diff options
Diffstat (limited to 'lib')
185 files changed, 6876 insertions, 3569 deletions
diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in index 5a9fabbe45..1bc6b82ebb 100644 --- a/lib/common_test/priv/Makefile.in +++ b/lib/common_test/priv/Makefile.in @@ -71,7 +71,7 @@ debug opt: $(V_at)sed -e 's;@CT_VSN@;$(VSN);' \ -e 's;@TS_VSN@;$(TEST_SERVER_VSN);' \ ../install.sh.in > install.sh - $(V_at)chmod 775 install.sh + - $(V_at)chmod -f 775 install.sh docs: diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl index e8ea7992b4..ec525784ec 100644 --- a/lib/common_test/src/ct_framework.erl +++ b/lib/common_test/src/ct_framework.erl @@ -1268,6 +1268,11 @@ report(What,Data) -> Data1 = if GrName == undefined -> {Suite,Func,Result}; true -> Data end, + %% Register the group leader for the process calling the report + %% function, making it possible for a hook function to print + %% in the test case log file + ReportingPid = self(), + ct_logs:register_groupleader(ReportingPid, group_leader()), case Result of {failed, _} -> ct_hooks:on_tc_fail(What, Data1); @@ -1282,6 +1287,7 @@ report(What,Data) -> _Else -> ok end, + ct_logs:unregister_groupleader(ReportingPid), case {Func,Result} of {init_per_suite,_} -> ok; diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl index 7037cdca73..23332ad268 100644 --- a/lib/common_test/src/ct_logs.erl +++ b/lib/common_test/src/ct_logs.erl @@ -29,6 +29,7 @@ -module(ct_logs). -export([init/2, close/2, init_tc/1, end_tc/1]). +-export([register_groupleader/2, unregister_groupleader/1]). -export([get_log_dir/0, get_log_dir/1]). -export([log/3, start_log/1, cont_log/2, end_log/0]). -export([set_stylesheet/2, clear_stylesheet/1]). @@ -267,7 +268,7 @@ init_tc(RefreshLog) -> ok. %%%----------------------------------------------------------------- -%%% @spec end_tc(TCPid) -> ok | {error,Reason} +%%% @spec end_tc(TCPid) -> ok %%% %%% @doc Test case clean up (tool-internal use only). %%% @@ -278,6 +279,26 @@ end_tc(TCPid) -> call({end_tc,TCPid}). %%%----------------------------------------------------------------- +%%% @spec register_groupleader(Pid,GroupLeader) -> ok +%%% +%%% @doc To enable logging to a group leader (tool-internal use only). +%%% +%%% <p>This function is called by ct_framework:report/2</p> +register_groupleader(Pid,GroupLeader) -> + call({register_groupleader,Pid,GroupLeader}), + ok. + +%%%----------------------------------------------------------------- +%%% @spec unregister_groupleader(Pid) -> ok +%%% +%%% @doc To disable logging to a group leader (tool-internal use only). +%%% +%%% <p>This function is called by ct_framework:report/2</p> +unregister_groupleader(Pid) -> + call({unregister_groupleader,Pid}), + ok. + +%%%----------------------------------------------------------------- %%% @spec log(Heading,Format,Args) -> ok %%% %%% @doc Log internal activity (tool-internal use only). @@ -764,6 +785,14 @@ logger_loop(State) -> return(From,ok), logger_loop(State#logger_state{tc_groupleaders = rm_tc_gl(TCPid,State)}); + {{register_groupleader,Pid,GL},From} -> + GLs = add_tc_gl(Pid,GL,State), + return(From,ok), + logger_loop(State#logger_state{tc_groupleaders = GLs}); + {{unregister_groupleader,Pid},From} -> + return(From,ok), + logger_loop(State#logger_state{tc_groupleaders = + rm_tc_gl(Pid,State)}); {{get_log_dir,true},From} -> return(From,{ok,State#logger_state.log_dir}), logger_loop(State); diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index 00d0aab507..4a12481214 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -293,10 +293,10 @@ script_start1(Parent, Args) -> application:set_env(common_test, auto_compile, true), InclDirs = case proplists:get_value(include, Args) of - Incl when is_list(hd(Incl)) -> - Incl; + Incls when is_list(hd(Incls)) -> + [filename:absname(IDir) || IDir <- Incls]; Incl when is_list(Incl) -> - [Incl]; + [filename:absname(Incl)]; undefined -> [] end, @@ -774,7 +774,8 @@ script_usage() -> "\n\t[-basic_html]\n\n"), io:format("Run tests from command line:\n\n" "\tct_run [-dir TestDir1 TestDir2 .. TestDirN] |" - "\n\t[-suite Suite1 Suite2 .. SuiteN [-case Case1 Case2 .. CaseN]]" + "\n\t[[-dir TestDir] -suite Suite1 Suite2 .. SuiteN" + "\n\t [[-group Groups1 Groups2 .. GroupsN] [-case Case1 Case2 .. CaseN]]]" "\n\t[-step [config | keep_inactive]]" "\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]" "\n\t[-userconfig CallbackModule ConfigFile1 .. ConfigFileN]" @@ -1023,10 +1024,10 @@ run_test2(StartOpts) -> case proplists:get_value(include, StartOpts) of undefined -> []; - Incl when is_list(hd(Incl)) -> - Incl; + Incls when is_list(hd(Incls)) -> + [filename:absname(IDir) || IDir <- Incls]; Incl when is_list(Incl) -> - [Incl] + [filename:absname(Incl)] end, case os:getenv("CT_INCLUDE_PATH") of false -> @@ -1393,6 +1394,7 @@ run_testspec2(TestSpec) -> EnvInclude++Opts#opts.include end, application:set_env(common_test, include, AllInclude), + LogDir1 = which(logdir,Opts#opts.logdir), case check_and_install_configfiles( Opts#opts.config, LogDir1, Opts) of @@ -2134,6 +2136,14 @@ do_run_test(Tests, Skip, Opts0) -> case check_and_add(Tests, [], []) of {ok,AddedToPath} -> ct_util:set_testdata({stats,{0,0,{0,0}}}), + + %% test_server needs to know the include path too + InclPath = case application:get_env(common_test, include) of + {ok,Incls} -> Incls; + _ -> [] + end, + application:set_env(test_server, include, InclPath), + test_server_ctrl:start_link(local), %% let test_server expand the test tuples and count no of cases diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl index babe73e575..4e03bf8630 100644 --- a/lib/common_test/src/ct_telnet.erl +++ b/lib/common_test/src/ct_telnet.erl @@ -1122,7 +1122,8 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO, NotFinished -> %% Get more data Fun = fun() -> get_data1(EO#eo.teln_pid) end, - case timer:tc(ct_gen_conn, do_within_time, [Fun, IdleTO]) of + BreakAfter = if TotalTO < IdleTO -> TotalTO; true -> IdleTO end, + case timer:tc(ct_gen_conn, do_within_time, [Fun, BreakAfter]) of {_,{error,Reason}} -> %% A timeout will occur when the telnet connection %% is idle for EO#eo.idle_timeout milliseconds. diff --git a/lib/common_test/test/ct_event_handler_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE.erl index b534a7141d..30a5e650fe 100644 --- a/lib/common_test/test/ct_event_handler_SUITE.erl +++ b/lib/common_test/test/ct_event_handler_SUITE.erl @@ -156,18 +156,21 @@ results(Config) when is_list(Config) -> TestEvents = [{eh_A,start_logging,{'DEF','RUNDIR'}}, {eh_A,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, - {eh_A,start_info,{1,1,3}}, + {eh_A,start_info,{1,1,4}}, {eh_A,tc_start,{eh_11_SUITE,init_per_suite}}, {eh_A,tc_done,{eh_11_SUITE,init_per_suite,ok}}, {eh_A,tc_start,{eh_11_SUITE,tc1}}, {eh_A,tc_done,{eh_11_SUITE,tc1,ok}}, {eh_A,test_stats,{1,0,{0,0}}}, {eh_A,tc_start,{eh_11_SUITE,tc2}}, - {eh_A,tc_done,{eh_11_SUITE,tc2,{skipped,"Skipped"}}}, + {eh_A,tc_done,{eh_11_SUITE,tc2,{skipped,"Skip"}}}, {eh_A,test_stats,{1,0,{1,0}}}, {eh_A,tc_start,{eh_11_SUITE,tc3}}, - {eh_A,tc_done,{eh_11_SUITE,tc3,{failed,{error,'Failing'}}}}, - {eh_A,test_stats,{1,1,{1,0}}}, + {eh_A,tc_done,{eh_11_SUITE,tc3,{skipped,"Skipped"}}}, + {eh_A,test_stats,{1,0,{2,0}}}, + {eh_A,tc_start,{eh_11_SUITE,tc4}}, + {eh_A,tc_done,{eh_11_SUITE,tc4,{failed,{error,'Failing'}}}}, + {eh_A,test_stats,{1,1,{2,0}}}, {eh_A,tc_start,{eh_11_SUITE,end_per_suite}}, {eh_A,tc_done,{eh_11_SUITE,end_per_suite,ok}}, {eh_A,test_done,{'DEF','STOP_TIME'}}, diff --git a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl index 16b7129993..a52fe96f30 100644 --- a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl +++ b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl @@ -124,7 +124,7 @@ end_per_testcase(_TestCase, _Config) -> %% Description: Returns the list of test cases that are to be executed. %%-------------------------------------------------------------------- all() -> - [tc1, tc2, tc3]. + [tc1, tc2, tc3, tc4]. %%-------------------------------------------------------------------- @@ -135,7 +135,10 @@ tc1(_Config) -> ok. tc2(_Config) -> - {skip,"Skipped"}. + {skip,"Skip"}. -tc3(_Config) -> +tc3(_Config) -> + {skipped,"Skipped"}. + +tc4(_Config) -> exit('Failing'). diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl index c8fc4bd59b..d5ad8312e6 100644 --- a/lib/common_test/test/ct_hooks_SUITE.erl +++ b/lib/common_test/test/ct_hooks_SUITE.erl @@ -1075,7 +1075,37 @@ test_events(fail_n_skip_with_minimal_cth) -> {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, {?eh,cth,{'_',init,['_',[]]}}, {?eh,tc_start,{'_',init_per_suite}}, - + + {parallel, + [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group, + group1,[parallel]}}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group, + group1,[parallel]},ok}}, + {parallel, + [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group, + group2,[parallel]}}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group, + group2,[parallel]},ok}}, + %% Verify that 'skip' as well as 'skipped' works + {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case2}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case2,{skipped,"skip it"}}}, + {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case3}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case3,{skipped,"skip it"}}}, + {?eh,cth,{empty_cth,on_tc_skip,[{test_case2,group2}, + {tc_user_skip,{skipped,"skip it"}}, + []]}}, + {?eh,cth,{empty_cth,on_tc_skip,[{test_case3,group2}, + {tc_user_skip,{skipped,"skip it"}}, + []]}}, + {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group, + group2,[parallel]}}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,group2, + [parallel]},ok}}]}, + {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group, + group1,[parallel]}}}, + {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group, + group1,[parallel]},ok}}]}, + {?eh,tc_done,{'_',end_per_suite,ok}}, {?eh,cth,{'_',terminate,[[]]}}, {?eh,stop_logging,[]} diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl index b2f22d8257..7b84c246ca 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl @@ -41,6 +41,8 @@ end_per_group(_Group,_Config) -> init_per_testcase(test_case2, Config) ->
{skip,"skip it"};
+init_per_testcase(test_case3, Config) ->
+ {skipped,"skip it"};
init_per_testcase(_TestCase, Config) ->
Config.
@@ -48,7 +50,9 @@ end_per_testcase(_TestCase, _Config) -> ok.
groups() ->
- [{group1,[parallel],[{group2,[parallel],[test_case1,test_case2,test_case3]}]}].
+ [{group1,[parallel],
+ [{group2,[parallel],
+ [test_case1,test_case2,test_case3,test_case4]}]}].
all() ->
[{group,group1}].
@@ -62,3 +66,6 @@ test_case2(Config) -> test_case3(Config) ->
ok.
+
+test_case4(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl index 6caac7e447..77783fccf5 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl @@ -75,6 +75,7 @@ init(Id, Opts) -> gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, init, [Id, Opts]}}), + ct:log("~w:init called", [?MODULE]), {ok,Opts}. %% @doc The ID is used to uniquly identify an CTH instance, if two CTH's @@ -85,6 +86,7 @@ init(Id, Opts) -> id(Opts) -> gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, id, [Opts]}}), + ct:log("~w:id called", [?MODULE]), now(). %% @doc Called before init_per_suite is called. Note that this callback is @@ -100,6 +102,7 @@ pre_init_per_suite(Suite,Config,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_init_per_suite, [Suite,Config,State]}}), + ct:log("~w:pre_init_per_suite(~w) called", [?MODULE,Suite]), {Config, State}. %% @doc Called after init_per_suite. @@ -114,6 +117,7 @@ post_init_per_suite(Suite,Config,Return,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_init_per_suite, [Suite,Config,Return,State]}}), + ct:log("~w:post_init_per_suite(~w) called", [?MODULE,Suite]), {Return, State}. %% @doc Called before end_per_suite. The config/state can be changed here, @@ -127,6 +131,7 @@ pre_end_per_suite(Suite,Config,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_end_per_suite, [Suite,Config,State]}}), + ct:log("~w:pre_end_per_suite(~w) called", [?MODULE,Suite]), {Config, State}. %% @doc Called after end_per_suite. Note that the config cannot be @@ -141,6 +146,7 @@ post_end_per_suite(Suite,Config,Return,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_end_per_suite, [Suite,Config,Return,State]}}), + ct:log("~w:post_end_per_suite(~w) called", [?MODULE,Suite]), {Return, State}. %% @doc Called before each init_per_group. @@ -154,6 +160,7 @@ pre_init_per_group(Group,Config,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_init_per_group, [Group,Config,State]}}), + ct:log("~w:pre_init_per_group(~w) called", [?MODULE,Group]), {Config, State}. %% @doc Called after each init_per_group. @@ -168,6 +175,7 @@ post_init_per_group(Group,Config,Return,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_init_per_group, [Group,Config,Return,State]}}), + ct:log("~w:post_init_per_group(~w) called", [?MODULE,Group]), {Return, State}. %% @doc Called after each end_per_group. The config/state can be changed here, @@ -181,6 +189,7 @@ pre_end_per_group(Group,Config,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_end_per_group, [Group,Config,State]}}), + ct:log("~w:pre_end_per_group(~w) called", [?MODULE,Group]), {Config, State}. %% @doc Called after each end_per_group. Note that the config cannot be @@ -195,6 +204,7 @@ post_end_per_group(Group,Config,Return,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_end_per_group, [Group,Config,Return,State]}}), + ct:log("~w:post_end_per_group(~w) called", [?MODULE,Group]), {Return, State}. %% @doc Called before each test case. @@ -208,6 +218,7 @@ pre_init_per_testcase(TC,Config,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_init_per_testcase, [TC,Config,State]}}), + ct:log("~w:pre_init_per_testcase(~w) called", [?MODULE,TC]), {Config, State}. %% @doc Called after each test case. Note that the config cannot be @@ -222,6 +233,7 @@ post_end_per_testcase(TC,Config,Return,State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_end_per_testcase, [TC,Config,Return,State]}}), + ct:log("~w:post_end_per_testcase(~w) called", [?MODULE,TC]), {Return, State}. %% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group, @@ -237,6 +249,7 @@ on_tc_fail(TC, Reason, State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, on_tc_fail, [TC,Reason,State]}}), + ct:log("~w:on_tc_fail(~w) called", [?MODULE,TC]), State. %% @doc Called when a test case is skipped by either user action @@ -253,6 +266,7 @@ on_tc_skip(TC, Reason, State) -> ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, on_tc_skip, [TC,Reason,State]}}), + ct:log("~w:on_tc_skip(~w) called", [?MODULE,TC]), State. %% @doc Called when the scope of the CTH is done, this depends on @@ -274,4 +288,5 @@ terminate(State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, terminate, [State]}}), + ct:log("~w:terminate called", [?MODULE]), ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl index 30721a6b3a..436470f46d 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl @@ -28,10 +28,14 @@ %% CT Hooks
-export([init/2]).
-export([terminate/1]).
+-export([on_tc_skip/3]).
init(Id, Opts) ->
empty_cth:init(Id, Opts).
+on_tc_skip(TC, Reason, State) ->
+ empty_cth:on_tc_skip(TC,Reason,State).
+
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl index 0ddb4e9b00..bd5d76266a 100644 --- a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl +++ b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl @@ -44,6 +44,7 @@ all() -> expect_error_timeout1, expect_error_timeout2, expect_error_timeout3, + total_timeout_less_than_idle, no_prompt_check, no_prompt_check_repeat, no_prompt_check_sequence, @@ -134,9 +135,32 @@ expect_error_timeout2(_) -> expect_error_timeout3(_) -> {ok, Handle} = ct_telnet:open(telnet_server_conn1), ok = ct_telnet:send(Handle, "echo_loop 5000 xxx"), + + T0 = now(), {error,timeout} = ct_telnet:expect(Handle, ["yyy"], [{idle_timeout,infinity}, - {total_timeout,3000}]), + {total_timeout,2001}]), + Diff = trunc(timer:now_diff(now(),T0)/1000), + {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)}, + + ok = ct_telnet:send(Handle, "echo ayt"), + {ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]), + ok = ct_telnet:close(Handle), + ok. + +%% OTP-12335: If total_timeout < idle_timeout, expect will never timeout +%% until after idle_timeout, which is incorrect. +total_timeout_less_than_idle(_) -> + {ok, Handle} = ct_telnet:open(telnet_server_conn1), + ok = ct_telnet:send(Handle, "echo_no_prompt xxx"), + + T0 = now(), + {error,timeout} = ct_telnet:expect(Handle, ["yyy"], + [{idle_timeout,5000}, + {total_timeout,2001}]), + Diff = trunc(timer:now_diff(now(),T0)/1000), + {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)}, + ok = ct_telnet:send(Handle, "echo ayt"), {ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]), ok = ct_telnet:close(Handle), diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl index fe4f473846..dd7e03dd28 100644 --- a/lib/compiler/src/beam_a.erl +++ b/lib/compiler/src/beam_a.erl @@ -54,6 +54,9 @@ rename_instrs([{call_only,A,F}|Is]) -> [{call,A,F},return|rename_instrs(Is)]; rename_instrs([{call_ext_only,A,F}|Is]) -> [{call_ext,A,F},return|rename_instrs(Is)]; +rename_instrs([{'%live',_}|Is]) -> + %% When compiling from old .S files. + rename_instrs(Is); rename_instrs([I|Is]) -> [rename_instr(I)|rename_instrs(Is)]; rename_instrs([]) -> []. diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl index 7d65dc983a..92f09e400c 100644 --- a/lib/compiler/src/beam_block.erl +++ b/lib/compiler/src/beam_block.erl @@ -184,7 +184,7 @@ embed_lines([], Acc) -> Acc. opt_blocks([{block,Bl0}|Is]) -> %% The live annotation at the beginning is not useful. - [{'%live',_}|Bl] = Bl0, + [{'%live',_,_}|Bl] = Bl0, [{block,opt_block(Bl)}|opt_blocks(Is)]; opt_blocks([I|Is]) -> [I|opt_blocks(Is)]; @@ -269,7 +269,7 @@ opt([{set,_,_,{line,_}}=Line1, opt([{set,Ds0,Ss,Op}|Is0]) -> {Ds,Is} = opt_moves(Ds0, Is0), [{set,Ds,Ss,Op}|opt(Is)]; -opt([{'%live',_}=I|Is]) -> +opt([{'%live',_,_}=I|Is]) -> [I|opt(Is)]; opt([]) -> []. diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl index 4e699c4fbf..ba71d4efae 100644 --- a/lib/compiler/src/beam_jump.erl +++ b/lib/compiler/src/beam_jump.erl @@ -166,6 +166,12 @@ share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) -> end; share_1([{func_info,_,_,_}=I|Is], _, [], Acc) -> reverse(Is, [I|Acc]); +share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); +share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); share_1([I|Is], Dict, Seq, Acc) -> case is_unreachable_after(I) of false -> @@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) -> share_1(Is, Dict, [I], Acc) end. +clean_non_sharable(Dict) -> + %% We are passing in or out of a 'try' block. Remove + %% sequences that should not shared over the boundaries + %% of a 'try' block. Since the end of the sequence must match, + %% the only possible match between a sequence outside and + %% a sequence inside the 'try' block is a sequence that ends + %% with an instruction that causes an exception. Any sequence + %% that causes an exception must contain a line/1 instruction. + dict:filter(fun(K, _V) -> sharable_with_try(K) end, Dict). + +sharable_with_try([{line,_}|_]) -> + %% This sequence may cause an exception and may potentially + %% match a sequence on the other side of the 'try' block + %% boundary. + false; +sharable_with_try([_|Is]) -> + sharable_with_try(Is); +sharable_with_try([]) -> true. %% Eliminate all fallthroughs. Return the result reversed. @@ -295,12 +319,6 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) -> opt(Is, [I|Acc], label_used(Lbl, St)); opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) -> skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St)); -opt([{label,L}=I|Is], Acc, #st{entry=L}=St) -> - %% NEVER move the entry label. - opt(Is, [I|Acc], St); -opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) -> - St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)}, - opt([Prev,I|Is], Acc, label_used({f,L2}, St)); opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> case dict:find(Lbl, Mlbl) of {ok,Lbls} -> @@ -310,9 +328,20 @@ opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> insert_labels([Lbl|Lbls], Is, Acc, St); error -> opt(Is, [I|Acc], St0) end; -opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) -> - opt([I|Is], Acc, St); -opt([{jump,Lbl}=I|Is], Acc, St) -> +opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) -> + %% All labels before this jump instruction should now be + %% moved to the location of the jump's target. + {Lbls,Acc} = collect_labels(Acc0, St0), + St = case Lbls of + [] -> St0; + [_|_] -> + Mlbl = dict:append_list(L, Lbls, Mlbl0), + St0#st{mlbl=Mlbl} + end, skip_unreachable(Is, [I|Acc], label_used(Lbl, St)); %% Optimization: quickly handle some common instructions that don't %% have any failure labels and where is_unreachable_after(I) =:= false. @@ -349,6 +378,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) -> end; insert_fc_labels([], _, Acc) -> Acc. +collect_labels(Is, #st{entry=Entry}) -> + collect_labels_1(Is, Entry, []). + +collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) -> + %% Never move the entry label. + {Acc,Is}; +collect_labels_1([{label,L}|Is], Entry, Acc) -> + collect_labels_1(Is, Entry, [L|Acc]); +collect_labels_1(Is, _Entry, Acc) -> + {Acc,Is}. + %% label_defined(Is, Label) -> true | false. %% Test whether the label Label is defined at the start of the instruction %% sequence, possibly preceeded by other label definitions. diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl index d9713cef0d..26c933481a 100644 --- a/lib/compiler/src/beam_type.erl +++ b/lib/compiler/src/beam_type.erl @@ -244,7 +244,7 @@ clearerror([], OrigIs) -> [{set,[],[],fclearerror}|OrigIs]. %% Combine two blocks and eliminate any move instructions that assign %% to registers that are killed later in the block. %% -merge_blocks(B1, [{'%live',_}|B2]) -> +merge_blocks(B1, [{'%live',_,_}|B2]) -> merge_blocks_1(B1++[{set,[],[],stop_here}|B2]). merge_blocks_1([{set,[],_,stop_here}|Is]) -> Is; @@ -329,27 +329,27 @@ build_alloc(Words, Floats) -> {alloc,[{words,Words},{floats,Floats}]}. %% flt_liveness([Instruction]) -> [Instruction] %% (Re)calculate the number of live registers for each heap allocation -%% function. We base liveness of the number of live registers at -%% entry to the instruction sequence. +%% function. We base liveness of the number of register map at the +%% beginning of the instruction sequence. %% %% A 'not_possible' term will be thrown if the set of live registers %% is not continous at an allocation function (e.g. if {x,0} and {x,2} %% are live, but not {x,1}). -flt_liveness([{'%live',Live}=LiveInstr|Is]) -> - flt_liveness_1(Is, init_regs(Live), [LiveInstr]). +flt_liveness([{'%live',_Live,Regs}=LiveInstr|Is]) -> + flt_liveness_1(Is, Regs, [LiveInstr]). -flt_liveness_1([{set,Ds,Ss,{alloc,_,Alloc}}|Is], Regs0, Acc) -> - Live = live_regs(Regs0), +flt_liveness_1([{set,Ds,Ss,{alloc,Live0,Alloc}}|Is], Regs0, Acc) -> + Live = min(Live0, live_regs(Regs0)), I = {set,Ds,Ss,{alloc,Live,Alloc}}, - Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds), + Regs1 = init_regs(Live), + Regs = x_live(Ds, Regs1), flt_liveness_1(Is, Regs, [I|Acc]); flt_liveness_1([{set,Ds,_,_}=I|Is], Regs0, Acc) -> - Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds), + Regs = x_live(Ds, Regs0), flt_liveness_1(Is, Regs, [I|Acc]); -flt_liveness_1([{'%live',_}=I|Is], Regs, Acc) -> - flt_liveness_1(Is, Regs, [I|Acc]); -flt_liveness_1([], _Regs, Acc) -> reverse(Acc). +flt_liveness_1([{'%live',_,_}], _Regs, Acc) -> + reverse(Acc). init_regs(Live) -> (1 bsl Live) - 1. @@ -364,14 +364,15 @@ live_regs_1(R, N) -> 1 -> live_regs_1(R bsr 1, N+1) end. -set_live({x,X}, Regs) -> Regs bor (1 bsl X); -set_live(_, Regs) -> Regs. +x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N)); +x_live([_|Rs], Regs) -> x_live(Rs, Regs); +x_live([], Regs) -> Regs. %% update(Instruction, TypeDb) -> NewTypeDb %% Update the type database to account for executing an instruction. %% %% First the cases for instructions inside basic blocks. -update({'%live',_}, Ts) -> Ts; +update({'%live',_,_}, Ts) -> Ts; update({set,[D],[S],move}, Ts) -> tdb_copy(S, D, Ts); update({set,[D],[{integer,I},Reg],{bif,element,_}}, Ts0) -> diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl index 26020e1d29..7704690f86 100644 --- a/lib/compiler/src/beam_utils.erl +++ b/lib/compiler/src/beam_utils.erl @@ -187,7 +187,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true; is_pure_test({test,is_nil,_,[_]}) -> true; is_pure_test({test,is_nonempty_list,_,[_]}) -> true; is_pure_test({test,test_arity,_,[_,_]}) -> true; -is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true; +is_pure_test({test,has_map_fields,_,[_|_]}) -> true; is_pure_test({test,Op,_,Ops}) -> erl_internal:new_type_test(Op, length(Ops)). @@ -196,7 +196,7 @@ is_pure_test({test,Op,_,Ops}) -> %% Go through the instruction sequence in reverse execution %% order, keep track of liveness and remove 'move' instructions %% whose destination is a register that will not be used. -%% Also insert {'%live',Live} annotations at the beginning +%% Also insert {'%live',Live,Regs} annotations at the beginning %% and end of each block. %% live_opt(Is0) -> @@ -217,7 +217,7 @@ delete_live_annos([{block,Bl0}|Is]) -> [] -> delete_live_annos(Is); [_|_]=Bl -> [{block,Bl}|delete_live_annos(Is)] end; -delete_live_annos([{'%live',_}|Is]) -> +delete_live_annos([{'%live',_,_}|Is]) -> delete_live_annos(Is); delete_live_annos([I|Is]) -> [I|delete_live_annos(Is)]; @@ -366,11 +366,6 @@ check_liveness(R, [{apply,Args}|Is], St) -> {x,_} -> {killed,St}; {y,_} -> check_liveness(R, Is, St) end; -check_liveness({x,R}, [{'%live',Live}|Is], St) -> - if - R < Live -> check_liveness(R, Is, St); - true -> {killed,St} - end; check_liveness(R, [{bif,Op,{f,Fail},Ss,D}|Is], St0) -> case check_liveness_fail(R, Op, Ss, Fail, St0) of {killed,St} = Killed -> @@ -554,7 +549,7 @@ check_killed_block(R, [{set,Ds,Ss,_Op}|Is]) -> false -> check_killed_block(R, Is) end end; -check_killed_block(R, [{'%live',Live}|Is]) -> +check_killed_block(R, [{'%live',Live,_}|Is]) -> case R of {x,X} when X >= Live -> killed; _ -> check_killed_block(R, Is) @@ -577,7 +572,7 @@ check_used_block({x,X}=R, [{set,Ds,Ss,{alloc,Live,Op}}|Is], St) -> end; check_used_block(R, [{set,Ds,Ss,Op}|Is], St) -> check_used_block_1(R, Ss, Ds, Op, Is, St); -check_used_block(R, [{'%live',Live}|Is], St) -> +check_used_block(R, [{'%live',Live,_}|Is], St) -> case R of {x,X} when X >= Live -> {killed,St}; _ -> check_used_block(R, Is, St) @@ -678,9 +673,9 @@ live_opt([{test,bs_start_match2,Fail,Live,[Src,_],_}=I|Is], _, D, Acc) -> %% Other instructions. live_opt([{block,Bl0}|Is], Regs0, D, Acc) -> - Live0 = {'%live',live_regs(Regs0)}, + Live0 = {'%live',live_regs(Regs0),Regs0}, {Bl,Regs} = live_opt_block(reverse(Bl0), Regs0, D, [Live0]), - Live = {'%live',live_regs(Regs)}, + Live = {'%live',live_regs(Regs),Regs}, live_opt(Is, Regs, D, [{block,[Live|Bl]}|Acc]); live_opt([{label,L}=I|Is], Regs, D0, Acc) -> D = gb_trees:insert(L, Regs, D0), diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl index c156cf79fe..4d4536b79c 100644 --- a/lib/compiler/src/beam_validator.erl +++ b/lib/compiler/src/beam_validator.erl @@ -22,7 +22,6 @@ %% Avoid warning for local function error/1 clashing with autoimported BIF. -compile({no_auto_import,[error/1]}). --export([file/1, files/1]). %% Interface for compiler. -export([module/2, format_error/1]). @@ -40,38 +39,12 @@ -define(DBG_FORMAT(F, D), ok). -endif. -%%% -%%% API functions. -%%% - --spec file(file:filename()) -> 'ok' | {'error', term()}. - -file(Name) when is_list(Name) -> - case case filename:extension(Name) of - ".S" -> s_file(Name); - ".beam" -> beam_file(Name) - end of - [] -> ok; - Es -> {error,Es} - end. - --spec files([file:filename()]) -> 'ok'. - -files([F|Fs]) -> - ?DBG_FORMAT("# Verifying: ~p~n", [F]), - case file(F) of - ok -> ok; - {error,Es} -> - io:format("~tp:~n~ts~n", [F,format_error(Es)]) - end, - files(Fs); -files([]) -> ok. - %% To be called by the compiler. module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) -> case validate(Mod, Fs) of - [] -> {ok,Code}; + [] -> + {ok,Code}; Es0 -> Es = [{?MODULE,E} || E <- Es0], {error,[{atom_to_list(Mod),Es}]} @@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) -spec format_error(term()) -> iolist(). -format_error([]) -> []; -format_error([{{M,F,A},{I,Off,Desc}}|Es]) -> - [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n", - [M,F,A,Off,I,Desc])|format_error(Es)]; -format_error([Error|Es]) -> - [format_error(Error)|format_error(Es)]; format_error({{_M,F,A},{I,Off,limit}}) -> io_lib:format( "function ~p/~p+~p:~n" @@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) -> " Internal consistency check failed - please report this bug.~n" " Instruction: ~p~n" " Error: ~p:~n", [F,A,Off,I,Desc]); -format_error({Module,Error}) -> - [Module:format_error(Error)]; format_error(Error) -> io_lib:format("~p~n", [Error]). @@ -112,36 +77,6 @@ format_error(Error) -> %%% Local functions follow. %%% -s_file(Name) -> - {ok,Is} = file:consult(Name), - {module,Module} = lists:keyfind(module, 1, Is), - Fs = find_functions(Is), - validate(Module, Fs). - -find_functions(Fs) -> - find_functions_1(Fs, none, [], []). - -find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) -> - Acc = add_func(Func, FuncAcc, Acc0), - find_functions_1(Is, {Name,Arity,Entry}, [], Acc); -find_functions_1([I|Is], Func, FuncAcc, Acc) -> - find_functions_1(Is, Func, [I|FuncAcc], Acc); -find_functions_1([], Func, FuncAcc, Acc) -> - reverse(add_func(Func, FuncAcc, Acc)). - -add_func(none, _, Acc) -> Acc; -add_func({Name,Arity,Entry}, Is, Acc) -> - [{function,Name,Arity,Entry,reverse(Is)}|Acc]. - -beam_file(Name) -> - try beam_disasm:file(Name) of - {error,beam_lib,Reason} -> [{beam_lib,Reason}]; - #beam_file{module=Module, code=Code0} -> - Code = normalize_disassembled_code(Code0), - validate(Module, Code) - catch _:_ -> [disassembly_failed] - end. - %%% %%% The validator follows. %%% @@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) -> try validate_1(Code, Name, Ar, Entry, Ft) of _ -> validate_0(Module, Fs, Ft) catch - Error -> + throw:Error -> + %% Controlled error. [Error|validate_0(Module, Fs, Ft)]; - error:Error -> - [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)] + Class:Error -> + %% Crash. + Stack = erlang:get_stacktrace(), + io:fwrite("Function: ~w/~w\n", [Name,Ar]), + erlang:raise(Class, Error, Stack) end. --ifdef(DEBUG). -validate_error(Error, Module, Name, Ar) -> - exit(validate_error_1(Error, Module, Name, Ar)). --else. -validate_error(Error, Module, Name, Ar) -> - validate_error_1(Error, Module, Name, Ar). --endif. -validate_error_1(Error, Module, Name, Ar) -> - {{Module,Name,Ar}, - {internal_error,'_',{Error,erlang:get_stacktrace()}}}. - -type index() :: non_neg_integer(). -type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}). @@ -225,7 +153,6 @@ validate_error_1(Error, Module, Name, Ar) -> hf=0, %Available heap size for floats. fls=undefined, %Floating point state. ct=[], %List of hot catch/try labels - bsm=undefined, %Bit syntax matching state. bits=undefined, %Number of bits in bit syntax binary. setelem=false %Previous instruction was setelement/3. }). @@ -308,7 +235,7 @@ labels_1([{label,L}|Is], R) -> labels_1([{line,_}|Is], R) -> labels_1(Is, R); labels_1(Is, R) -> - {lists:reverse(R),Is}. + {reverse(R),Is}. init_state(Arity) -> Xs = init_regs(Arity, term), @@ -403,10 +330,6 @@ valfun_1({init,{y,_}=Reg}, Vst) -> set_type_y(initialized, Reg, Vst); valfun_1({test_heap,Heap,Live}, Vst) -> test_heap(Heap, Live, Vst); -valfun_1({bif,_Op,nofail,Src,Dst}, Vst) -> - %% The 'nofail' atom only occurs in disassembled code. - validate_src(Src, Vst), - set_type_reg(term, Dst, Vst); valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) -> case is_bif_safe(Op, length(Src)) of false -> @@ -432,18 +355,12 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) -> valfun_1({put,Src}, Vst) -> assert_term(Src, Vst), eat_heap(1, Vst); -valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) -> - Vst = eat_heap(2*Sz, Vst0), - set_type_reg(cons, Dst, Vst); %% Instructions for optimization of selective receives. valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) -> Vst; valfun_1({recv_set,{f,Fail}}, Vst) when is_integer(Fail) -> Vst; %% Misc. -valfun_1({'%live',Live}, Vst) -> - verify_live(Live, Vst), - Vst; valfun_1(remove_message, Vst) -> Vst; valfun_1({'%',_}, Vst) -> @@ -602,8 +519,6 @@ valfun_4({call_ext_last,Live,Func,StkSize}, tail_call(Func, Live, Vst); valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) -> error({allocated,NumY}); -valfun_4({make_fun,_,_,Live}, Vst) -> - call('fun', Live, Vst); valfun_4({make_fun2,_,_,_,Live}, Vst) -> call(make_fun, Live, Vst); %% Other BIFs @@ -620,8 +535,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) -> TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0), Vst = set_type(TupleType, Tuple, Vst1), set_type_reg(term, Dst, Vst); -valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) -> - valfun_4({bif,raise,Fail,Src,Dst}, Vst); valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) -> validate_src(Src, Vst0), Vst = branch_state(Fail, Vst0), @@ -738,32 +651,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) -> valfun_4({bs_restore2,Ctx,SavePoint}, Vst) -> bsm_restore(Ctx, SavePoint, Vst); -%% Bit syntax instructions. -valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) -> - valfun_4({test,bs_start_match,F,[Src]}, Vst); -valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) -> - assert_term(Src, Vst), - bs_start_match(branch_state(Fail, Vst)); - -valfun_4({bs_save,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_save(SavePoint, Vst); -valfun_4({bs_restore,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_assert_savepoint(SavePoint, Vst), - Vst; -valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) -> - bs_assert_state(Vst), - assert_term(Src, Vst), - branch_state(Fail, Vst); -valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) -> - bs_assert_state(Vst), - branch_state(Fail, Vst); -valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) -> - bs_assert_state(Vst0), - Vst = branch_state(Fail, Vst0), - set_type_reg({integer,[]}, Dst, Vst); - %% Other test instructions. valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) -> assert_term(Float, Vst), @@ -779,9 +666,17 @@ valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) -> assert_type(tuple, Tuple, Vst), set_type_reg({tuple,Sz}, Tuple, branch_state(Lbl, Vst)); valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) -> - validate_src([Src], Vst), + assert_type(map, Src, Vst), assert_strict_literal_termorder(List), branch_state(Lbl, Vst); +valfun_4({test,is_map,{f,Lbl},[Src]}, Vst0) -> + Vst = branch_state(Lbl, Vst0), + case Src of + {Tag,_} when Tag =:= x; Tag =:= y -> + set_type_reg(map, Src, Vst); + _ -> + Vst + end; valfun_4({test,_Op,{f,Lbl},Src}, Vst) -> validate_src(Src, Vst), branch_state(Lbl, Vst); @@ -795,9 +690,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) -> valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) -> assert_term(A, Vst), set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); -valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) -> - assert_term(Src, Vst), - set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> verify_live(Live, Vst0), if @@ -868,16 +760,6 @@ valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) -> assert_term(Src, Vst0), Vst = bs_align_check(I, Vst0), branch_state(Fail, Vst); -%% Old bit syntax construction (before R10B). -valfun_4({bs_init,_,_}, Vst) -> - bs_zero_bits(Vst); -valfun_4({bs_need_buf,_}, Vst) -> Vst; -valfun_4({bs_final,{f,Fail},Dst}, Vst0) -> - Vst = branch_state(Fail, Vst0), - set_type_reg(binary, Dst, Vst); -valfun_4({bs_final2,Src,Dst}, Vst0) -> - assert_term(Src, Vst0), - set_type_reg(binary, Dst, Vst0); %% Map instructions. valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> verify_put_map(Fail, Src, Dst, Live, List, Vst); @@ -889,26 +771,30 @@ valfun_4(_, _) -> error(unknown_instruction). verify_get_map(Fail, Src, List, Vst0) -> - assert_term(Src, Vst0), + assert_type(map, Src, Vst0), Vst1 = branch_state(Fail, Vst0), - Lits = mmap(fun(L,_R) -> [L] end, List), - assert_strict_literal_termorder(Lits), + Keys = extract_map_keys(List), + assert_strict_literal_termorder(Keys), verify_get_map_pair(List,Vst0,Vst1). +extract_map_keys([Key,_Val|T]) -> + [Key|extract_map_keys(T)]; +extract_map_keys([]) -> []. + verify_get_map_pair([],_,Vst) -> Vst; verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) -> assert_term(Src, Vst0), verify_get_map_pair(Vs,Vst0,set_type_reg(term,Dst,Vsti)). verify_put_map(Fail, Src, Dst, Live, List, Vst0) -> + assert_type(map, Src, Vst0), verify_live(Live, Vst0), verify_y_init(Vst0), foreach(fun (Term) -> assert_term(Term, Vst0) end, List), - assert_term(Src, Vst0), Vst1 = heap_alloc(0, Vst0), Vst2 = branch_state(Fail, Vst1), Vst = prune_x_regs(Live, Vst2), - set_type_reg(term, Dst, Vst). + set_type_reg(map, Dst, Vst). %% %% Common code for validating bs_get* instructions. @@ -936,9 +822,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) -> %% val_dsetel({move,_,_}, Vst) -> Vst; -val_dsetel({put_string,0,{string,""},_}, Vst) -> - %% An empty string is OK since it doesn't build anything. - Vst; val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) -> Vst#vst{current=St#st{setelem=true}}; val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) -> @@ -972,7 +855,7 @@ call(Name, Live, #vst{current=St}=Vst) -> Type when Type =/= exception -> %% Type is never 'exception' because it has been handled earlier. Xs = gb_trees_from_list([{0,Type}]), - Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}} + Vst#vst{current=St#st{x=Xs,f=init_fregs()}} end. %% Tail call. @@ -1030,7 +913,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) -> error({existing_stack_frame,{size,Numy}}). deallocate(#vst{current=St}=Vst) -> - Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}. + Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}. test_heap(Heap, Live, Vst0) -> verify_live(Live, Vst0), @@ -1038,7 +921,7 @@ test_heap(Heap, Live, Vst0) -> heap_alloc(Heap, Vst). heap_alloc(Heap, #vst{current=St0}=Vst) -> - St1 = kill_heap_allocation(St0#st{bsm=undefined}), + St1 = kill_heap_allocation(St0), St = heap_alloc_1(Heap, St1), Vst#vst{current=St}. @@ -1122,74 +1005,30 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}). %%% Maps -%% ensure that a list of literals has a strict -%% ascending term order (also meaning unique literals). -%% Single item lists may have registers. -assert_strict_literal_termorder([_]) -> ok; -assert_strict_literal_termorder(Ls) -> - Vs = lists:map(fun (L) -> get_literal(L) end, Ls), +%% A single item list may be either a list or a register. +%% +%% A list with more than item must contain literals in +%% ascending term order. +%% +%% An empty list is not allowed. + +assert_strict_literal_termorder([]) -> + %% There is no reason to use the get_map_elements and + %% has_map_fields instructions with empty lists. + error(empty_field_list); +assert_strict_literal_termorder([_]) -> + ok; +assert_strict_literal_termorder([_,_|_]=Ls) -> + Vs = [get_literal(L) || L <- Ls], case check_strict_value_termorder(Vs) of true -> ok; - false -> error({not_strict_order, Ls}) + false -> error(not_strict_order) end. -%% usage: -%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]), -%% [{1,2},{3,4}] - -mmap(F,List) -> - {arity,Ar} = erlang:fun_info(F,arity), - mmap(F,Ar,List). -mmap(_F,_,[]) -> []; -mmap(F,Ar,List) -> - {Hd,Tl} = lists:split(Ar,List), - apply(F,Hd) ++ mmap(F,Ar,Tl). - -check_strict_value_termorder([]) -> true; -check_strict_value_termorder([_]) -> true; -check_strict_value_termorder([V1,V2]) -> - erts_internal:cmp_term(V1,V2) < 0; -check_strict_value_termorder([V1,V2|Vs]) -> - case erts_internal:cmp_term(V1,V2) < 0 of - true -> check_strict_value_termorder([V2|Vs]); - false -> false - end. - -%%% -%%% Binary matching. -%%% -%%% Possible values for the bsm field (=bit syntax matching state). -%%% -%%% undefined - Undefined (initial state). No matching instructions allowed. -%%% -%%% (gb set) - The gb set contains the defined save points. -%%% -%%% The bsm field is reset to 'undefined' by instructions that may cause a -%%% a garbage collection (might move the binary) and/or context switch -%%% (may invalidate the save points). - -bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) -> - Vst#vst{current=St#st{bsm=gb_sets:empty()}}; -bs_start_match(Vst) -> - %% Must retain save points here - it is possible to restore back - %% to a previous binary. - Vst. - -bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst) - when is_integer(Reg), Reg < ?MAXREG -> - Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}}; -bs_save(_, _) -> error(limit). - -bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) -> - case gb_sets:is_member(Reg, Saved) of - false -> error({no_save_point,Reg}); - true -> ok - end. - -bs_assert_state(#vst{current=#st{bsm=undefined}}) -> - error(no_bs_match_state); -bs_assert_state(_) -> ok. - +check_strict_value_termorder([V1|[V2|_]=Vs]) -> + erts_internal:cmp_term(V1, V2) < 0 andalso + check_strict_value_termorder(Vs); +check_strict_value_termorder([_]) -> true. %%% %%% New binary matching instructions. @@ -1389,7 +1228,8 @@ assert_term(Src, Vst) -> %% %% number Integer or Float of unknown value %% - +%% map Map. +%% assert_type(WantedType, Term, Vst) -> assert_type(WantedType, get_term_type(Term, Vst)). @@ -1471,6 +1311,7 @@ get_term_type_1(nil=T, _) -> T; get_term_type_1({atom,A}=T, _) when is_atom(A) -> T; get_term_type_1({float,F}=T, _) when is_float(F) -> T; get_term_type_1({integer,I}=T, _) when is_integer(I) -> T; +get_term_type_1({literal,Map}, _) when is_map(Map) -> map; get_term_type_1({literal,_}=T, _) -> T; get_term_type_1({x,X}=Reg, #vst{current=#st{x=Xs}}) when is_integer(X) -> case gb_trees:lookup(X, Xs) of @@ -1525,14 +1366,13 @@ merge_states(L, St, Branched) when L =/= 0 -> {value,OtherSt} -> merge_states_1(St, OtherSt) end. -merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St, - #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) -> +merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0}=St, + #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) -> NumY = merge_stk(NumY0, NumY1), Xs = merge_regs(Xs0, Xs1), Ys = merge_y_regs(Ys0, Ys1), Ct = merge_ct(Ct0, Ct1), - Bsm = merge_bsm(Bsm0, Bsm1), - St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}. + St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}. merge_stk(S, S) -> S; merge_stk(_, _) -> undecided. @@ -1615,10 +1455,6 @@ merge_types(T1, T2) when T1 =/= T2 -> %% Too different. All we know is that the type is a 'term'. term. -merge_bsm(undefined, _) -> undefined; -merge_bsm(_, undefined) -> undefined; -merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1). - tuple_sz([Sz]) -> Sz; tuple_sz(Sz) -> Sz. @@ -1725,6 +1561,7 @@ bif_type(is_float, [_], _) -> bool; bif_type(is_function, [_], _) -> bool; bif_type(is_integer, [_], _) -> bool; bif_type(is_list, [_], _) -> bool; +bif_type(is_map, [_], _) -> bool; bif_type(is_number, [_], _) -> bool; bif_type(is_pid, [_], _) -> bool; bif_type(is_port, [_], _) -> bool; @@ -1754,6 +1591,7 @@ is_bif_safe(is_float, 1) -> true; is_bif_safe(is_function, 1) -> true; is_bif_safe(is_integer, 1) -> true; is_bif_safe(is_list, 1) -> true; +is_bif_safe(is_map, 1) -> true; is_bif_safe(is_number, 1) -> true; is_bif_safe(is_pid, 1) -> true; is_bif_safe(is_port, 1) -> true; @@ -1840,52 +1678,3 @@ error(Error) -> exit(Error). -else. error(Error) -> throw(Error). -endif. - - -%%% -%%% Rewrite disassembled code to the same format as we used internally -%%% to not have to worry later. -%%% - -normalize_disassembled_code(Fs) -> - Index = ndc_index(Fs, []), - ndc(Fs, Index, []). - -ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) -> - ndc_index(Fs, [{{Name,Arity},Entry}|Acc]); -ndc_index([], Acc) -> - gb_trees:from_orddict(lists:sort(Acc)). - -ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) -> - Code = ndc_1(Code0, D, []), - ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]); -ndc([], _, Acc) -> reverse(Acc). - -ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]); -ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([I|Is], D, Acc) -> - ndc_1(Is, D, [I|Acc]); -ndc_1([], _, Acc) -> - reverse(Acc). diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl index 7d1819ea15..ea1959d0f8 100644 --- a/lib/compiler/src/sys_core_fold.erl +++ b/lib/compiler/src/sys_core_fold.erl @@ -1603,6 +1603,7 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0, %% is correct, the clause will always match at run-time. Case; {true,Bs} -> + eval_case_warn(B), {Ps,As} = unzip(Bs), InnerLet = cerl:c_let(Ps, core_lib:make_values(As), B), Let = cerl:c_let(Vs, E, InnerLet), @@ -1610,6 +1611,19 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0, end; eval_case(Case, _) -> Case. +eval_case_warn(#c_primop{anno=Anno, + name=#c_literal{val=match_fail}, + args=[#c_literal{val=Reason}]}=Core) + when is_atom(Reason) -> + case member(eval_failure, Anno) of + false -> + ok; + true -> + %% Example: M = not_map, M#{k:=v} + add_warning(Core, {eval_failure,Reason}) + end; +eval_case_warn(_) -> ok. + %% case_opt(CaseArg, [Clause]) -> {CaseArg,[Clause]}. %% Try and optimise a case by avoid building tuples or lists %% in the case expression. Instead combine the variable parts @@ -2288,10 +2302,17 @@ opt_simple_let_2(Let0, Vs0, Arg0, Body, Ctxt, Sub) -> %% No variables left. Body; {_,Arg,#c_literal{}} -> - %% Since the variable is not used in the body, we can rewrite the - %% let to a sequence: - %% let <Var> = Arg in Literal ==> seq Arg Literal - expr(#c_seq{arg=Arg,body=Body}, Ctxt, sub_new_preserve_types(Sub)); + E = case Ctxt of + effect -> + %% Throw away the literal body. + Arg; + value -> + %% Since the variable is not used in the body, we + %% can rewrite the let to a sequence. + %% let <Var> = Arg in Literal ==> seq Arg Literal + #c_seq{arg=Arg,body=Body} + end, + expr(E, Ctxt, sub_new_preserve_types(Sub)); {Vs,Arg,Body} -> %% If none of the variables are used in the body, we can %% rewrite the let to a sequence: diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl index 8c1a0c08ac..cbe50b93b0 100644 --- a/lib/compiler/src/v3_codegen.erl +++ b/lib/compiler/src/v3_codegen.erl @@ -1523,9 +1523,11 @@ set_cg([{var,R}], {map,Op,Map,[{map_pair,{var,_}=K,V}]}, Le, Vdb, Bef, List = [cg_reg_arg(K,Int0),cg_reg_arg(V,Int0)], Live = max_reg(Bef#sr.reg), - Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)}, - Aft = clear_dead(Int1, Le#l.i, Vdb), - Target = fetch_reg(R, Int1#sr.reg), + + %% The target register can reuse one of the source registers. + Aft0 = clear_dead(Int0, Le#l.i, Vdb), + Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)}, + Target = fetch_reg(R, Aft#sr.reg), I = case Op of assoc -> put_map_assoc; @@ -1557,9 +1559,11 @@ set_cg([{var,R}], {map,Op,Map,Es}, Le, Vdb, Bef, List = flatmap(fun({K,V}) -> [K,cg_reg_arg(V,Int0)] end, Pairs), Live = max_reg(Bef#sr.reg), - Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)}, - Aft = clear_dead(Int1, Le#l.i, Vdb), - Target = fetch_reg(R, Int1#sr.reg), + + %% The target register can reuse one of the source registers. + Aft0 = clear_dead(Int0, Le#l.i, Vdb), + Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)}, + Target = fetch_reg(R, Aft#sr.reg), I = case Op of assoc -> put_map_assoc; diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl index 9dd6b319a3..3c19a209c0 100644 --- a/lib/compiler/src/v3_core.erl +++ b/lib/compiler/src/v3_core.erl @@ -758,30 +758,28 @@ make_bool_switch_guard(L, E, V, T, F) -> {clause,NegL,[V],[],[V]} ]}. -expr_map(M0,Es0,A,St0) -> - {M1,Mps,St1} = safe(M0, St0), +expr_map(M0, Es0, A, St0) -> + {M1,Eps0,St1} = safe(M0, St0), case is_valid_map_src(M1) of true -> - case {M1,Es0} of - {#c_var{}, []} -> - %% transform M#{} to is_map(M) - {Vpat,St2} = new_var(St1), - {Fpat,St3} = new_var(St2), - Cs = [#iclause{ - anno=A, - pats=[Vpat], - guard=[#icall{anno=#a{anno=A}, + {M2,Eps1,St2} = map_build_pairs(M1, Es0, A, St1), + M3 = case Es0 of + [] -> M1; + [_|_] -> M2 + end, + Cs = [#iclause{ + anno=#a{anno=[compiler_generated|A]}, + pats=[], + guard=[#icall{anno=#a{anno=A}, module=#c_literal{anno=A,val=erlang}, name=#c_literal{anno=A,val=is_map}, - args=[Vpat]}], - body=[Vpat]}], - Fc = fail_clause([Fpat], A, #c_literal{val=badarg}), - {#icase{anno=#a{anno=A},args=[M1],clauses=Cs,fc=Fc},Mps,St3}; - {_,_} -> - {M2,Eps,St2} = map_build_pairs(M1, Es0, A, St1), - {M2,Mps++Eps,St2} - end; - false -> throw({bad_map,bad_map}) + args=[M1]}], + body=[M3]}], + Fc = fail_clause([], [eval_failure|A], #c_literal{val=badarg}), + Eps = Eps0 ++ Eps1, + {#icase{anno=#a{anno=A},args=[],clauses=Cs,fc=Fc},Eps,St2}; + false -> + throw({bad_map,bad_map}) end. map_build_pairs(Map, Es0, Ann, St0) -> @@ -1623,49 +1621,30 @@ pattern_map_pairs(Ps, St) -> {CMapPair,EpsP,Sti1} = pattern_map_pair(P,Sti0), {CMapPair, {EpsM++EpsP,Sti1}} end, {[],St}, Ps), - {pat_alias_map_pairs(CMapPairs,[]),Eps,St1}. - -%% remove cluddering annotations -pattern_map_clean_key(#c_literal{val=V}) -> {literal,V}; -pattern_map_clean_key(#c_var{name=V}) -> {var,V}. - -pat_alias_map_pairs(Ps1,Ps2) -> - Ps = Ps1 ++ Ps2, - F = fun(#c_map_pair{key=Ck,val=Cv},Dbi) -> - K = pattern_map_clean_key(Ck), - case dict:find(K,Dbi) of - {ok,Cvs} -> dict:store(K,[Cv|Cvs],Dbi); - _ -> dict:store(K,[Cv],Dbi) - end - end, - Kdb = lists:foldl(F,dict:new(),Ps), - pat_alias_map_pairs(Ps,Kdb,sets:new()). - -pat_alias_map_pairs([],_,_) -> []; -pat_alias_map_pairs([#c_map_pair{key=Ck}=Pair|Pairs],Kdb,Set) -> - K = pattern_map_clean_key(Ck), - case sets:is_element(K,Set) of - true -> - pat_alias_map_pairs(Pairs,Kdb,Set); - false -> - Cvs = dict:fetch(K,Kdb), - Cv = pat_alias_map_pair_values(Cvs), - Set1 = sets:add_element(K,Set), - [Pair#c_map_pair{val=Cv}|pat_alias_map_pairs(Pairs,Kdb,Set1)] - end. - -pat_alias_map_pair_values([Cv]) -> Cv; -pat_alias_map_pair_values([Cv1,Cv2|Cvs]) -> - pat_alias_map_pair_values([pat_alias(Cv1,Cv2)|Cvs]). + {pat_alias_map_pairs(CMapPairs),Eps,St1}. pattern_map_pair({map_field_exact,L,K,V}, St0) -> - {Ck,EpsK,St1} = safe_pattern_expr(K,St0), + {Ck,EpsK,St1} = safe_pattern_expr(K, St0), {Cv,EpsV,St2} = pattern(V, St1), - {#c_map_pair{anno=lineno_anno(L,St2), + {#c_map_pair{anno=lineno_anno(L, St2), op=#c_literal{val=exact}, key=Ck, val=Cv},EpsK++EpsV,St2}. +pat_alias_map_pairs(Ps) -> + D = foldl(fun(#c_map_pair{key=K0}=Pair, D0) -> + K = cerl:set_ann(K0, []), + dict:append(K, Pair, D0) + end, dict:new(), Ps), + pat_alias_map_pairs_1(dict:to_list(D)). + +pat_alias_map_pairs_1([{_,[#c_map_pair{val=V0}=Pair|Vs]}|T]) -> + V = foldl(fun(#c_map_pair{val=V}, Pat) -> + pat_alias(V, Pat) + end, V0, Vs), + [Pair#c_map_pair{val=V}|pat_alias_map_pairs_1(T)]; +pat_alias_map_pairs_1([]) -> []. + %% pat_bin([BinElement], State) -> [BinSeg]. pat_bin(Ps, St) -> [pat_segment(P, St) || P <- Ps]. @@ -1707,7 +1686,7 @@ pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_tuple{es=Es2}) -> %% alias maps %% There are no literals in maps patterns (patterns are always abstract) pat_alias(#c_map{es=Es1}=M,#c_map{es=Es2}) -> - M#c_map{es=pat_alias_map_pairs(Es1,Es2)}; + M#c_map{es=pat_alias_map_pairs(Es1++Es2)}; pat_alias(#c_alias{var=V1,pat=P1}, #c_alias{var=V2,pat=P2}) -> @@ -1819,7 +1798,7 @@ uclauses(Lcs, Ks, St0) -> uclause(Cl0, Ks, St0) -> {Cl1,_Pvs,Used,New,St1} = uclause(Cl0, Ks, Ks, St0), - A0 = get_ianno(Cl1), + A0 = get_anno(Cl1), A = A0#a{us=Used,ns=New}, {Cl1#iclause{anno=A},St1}. @@ -2006,7 +1985,7 @@ ufun_clauses(Lcs, Ks, St0) -> ufun_clause(Cl0, Ks, St0) -> {Cl1,Pvs,Used,_,St1} = uclause(Cl0, [], Ks, St0), - A0 = get_ianno(Cl1), + A0 = get_anno(Cl1), A = A0#a{us=subtract(intersection(Used, Ks), Pvs),ns=[]}, {Cl1#iclause{anno=A},St1}. @@ -2354,12 +2333,6 @@ lineno_anno(L, St) -> [Line] ++ St#core.file end. -get_ianno(Ce) -> - case get_anno(Ce) of - #a{}=A -> A; - A when is_list(A) -> #a{anno=A} - end. - get_lineno_anno(Ce) -> case get_anno(Ce) of #a{anno=A} -> A; diff --git a/lib/compiler/src/v3_kernel.erl b/lib/compiler/src/v3_kernel.erl index 08e84efc1b..0ac1aaf158 100644 --- a/lib/compiler/src/v3_kernel.erl +++ b/lib/compiler/src/v3_kernel.erl @@ -273,17 +273,7 @@ expr(#c_tuple{anno=A,es=Ces}, Sub, St0) -> {Kes,Ep,St1} = atomic_list(Ces, Sub, St0), {#k_tuple{anno=A,es=Kes},Ep,St1}; expr(#c_map{anno=A,arg=Var,es=Ces}, Sub, St0) -> - try expr_map(A,Var,Ces,Sub,St0) of - {_,_,_}=Res -> Res - catch - throw:bad_map -> - St1 = add_warning(get_line(A), bad_map, A, St0), - Erl = #c_literal{val=erlang}, - Name = #c_literal{val=error}, - Args = [#c_literal{val=badarg}], - Error = #c_call{anno=A,module=Erl,name=Name,args=Args}, - expr(Error, Sub, St1) - end; + expr_map(A, Var, Ces, Sub, St0); expr(#c_binary{anno=A,segments=Cv}, Sub, St0) -> try atomic_bin(Cv, Sub, St0) of {Kv,Ep,St1} -> @@ -506,19 +496,9 @@ translate_fc(Args) -> [#c_literal{val=function_clause},make_list(Args)]. expr_map(A,Var0,Ces,Sub,St0) -> - %% An extra pass of validation of Map src because of inlining {Var,Mps,St1} = expr(Var0, Sub, St0), - case is_valid_map_src(Var) of - true -> - {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1), - {Km,Eps++Mps,St2}; - false -> throw(bad_map) - end. - -is_valid_map_src(#k_map{}) -> true; -is_valid_map_src(#k_literal{val=M}) when is_map(M) -> true; -is_valid_map_src(#k_var{}) -> true; -is_valid_map_src(_) -> false. + {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1), + {Km,Eps++Mps,St2}. map_split_pairs(A, Var, Ces, Sub, St0) -> %% 1. Force variables. @@ -2024,9 +2004,7 @@ format_error(nomatch_shadow) -> format_error(bad_call) -> "invalid module and/or function name; this call will always fail"; format_error(bad_segment_size) -> - "binary construction will fail because of a type mismatch"; -format_error(bad_map) -> - "map construction will fail because of a type mismatch". + "binary construction will fail because of a type mismatch". add_warning(none, Term, Anno, #kern{ws=Ws}=St) -> File = get_file(Anno), diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl index 27979647c6..1b1c7db0e8 100644 --- a/lib/compiler/test/beam_validator_SUITE.erl +++ b/lib/compiler/test/beam_validator_SUITE.erl @@ -21,16 +21,17 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, init_per_testcase/2,end_per_testcase/2, - beam_files/1,compiler_bug/1,stupid_but_valid/1, + compiler_bug/1,stupid_but_valid/1, xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1, uninit/1,unsafe_catch/1, - dead_code/1,mult_labels/1, + dead_code/1, overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1, cons_guard/1, freg_range/1,freg_uninit/1,freg_state/1, - bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1, + bad_bin_match/1,bin_aligned/1,bad_dsetel/1, state_after_fault_in_catch/1,no_exception_in_catch/1, - undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]). + undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1, + map_field_lists/1]). -include_lib("test_server/include/test_server.hrl"). @@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> test_lib:recompile(?MODULE), - [beam_files,{group,p}]. + [{group,p}]. groups() -> [{p,test_lib:parallel(), [compiler_bug,stupid_but_valid,xrange, yrange,stack,call_last,merge_undefined,uninit, - unsafe_catch,dead_code,mult_labels, + unsafe_catch,dead_code, overwrite_catchtag,overwrite_trytag,accessing_tags, bad_catch_try,cons_guard,freg_range,freg_uninit, - freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel, + freg_state,bad_bin_match,bin_aligned,bad_dsetel, state_after_fault_in_catch,no_exception_in_catch, - undef_label,illegal_instruction,failing_gc_guard_bif]}]. + undef_label,illegal_instruction,failing_gc_guard_bif, + map_field_lists]}]. init_per_suite(Config) -> Config. @@ -72,27 +74,6 @@ init_per_group(_GroupName, Config) -> end_per_group(_GroupName, Config) -> Config. - -beam_files(Config) when is_list(Config) -> - ?line DataDir = proplists:get_value(data_dir, Config), - ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]), - %% Must have at least two files here, or there will be - %% a grammatical error in the output of the io:format/2 call below. ;-) - ?line [_,_|_] = Fs = filelib:wildcard(Wc), - ?line io:format("~p files\n", [length(Fs)]), - test_lib:p_run(fun do_beam_file/1, Fs). - - -do_beam_file(F) -> - case beam_validator:file(F) of - ok -> - ok; - {error,Es} -> - io:format("File: ~s", [F]), - io:format("Error: ~p\n", [Es]), - error - end. - compiler_bug(Config) when is_list(Config) -> %% Check that the compiler returns an error if we try to %% assemble one of the bad '.S' files. @@ -141,7 +122,7 @@ yrange(Config) when is_list(Config) -> {{move,{x,1},{y,-1}},5, {invalid_store,{y,-1},term}}}, {{t,sum_2,2}, - {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},8, + {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},7, {uninitialized_reg,{y,1024}}}}, {{t,sum_3,2}, {{move,{x,1},{y,1024}},5,limit}}, @@ -152,31 +133,31 @@ yrange(Config) when is_list(Config) -> stack(Config) when is_list(Config) -> Errors = do_val(stack, Config), - ?line [{{t,a,2},{return,11,{stack_frame,2}}}, - {{t,b,2},{{deallocate,2},4,{allocated,none}}}, - {{t,c,2},{{deallocate,2},12,{allocated,none}}}, - {{t,d,2}, - {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, - {{t,e,2},{{deallocate,5},6,{allocated,2}}}, - {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, - {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors, + [{{t,a,2},{return,9,{stack_frame,2}}}, + {{t,b,2},{{deallocate,2},4,{allocated,none}}}, + {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, + {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}, + {{t,c,2},{{deallocate,2},10,{allocated,none}}}, + {{t,d,2}, + {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, + {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors, ok. call_last(Config) when is_list(Config) -> Errors = do_val(call_last, Config), - ?line [{{t,a,1},{{call_last,1,{f,8},2},11,{allocated,1}}}, - {{t,b,1}, - {{call_ext_last,2,{extfunc,lists,seq,2},2}, - 11, - {allocated,1}}}] = Errors, + [{{t,a,1},{{call_last,1,{f,8},2},9,{allocated,1}}}, + {{t,b,1}, + {{call_ext_last,2,{extfunc,lists,seq,2},2}, + 10, + {allocated,1}}}] = Errors, ok. merge_undefined(Config) when is_list(Config) -> Errors = do_val(merge_undefined, Config), - ?line [{{t,handle_call,2}, - {{call_ext,2,{extfunc,debug,filter,2}}, - 22, - {uninitialized_reg,{y,0}}}}] = Errors, + [{{t,handle_call,2}, + {{call_ext,2,{extfunc,debug,filter,2}}, + 22, + {uninitialized_reg,{y,0}}}}] = Errors, ok. uninit(Config) when is_list(Config) -> @@ -185,10 +166,10 @@ uninit(Config) when is_list(Config) -> [{{t,sum_1,2}, {{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}}, {{t,sum_2,2}, - {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}}, + {{call,1,{f,8}},5,{uninitialized_reg,{y,0}}}}, {{t,sum_3,2}, {{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}, - 7, + 6, {unassigned,{y,0}}}}] = Errors, ok. @@ -206,10 +187,6 @@ dead_code(Config) when is_list(Config) -> [] = do_val(dead_code, Config), ok. -mult_labels(Config) when is_list(Config) -> - [] = do_val(erl_prim_loader, Config, ".beam"), - ok. - overwrite_catchtag(Config) when is_list(Config) -> Errors = do_val(overwrite_catchtag, Config), ?line @@ -221,16 +198,15 @@ overwrite_trytag(Config) when is_list(Config) -> Errors = do_val(overwrite_trytag, Config), ?line [{{overwrite_trytag,foo,1}, - {{kill,{y,2}},9,{trytag,_}}}] = Errors, + {{kill,{y,2}},8,{trytag,_}}}] = Errors, ok. accessing_tags(Config) when is_list(Config) -> Errors = do_val(accessing_tags, Config), - ?line - [{{accessing_tags,foo,1}, - {{move,{y,0},{x,0}},6,{catchtag,_}}}, - {{accessing_tags,bar,1}, - {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors, + [{{accessing_tags,bar,1}, + {{move,{y,0},{x,0}},6,{trytag,_}}}, + {{accessing_tags,foo,1}, + {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors, ok. bad_catch_try(Config) when is_list(Config) -> @@ -317,13 +293,6 @@ freg_state(Config) when is_list(Config) -> {fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors, ok. -bin_match(Config) when is_list(Config) -> - Errors = do_val(bin_match, Config), - ?line - [{{t,t,1},{{bs_save,0},4,no_bs_match_state}}, - {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors, - ok. - bad_bin_match(Config) when is_list(Config) -> [{{t,t,1},{return,5,{match_context,{x,0}}}}] = do_val(bad_bin_match, Config), @@ -347,36 +316,69 @@ bad_dsetel(Config) when is_list(Config) -> ?line [{{t,t,1}, {{set_tuple_element,{x,1},{x,0},1}, - 15, + 17, illegal_context_for_set_tuple_element}}] = Errors, ok. state_after_fault_in_catch(Config) when is_list(Config) -> Errors = do_val(state_after_fault_in_catch, Config), - [{{t,foo,1}, - {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,if_end,1}, + [{{state_after_fault_in_catch,badmatch,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, {{state_after_fault_in_catch,case_end,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,badmatch,1}, - {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors, + {{state_after_fault_in_catch,if_end,1}, + {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, + {{t,foo,1}, + {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors, ok. no_exception_in_catch(Config) when is_list(Config) -> Errors = do_val(no_exception_in_catch, Config), [{{no_exception_in_catch,nested_of_1,4}, - {{move,{x,3},{x,0}},91,{uninitialized_reg,{x,3}}}}] = Errors, + {{move,{x,3},{x,0}},88,{uninitialized_reg,{x,3}}}}] = Errors, ok. undef_label(Config) when is_list(Config) -> - Errors = do_val(undef_label, Config), + M = {undef_label, + [{t,1}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,undef_label},{atom,t},1}, + {label,2}, + {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}, + {move,{atom,ok},{x,0}}, + return]}, + {function,x,1,17, + [{label,3}, + {func_info,{atom,undef_label},{atom,x},1}, + {label,4}, + return]}], + 5}, + Errors = beam_val(M), [{{undef_label,t,1},{undef_labels,[42]}}, {{undef_label,x,1},{return,4,no_entry_label}}] = Errors, ok. illegal_instruction(Config) when is_list(Config) -> - Errors = do_val(illegal_instruction, Config), + M = {illegal_instruction, + [{t,1},{x,1},{y,0}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,illegal_instruction},{atom,t},1}, + {label,2}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,x,1,4, + [{label,3}, + bad_func_info, + {label,4}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,y,0,17,[]}], + 5}, + Errors = beam_val(M), [{{illegal_instruction,t,1}, {{my_illegal_instruction,{x,0}},4,unknown_instruction}}, {{'_',x,1},{bad_func_info,1,illegal_instruction}}, @@ -414,19 +416,40 @@ process_request_foo(_) -> process_request_bar(Pid, [Response]) when is_pid(Pid) -> Response. +map_field_lists(Config) -> + Errors = do_val(map_field_lists, Config), + [{{map_field_lists,x,1}, + {{test,has_map_fields,{f,1},{x,0}, + {list,[{atom,z},{atom,a}]}}, + 5, + not_strict_order}}, + {{map_field_lists,y,1}, + {{test,has_map_fields,{f,3},{x,0},{list,[]}}, + 5, + empty_field_list}} + ] = Errors. %%%------------------------------------------------------------------------- -do_val(Name, Config) -> - do_val(Name, Config, ".S"). - -do_val(Name, Config, Type) -> - ?line Data = ?config(data_dir, Config), - ?line File = filename:join(Data, atom_to_list(Name)++Type), - ?line case beam_validator:file(File) of - {error,Errors} -> - ?line io:format("~p:~n~s", - [File,beam_validator:format_error(Errors)]), - Errors; - ok -> [] - end. +do_val(Mod, Config) -> + Data = ?config(data_dir, Config), + Base = atom_to_list(Mod), + File = filename:join(Data, Base), + case compile:file(File, [from_asm,no_postopt,return_errors]) of + {error,L,[]} -> + [{Base,Errors0}] = L, + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors; + {ok,Mod} -> + [] + end. + +beam_val(M) -> + Name = atom_to_list(element(1, M)), + {error,[{Name,Errors0}]} = beam_validator:module(M, []), + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S index 279b2fa97f..9630d73a93 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bad_dsetel}. %% version = 0 {exports, [{module_info,0},{module_info,1},{t,1}]}. @@ -21,7 +21,9 @@ {move,{integer,3},{x,0}}. {call_ext,3,{extfunc,erlang,setelement,3}}. {test_heap,6,1}. - {put_string,3,{string,"abc"},{x,1}}. + {put_list,{integer,99},nil,{x,1}}. + {put_list,{integer,98},{x,1},{x,1}}. + {put_list,{integer,97},{x,1},{x,1}}. {set_tuple_element,{x,1},{x,0},1}. {'%live',1}. {deallocate,0}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S index 2f353fbd25..a59f7ccc03 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bin_aligned}. %% version = 0 {exports, [{decode,1},{module_info,0},{module_info,1}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S deleted file mode 100644 index 96df0f7933..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S +++ /dev/null @@ -1,64 +0,0 @@ -{module, bin_match}. %% version = 0 - -{exports, [{t,1}]}. - -{attributes, []}. - -{labels, 8}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,t},{atom,t},1}. - {label,2}. -%% {test,bs_start_match,{f,1},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,3},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,3}. - {bs_restore,0}. - {test,bs_get_integer, - {f,1}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,1},[0]}. - {move,{x,1},{x,0}}. - return. - -{function, x, 1, 5}. - {label,4}. - {func_info,{atom,t},{atom,x},1}. - {label,5}. - {test,bs_start_match,{f,4},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,6},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,6}. - {bs_restore,1}. - {test,bs_get_integer, - {f,4}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,4},[0]}. - {move,{x,1},{x,0}}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S index f964f98fba..c114664ba0 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S +++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S @@ -1,10 +1,10 @@ {module, dead_code}. %% version = 0 -{exports, [{execute,0},{module_info,0},{module_info,1}]}. +{exports, [{execute,0}]}. {attributes, []}. -{labels, 10}. +{labels, 6}. {function, execute, 0, 2}. @@ -12,7 +12,6 @@ {func_info,{atom,dead_code},{atom,execute},0}. {label,2}. {allocate,0,0}. - {'%live',0}. {call_ext,0,{extfunc,foo,fie,0}}. {test,is_ne,{f,4},[{x,0},{integer,0}]}. {test,is_ne,{f,4},[{x,0},{integer,1}]}. @@ -22,27 +21,7 @@ {case_end,{x,0}}. {label,4}. {move,{atom,ok},{x,0}}. - {'%live',1}. {deallocate,0}. return. - {'%','Moved code'}. {label,5}. {case_end,{x,0}}. - - -{function, module_info, 0, 7}. - {label,6}. - {func_info,{atom,dead_code},{atom,module_info},0}. - {label,7}. - {move,nil,{x,0}}. - {'%live',1}. - return. - - -{function, module_info, 1, 9}. - {label,8}. - {func_info,{atom,dead_code},{atom,module_info},1}. - {label,9}. - {move,nil,{x,0}}. - {'%live',1}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam Binary files differdeleted file mode 100644 index dd58a88e42..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam +++ /dev/null diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S index ee583a923e..b3ebff3ade 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S @@ -1,10 +1,10 @@ {module, freg_range}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S index ff4d7548ae..7466763482 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S @@ -1,6 +1,6 @@ {module, freg_state}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}. {attributes, []}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S index f8d805d9ec..71e833446a 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S @@ -1,10 +1,10 @@ {module, freg_uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2}]}. {attributes, []}. -{labels, 8}. +{labels, 7}. {function, sum_1, 2, 2}. @@ -14,7 +14,6 @@ {fconv,{x,0},{fr,0}}. fclearerror. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. return. @@ -26,7 +25,12 @@ {fconv,{x,1},{fr,1}}. fclearerror. {fcheckerror,{f,0}}. - {call,2,{f,8}}. + {call,2,{f,6}}. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. + return. + +{function, foo, 2, 6}. + {label,5}. + {func_info,{atom,t},{atom,foo},2}. + {label,6}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S deleted file mode 100644 index d6e92abc71..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S +++ /dev/null @@ -1,26 +0,0 @@ -{module, illegal_instruction}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,illegal_instruction},{atom,t},1}. - {label,2}. - {my_illegal_instruction,{x,0}}. - return. - - -{function, x, 1, 4}. - {label,3}. - bad_func_info. - {label,4}. - {my_illegal_instruction,{x,0}}. - return. - -{function, y, 0, 17}. -
\ No newline at end of file diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S new file mode 100644 index 0000000000..9af68c82d4 --- /dev/null +++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S @@ -0,0 +1,29 @@ +{module, map_field_lists}. %% version = 0 + +{exports, [{x,1},{y,1}]}. + +{attributes, []}. + +{labels, 5}. + + +{function, x, 1, 2}. + {label,1}. + {line,[{location,"map_field_lists.erl",4}]}. + {func_info,{atom,map_field_lists},{atom,x},1}. + {label,2}. + {test,is_map,{f,1},[{x,0}]}. + {test,has_map_fields,{f,1},{x,0},{list,[{atom,z},{atom,a}]}}. + {move,{atom,ok},{x,0}}. + return. + + +{function, y, 1, 4}. + {label,3}. + {line,[{location,"map_field_lists.erl",7}]}. + {func_info,{atom,map_field_lists},{atom,y},1}. + {label,4}. + {test,is_map,{f,3},[{x,0}]}. + {test,has_map_fields,{f,3},{x,0},{list,[]}}. + {move,{atom,ok},{x,0}}. + return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S index 3d76127824..481d55045d 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S +++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S @@ -22,7 +22,8 @@ {label,4}. {allocate_heap,1,6,2}. {move,{x,1},{y,0}}. - {put_string,2,{string,"~p"},{x,0}}. + {put_list,{integer,112},nil,{x,0}}. + {put_list,{integer,126},{x,0},{x,0}}. {put_list,{y,0},nil,{x,1}}. {'%live',2}. {call_ext,2,{extfunc,io,format,2}}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S index e08a718a39..1a5b417a5f 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S +++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S @@ -26,7 +26,7 @@ {call_ext,1,{extfunc,erlang,erase,1}}. {move,{atom,nested},{x,0}}. {call_ext,1,{extfunc,erlang,erase,1}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'try',{y,8},{f,13}}. {'try',{y,7},{f,11}}. {'try',{y,6},{f,9}}. @@ -34,7 +34,7 @@ %% Because the following instructions can't possible throw an exception, %% label 7 used to get no state. Now the try_end itself will save the state. {move,{x,0},{y,4}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'%live',1}. {try_end,{y,5}}. {test,is_eq_exact,{f,15},[{x,0},{y,4}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S index 244c22a2f9..e4356a9d00 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/stack.S +++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S @@ -1,10 +1,10 @@ {module, stack}. %% version = 0 -{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}. +{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}. {attributes, []}. -{labels, 21}. +{labels, 17}. {function, a, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S deleted file mode 100644 index dd29066bf4..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S +++ /dev/null @@ -1,22 +0,0 @@ -{module, undef_label}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,undef_label},{atom,t},1}. - {label,2}. - {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}. - {move,{atom,ok},{x,0}}. - return. - -{function, x, 1, 17}. - {label,3}. - {func_info,{atom,undef_label},{atom,x},1}. - {label,4}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S index 1a45c31411..9a66f4f7d6 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S @@ -1,9 +1,11 @@ {module, uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}. {attributes, []}. +{labels, 9}. + {function, sum_1, 2, 2}. {label,1}. {func_info,{atom,t},{atom,sum_1},2}. @@ -11,7 +13,7 @@ {allocate,1,2}. {move,{y,0},{x,0}}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -23,7 +25,7 @@ {label,4}. {allocate,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -35,14 +37,14 @@ {label,6}. {allocate_zero,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. return. -{function, id, 1, 10}. - {label,9}. +{function, id, 1, 8}. + {label,7}. {func_info,{atom,t},{atom,id},1}. - {label,10}. + {label,8}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S index 3abbdffbc2..c6f20288f7 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S +++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S @@ -1,10 +1,10 @@ {module, xrange}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl index 8db47ffa40..08279d9408 100644 --- a/lib/compiler/test/guard_SUITE.erl +++ b/lib/compiler/test/guard_SUITE.erl @@ -1802,6 +1802,12 @@ bad_guards(Config) when is_list(Config) -> fc(catch bad_guards_2(#{a=>0,b=>0}, [x])), fc(catch bad_guards_2(not_a_map, [x])), fc(catch bad_guards_2(42, [x])), + + fc(catch bad_guards_3(#{a=>0,b=>0}, [])), + fc(catch bad_guards_3(#{a=>0,b=>0}, [x])), + fc(catch bad_guards_3(not_a_map, [x])), + fc(catch bad_guards_3(42, [x])), + ok. %% beam_bool used to produce GC BIF instructions whose @@ -1813,6 +1819,12 @@ bad_guards_1(X, [_]) when {{X}}, -X -> bad_guards_2(M, [_]) when M#{a := 0, b => 0}, map_size(M) -> ok. +%% beam_type used to produce an GC BIF instruction whose Live operand +%% included uninitialized registers. + +bad_guards_3(M, [_]) when is_map(M) andalso M#{a := 0, b => 0}, length(M) -> + ok. + %% Call this function to turn off constant propagation. id(I) -> I. diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl index 6c5b34498b..62bada1407 100644 --- a/lib/compiler/test/lc_SUITE.erl +++ b/lib/compiler/test/lc_SUITE.erl @@ -208,6 +208,17 @@ effect(Config) when is_list(Config) -> #{<<1:500>>:=V1,<<2:301>>:=V2} <- L], ok end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])), + + %% Will trigger the time-trap timeout if not tail-recursive. + case ?MODULE of + lc_SUITE -> + _ = [{'EXIT',{badarg,_}} = + (catch binary_to_atom(<<C/utf8>>, utf8)) || + C <- lists:seq(16#10000, 16#FFFFF)]; + _ -> + ok + end, + ok. do_effect(Lc, L) -> diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl index bc5ae803c6..cfa8262701 100644 --- a/lib/compiler/test/map_SUITE.erl +++ b/lib/compiler/test/map_SUITE.erl @@ -641,6 +641,7 @@ t_build_and_match_nil(Config) when is_list(Config) -> "treat" => V2, [] => V1 }), #{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }), + #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }), ok. t_build_and_match_structure(Config) when is_list(Config) -> diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl index d721a86f5a..68a31f14d5 100644 --- a/lib/compiler/test/misc_SUITE.erl +++ b/lib/compiler/test/misc_SUITE.erl @@ -280,6 +280,14 @@ silly_coverage(Config) when is_list(Config) -> {label,2}|non_proper_list]}],99}, expect_error(fun() -> beam_z:module(BeamZInput, []) end), + %% beam_validator. + BeamValInput = {?MODULE,[{foo,0}],[], + [{function,foo,0,2, + [{label,1}, + {func_info,{atom,?MODULE},{atom,foo},0}, + {label,2}|non_proper_list]}],99}, + expect_error(fun() -> beam_validator:module(BeamValInput, []) end), + ok. expect_error(Fun) -> diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl index 8ab618bb01..80d93fbfa4 100644 --- a/lib/compiler/test/trycatch_SUITE.erl +++ b/lib/compiler/test/trycatch_SUITE.erl @@ -24,7 +24,8 @@ catch_oops/1,after_oops/1,eclectic/1,rethrow/1, nested_of/1,nested_catch/1,nested_after/1, nested_horrid/1,last_call_optimization/1,bool/1, - plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]). + plain_catch_coverage/1,andalso_orelse/1,get_in_try/1, + hockey/1]). -include_lib("test_server/include/test_server.hrl"). @@ -39,7 +40,8 @@ groups() -> [basic,lean_throw,try_of,try_after,catch_oops, after_oops,eclectic,rethrow,nested_of,nested_catch, nested_after,nested_horrid,last_call_optimization, - bool,plain_catch_coverage,andalso_orelse,get_in_try]}]. + bool,plain_catch_coverage,andalso_orelse,get_in_try, + hockey]}]. init_per_suite(Config) -> @@ -943,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) -> _:not_found -> get_valid_line(T, Annotations) end. + +hockey(_) -> + {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()), + ok. + +hockey() -> + %% beam_jump used to generate a call into the try block. + %% beam_validator disapproved. + receive _ -> (b = fun() -> ok end) + + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, + + y catch _ -> ok end. diff --git a/lib/compiler/test/warnings_SUITE.erl b/lib/compiler/test/warnings_SUITE.erl index 6663985ad7..dcd3910926 100644 --- a/lib/compiler/test/warnings_SUITE.erl +++ b/lib/compiler/test/warnings_SUITE.erl @@ -580,11 +580,11 @@ maps(Config) when is_list(Config) -> <<" t() -> M = {a,[]}, - {'EXIT',{badarg,_}} = (catch(M#{ a => 1})), + {'EXIT',{badarg,_}} = (catch(M#{ a => 1 })), ok. ">>, [], - {warnings,[{4,v3_kernel,bad_map}]}}, + {warnings,[{4,sys_core_fold,{eval_failure,badarg}}]}}, {bad_map_src2, <<" t() -> @@ -594,7 +594,7 @@ maps(Config) when is_list(Config) -> id(I) -> I. ">>, [inline], - {warnings,[{4,v3_kernel,bad_map}]}}, + []}, {bad_map_src3, <<" t() -> diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c index ece29b28e0..aa99f2236e 100644 --- a/lib/crypto/c_src/crypto.c +++ b/lib/crypto/c_src/crypto.c @@ -1688,14 +1688,15 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM int new_ivlen = 0; ERL_NIF_TERM ret; - if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16 + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) + || !(key.size == 16 || key.size == 24 || key.size == 32) || !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16 || !enif_inspect_iolist_as_binary(env, argv[2], &text)) { return enif_make_badarg(env); } memcpy(ivec_clone, ivec.data, 16); - AES_set_encrypt_key(key.data, 128, &aes_key); + AES_set_encrypt_key(key.data, key.size * 8, &aes_key); AES_cfb8_encrypt((unsigned char *) text.data, enif_make_new_binary(env, text.size, &ret), text.size, &aes_key, ivec_clone, &new_ivlen, @@ -1714,14 +1715,15 @@ static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE CHECK_OSE_CRYPTO(); - if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16 + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) + || !(key.size == 16 || key.size == 24 || key.size == 32) || !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16 || !enif_inspect_iolist_as_binary(env, argv[2], &text)) { return enif_make_badarg(env); } memcpy(ivec_clone, ivec.data, 16); - AES_set_encrypt_key(key.data, 128, &aes_key); + AES_set_encrypt_key(key.data, key.size * 8, &aes_key); AES_cfb128_encrypt((unsigned char *) text.data, enif_make_new_binary(env, text.size, &ret), text.size, &aes_key, ivec_clone, &new_ivlen, diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl index aaae9c027d..e8845ed52f 100644 --- a/lib/crypto/src/crypto.erl +++ b/lib/crypto/src/crypto.erl @@ -1399,9 +1399,6 @@ aes_ctr_decrypt(_Key, _IVec, _Cipher) -> ?nif_stub. %% %% AES - in electronic codebook mode (ECB) %% --spec aes_ecb_crypt(iodata(), iodata(), integer()) -> - binary(). - aes_ecb_encrypt(Key, Data) -> aes_ecb_crypt(Key, Data, true). diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl index 7fcfc1ffc5..72944eea8e 100644 --- a/lib/crypto/test/crypto_SUITE.erl +++ b/lib/crypto/test/crypto_SUITE.erl @@ -1338,6 +1338,38 @@ aes_cfb8() -> {aes_cfb8, hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"), hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"), + hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}, + {aes_cfb8, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("000102030405060708090a0b0c0d0e0f"), + hexstr2bin("6bc1bee22e409f96e93d7e117393172a")}, + {aes_cfb8, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"), + hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")}, + {aes_cfb8, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"), + hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")}, + {aes_cfb8, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"), + hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}, + {aes_cfb8, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("000102030405060708090a0b0c0d0e0f"), + hexstr2bin("6bc1bee22e409f96e93d7e117393172a")}, + {aes_cfb8, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"), + hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")}, + {aes_cfb8, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("39ffed143b28b1c832113c6331e5407b"), + hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")}, + {aes_cfb8, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"), hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")} ]. @@ -1357,6 +1389,38 @@ aes_cfb128() -> {aes_cfb128, hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"), hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"), + hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}, + {aes_cfb128, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("000102030405060708090a0b0c0d0e0f"), + hexstr2bin("6bc1bee22e409f96e93d7e117393172a")}, + {aes_cfb128, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"), + hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")}, + {aes_cfb128, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"), + hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")}, + {aes_cfb128, + hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"), + hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"), + hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}, + {aes_cfb128, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("000102030405060708090a0b0c0d0e0f"), + hexstr2bin("6bc1bee22e409f96e93d7e117393172a")}, + {aes_cfb128, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"), + hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")}, + {aes_cfb128, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("39ffed143b28b1c832113c6331e5407b"), + hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")}, + {aes_cfb128, + hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), + hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"), hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")} ]. diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl index 39a178cb7d..4a1ba9c539 100644 --- a/lib/dialyzer/src/dialyzer_contracts.erl +++ b/lib/dialyzer/src/dialyzer_contracts.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -394,21 +394,22 @@ insert_constraints([], Dict) -> Dict. store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) -> %% io:format("contract from form: ~p\n", [TypeSpec]), - TmpContract = contract_from_form(TypeSpec, RecordsDict, FileLine), + {Module, _, _} = MFA, + TmpContract = contract_from_form(TypeSpec, Module, RecordsDict, FileLine), %% io:format("contract: ~p\n", [TmpContract]), dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict). -contract_from_form(Forms, RecDict, FileLine) -> - {CFuns, Forms1} = contract_from_form(Forms, RecDict, FileLine, [], []), +contract_from_form(Forms, Module, RecDict, FileLine) -> + {CFuns, Forms1} = contract_from_form(Forms, Module, RecDict, FileLine, [], []), #tmp_contract{contract_funs = CFuns, forms = Forms1}. -contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict, +contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], Module, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = fun(ExpTypes, AllRecords) -> - Type = + NewType = try - erl_types:t_from_form(Form, RecDict) + erl_types:t_from_form(Form, ExpTypes, Module, AllRecords) catch throw:{error, Msg} -> {File, Line} = FileLine, @@ -416,61 +417,60 @@ contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict, Line, Msg]), throw({error, NewMsg}) end, - NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {NewTypeNoVars, []} end, NewTypeAcc = [TypeFun | TypeAcc], NewFormAcc = [{Form, []} | FormAcc], - contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc); + contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc); contract_from_form([{type, _L1, bounded_fun, [{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left], - RecDict, FileLine, TypeAcc, FormAcc) -> + Module, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = fun(ExpTypes, AllRecords) -> {Constr1, VarDict} = - process_constraints(Constr, RecDict, ExpTypes, AllRecords), - Type = erl_types:t_from_form(Form, RecDict, VarDict), - NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords), + process_constraints(Constr, Module, RecDict, ExpTypes, AllRecords), + NewType = erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, + VarDict), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {NewTypeNoVars, Constr1} end, NewTypeAcc = [TypeFun | TypeAcc], NewFormAcc = [{Form, Constr} | FormAcc], - contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc); -contract_from_form([], _RecDict, _FileLine, TypeAcc, FormAcc) -> + contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc); +contract_from_form([], _Module, _RecDict, _FileLine, TypeAcc, FormAcc) -> {lists:reverse(TypeAcc), lists:reverse(FormAcc)}. -process_constraints(Constrs, RecDict, ExpTypes, AllRecords) -> - Init0 = initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords), +process_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) -> + Init0 = initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords), Init = remove_cycles(Init0), - constraints_fixpoint(Init, RecDict, ExpTypes, AllRecords). + constraints_fixpoint(Init, Module, RecDict, ExpTypes, AllRecords). -initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords) -> - initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords, []). +initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) -> + initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords, []). -initialize_constraints([], _RecDict, _ExpTypes, _AllRecords, Acc) -> +initialize_constraints([], _Module, _RecDict, _ExpTypes, _AllRecords, Acc) -> Acc; -initialize_constraints([Constr|Rest], RecDict, ExpTypes, AllRecords, Acc) -> +initialize_constraints([Constr|Rest], Module, RecDict, ExpTypes, AllRecords, Acc) -> case Constr of {type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} -> - T1 = final_form(Type1, RecDict, ExpTypes, AllRecords, dict:new()), + T1 = final_form(Type1, Module, ExpTypes, AllRecords, dict:new()), Entry = {T1, Type2}, - initialize_constraints(Rest, RecDict, ExpTypes, AllRecords, [Entry|Acc]); + initialize_constraints(Rest, Module, RecDict, ExpTypes, AllRecords, [Entry|Acc]); {type, _, constraint, [{atom,_,Name}, List]} -> N = length(List), throw({error, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}) end. -constraints_fixpoint(Constrs, RecDict, ExpTypes, AllRecords) -> +constraints_fixpoint(Constrs, Module, RecDict, ExpTypes, AllRecords) -> VarDict = - constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, dict:new()), - constraints_fixpoint(VarDict, Constrs, RecDict, ExpTypes, AllRecords). + constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, dict:new()), + constraints_fixpoint(VarDict, Module, Constrs, RecDict, ExpTypes, AllRecords). -constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) -> +constraints_fixpoint(OldVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) -> NewVarDict = - constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, OldVarDict), + constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, OldVarDict), case NewVarDict of OldVarDict -> DictFold = @@ -480,25 +480,24 @@ constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) -> FinalConstrs = dict:fold(DictFold, [], NewVarDict), {FinalConstrs, NewVarDict}; _Other -> - constraints_fixpoint(NewVarDict, Constrs, RecDict, ExpTypes, AllRecords) + constraints_fixpoint(NewVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) end. -final_form(Form, RecDict, ExpTypes, AllRecords, VarDict) -> - T1 = erl_types:t_from_form(Form, RecDict, VarDict), - erl_types:t_solve_remote(T1, ExpTypes, AllRecords). +final_form(Form, Module, ExpTypes, AllRecords, VarDict) -> + erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, VarDict). -constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, VarDict) -> +constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict) -> Subtypes = - constraints_to_subs(Constrs, RecDict, ExpTypes, AllRecords, VarDict, []), + constraints_to_subs(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict, []), insert_constraints(Subtypes, dict:new()). -constraints_to_subs([], _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) -> +constraints_to_subs([], _Module, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) -> Acc; -constraints_to_subs([C|Rest], RecDict, ExpTypes, AllRecords, VarDict, Acc) -> +constraints_to_subs([C|Rest], Module, RecDict, ExpTypes, AllRecords, VarDict, Acc) -> {T1, Form2} = C, - T2 = final_form(Form2, RecDict, ExpTypes, AllRecords, VarDict), + T2 = final_form(Form2, Module, ExpTypes, AllRecords, VarDict), NewAcc = [{subtype, T1, T2}|Acc], - constraints_to_subs(Rest, RecDict, ExpTypes, AllRecords, VarDict, NewAcc). + constraints_to_subs(Rest, Module, RecDict, ExpTypes, AllRecords, VarDict, NewAcc). %% Replaces variables with '_' when necessary to break up cycles among %% the constraints. @@ -754,8 +753,7 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) -> end. t_from_forms_without_remote([{FType, []}], RecDict) -> - Type0 = erl_types:t_from_form(FType, RecDict), - Type1 = erl_types:subst_all_remote(Type0, erl_types:t_none()), + Type1 = erl_types:t_from_form_without_remote(FType, RecDict), {ok, erl_types:subst_all_vars_to_any(Type1)}; t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) -> %% 'When' constraints diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl index ea1b09fcdd..336b4641d4 100644 --- a/lib/dialyzer/src/dialyzer_dataflow.erl +++ b/lib/dialyzer/src/dialyzer_dataflow.erl @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -2977,8 +2977,10 @@ state__lookup_name(Fun, #state{callgraph = Callgraph}) -> state__lookup_record(Tag, Arity, #state{records = Records}) -> case erl_types:lookup_record(Tag, Arity, Records) of {ok, Fields} -> - {ok, t_tuple([t_atom(Tag)| - [FieldType || {_FieldName, FieldType} <- Fields]])}; + RecType = + t_tuple([t_atom(Tag)| + [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]), + {ok, RecType}; error -> error end. diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl index 217d238712..1737bfd3a9 100644 --- a/lib/dialyzer/src/dialyzer_typesig.erl +++ b/lib/dialyzer/src/dialyzer_typesig.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -3264,7 +3264,7 @@ lookup_record(Records, Tag, Arity) -> {ok, Fields} -> RecType = t_tuple([t_from_term(Tag)| - [FieldType || {_FieldName, FieldType} <- Fields]]), + [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]), {ok, RecType}; error -> error diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl index 01ade00664..1cc9528fed 100644 --- a/lib/dialyzer/src/dialyzer_utils.erl +++ b/lib/dialyzer/src/dialyzer_utils.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -63,13 +63,13 @@ print_types(RecDict) -> print_types1([], _) -> ok; -print_types1([{type, _Name} = Key|T], RecDict) -> - {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict), - io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]), +print_types1([{type, _Name, _NArgs} = Key|T], RecDict) -> + {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict), + io:format("\n~w: ~w\n", [Key, Type]), print_types1(T, RecDict); -print_types1([{opaque, _Name} = Key|T], RecDict) -> - {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict), - io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]), +print_types1([{opaque, _Name, _NArgs} = Key|T], RecDict) -> + {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict), + io:format("\n~w: ~w\n", [Key, Type]), print_types1(T, RecDict); print_types1([{record, _Name} = Key|T], RecDict) -> {ok, [{_Arity, _Fields} = AF]} = dict:find(Key, RecDict), @@ -221,28 +221,29 @@ get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}} get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left], Module, Records, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' -> - try - NewRecDict = add_new_type(Attr, Name, TypeForm, [], Module, RecDict), - get_record_and_type_info(Left, Module, Records, NewRecDict) + try add_new_type(Attr, Name, TypeForm, [], Module, RecDict) of + NewRecDict -> + get_record_and_type_info(Left, Module, Records, NewRecDict) catch throw:{error, _} = Error -> Error end; get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left], Module, Records, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' -> - try - NewRecDict = add_new_type(Attr, Name, TypeForm, Args, Module, RecDict), - get_record_and_type_info(Left, Module, Records, NewRecDict) + try add_new_type(Attr, Name, TypeForm, Args, Module, RecDict) of + NewRecDict -> + get_record_and_type_info(Left, Module, Records, NewRecDict) catch throw:{error, _} = Error -> Error end; get_record_and_type_info([_Other|Left], Module, Records, RecDict) -> get_record_and_type_info(Left, Module, Records, RecDict); get_record_and_type_info([], _Module, Records, RecDict) -> - case type_record_fields(lists:reverse(Records), RecDict) of - {ok, _NewRecDict} = Ok -> - ?debug(_NewRecDict), - Ok; + case + check_type_of_record_fields(lists:reverse(Records), RecDict) + of + ok -> + {ok, RecDict}; {error, Name, Error} -> {error, flat_format(" Error while parsing #~w{}: ~s\n", [Name, Error])} end. @@ -254,20 +255,21 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) -> Msg = flat_format("Type ~s/~w already defined\n", [Name, Arity]), throw({error, Msg}); false -> - ArgTypes = [erl_types:t_from_form(X) || X <- ArgForms], - case lists:all(fun erl_types:t_is_var/1, ArgTypes) of - true -> - ArgNames = [erl_types:t_var_name(X) || X <- ArgTypes], + try erl_types:t_var_names(ArgForms) of + ArgNames -> dict:store({TypeOrOpaque, Name, Arity}, - {Module, TypeForm, ArgNames}, RecDict); - false -> + {{Module, TypeForm, ArgNames}, + erl_types:t_any()}, RecDict) + catch + _:_ -> throw({error, flat_format("Type declaration for ~w does not " "have variables as parameters", [Name])}) end end. get_record_fields(Fields, RecDict) -> - get_record_fields(Fields, RecDict, []). + Fs = get_record_fields(Fields, RecDict, []), + {ok, [{Name, Form, erl_types:t_any()} || {Name, Form} <- Fs]}. get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left], RecDict, Acc) -> @@ -276,7 +278,7 @@ get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left], {record_field, _Line, Name0} -> erl_parse:normalise(Name0); {record_field, _Line, Name0, _Init} -> erl_parse:normalise(Name0) end, - get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]); + get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]); get_record_fields([{record_field, _Line, Name}|Left], RecDict, Acc) -> NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc], get_record_fields(Left, RecDict, NewAcc); @@ -284,22 +286,20 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) -> NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc], get_record_fields(Left, RecDict, NewAcc); get_record_fields([], _RecDict, Acc) -> - {ok, lists:reverse(Acc)}. + lists:reverse(Acc). -type_record_fields([], RecDict) -> - {ok, RecDict}; -type_record_fields([RecKey|Recs], RecDict) -> - {ok, [{Arity, Fields}]} = dict:find(RecKey, RecDict), +%% Just check the local types. process_record_remote_types will add +%% the types later. +check_type_of_record_fields([], _RecDict) -> + ok; +check_type_of_record_fields([RecKey|Recs], RecDict) -> + {ok, [{_Arity, Fields}]} = dict:find(RecKey, RecDict), try - TypedFields = - [{FieldName, erl_types:t_from_form(FieldTypeForm, RecDict)} - || {FieldName, FieldTypeForm} <- Fields], - RecDict1 = dict:store(RecKey, [{Arity, TypedFields}], RecDict), - Fun = fun(OldOrdDict) -> - orddict:store(Arity, TypedFields, OldOrdDict) - end, - RecDict2 = dict:update(RecKey, Fun, RecDict1), - type_record_fields(Recs, RecDict2) + [erl_types:t_from_form_without_remote(FieldTypeForm, RecDict) + || {_FieldName, FieldTypeForm, _} <- Fields] + of + L when is_list(L) -> + check_type_of_record_fields(Recs, RecDict) catch throw:{error, Error} -> {record, Name} = RecKey, @@ -308,30 +308,44 @@ type_record_fields([RecKey|Recs], RecDict) -> -spec process_record_remote_types(codeserver()) -> codeserver(). +%% The field types are cached. Used during analysis when handling records. process_record_remote_types(CServer) -> TempRecords = dialyzer_codeserver:get_temp_records(CServer), TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer), - RecordFun = - fun(Key, Value) -> - case Key of - {record, _Name} -> - FieldFun = - fun(_Arity, Fields) -> - [{Name, erl_types:t_solve_remote(Field, TempExpTypes, - TempRecords)} - || {Name, Field} <- Fields] - end, - orddict:map(FieldFun, Value); - _Other -> Value - end - end, ModuleFun = - fun(_Module, Record) -> + fun(Module, Record) -> + RecordFun = + fun(Key, Value) -> + case Key of + {record, _Name} -> + FieldFun = + fun(_Arity, Fields) -> + [{Name, Field, + erl_types:t_from_form(Field, + TempExpTypes, + Module, + TempRecords)} + || {Name, Field, _} <- Fields] + end, + orddict:map(FieldFun, Value); + {opaque, _, _} -> + {{_Module, Form, _ArgNames}=F, _Type} = Value, + Type = erl_types:t_from_form(Form, TempExpTypes, Module, + TempRecords), + {F, Type}; + _Other -> Value + end + end, dict:map(RecordFun, Record) end, - NewRecords = dict:map(ModuleFun, TempRecords), - CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer), - dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1). + try dict:map(ModuleFun, TempRecords) of + NewRecords -> + CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer), + dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1) + catch + throw:{error, _RecName, _Error} = Error-> + Error + end. -spec merge_records(dict:dict(), dict:dict()) -> dict:dict(). diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes index fbdd182358..a9fbfb6068 100644 --- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes +++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes @@ -6,23 +6,27 @@ contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'} contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) -contracts_with_subtypes.erl:171: The pattern 1 can never match the type string() -contracts_with_subtypes.erl:174: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()} -contracts_with_subtypes.erl:176: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()} -contracts_with_subtypes.erl:192: The pattern 'alpha' can never match the type {'ok',_} -contracts_with_subtypes.erl:194: The pattern 42 can never match the type {'ok',_} -contracts_with_subtypes.erl:212: The pattern 'alpha' can never match the type {'ok',_} -contracts_with_subtypes.erl:214: The pattern 42 can never match the type {'ok',_} -contracts_with_subtypes.erl:231: The pattern 1 can never match the type string() -contracts_with_subtypes.erl:234: The pattern {'ok', _} can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:235: The pattern 'alpha' can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:236: The pattern {'ok', 42} can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:237: The pattern 42 can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:175: The pattern 1 can never match the type string() +contracts_with_subtypes.erl:178: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()} +contracts_with_subtypes.erl:180: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()} +contracts_with_subtypes.erl:196: The pattern 'alpha' can never match the type {'ok',_} +contracts_with_subtypes.erl:198: The pattern 42 can never match the type {'ok',_} +contracts_with_subtypes.erl:216: The pattern 'alpha' can never match the type {'ok',_} +contracts_with_subtypes.erl:218: The pattern 42 can never match the type {'ok',_} +contracts_with_subtypes.erl:235: The pattern 1 can never match the type string() +contracts_with_subtypes.erl:238: The pattern {'ok', _} can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:239: The pattern 'alpha' can never match the type {'ok',_,string()} contracts_with_subtypes.erl:23: Invalid type specification for function contracts_with_subtypes:extract2/0. The success typing is () -> 'something' -contracts_with_subtypes.erl:263: Function flat_ets_new_t/0 has no local return -contracts_with_subtypes.erl:264: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed') -contracts_with_subtypes.erl:290: Function factored_ets_new_t/0 has no local return -contracts_with_subtypes.erl:291: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term()) +contracts_with_subtypes.erl:240: The pattern {'ok', 42} can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:241: The pattern 42 can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:267: Function flat_ets_new_t/0 has no local return +contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed') +contracts_with_subtypes.erl:294: Function factored_ets_new_t/0 has no local return +contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term()) contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,atom()), is_subtype(Res,atom()) contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,Arg2), is_subtype(Arg2,atom()), is_subtype(Res,atom()) contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when is_subtype(Arg2,atom()), is_subtype(Arg1,Arg2), is_subtype(Res,atom()) diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl new file mode 100644 index 0000000000..91a157b17f --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl @@ -0,0 +1,528 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%% A stripped version of erl_parse.yrl. +%%% +%%% A type for the abstract format with *external* types has been added. +%%% The type of the abstract format is not up-to-date, but it does not +%%% matter since the purpose of the type is to stress the conversion +%%% of type forms to erl_type(). + +-module(big_external_type). + +-export([parse_form/1,parse_exprs/1,parse_term/1]). +-export([normalise/1,tokens/1,tokens/2]). +-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]). + +-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0, + error_info/0]). + +%% Start of Abstract Format + +-type line() :: erl_scan:line(). + +-export_type([af_record_index/0, af_record_field/1, af_record_name/0, + af_field_name/0, af_function_decl/0]). + +-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0, + af_compile/0, af_file/0, af_record_decl/0, + af_field_decl/0, af_wild_attribute/0, + af_record_update/1, af_catch/0, af_local_call/0, + af_remote_call/0, af_args/0, af_local_function/0, + af_remote_function/0, af_list_comprehension/0, + af_binary_comprehension/0, af_template/0, + af_qualifier_seq/0, af_qualifier/0, af_generator/0, + af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0, + af_clause_seq/0, af_catch_clause_seq/0, af_receive/0, + af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0, + af_query_access/0, af_clause/0, + af_catch_clause/0, af_catch_pattern/0, af_catch_class/0, + af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0, + af_record_access/1, af_guard_call/0, + af_remote_guard_call/0, af_pattern/0, af_literal/0, + af_atom/0, af_lit_atom/1, af_integer/0, af_float/0, + af_string/0, af_match/1, af_variable/0, + af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1, + af_bin/1, af_binelement/1, af_binelement_size/0, + af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]). + +-type abstract_form() :: ?MODULE:af_module() + | ?MODULE:af_export() + | ?MODULE:af_import() + | ?MODULE:af_compile() + | ?MODULE:af_file() + | ?MODULE:af_record_decl() + | ?MODULE:af_wild_attribute() + | ?MODULE:af_function_decl(). + +-type af_module() :: {attribute, line(), module, module()}. + +-type af_export() :: {attribute, line(), export, ?MODULE:af_fa_list()}. + +-type af_import() :: {attribute, line(), import, ?MODULE:af_fa_list()}. + +-type af_fa_list() :: [{function(), arity()}]. + +-type af_compile() :: {attribute, line(), compile, any()}. + +-type af_file() :: {attribute, line(), file, {string(), line()}}. + +-type af_record_decl() :: + {attribute, line(), record, ?MODULE:af_record_name(), [?MODULE:af_field_decl()]}. + +-type af_field_decl() :: {record_field, line(), ?MODULE:af_atom()} + | {record_field, line(), ?MODULE:af_atom(), ?MODULE:abstract_expr()}. + +%% Types and specs, among other things... +-type af_wild_attribute() :: {attribute, line(), ?MODULE:af_atom(), any()}. + +-type af_function_decl() :: + {function, line(), function(), arity(), ?MODULE:af_clause_seq()}. + +-type abstract_expr() :: ?MODULE:af_literal() + | ?MODULE:af_match(?MODULE:abstract_expr()) + | ?MODULE:af_variable() + | ?MODULE:af_tuple(?MODULE:abstract_expr()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:abstract_expr()) + | ?MODULE:af_bin(?MODULE:abstract_expr()) + | ?MODULE:af_binary_op(?MODULE:abstract_expr()) + | ?MODULE:af_unary_op(?MODULE:abstract_expr()) + | ?MODULE:af_record_access(?MODULE:abstract_expr()) + | ?MODULE:af_record_update(?MODULE:abstract_expr()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:abstract_expr()) + | ?MODULE:af_catch() + | ?MODULE:af_local_call() + | ?MODULE:af_remote_call() + | ?MODULE:af_list_comprehension() + | ?MODULE:af_binary_comprehension() + | ?MODULE:af_block() + | ?MODULE:af_if() + | ?MODULE:af_case() + | ?MODULE:af_try() + | ?MODULE:af_receive() + | ?MODULE:af_local_fun() + | ?MODULE:af_remote_fun() + | ?MODULE:af_fun() + | ?MODULE:af_query() + | ?MODULE:af_query_access(). + +-type af_record_update(T) :: {record, + line(), + ?MODULE:abstract_expr(), + ?MODULE:af_record_name(), + [?MODULE:af_record_field(T)]}. + +-type af_catch() :: {'catch', line(), ?MODULE:abstract_expr()}. + +-type af_local_call() :: {call, line(), ?MODULE:af_local_function(), ?MODULE:af_args()}. + +-type af_remote_call() :: {call, line(), ?MODULE:af_remote_function(), ?MODULE:af_args()}. + +-type af_args() :: [?MODULE:abstract_expr()]. + +-type af_local_function() :: ?MODULE:abstract_expr(). + +-type af_remote_function() :: + {remote, line(), ?MODULE:abstract_expr(), ?MODULE:abstract_expr()}. + +-type af_list_comprehension() :: + {lc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}. + +-type af_binary_comprehension() :: + {bc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}. + +-type af_template() :: ?MODULE:abstract_expr(). + +-type af_qualifier_seq() :: [?MODULE:af_qualifier()]. + +-type af_qualifier() :: ?MODULE:af_generator() | ?MODULE:af_filter(). + +-type af_generator() :: {generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()} + | {b_generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}. + +-type af_filter() :: ?MODULE:abstract_expr(). + +-type af_block() :: {block, line(), ?MODULE:af_body()}. + +-type af_if() :: {'if', line(), ?MODULE:af_clause_seq()}. + +-type af_case() :: {'case', line(), ?MODULE:abstract_expr(), ?MODULE:af_clause_seq()}. + +-type af_try() :: {'try', + line(), + ?MODULE:af_body(), + ?MODULE:af_clause_seq(), + ?MODULE:af_catch_clause_seq(), + ?MODULE:af_body()}. + +-type af_clause_seq() :: [?MODULE:af_clause(), ...]. + +-type af_catch_clause_seq() :: [?MODULE:af_clause(), ...]. + +-type af_receive() :: + {'receive', line(), ?MODULE:af_clause_seq()} + | {'receive', line(), ?MODULE:af_clause_seq(), ?MODULE:abstract_expr(), ?MODULE:af_body()}. + +-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}. + +-type af_remote_fun() :: + {'fun', line(), {function, module(), function(), arity()}} + | {'fun', line(), {function, ?MODULE:af_atom(), ?MODULE:af_atom(), ?MODULE:af_integer()}}. + +-type af_fun() :: {'fun', line(), {clauses, ?MODULE:af_clause_seq()}}. + +-type af_query() :: {'query', line(), ?MODULE:af_list_comprehension()}. + +-type af_query_access() :: + {record_field, line(), ?MODULE:abstract_expr(), ?MODULE:af_field_name()}. + +-type abstract_clause() :: ?MODULE:af_clause() | ?MODULE:af_catch_clause(). + +-type af_clause() :: + {clause, line(), [?MODULE:af_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}. + +-type af_catch_clause() :: + {clause, line(), [?MODULE:af_catch_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}. + +-type af_catch_pattern() :: + {?MODULE:af_catch_class(), ?MODULE:af_pattern(), ?MODULE:af_anon_variable()}. + +-type af_catch_class() :: + ?MODULE:af_variable() + | ?MODULE:af_lit_atom(throw) | ?MODULE:af_lit_atom(error) | ?MODULE:af_lit_atom(exit). + +-type af_body() :: [?MODULE:abstract_expr(), ...]. + +-type af_guard_seq() :: [?MODULE:af_guard()]. + +-type af_guard() :: [?MODULE:af_guard_test(), ...]. + +-type af_guard_test() :: ?MODULE:af_literal() + | ?MODULE:af_variable() + | ?MODULE:af_tuple(?MODULE:af_guard_test()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:af_guard_test()) + | ?MODULE:af_bin(?MODULE:af_guard_test()) + | ?MODULE:af_binary_op(?MODULE:af_guard_test()) + | ?MODULE:af_unary_op(?MODULE:af_guard_test()) + | ?MODULE:af_record_access(?MODULE:af_guard_test()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:af_guard_test()) + | ?MODULE:af_guard_call() + | ?MODULE:af_remote_guard_call(). + +-type af_record_access(T) :: + {record, line(), ?MODULE:af_record_name(), [?MODULE:af_record_field(T)]}. + +-type af_guard_call() :: {call, line(), function(), [?MODULE:af_guard_test()]}. + +-type af_remote_guard_call() :: + {call, line(), atom(), ?MODULE:af_lit_atom(erlang), [?MODULE:af_guard_test()]}. + +-type af_pattern() :: ?MODULE:af_literal() + | ?MODULE:af_match(?MODULE:af_pattern()) + | ?MODULE:af_variable() + | ?MODULE:af_anon_variable() + | ?MODULE:af_tuple(?MODULE:af_pattern()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:af_pattern()) + | ?MODULE:af_bin(?MODULE:af_pattern()) + | ?MODULE:af_binary_op(?MODULE:af_pattern()) + | ?MODULE:af_unary_op(?MODULE:af_pattern()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:af_pattern()). + +-type af_literal() :: ?MODULE:af_atom() | ?MODULE:af_integer() | ?MODULE:af_float() | ?MODULE:af_string(). + +-type af_atom() :: ?MODULE:af_lit_atom(atom()). + +-type af_lit_atom(A) :: {atom, line(), A}. + +-type af_integer() :: {integer, line(), non_neg_integer()}. + +-type af_float() :: {float, line(), float()}. + +-type af_string() :: {string, line(), [byte()]}. + +-type af_match(T) :: {match, line(), T, T}. + +-type af_variable() :: {var, line(), atom()}. + +-type af_anon_variable() :: {var, line(), '_'}. + +-type af_tuple(T) :: {tuple, line(), [T]}. + +-type af_nil() :: {nil, line()}. + +-type af_cons(T) :: {cons, line, T, T}. + +-type af_bin(T) :: {bin, line(), [?MODULE:af_binelement(T)]}. + +-type af_binelement(T) :: {bin_element, + line(), + T, + ?MODULE:af_binelement_size(), + type_specifier_list()}. + +-type af_binelement_size() :: default | ?MODULE:abstract_expr(). + +-type af_binary_op(T) :: {op, line(), T, ?MODULE:af_binop(), T}. + +-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-' + | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++' + | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:=' + | '=/='. + +-type af_unary_op(T) :: {op, line(), ?MODULE:af_unop(), T}. + +-type af_unop() :: '+' | '*' | 'bnot' | 'not'. + +%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}. +-type type_specifier_list() :: default | [type_specifier(), ...]. + +-type type_specifier() :: af_type() + | af_signedness() + | af_endianness() + | af_unit(). + +-type af_type() :: integer + | float + | binary + | bytes + | bitstring + | bits + | utf8 + | utf16 + | utf32. + +-type af_signedness() :: signed | unsigned. + +-type af_endianness() :: big | little | native. + +-type af_unit() :: {unit, 1..256}. + +-type af_record_index() :: + {record_index, line(), af_record_name(), af_field_name()}. + +-type af_record_field(T) :: {record_field, line(), af_field_name(), T}. + +-type af_record_name() :: atom(). + +-type af_field_name() :: atom(). + +%% End of Abstract Format + +-type error_description() :: term(). +-type error_info() :: {erl_scan:line(), module(), error_description()}. +-type token() :: {Tag :: atom(), Line :: erl_scan:line()}. + +%% mkop(Op, Arg) -> {op,Line,Op,Arg}. +%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}. + +-define(mkop2(L, OpPos, R), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,L,R} + end). + +-define(mkop1(OpPos, A), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,A} + end). + +%% keep track of line info in tokens +-define(line(Tup), element(2, Tup)). + +%% Entry points compatible to old erl_parse. +%% These really suck and are only here until Calle gets multiple +%% entry points working. + +-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when + Tokens :: [token()], + AbsForm :: abstract_form(), + ErrorInfo :: error_info(). +parse_form([{'-',L1},{atom,L2,spec}|Tokens]) -> + parse([{'-',L1},{'spec',L2}|Tokens]); +parse_form([{'-',L1},{atom,L2,callback}|Tokens]) -> + parse([{'-',L1},{'callback',L2}|Tokens]); +parse_form(Tokens) -> + parse(Tokens). + +-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when + Tokens :: [token()], + ExprList :: [abstract_expr()], + ErrorInfo :: error_info(). +parse_exprs(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} -> + {ok,Exprs}; + {error,_} = Err -> Err + end. + +-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when + Tokens :: [token()], + Term :: term(), + ErrorInfo :: error_info(). +parse_term(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} -> + try normalise(Expr) of + Term -> {ok,Term} + catch + _:_R -> {error,{?line(Expr),?MODULE,"bad term"}} + end; + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} -> + {error,{?line(E2),?MODULE,"bad term"}}; + {error,_} = Err -> Err + end. + +%% Convert between the abstract form of a term and a term. + +-spec normalise(AbsTerm) -> Data when + AbsTerm :: abstract_expr(), + Data :: term(). +normalise({char,_,C}) -> C; +normalise({integer,_,I}) -> I; +normalise({float,_,F}) -> F; +normalise({atom,_,A}) -> A; +normalise({string,_,S}) -> S; +normalise({nil,_}) -> []; +normalise({bin,_,Fs}) -> + {value, B, _} = + eval_bits:expr_grp(Fs, [], + fun(E, _) -> + {value, normalise(E), []} + end, [], true), + B; +normalise({cons,_,Head,Tail}) -> + [normalise(Head)|normalise(Tail)]; +normalise({tuple,_,Args}) -> + list_to_tuple(normalise_list(Args)); +%% Atom dot-notation, as in 'foo.bar.baz' +%% Special case for unary +/-. +normalise({op,_,'+',{char,_,I}}) -> I; +normalise({op,_,'+',{integer,_,I}}) -> I; +normalise({op,_,'+',{float,_,F}}) -> F; +normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible! +normalise({op,_,'-',{integer,_,I}}) -> -I; +normalise({op,_,'-',{float,_,F}}) -> -F; +normalise(X) -> erlang:error({badarg, X}). + +normalise_list([H|T]) -> + [normalise(H)|normalise_list(T)]; +normalise_list([]) -> + []. + +%% Generate a list of tokens representing the abstract term. + +-spec tokens(AbsTerm) -> Tokens when + AbsTerm :: abstract_expr(), + Tokens :: [token()]. +tokens(Abs) -> + tokens(Abs, []). + +-spec tokens(AbsTerm, MoreTokens) -> Tokens when + AbsTerm :: abstract_expr(), + MoreTokens :: [token()], + Tokens :: [token()]. +tokens({char,L,C}, More) -> [{char,L,C}|More]; +tokens({integer,L,N}, More) -> [{integer,L,N}|More]; +tokens({float,L,F}, More) -> [{float,L,F}|More]; +tokens({atom,L,A}, More) -> [{atom,L,A}|More]; +tokens({var,L,V}, More) -> [{var,L,V}|More]; +tokens({string,L,S}, More) -> [{string,L,S}|More]; +tokens({nil,L}, More) -> [{'[',L},{']',L}|More]; +tokens({cons,L,Head,Tail}, More) -> + [{'[',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens({tuple,L,[]}, More) -> + [{'{',L},{'}',L}|More]; +tokens({tuple,L,[E|Es]}, More) -> + [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))]. + +tokens_tail({cons,L,Head,Tail}, More) -> + [{',',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens_tail({nil,L}, More) -> + [{']',L}|More]; +tokens_tail(Other, More) -> + L = ?line(Other), + [{'|',L}|tokens(Other, [{']',L}|More])]. + +tokens_tuple([E|Es], Line, More) -> + [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))]; +tokens_tuple([], Line, More) -> + [{'}',Line}|More]. + +%% Give the relative precedences of operators. + +inop_prec('=') -> {150,100,100}; +inop_prec('!') -> {150,100,100}; +inop_prec('orelse') -> {160,150,150}; +inop_prec('andalso') -> {200,160,160}; +inop_prec('==') -> {300,200,300}; +inop_prec('/=') -> {300,200,300}; +inop_prec('=<') -> {300,200,300}; +inop_prec('<') -> {300,200,300}; +inop_prec('>=') -> {300,200,300}; +inop_prec('>') -> {300,200,300}; +inop_prec('=:=') -> {300,200,300}; +inop_prec('=/=') -> {300,200,300}; +inop_prec('++') -> {400,300,300}; +inop_prec('--') -> {400,300,300}; +inop_prec('+') -> {400,400,500}; +inop_prec('-') -> {400,400,500}; +inop_prec('bor') -> {400,400,500}; +inop_prec('bxor') -> {400,400,500}; +inop_prec('bsl') -> {400,400,500}; +inop_prec('bsr') -> {400,400,500}; +inop_prec('or') -> {400,400,500}; +inop_prec('xor') -> {400,400,500}; +inop_prec('*') -> {500,500,600}; +inop_prec('/') -> {500,500,600}; +inop_prec('div') -> {500,500,600}; +inop_prec('rem') -> {500,500,600}; +inop_prec('band') -> {500,500,600}; +inop_prec('and') -> {500,500,600}; +inop_prec('#') -> {800,700,800}; +inop_prec(':') -> {900,800,900}; +inop_prec('.') -> {900,900,1000}. + +-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'. + +-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}. + +preop_prec('catch') -> {0,100}; +preop_prec('+') -> {600,700}; +preop_prec('-') -> {600,700}; +preop_prec('bnot') -> {600,700}; +preop_prec('not') -> {600,700}; +preop_prec('#') -> {700,800}. + +-spec func_prec() -> {800,700}. + +func_prec() -> {800,700}. + +-spec max_prec() -> 1000. + +max_prec() -> 1000. + +parse(T) -> + bar:foo(T). diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl new file mode 100644 index 0000000000..6de263eda1 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl @@ -0,0 +1,525 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%% A stripped version of erl_parse.yrl. +%%% +%%% A type for the abstract format with *local* types has been added. +%%% The type of the abstract format is not up-to-date, but it does not +%%% matter since the purpose of the type is to stress the conversion +%%% of type forms to erl_type(). + +-module(big_local_type). + +-export([parse_form/1,parse_exprs/1,parse_term/1]). +-export([normalise/1,tokens/1,tokens/2]). +-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]). + +-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0, + error_info/0]). + +%% Start of Abstract Format + +-type line() :: erl_scan:line(). + +-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0, + af_compile/0, af_file/0, af_record_decl/0, + af_field_decl/0, af_wild_attribute/0, + af_record_update/1, af_catch/0, af_local_call/0, + af_remote_call/0, af_args/0, af_local_function/0, + af_remote_function/0, af_list_comprehension/0, + af_binary_comprehension/0, af_template/0, + af_qualifier_seq/0, af_qualifier/0, af_generator/0, + af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0, + af_clause_seq/0, af_catch_clause_seq/0, af_receive/0, + af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0, + af_query_access/0, af_clause/0, + af_catch_clause/0, af_catch_pattern/0, af_catch_class/0, + af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0, + af_record_access/1, af_guard_call/0, + af_remote_guard_call/0, af_pattern/0, af_literal/0, + af_atom/0, af_lit_atom/1, af_integer/0, af_float/0, + af_string/0, af_match/1, af_variable/0, + af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1, + af_bin/1, af_binelement/1, af_binelement_size/0, + af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]). + +-type abstract_form() :: af_module() + | af_export() + | af_import() + | af_compile() + | af_file() + | af_record_decl() + | af_wild_attribute() + | af_function_decl(). + +-type af_module() :: {attribute, line(), module, module()}. + +-type af_export() :: {attribute, line(), export, af_fa_list()}. + +-type af_import() :: {attribute, line(), import, af_fa_list()}. + +-type af_fa_list() :: [{function(), arity()}]. + +-type af_compile() :: {attribute, line(), compile, any()}. + +-type af_file() :: {attribute, line(), file, {string(), line()}}. + +-type af_record_decl() :: + {attribute, line(), record, af_record_name(), [af_field_decl()]}. + +-type af_field_decl() :: {record_field, line(), af_atom()} + | {record_field, line(), af_atom(), abstract_expr()}. + +%% Types and specs, among other things... +-type af_wild_attribute() :: {attribute, line(), af_atom(), any()}. + +-type af_function_decl() :: + {function, line(), function(), arity(), af_clause_seq()}. + +-type abstract_expr() :: af_literal() + | af_match(abstract_expr()) + | af_variable() + | af_tuple(abstract_expr()) + | af_nil() + | af_cons(abstract_expr()) + | af_bin(abstract_expr()) + | af_binary_op(abstract_expr()) + | af_unary_op(abstract_expr()) + | af_record_access(abstract_expr()) + | af_record_update(abstract_expr()) + | af_record_index() + | af_record_field(abstract_expr()) + | af_catch() + | af_local_call() + | af_remote_call() + | af_list_comprehension() + | af_binary_comprehension() + | af_block() + | af_if() + | af_case() + | af_try() + | af_receive() + | af_local_fun() + | af_remote_fun() + | af_fun() + | af_query() + | af_query_access(). + +-type af_record_update(T) :: {record, + line(), + abstract_expr(), + af_record_name(), + [af_record_field(T)]}. + +-type af_catch() :: {'catch', line(), abstract_expr()}. + +-type af_local_call() :: {call, line(), af_local_function(), af_args()}. + +-type af_remote_call() :: {call, line(), af_remote_function(), af_args()}. + +-type af_args() :: [abstract_expr()]. + +-type af_local_function() :: abstract_expr(). + +-type af_remote_function() :: + {remote, line(), abstract_expr(), abstract_expr()}. + +-type af_list_comprehension() :: + {lc, line(), af_template(), af_qualifier_seq()}. + +-type af_binary_comprehension() :: + {bc, line(), af_template(), af_qualifier_seq()}. + +-type af_template() :: abstract_expr(). + +-type af_qualifier_seq() :: [af_qualifier()]. + +-type af_qualifier() :: af_generator() | af_filter(). + +-type af_generator() :: {generate, line(), af_pattern(), abstract_expr()} + | {b_generate, line(), af_pattern(), abstract_expr()}. + +-type af_filter() :: abstract_expr(). + +-type af_block() :: {block, line(), af_body()}. + +-type af_if() :: {'if', line(), af_clause_seq()}. + +-type af_case() :: {'case', line(), abstract_expr(), af_clause_seq()}. + +-type af_try() :: {'try', + line(), + af_body(), + af_clause_seq(), + af_catch_clause_seq(), + af_body()}. + +-type af_clause_seq() :: [af_clause(), ...]. + +-type af_catch_clause_seq() :: [af_clause(), ...]. + +-type af_receive() :: + {'receive', line(), af_clause_seq()} + | {'receive', line(), af_clause_seq(), abstract_expr(), af_body()}. + +-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}. + +-type af_remote_fun() :: + {'fun', line(), {function, module(), function(), arity()}} + | {'fun', line(), {function, af_atom(), af_atom(), af_integer()}}. + +-type af_fun() :: {'fun', line(), {clauses, af_clause_seq()}}. + +-type af_query() :: {'query', line(), af_list_comprehension()}. + +-type af_query_access() :: + {record_field, line(), abstract_expr(), af_field_name()}. + +-type abstract_clause() :: af_clause() | af_catch_clause(). + +-type af_clause() :: + {clause, line(), [af_pattern()], af_guard_seq(), af_body()}. + +-type af_catch_clause() :: + {clause, line(), [af_catch_pattern()], af_guard_seq(), af_body()}. + +-type af_catch_pattern() :: + {af_catch_class(), af_pattern(), af_anon_variable()}. + +-type af_catch_class() :: + af_variable() + | af_lit_atom(throw) | af_lit_atom(error) | af_lit_atom(exit). + +-type af_body() :: [abstract_expr(), ...]. + +-type af_guard_seq() :: [af_guard()]. + +-type af_guard() :: [af_guard_test(), ...]. + +-type af_guard_test() :: af_literal() + | af_variable() + | af_tuple(af_guard_test()) + | af_nil() + | af_cons(af_guard_test()) + | af_bin(af_guard_test()) + | af_binary_op(af_guard_test()) + | af_unary_op(af_guard_test()) + | af_record_access(af_guard_test()) + | af_record_index() + | af_record_field(af_guard_test()) + | af_guard_call() + | af_remote_guard_call(). + +-type af_record_access(T) :: + {record, line(), af_record_name(), [af_record_field(T)]}. + +-type af_guard_call() :: {call, line(), function(), [af_guard_test()]}. + +-type af_remote_guard_call() :: + {call, line(), atom(), af_lit_atom(erlang), [af_guard_test()]}. + +-type af_pattern() :: af_literal() + | af_match(af_pattern()) + | af_variable() + | af_anon_variable() + | af_tuple(af_pattern()) + | af_nil() + | af_cons(af_pattern()) + | af_bin(af_pattern()) + | af_binary_op(af_pattern()) + | af_unary_op(af_pattern()) + | af_record_index() + | af_record_field(af_pattern()). + +-type af_literal() :: af_atom() | af_integer() | af_float() | af_string(). + +-type af_atom() :: af_lit_atom(atom()). + +-type af_lit_atom(A) :: {atom, line(), A}. + +-type af_integer() :: {integer, line(), non_neg_integer()}. + +-type af_float() :: {float, line(), float()}. + +-type af_string() :: {string, line(), [byte()]}. + +-type af_match(T) :: {match, line(), T, T}. + +-type af_variable() :: {var, line(), atom()}. + +-type af_anon_variable() :: {var, line(), '_'}. + +-type af_tuple(T) :: {tuple, line(), [T]}. + +-type af_nil() :: {nil, line()}. + +-type af_cons(T) :: {cons, line, T, T}. + +-type af_bin(T) :: {bin, line(), [af_binelement(T)]}. + +-type af_binelement(T) :: {bin_element, + line(), + T, + af_binelement_size(), + type_specifier_list()}. + +-type af_binelement_size() :: default | abstract_expr(). + +-type af_binary_op(T) :: {op, line(), T, af_binop(), T}. + +-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-' + | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++' + | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:=' + | '=/='. + +-type af_unary_op(T) :: {op, line(), af_unop(), T}. + +-type af_unop() :: '+' | '*' | 'bnot' | 'not'. + +%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}. +-type type_specifier_list() :: default | [type_specifier(), ...]. + +-type type_specifier() :: af_type() + | af_signedness() + | af_endianness() + | af_unit(). + +-type af_type() :: integer + | float + | binary + | bytes + | bitstring + | bits + | utf8 + | utf16 + | utf32. + +-type af_signedness() :: signed | unsigned. + +-type af_endianness() :: big | little | native. + +-type af_unit() :: {unit, 1..256}. + +-type af_record_index() :: + {record_index, line(), af_record_name(), af_field_name()}. + +-type af_record_field(T) :: {record_field, line(), af_field_name(), T}. + +-type af_record_name() :: atom(). + +-type af_field_name() :: atom(). + +%% End of Abstract Format + +-type error_description() :: term(). +-type error_info() :: {erl_scan:line(), module(), error_description()}. +-type token() :: {Tag :: atom(), Line :: erl_scan:line()}. + +%% mkop(Op, Arg) -> {op,Line,Op,Arg}. +%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}. + +-define(mkop2(L, OpPos, R), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,L,R} + end). + +-define(mkop1(OpPos, A), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,A} + end). + +%% keep track of line info in tokens +-define(line(Tup), element(2, Tup)). + +%% Entry points compatible to old erl_parse. +%% These really suck and are only here until Calle gets multiple +%% entry points working. + +-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when + Tokens :: [token()], + AbsForm :: abstract_form(), + ErrorInfo :: error_info(). +parse_form([{'-',L1},{atom,L2,spec}|Tokens]) -> + parse([{'-',L1},{'spec',L2}|Tokens]); +parse_form([{'-',L1},{atom,L2,callback}|Tokens]) -> + parse([{'-',L1},{'callback',L2}|Tokens]); +parse_form(Tokens) -> + parse(Tokens). + +-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when + Tokens :: [token()], + ExprList :: [abstract_expr()], + ErrorInfo :: error_info(). +parse_exprs(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} -> + {ok,Exprs}; + {error,_} = Err -> Err + end. + +-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when + Tokens :: [token()], + Term :: term(), + ErrorInfo :: error_info(). +parse_term(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} -> + try normalise(Expr) of + Term -> {ok,Term} + catch + _:_R -> {error,{?line(Expr),?MODULE,"bad term"}} + end; + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} -> + {error,{?line(E2),?MODULE,"bad term"}}; + {error,_} = Err -> Err + end. + +%% Convert between the abstract form of a term and a term. + +-spec normalise(AbsTerm) -> Data when + AbsTerm :: abstract_expr(), + Data :: term(). +normalise({char,_,C}) -> C; +normalise({integer,_,I}) -> I; +normalise({float,_,F}) -> F; +normalise({atom,_,A}) -> A; +normalise({string,_,S}) -> S; +normalise({nil,_}) -> []; +normalise({bin,_,Fs}) -> + {value, B, _} = + eval_bits:expr_grp(Fs, [], + fun(E, _) -> + {value, normalise(E), []} + end, [], true), + B; +normalise({cons,_,Head,Tail}) -> + [normalise(Head)|normalise(Tail)]; +normalise({tuple,_,Args}) -> + list_to_tuple(normalise_list(Args)); +%% Atom dot-notation, as in 'foo.bar.baz' +%% Special case for unary +/-. +normalise({op,_,'+',{char,_,I}}) -> I; +normalise({op,_,'+',{integer,_,I}}) -> I; +normalise({op,_,'+',{float,_,F}}) -> F; +normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible! +normalise({op,_,'-',{integer,_,I}}) -> -I; +normalise({op,_,'-',{float,_,F}}) -> -F; +normalise(X) -> erlang:error({badarg, X}). + +normalise_list([H|T]) -> + [normalise(H)|normalise_list(T)]; +normalise_list([]) -> + []. + +%% Generate a list of tokens representing the abstract term. + +-spec tokens(AbsTerm) -> Tokens when + AbsTerm :: abstract_expr(), + Tokens :: [token()]. +tokens(Abs) -> + tokens(Abs, []). + +-spec tokens(AbsTerm, MoreTokens) -> Tokens when + AbsTerm :: abstract_expr(), + MoreTokens :: [token()], + Tokens :: [token()]. +tokens({char,L,C}, More) -> [{char,L,C}|More]; +tokens({integer,L,N}, More) -> [{integer,L,N}|More]; +tokens({float,L,F}, More) -> [{float,L,F}|More]; +tokens({atom,L,A}, More) -> [{atom,L,A}|More]; +tokens({var,L,V}, More) -> [{var,L,V}|More]; +tokens({string,L,S}, More) -> [{string,L,S}|More]; +tokens({nil,L}, More) -> [{'[',L},{']',L}|More]; +tokens({cons,L,Head,Tail}, More) -> + [{'[',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens({tuple,L,[]}, More) -> + [{'{',L},{'}',L}|More]; +tokens({tuple,L,[E|Es]}, More) -> + [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))]. + +tokens_tail({cons,L,Head,Tail}, More) -> + [{',',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens_tail({nil,L}, More) -> + [{']',L}|More]; +tokens_tail(Other, More) -> + L = ?line(Other), + [{'|',L}|tokens(Other, [{']',L}|More])]. + +tokens_tuple([E|Es], Line, More) -> + [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))]; +tokens_tuple([], Line, More) -> + [{'}',Line}|More]. + +%% Give the relative precedences of operators. + +inop_prec('=') -> {150,100,100}; +inop_prec('!') -> {150,100,100}; +inop_prec('orelse') -> {160,150,150}; +inop_prec('andalso') -> {200,160,160}; +inop_prec('==') -> {300,200,300}; +inop_prec('/=') -> {300,200,300}; +inop_prec('=<') -> {300,200,300}; +inop_prec('<') -> {300,200,300}; +inop_prec('>=') -> {300,200,300}; +inop_prec('>') -> {300,200,300}; +inop_prec('=:=') -> {300,200,300}; +inop_prec('=/=') -> {300,200,300}; +inop_prec('++') -> {400,300,300}; +inop_prec('--') -> {400,300,300}; +inop_prec('+') -> {400,400,500}; +inop_prec('-') -> {400,400,500}; +inop_prec('bor') -> {400,400,500}; +inop_prec('bxor') -> {400,400,500}; +inop_prec('bsl') -> {400,400,500}; +inop_prec('bsr') -> {400,400,500}; +inop_prec('or') -> {400,400,500}; +inop_prec('xor') -> {400,400,500}; +inop_prec('*') -> {500,500,600}; +inop_prec('/') -> {500,500,600}; +inop_prec('div') -> {500,500,600}; +inop_prec('rem') -> {500,500,600}; +inop_prec('band') -> {500,500,600}; +inop_prec('and') -> {500,500,600}; +inop_prec('#') -> {800,700,800}; +inop_prec(':') -> {900,800,900}; +inop_prec('.') -> {900,900,1000}. + +-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'. + +-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}. + +preop_prec('catch') -> {0,100}; +preop_prec('+') -> {600,700}; +preop_prec('-') -> {600,700}; +preop_prec('bnot') -> {600,700}; +preop_prec('not') -> {600,700}; +preop_prec('#') -> {700,800}. + +-spec func_prec() -> {800,700}. + +func_prec() -> {800,700}. + +-spec max_prec() -> 1000. + +max_prec() -> 1000. + +parse(T) -> + bar:foo(T). diff --git a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl index d7dfd9752e..dbabd904c2 100644 --- a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl +++ b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl @@ -136,10 +136,14 @@ q(ab) -> rec2({a, b}); % breaks the contract q(ba) -> rec2({b, a}); % breaks the contract q(aba) -> rec2({a, {b, a}}); % breaks the contract q(bab) -> rec2({b, {a, b}}); % breaks the contract -q(abab) -> rec2({a, {b, {a, b}}}); -q(baba) -> rec2({b, {a, {b, a}}}); -q(ababa) -> rec2({a, {b, {a, {b, a}}}}); -q(babab) -> rec2({b, {a, {b, {a, b}}}}). +q(abab) -> rec2({a, {b, {a, b}}}); % breaks the contract +q(baba) -> rec2({b, {a, {b, a}}}); % breaks the contract +q(ababa) -> rec2({a, {b, {a, {b, a}}}}); % breaks the contract +q(babab) -> rec2({b, {a, {b, {a, b}}}}); % breaks the contract +q(ababab) -> rec2({a, {b, {a, {b, {a, b}}}}}); +q(bababa) -> rec2({b, {a, {b, {a, {b, a}}}}}); +q(abababa) -> rec2({a, {b, {a, {b, {a, {b, a}}}}}}); +q(bababab) -> rec2({b, {a, {b, {a, {b, {a, b}}}}}}). %=============================================================================== diff --git a/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl new file mode 100644 index 0000000000..2d75f25bd5 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl @@ -0,0 +1,47 @@ +%% A bug reported by Tail-f Systems. The problem is that record types +%% are included without properly limiting their depth. + +-module(ditrap). + +-define(tref(T), ?MODULE:T). +-define(fref(T), ?MODULE:T). + +-export_type([ module_rec/0 + , typedef_rec/0 + , type_spec_fun/0 + ]). + +-record(type, { + base :: 'builtin' | external:random_type() | ?tref(typedef_rec()), + type_spec_fun :: ?fref(type_spec_fun()) + }). + +-record(typedef, {type :: #type{}}). + +-record(typedefs, { + map :: ?tref(typedef_rec()), + parent :: 'undefined' | #typedefs{} + }). + +-record(sn, { + module :: ?tref(module_rec()), + typedefs :: #typedefs{}, + type :: 'undefined' | #type{}, + keys :: 'undefined' | [#sn{}], + children = [] :: [#sn{}] + }). + +-record(augment, {children = [] :: [#sn{}]}). + +-record(module, { + submodules = [] :: [{#module{}, external:pos()}], + typedefs = #typedefs{} :: #typedefs{}, + children = [] :: [#sn{}], + remote_augments = [] :: [{ModuleName :: atom(), [#augment{}]}], + local_augments = [] :: [#augment{}] + }). + +-type typedef_rec() :: #typedef{}. +-type module_rec() :: #module{}. + +-type type_spec_fun() :: undefined | fun((#type{}, #module{}) -> any()). diff --git a/lib/diameter/doc/src/diameter.xml b/lib/diameter/doc/src/diameter.xml index 00b54ffbc4..638c1c4c2b 100644 --- a/lib/diameter/doc/src/diameter.xml +++ b/lib/diameter/doc/src/diameter.xml @@ -21,7 +21,7 @@ <copyright> <year>2011</year> -<year>2014</year> +<year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -111,7 +111,7 @@ Defined in &dict_data_types;.</p> <tag><c>application_alias() = term()</c></tag> <item> <p> -A name identifying a Diameter application in +Name identifying a Diameter application in service configuration. Passed to &call; when sending requests defined by the application.</p> @@ -129,7 +129,7 @@ ExtraArgs = list() </pre> <p> -A module implementing the callback interface defined in &man_app;, +Module implementing the callback interface defined in &man_app;, along with any extra arguments to be appended to those documented. Note that extra arguments specific to an outgoing request can be @@ -156,7 +156,7 @@ Has one the following types.</p> <tag><c>{alias, &application_alias;}</c></tag> <item> <p> -A unique identifier for the application in the scope of the +Unique identifier for the application in the scope of the service. Defaults to the value of the <c>dictionary</c> option if unspecified.</p> @@ -165,7 +165,7 @@ unspecified.</p> <tag><c>{dictionary, atom()}</c></tag> <item> <p> -The name of an encode/decode module for the Diameter +Name of an encode/decode module for the Diameter messages defined by the application. These modules are generated from files whose format is documented in &man_dict;.</p> @@ -174,7 +174,7 @@ These modules are generated from files whose format is documented in <tag><c>{module, &application_module;}</c></tag> <item> <p> -The callback module with which messages of the Diameter application are +Callback module in which messages of the Diameter application are handled. See &man_app; for the required interface and semantics.</p> </item> @@ -182,7 +182,7 @@ See &man_app; for the required interface and semantics.</p> <tag><c>{state, term()}</c></tag> <item> <p> -The initial callback state. +Initial callback state. The prevailing state is passed to some &man_app; callbacks, which can then return a new state. @@ -192,7 +192,7 @@ Defaults to the value of the <c>alias</c> option if unspecified.</p> <tag><c>{call_mutates_state, true|false}</c></tag> <item> <p> -Specifies whether or not the &app_pick_peer; +Whether or not the &app_pick_peer; application callback can modify the application state. Defaults to <c>false</c> if unspecified.</p> @@ -209,7 +209,7 @@ probably avoid it.</p> <tag><c>{answer_errors, callback|report|discard}</c></tag> <item> <p> -Determines the manner in which incoming answer messages containing +Manner in which incoming answer messages containing decode errors are handled.</p> <p> @@ -233,7 +233,7 @@ Defaults to <c>discard</c> if unspecified.</p> <tag><c>{request_errors, answer_3xxx|answer|callback}</c></tag> <item> <p> -Determines the manner in which incoming requests are handled when an +Manner in which incoming requests are handled when an error other than 3007 (DIAMETER_APPLICATION_UNSUPPORTED, which cannot be associated with an application callback module), is detected.</p> @@ -293,7 +293,7 @@ Multiple options append to the argument list.</p> <tag><c>{filter, &peer_filter;}</c></tag> <item> <p> -A filter to apply to the list of available peers before passing it to +Filter to apply to the list of available peers before passing it to the &app_pick_peer; callback for the application in question. Multiple options are equivalent a single <c>all</c> filter on the corresponding list of filters. @@ -311,7 +311,7 @@ Defaults to 5000.</p> <tag><c>detach</c></tag> <item> <p> -Causes &call; to return <c>ok</c> as +Cause &call; to return <c>ok</c> as soon as the request in question has been encoded, instead of waiting for and returning the result from a subsequent &app_handle_answer; or @@ -427,7 +427,7 @@ configuration passed to &start_service; or &add_transport;.</p> <tag><c>peer_filter() = term()</c></tag> <item> <p> -A filter passed to &call; in order to select candidate peers for a +Filter passed to &call; in order to select candidate peers for a &app_pick_peer; callback. Has one of the following types.</p> @@ -1032,7 +1032,7 @@ case the corresponding callbacks are applied until either all return <tag><c>{capx_timeout, &dict_Unsigned32;}</c></tag> <item> <p> -The number of milliseconds after which a transport process having an +Number of milliseconds after which a transport process having an established transport connection will be terminated if the expected capabilities exchange message (CER or CEA) is not received from the peer. For a connecting transport, the timing of connection attempts is @@ -1079,7 +1079,7 @@ transport.</p> <item> <p> -A callback invoked prior to terminating the transport process of a +Callback invoked prior to terminating the transport process of a transport connection having watchdog state <c>OKAY</c>. Applied to <c>application|service|transport</c> and the <c>&transport_ref;</c> and <c>&app_peer;</c> in question: @@ -1095,7 +1095,7 @@ The return value can have one of the following types.</p> <tag><c>{dpr, [option()]}</c></tag> <item> <p> -Causes Disconnect-Peer-Request to be sent to the peer, the transport +Send Disconnect-Peer-Request to the peer, the transport process being terminated following reception of Disconnect-Peer-Answer or timeout. An <c>option()</c> can be one of the following.</p> @@ -1104,7 +1104,7 @@ An <c>option()</c> can be one of the following.</p> <tag><c>{cause, 0|rebooting|1|busy|2|goaway}</c></tag> <item> <p> -The Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and +Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and <c>DO_NOT_WANT_TO_TALK_TO_YOU</c> respectively. Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and <c>goaway</c> for <c>Reason=transport</c>.</p> @@ -1113,7 +1113,7 @@ Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and <tag><c>{timeout, &dict_Unsigned32;}</c></tag> <item> <p> -The number of milliseconds after which the transport process is +Number of milliseconds after which the transport process is terminated if DPA has not been received. Defaults to 1000.</p> </item> @@ -1129,7 +1129,7 @@ Equivalent to <c>{dpr, []}</c>.</p> <tag><c>close</c></tag> <item> <p> -Causes the transport process to be terminated without +Terminate the transport process without Disconnect-Peer-Request being sent to the peer.</p> </item> @@ -1156,7 +1156,7 @@ Defaults to a single callback returning <c>dpr</c>.</p> <tag><c>{length_errors, exit|handle|discard}</c></tag> <item> <p> -Specifies how to deal with errors in the Message Length field of the +How to deal with errors in the Message Length field of the Diameter Header in an incoming message. An error in this context is that the length is not at least 20 bytes (the length of a Header), is not a multiple of 4 (a valid length) or @@ -1188,11 +1188,26 @@ See &man_tcp; for the behaviour of that module.</p> </note> </item> +<tag><c>{pool_size, pos_integer()}</c></tag> +<item> +<p> +Number of transport processes to start. +For a listening transport, determines the size of the pool of +accepting transport processes, a larger number being desirable for +processing multiple concurrent peer connection attempts. +For a connecting transport, determines the number of connections to +the peer in question that will be attempted to be establshed: +the &service_opt;: <c>restrict_connections</c> should also be +configured on the service in question to allow multiple connections to +the same peer.</p> + +</item> + <marker id="spawn_opt"/> <tag><c>{spawn_opt, [term()]}</c></tag> <item> <p> -An options list passed to &spawn_opt; when spawning a process for an +Options list passed to &spawn_opt; when spawning a process for an incoming Diameter request. Options <c>monitor</c> and <c>link</c> are ignored.</p> @@ -1205,7 +1220,7 @@ Defaults to the list configured on the service if not specified.</p> <tag><c>{transport_config, term(), &dict_Unsigned32; | infinity}</c></tag> <item> <p> -A term passed as the third argument to the &transport_start; function of +Term passed as the third argument to the &transport_start; function of the relevant &transport_module; in order to start a transport process. Defaults to the empty list if unspecified.</p> @@ -1233,7 +1248,7 @@ To listen on both SCTP and TCP, define one transport for each.</p> <tag><c>{transport_module, atom()}</c></tag> <item> <p> -A module implementing a transport process as defined in &man_transport;. +Module implementing a transport process as defined in &man_transport;. Defaults to <c>diameter_tcp</c> if unspecified.</p> <p> @@ -1253,7 +1268,7 @@ corresponding timeout (see below) or all fail.</p> <tag><c>{watchdog_config, [{okay|suspect, non_neg_integer()}]}</c></tag> <item> <p> -Specifies configuration that alters the behaviour of the watchdog +Configuration that alters the behaviour of the watchdog state machine. On key <c>okay</c>, the non-negative number of answered DWR messages before transitioning from REOPEN to OKAY. @@ -1308,7 +1323,7 @@ in predicate functions passed to &remove_transport;.</p> <tag><c>transport_ref() = reference()</c></tag> <item> <p> -An reference returned by &add_transport; that +Reference returned by &add_transport; that identifies the configuration.</p> </item> @@ -1737,6 +1752,14 @@ connection might look as follows.</p> The information presented here is as in the <c>connect</c> case except that the client connections are grouped under an <c>accept</c> tuple.</p> +<p> +Whether or not the &transport_opt; <c>pool_size</c> affects the format +of the listing in the case of a connecting transport, since a value +greater than 1 implies multiple transport processes for the same +<c>&transport_ref;</c>, as in the listening case. +The format in this case is similar to the listening case, with a +<c>pool</c> tuple in place of an <c>accept</c> tuple.</p> + </item> <tag><c>connections</c></tag> diff --git a/lib/diameter/examples/code/GNUmakefile b/lib/diameter/examples/code/GNUmakefile index 98e36a99e3..81f1da5a39 100644 --- a/lib/diameter/examples/code/GNUmakefile +++ b/lib/diameter/examples/code/GNUmakefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 2010-2012. All Rights Reserved. +# Copyright Ericsson AB 2010-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -20,7 +20,7 @@ EXAMPLES = client server relay # redirect proxy CALLBACKS = $(EXAMPLES:%=%_cb) -MODULES = peer $(EXAMPLES) $(EXAMPLES:%=%_cb) +MODULES = node $(EXAMPLES) $(EXAMPLES:%=%_cb) BEAM = $(MODULES:%=%.beam) diff --git a/lib/diameter/examples/code/client.erl b/lib/diameter/examples/code/client.erl index 46eb4a55db..be5b4cbba5 100644 --- a/lib/diameter/examples/code/client.erl +++ b/lib/diameter/examples/code/client.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -38,7 +38,7 @@ -module(client). -include_lib("diameter/include/diameter.hrl"). --include_lib("diameter/include/diameter_gen_base_rfc3588.hrl"). +-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl"). -export([start/1, %% start a service connect/2, %% add a connecting transport @@ -50,17 +50,14 @@ %% both the record and list encoding here, one detached and one not, %% is just for demonstration purposes. -%% Convenience functions using the default service name, ?SVC_NAME. +%% Convenience functions using the default service name. -export([start/0, connect/1, stop/0, call/0, cast/0]). --define(SVC_NAME, ?MODULE). --define(APP_ALIAS, ?MODULE). --define(CALLBACK_MOD, client_cb). - +-define(DEF_SVC_NAME, ?MODULE). -define(L, atom_to_list). %% The service configuration. As in the server example, a client @@ -70,27 +67,27 @@ {'Origin-Realm', "example.com"}, {'Vendor-Id', 0}, {'Product-Name', "Client"}, - {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]}, - {application, [{alias, ?APP_ALIAS}, - {dictionary, ?DIAMETER_DICT_COMMON}, - {module, ?CALLBACK_MOD}]}]). + {'Auth-Application-Id', [0]}, + {application, [{alias, common}, + {dictionary, diameter_gen_base_rfc6733}, + {module, client_cb}]}]). %% start/1 start(Name) when is_atom(Name) -> - peer:start(Name, ?SERVICE(Name)). + node:start(Name, ?SERVICE(Name)). start() -> - start(?SVC_NAME). + start(?DEF_SVC_NAME). %% connect/2 connect(Name, T) -> - peer:connect(Name, T). + node:connect(Name, T). connect(T) -> - connect(?SVC_NAME, T). + connect(?DEF_SVC_NAME, T). %% call/1 @@ -99,10 +96,10 @@ call(Name) -> RAR = #diameter_base_RAR{'Session-Id' = SId, 'Auth-Application-Id' = 0, 'Re-Auth-Request-Type' = 0}, - diameter:call(Name, ?APP_ALIAS, RAR, []). + diameter:call(Name, common, RAR, []). call() -> - call(?SVC_NAME). + call(?DEF_SVC_NAME). %% cast/1 @@ -111,15 +108,15 @@ cast(Name) -> RAR = ['RAR', {'Session-Id', SId}, {'Auth-Application-Id', 0}, {'Re-Auth-Request-Type', 1}], - diameter:call(Name, ?APP_ALIAS, RAR, [detach]). + diameter:call(Name, common, RAR, [detach]). cast() -> - cast(?SVC_NAME). + cast(?DEF_SVC_NAME). %% stop/1 stop(Name) -> - peer:stop(Name). + node:stop(Name). stop() -> - stop(?SVC_NAME). + stop(?DEF_SVC_NAME). diff --git a/lib/diameter/examples/code/node.erl b/lib/diameter/examples/code/node.erl new file mode 100644 index 0000000000..4fe9007059 --- /dev/null +++ b/lib/diameter/examples/code/node.erl @@ -0,0 +1,174 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%% +%% A library module used by the example Diameter nodes. Does little +%% more than provide an alternate/simplified transport configuration. +%% + +-module(node). + +-export([start/2, + listen/2, + connect/2, + stop/1]). + +-type protocol() + :: tcp | sctp. + +-type ip_address() + :: default + | inet:ip_address(). + +-type server_transport() + :: protocol() + | {protocol(), ip_address(), non_neg_integer()}. + +-type server_opts() + :: server_transport() + | {server_transport(), [diameter:transport_opt()]} + | [diameter:transport_opt()]. + +-type client_transport() + :: protocol() | any + | {protocol() | any, ip_address(), non_neg_integer()} + | {protocol() | any, ip_address(), ip_address(), non_neg_integer()}. + +-type client_opts() + :: client_transport() + | {client_transport(), [diameter:transport_opt()]} + | [diameter:transport_opt()]. + +%% The server_transport() and client_transport() config is just +%% convenience: arbitrary options can be specifed as a +%% [diameter:transport_opt()]. + +-define(DEFAULT_PORT, 3868). + +%% --------------------------------------------------------------------------- +%% Interface functions +%% --------------------------------------------------------------------------- + +%% start/2 + +-spec start(diameter:service_name(), [diameter:service_opt()]) + -> ok + | {error, term()}. + +start(Name, Opts) + when is_atom(Name), is_list(Opts) -> + diameter:start_service(Name, Opts). + +%% connect/2 + +-spec connect(diameter:service_name(), client_opts()) + -> {ok, diameter:transport_ref()} + | {error, term()}. + +connect(Name, Opts) + when is_list(Opts) -> + diameter:add_transport(Name, {connect, Opts}); + +connect(Name, {T, Opts}) -> + connect(Name, Opts ++ client_opts(T)); + +connect(Name, T) -> + connect(Name, [{connect_timer, 5000} | client_opts(T)]). + +%% listen/2 + +-spec listen(diameter:service_name(), server_opts()) + -> {ok, diameter:transport_ref()} + | {error, term()}. + +listen(Name, Opts) + when is_list(Opts) -> + diameter:add_transport(Name, {listen, Opts}); + +listen(Name, {T, Opts}) -> + listen(Name, Opts ++ server_opts(T)); + +listen(Name, T) -> + listen(Name, server_opts(T)). + +%% stop/1 + +-spec stop(diameter:service_name()) + -> ok + | {error, term()}. + +stop(Name) -> + diameter:stop_service(Name). + +%% --------------------------------------------------------------------------- +%% Internal functions +%% --------------------------------------------------------------------------- + +%% server_opts/1 +%% +%% Return transport options for a listening transport. + +server_opts({T, Addr, Port}) -> + [{transport_module, tmod(T)}, + {transport_config, [{reuseaddr, true}, + {ip, addr(Addr)}, + {port, Port}]}]; + +server_opts(T) -> + server_opts({T, loopback, ?DEFAULT_PORT}). + +%% client_opts/1 +%% +%% Return transport options for a connecting transport. + +client_opts({T, LA, RA, RP}) + when T == all; %% backwards compatibility + T == any -> + [[S, {C,Os}], T] = [client_opts({P, LA, RA, RP}) || P <- [sctp,tcp]], + [S, {C,Os,2000} | T]; + +client_opts({T, LA, RA, RP}) -> + [{transport_module, tmod(T)}, + {transport_config, [{raddr, addr(RA)}, + {rport, RP}, + {reuseaddr, true} + | ip(LA)]}]; + +client_opts({T, RA, RP}) -> + client_opts({T, default, RA, RP}); + +client_opts(T) -> + client_opts({T, loopback, loopback, ?DEFAULT_PORT}). + +%% --------------------------------------------------------------------------- + +tmod(tcp) -> diameter_tcp; +tmod(sctp) -> diameter_sctp. + +ip(default) -> + []; +ip(loopback) -> + [{ip, {127,0,0,1}}]; +ip(Addr) -> + [{ip, Addr}]. + +addr(loopback) -> + {127,0,0,1}; +addr(A) -> + A. diff --git a/lib/diameter/examples/code/peer.erl b/lib/diameter/examples/code/peer.erl deleted file mode 100644 index 7519abfb2c..0000000000 --- a/lib/diameter/examples/code/peer.erl +++ /dev/null @@ -1,150 +0,0 @@ -%% -%% %CopyrightBegin% -%% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. -%% -%% The contents of this file are subject to the Erlang Public License, -%% Version 1.1, (the "License"); you may not use this file except in -%% compliance with the License. You should have received a copy of the -%% Erlang Public License along with this software. If not, it can be -%% retrieved online at http://www.erlang.org/. -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and limitations -%% under the License. -%% -%% %CopyrightEnd% -%% - -%% -%% A library module that factors out commonality in the example -%% Diameter peers. -%% - --module(peer). - --include_lib("diameter/include/diameter.hrl"). --include_lib("diameter/include/diameter_gen_base_rfc3588.hrl"). - --export([start/2, - listen/2, - connect/2, - stop/1]). - --type service_name() - :: term(). - --type protocol() - :: tcp | sctp. - --type ip_address() - :: default - | inet:ip_address(). - --type server_config() - :: protocol() - | {protocol(), ip_address(), non_neg_integer()}. - --type client_config() - :: protocol() - | {protocol(), ip_address(), non_neg_integer()} - | {protocol(), ip_address(), ip_address(), non_neg_integer()}. - --define(DEFAULT_PORT, 3868). - -%% --------------------------------------------------------------------------- -%% Interface functions -%% --------------------------------------------------------------------------- - -%% start/2 - --spec start(service_name(), list()) - -> ok - | {error, term()}. - -start(Name, Opts) - when is_atom(Name), is_list(Opts) -> - diameter:start_service(Name, Opts). - -%% connect/2 - --spec connect(service_name(), client_config()) - -> {ok, reference()} - | {error, term()}. - -connect(Name, T) -> - diameter:add_transport(Name, {connect, [{connect_timer, 5000} - | client(T)]}). - -%% listen/2 - --spec listen(service_name(), server_config()) - -> {ok, reference()} - | {error, term()}. - -listen(Name, T) -> - diameter:add_transport(Name, {listen, server(T)}). - -%% stop/1 - --spec stop(service_name()) - -> ok - | {error, term()}. - -stop(Name) -> - diameter:stop_service(Name). - -%% --------------------------------------------------------------------------- -%% Internal functions -%% --------------------------------------------------------------------------- - -%% server/1 -%% -%% Return config for a listening transport. - -server({T, Addr, Port}) -> - [{transport_module, tmod(T)}, - {transport_config, [{reuseaddr, true}, - {ip, addr(Addr)}, - {port, Port}]}]; - -server(T) -> - server({T, loopback, ?DEFAULT_PORT}). - -%% client/1 -%% -%% Return config for a connecting transport. - -client({all, LA, RA, RP}) -> - [[M,{K,C}], T] - = [client({P, LA, RA, RP}) || P <- [sctp,tcp]], - [M, {K,C,2000} | T]; - -client({T, LA, RA, RP}) -> - [{transport_module, tmod(T)}, - {transport_config, [{raddr, addr(RA)}, - {rport, RP}, - {reuseaddr, true} - | ip(LA)]}]; - -client({T, RA, RP}) -> - client({T, default, RA, RP}); - -client(T) -> - client({T, loopback, loopback, ?DEFAULT_PORT}). - -tmod(tcp) -> diameter_tcp; -tmod(sctp) -> diameter_sctp. - -ip(default) -> - []; -ip(loopback) -> - [{ip, {127,0,0,1}}]; -ip(Addr) -> - [{ip, Addr}]. - -addr(loopback) -> - {127,0,0,1}; -addr(A) -> - A. diff --git a/lib/diameter/examples/code/relay.erl b/lib/diameter/examples/code/relay.erl index d3438f83f3..0aa3cd06d3 100644 --- a/lib/diameter/examples/code/relay.erl +++ b/lib/diameter/examples/code/relay.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2012. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,9 +31,6 @@ -module(relay). --include_lib("diameter/include/diameter.hrl"). --include_lib("diameter/include/diameter_gen_base_rfc3588.hrl"). - -export([start/1, listen/2, connect/2, @@ -44,49 +41,47 @@ connect/1, stop/0]). --define(APP_ALIAS, ?MODULE). --define(SVC_NAME, ?MODULE). --define(CALLBACK_MOD, relay_cb). +-define(DEF_SVC_NAME, ?MODULE). %% The service configuration. -define(SERVICE(Name), [{'Origin-Host', atom_to_list(Name) ++ ".example.com"}, {'Origin-Realm', "example.com"}, {'Vendor-Id', 193}, {'Product-Name', "RelayAgent"}, - {'Auth-Application-Id', [?DIAMETER_APP_ID_RELAY]}, - {application, [{alias, ?MODULE}, - {dictionary, ?DIAMETER_DICT_RELAY}, - {module, ?CALLBACK_MOD}]}]). + {'Auth-Application-Id', [16#FFFFFFFF]}, + {application, [{alias, relay}, + {dictionary, diameter_relay}, + {module, relay_cb}]}]). %% start/1 start(Name) when is_atom(Name) -> - peer:start(Name, ?SERVICE(Name)). + node:start(Name, ?SERVICE(Name)). start() -> - start(?SVC_NAME). + start(?DEF_SVC_NAME). %% listen/2 listen(Name, T) -> - peer:listen(Name, T). + node:listen(Name, T). listen(T) -> - listen(?SVC_NAME, T). + listen(?DEF_SVC_NAME, T). %% connect/2 connect(Name, T) -> - peer:connect(Name, T). + node:connect(Name, T). connect(T) -> - connect(?SVC_NAME, T). + connect(?DEF_SVC_NAME, T). %% stop/1 stop(Name) -> - peer:stop(Name). + node:stop(Name). stop() -> - stop(?SVC_NAME). + stop(?DEF_SVC_NAME). diff --git a/lib/diameter/examples/code/server.erl b/lib/diameter/examples/code/server.erl index 3959461cec..8c91e68895 100644 --- a/lib/diameter/examples/code/server.erl +++ b/lib/diameter/examples/code/server.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2012. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -34,21 +34,16 @@ -module(server). --include_lib("diameter/include/diameter.hrl"). --include_lib("diameter/include/diameter_gen_base_rfc3588.hrl"). - -export([start/1, %% start a service listen/2, %% add a listening transport stop/1]). %% stop a service -%% Convenience functions using the default service name, ?SVC_NAME. +%% Convenience functions using the default service name. -export([start/0, listen/1, stop/0]). --define(SVC_NAME, ?MODULE). --define(APP_ALIAS, ?MODULE). --define(CALLBACK_MOD, server_cb). +-define(DEF_SVC_NAME, ?MODULE). %% The service configuration. In a server supporting multiple Diameter %% applications each application may have its own, although they could all @@ -57,32 +52,32 @@ {'Origin-Realm', "example.com"}, {'Vendor-Id', 193}, {'Product-Name', "Server"}, - {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]}, - {application, [{alias, ?APP_ALIAS}, - {dictionary, ?DIAMETER_DICT_COMMON}, - {module, ?CALLBACK_MOD}]}]). + {'Auth-Application-Id', [0]}, + {application, [{alias, common}, + {dictionary, diameter_gen_base_rfc6733}, + {module, server_cb}]}]). %% start/1 start(Name) when is_atom(Name) -> - peer:start(Name, ?SERVICE(Name)). + node:start(Name, ?SERVICE(Name)). start() -> - start(?SVC_NAME). + start(?DEF_SVC_NAME). %% listen/2 listen(Name, T) -> - peer:listen(Name, T). + node:listen(Name, T). listen(T) -> - listen(?SVC_NAME, T). + listen(?DEF_SVC_NAME, T). %% stop/1 stop(Name) -> - peer:stop(Name). + node:stop(Name). stop() -> - stop(?SVC_NAME). + stop(?DEF_SVC_NAME). diff --git a/lib/diameter/examples/code/server_cb.erl b/lib/diameter/examples/code/server_cb.erl index 9d8d395d06..071e152493 100644 --- a/lib/diameter/examples/code/server_cb.erl +++ b/lib/diameter/examples/code/server_cb.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -24,7 +24,7 @@ -module(server_cb). -include_lib("diameter/include/diameter.hrl"). --include_lib("diameter/include/diameter_gen_base_rfc3588.hrl"). +-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl"). %% diameter callbacks -export([peer_up/3, diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl index bc25f7d472..8272904856 100644 --- a/lib/diameter/include/diameter_gen.hrl +++ b/lib/diameter/include/diameter_gen.hrl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -25,6 +25,9 @@ -define(THROW(T), throw({?MODULE, T})). +%% Tag common to generated dictionaries. +-define(TAG, diameter_gen). + %% Key to a value in the process dictionary that determines whether or %% not an unrecognized AVP setting the M-bit should be regarded as an %% error or not. See is_strict/0. @@ -48,13 +51,20 @@ %% dictionary. putr(K,V) -> - put({?MODULE, K}, V). + put({?TAG, K}, V). getr(K) -> - get({?MODULE, K}). + case get({?TAG, K}) of + undefined -> + V = erase({?MODULE, K}), %% written in old code + V == undefined orelse putr(K,V), + V; + V -> + V + end. eraser(K) -> - erase({?MODULE, K}). + erase({?TAG, K}). %% --------------------------------------------------------------------------- %% # encode_avps/2 @@ -313,12 +323,20 @@ d(Name, Avp, Acc) -> %% decode is packed into 'AVP'. Mod = dict(Failed), %% Dictionary to decode in. + %% On decode, a Grouped AVP is represented as a #diameter_avp{} + %% list with AVP as head and component AVPs as tail. On encode, + %% data can be a list of component AVPs. + try Mod:avp(decode, Data, AvpName) of V -> {Avps, T} = Acc, {H, A} = ungroup(V, Avp), {[H | Avps], pack_avp(Name, A, T)} catch + throw: {?TAG, {grouped, RC, ComponentAvps}} -> + {Avps, {Rec, Errors}} = Acc, + A = trim(Avp), + {[[A | trim(ComponentAvps)] | Avps], {Rec, [{RC, A} | Errors]}}; error: Reason -> d(undefined == Failed orelse is_failed(), Reason, @@ -338,6 +356,10 @@ d(Name, Avp, Acc) -> trim(#diameter_avp{data = <<0:1, Bin/binary>>} = Avp) -> Avp#diameter_avp{data = Bin}; +trim(Avps) + when is_list(Avps) -> + lists:map(fun trim/1, Avps); + trim(Avp) -> Avp. @@ -582,22 +604,37 @@ value(_, Avp) -> %% # grouped_avp/3 %% --------------------------------------------------------------------------- --spec grouped_avp(decode, avp_name(), binary()) +-spec grouped_avp(decode, avp_name(), bitstring()) -> {avp_record(), [avp()]}; (encode, avp_name(), avp_record() | avp_values()) -> binary() | no_return(). +%% Length error induced by diameter_codec:collect_avps/1. +grouped_avp(decode, _Name, <<0:1, _/binary>>) -> + throw({?TAG, {grouped, 5014, []}}); + grouped_avp(decode, Name, Data) -> - {Rec, Avps, []} = decode_avps(Name, diameter_codec:collect_avps(Data)), - {Rec, Avps}; -%% A failed match here will result in 5004. Note that this is the only -%% AVP type that doesn't just return the decoded record, also -%% returning the list of component AVP's. + grouped_decode(Name, diameter_codec:collect_avps(Data)); grouped_avp(encode, Name, Data) -> encode_avps(Name, Data). +%% grouped_decode/2 +%% +%% Note that Grouped is the only AVP type that doesn't just return a +%% decoded value, also returning the list of component diameter_avp +%% records. + +grouped_decode(_Name, {Error, Acc}) -> + {RC, Avp} = Error, + throw({?TAG, {grouped, RC, [Avp | Acc]}}); + +grouped_decode(Name, ComponentAvps) -> + {Rec, Avps, Es} = decode_avps(Name, ComponentAvps), + [] == Es orelse throw({?TAG, {grouped, 5004, Avps}}), %% decode failure + {Rec, Avps}. + %% --------------------------------------------------------------------------- %% # empty_group/1 %% --------------------------------------------------------------------------- diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl index d74e091e11..1bbdf6e34d 100644 --- a/lib/diameter/src/base/diameter.erl +++ b/lib/diameter/src/base/diameter.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -337,6 +337,7 @@ call(SvcName, App, Message) -> :: {transport_module, atom()} | {transport_config, any()} | {transport_config, any(), 'Unsigned32'() | infinity} + | {pool_size, pos_integer()} | {applications, [app_alias()]} | {capabilities, [capability()]} | {capabilities_cb, evaluable()} diff --git a/lib/diameter/src/base/diameter_codec.erl b/lib/diameter/src/base/diameter_codec.erl index a2b04bfd63..b4ecb63961 100644 --- a/lib/diameter/src/base/diameter_codec.erl +++ b/lib/diameter/src/base/diameter_codec.erl @@ -390,6 +390,9 @@ sequence_numbers(#diameter_packet{bin = Bin}) sequence_numbers(#diameter_packet{header = #diameter_header{} = H}) -> sequence_numbers(H); +sequence_numbers(#diameter_packet{msg = [#diameter_header{} = H | _]}) -> + sequence_numbers(H); + sequence_numbers(#diameter_header{hop_by_hop_id = H, end_to_end_id = E}) -> {H,E}; @@ -561,14 +564,14 @@ split_data(Bin, Len) -> <<Data:Len/binary, _:Pad/binary, Rest/binary>> -> {Data, Rest}; _ -> - %% Header length points past the end of the message. As - %% stated in the 6733 text above, it's sufficient to - %% return a zero-filled minimal payload if this is a - %% request. Do this (in cases that we know the type) by - %% inducing a decode failure and letting the dictionary's - %% decode (in diameter_gen) deal with it. Here we don't - %% know type. If the type isn't known, then the decode - %% just strips the extra bit. + %% Header length points past the end of the message, or + %% doesn't span the header. As stated in the 6733 text + %% above, it's sufficient to return a zero-filled minimal + %% payload if this is a request. Do this (in cases that we + %% know the type) by inducing a decode failure and letting + %% the dictionary's decode (in diameter_gen) deal with it. + %% Here we don't know type. If the type isn't known, then + %% the decode just strips the extra bit. {<<0:1, Bin/binary>>, <<>>} end. @@ -582,6 +585,8 @@ split_data(Bin, Len) -> %% dictionary doesn't know about specific AVP's. %% Grouped AVP whose components need packing ... +pack_avp([#diameter_avp{} = A | Avps]) -> + pack_avp(A#diameter_avp{data = Avps}); pack_avp(#diameter_avp{data = [#diameter_avp{} | _] = Avps} = A) -> pack_avp(A#diameter_avp{data = encode_avps(Avps)}); diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl index dd1c9b73bb..c0a4f7df69 100644 --- a/lib/diameter/src/base/diameter_config.erl +++ b/lib/diameter/src/base/diameter_config.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -35,10 +35,11 @@ %% -module(diameter_config). --compile({no_auto_import, [monitor/2]}). - -behaviour(gen_server). +-compile({no_auto_import, [monitor/2, now/0]}). +-import(diameter_lib, [now/0]). + -export([start_service/2, stop_service/1, add_transport/2, @@ -554,6 +555,9 @@ opt({watchdog_config, L}) -> opt({spawn_opt, Opts}) -> is_list(Opts); +opt({pool_size, N}) -> + is_integer(N) andalso 0 < N; + %% Options that we can't validate. opt({K, _}) when K == transport_config; diff --git a/lib/diameter/src/base/diameter_lib.erl b/lib/diameter/src/base/diameter_lib.erl index 5b3a2063f8..d0d730f47c 100644 --- a/lib/diameter/src/base/diameter_lib.erl +++ b/lib/diameter/src/base/diameter_lib.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,12 +18,18 @@ %% -module(diameter_lib). +-compile({no_auto_import, [now/0]}). -export([info_report/2, error_report/2, warning_report/2, + now/0, + timestamp/1, now_diff/1, + micro_diff/1, + micro_diff/2, time/1, + seed/0, eval/1, eval_name/1, get_stacktrace/0, @@ -31,6 +37,8 @@ spawn_opts/2, wait/1, fold_tuple/3, + fold_n/3, + for_n/2, log/4]). %% --------------------------------------------------------------------------- @@ -90,13 +98,50 @@ fmt(T) -> end. %% --------------------------------------------------------------------------- +%% # now/0 +%% --------------------------------------------------------------------------- + +-type timestamp() :: {non_neg_integer(), 0..999999, 0..999999}. +-type now() :: integer() %% monotonic time + | timestamp(). + +-spec now() + -> now(). + +%% Use monotonic time if it exists, fall back to erlang:now() +%% otherwise. + +now() -> + try + erlang:monotonic_time() + catch + error: undef -> erlang:now() + end. + +%% --------------------------------------------------------------------------- +%% # timestamp/1 +%% --------------------------------------------------------------------------- + +-spec timestamp(NowT :: now()) + -> timestamp(). + +timestamp({_,_,_} = T) -> %% erlang:now() + T; + +timestamp(MonoT) -> %% monotonic time + MicroSecs = erlang:convert_time_resolution(MonoT + erlang:time_offset(), + erlang:time_resolution(), + 1000000), + Secs = MicroSecs div 1000000, + {Secs div 1000000, Secs rem 1000000, MicroSecs rem 1000000}. + +%% --------------------------------------------------------------------------- %% # now_diff/1 %% --------------------------------------------------------------------------- --spec now_diff(NowT) +-spec now_diff(NowT :: now()) -> {Hours, Mins, Secs, MicroSecs} - when NowT :: {non_neg_integer(), 0..999999, 0..999999}, - Hours :: non_neg_integer(), + when Hours :: non_neg_integer(), Mins :: 0..59, Secs :: 0..59, MicroSecs :: 0..999999. @@ -104,8 +149,41 @@ fmt(T) -> %% Return timer:now_diff(now(), NowT) as an {H, M, S, MicroS} tuple %% instead of as integer microseconds. -now_diff({_,_,_} = Time) -> - time(timer:now_diff(now(), Time)). +now_diff(Time) -> + time(micro_diff(Time)). + +%% --------------------------------------------------------------------------- +%% # micro_diff/1 +%% --------------------------------------------------------------------------- + +-spec micro_diff(NowT :: now()) + -> MicroSecs + when MicroSecs :: non_neg_integer(). + +micro_diff({_,_,_} = T0) -> + timer:now_diff(erlang:now(), T0); + +micro_diff(T0) -> %% monotonic time + erlang:convert_time_resolution(erlang:monotonic_time() - T0, + erlang:time_resolution(), + 1000000). + +%% --------------------------------------------------------------------------- +%% # micro_diff/2 +%% --------------------------------------------------------------------------- + +-spec micro_diff(T1 :: now(), T0 :: now()) + -> MicroSecs + when MicroSecs :: non_neg_integer(). + +micro_diff(T1, T0) + when is_integer(T1), is_integer(T0) -> %% monotonic time + erlang:convert_time_resolution(T1 - T0, + erlang:time_resolution(), + 1000000); + +micro_diff(T1, T0) -> %% at least one erlang:now() + timer:now_diff(timestamp(T1), timestamp(T0)). %% --------------------------------------------------------------------------- %% # time/1 @@ -115,7 +193,7 @@ now_diff({_,_,_} = Time) -> -spec time(NowT | Diff) -> {Hours, Mins, Secs, MicroSecs} - when NowT :: {non_neg_integer(), 0..999999, 0..999999}, + when NowT :: timestamp(), Diff :: non_neg_integer(), Hours :: non_neg_integer(), Mins :: 0..59, @@ -134,6 +212,27 @@ time(Micro) -> %% elapsed time {H, M, S, Micro rem 1000000}. %% --------------------------------------------------------------------------- +%% # seed/0 +%% --------------------------------------------------------------------------- + +-spec seed() + -> {timestamp(), {integer(), integer(), integer()}}. + +%% Return an argument for random:seed/1. + +seed() -> + T = now(), + {timestamp(T), seed(T)}. + +%% seed/1 + +seed({_,_,_} = T) -> + T; + +seed(T) -> %% monotonic time + {erlang:phash2(node()), T, erlang:unique_integer()}. + +%% --------------------------------------------------------------------------- %% # eval/1 %% %% Evaluate a function in various forms. @@ -247,17 +346,19 @@ opts(HeapSize, Opts) -> %% # wait/1 %% --------------------------------------------------------------------------- --spec wait([pid()]) +-spec wait([pid() | reference()]) -> ok. wait(L) -> - down([erlang:monitor(process, P) || P <- L]). + lists:foreach(fun down/1, L). -down([]) -> - ok; -down([MRef|T]) -> - receive {'DOWN', MRef, process, _, _} -> ok end, - down(T). +down(Pid) + when is_pid(Pid) -> + down(monitor(process, Pid)); + +down(MRef) + when is_reference(MRef) -> + receive {'DOWN', MRef, process, _, _} = T -> T end. %% --------------------------------------------------------------------------- %% # fold_tuple/3 @@ -290,6 +391,35 @@ ft(Value, {Idx, T}) -> setelement(Idx, T, Value). %% --------------------------------------------------------------------------- +%% # fold_n/3 +%% --------------------------------------------------------------------------- + +-spec fold_n(F, Acc0, N) + -> term() + when F :: fun((non_neg_integer(), term()) -> term()), + Acc0 :: term(), + N :: non_neg_integer(). + +fold_n(F, Acc, N) + when is_integer(N), 0 < N -> + fold_n(F, F(N, Acc), N-1); + +fold_n(_, Acc, _) -> + Acc. + +%% --------------------------------------------------------------------------- +%% # for_n/2 +%% --------------------------------------------------------------------------- + +-spec for_n(F, N) + -> non_neg_integer() + when F :: fun((non_neg_integer()) -> term()), + N :: non_neg_integer(). + +for_n(F, N) -> + fold_n(fun(M,A) -> F(M), A+1 end, 0, N). + +%% --------------------------------------------------------------------------- %% # log/4 %% %% Called to have something to trace on for happenings of interest. diff --git a/lib/diameter/src/base/diameter_peer.erl b/lib/diameter/src/base/diameter_peer.erl index e5d4b28766..ea326dd03e 100644 --- a/lib/diameter/src/base/diameter_peer.erl +++ b/lib/diameter/src/base/diameter_peer.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,9 +18,11 @@ %% -module(diameter_peer). - -behaviour(gen_server). +-compile({no_auto_import, [now/0]}). +-import(diameter_lib, [now/0]). + %% Interface towards transport modules ... -export([recv/2, up/1, diff --git a/lib/diameter/src/base/diameter_reg.erl b/lib/diameter/src/base/diameter_reg.erl index 3197c1aee1..f785777874 100644 --- a/lib/diameter/src/base/diameter_reg.erl +++ b/lib/diameter/src/base/diameter_reg.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -22,10 +22,11 @@ %% -module(diameter_reg). --compile({no_auto_import, [monitor/2]}). - -behaviour(gen_server). +-compile({no_auto_import, [monitor/2, now/0]}). +-import(diameter_lib, [now/0]). + -export([add/1, add_new/1, del/1, diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl index 76b05a2ad4..04401a3d87 100644 --- a/lib/diameter/src/base/diameter_service.erl +++ b/lib/diameter/src/base/diameter_service.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -24,6 +24,9 @@ -module(diameter_service). -behaviour(gen_server). +-compile({no_auto_import, [now/0]}). +-import(diameter_lib, [now/0]). + %% towards diameter_service_sup -export([start_link/1]). @@ -610,8 +613,9 @@ st(#watchdog{ref = Ref, pid = Pid}, Refs) -> %% st/3 st(#watchdog{pid = Pid}, Reason, Acc) -> + MRef = monitor(process, Pid), Pid ! {shutdown, self(), Reason}, - [Pid | Acc]. + [MRef | Acc]. %% --------------------------------------------------------------------------- %% # call_service/2 @@ -765,8 +769,9 @@ reason(failure) -> start(Ref, {T, Opts}, S) when T == connect; T == listen -> + N = proplists:get_value(pool_size, Opts, 1), try - {ok, start(Ref, type(T), Opts, S)} + {ok, start(Ref, type(T), Opts, N, S)} catch ?FAILURE(Reason) -> {error, Reason} @@ -784,11 +789,16 @@ type(connect = T) -> T. %% start/4 -start(Ref, Type, Opts, #state{watchdogT = WatchdogT, - peerT = PeerT, - options = SvcOpts, - service_name = SvcName, - service = Svc0}) +start(Ref, Type, Opts, State) -> + start(Ref, Type, Opts, 1, State). + +%% start/5 + +start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT, + peerT = PeerT, + options = SvcOpts, + service_name = SvcName, + service = Svc0}) when Type == connect; Type == accept -> #diameter_service{applications = Apps} @@ -796,14 +806,19 @@ start(Ref, Type, Opts, #state{watchdogT = WatchdogT, = merge_service(Opts, Svc0), {_,_} = Mask = proplists:get_value(sequence, SvcOpts), RecvData = diameter_traffic:make_recvdata([SvcName, PeerT, Apps, Mask]), - Pid = s(Type, Ref, {{spawn_opts([Opts, SvcOpts]), RecvData}, - Opts, - SvcOpts, - Svc}), - insert(WatchdogT, #watchdog{pid = Pid, - type = Type, - ref = Ref, - options = Opts}), + T = {{spawn_opts([Opts, SvcOpts]), RecvData}, Opts, SvcOpts, Svc}, + Rec = #watchdog{type = Type, + ref = Ref, + options = Opts}, + diameter_lib:fold_n(fun(_,A) -> + [wd(Type, Ref, T, WatchdogT, Rec) | A] + end, + [], + N). + +wd(Type, Ref, T, WatchdogT, Rec) -> + Pid = wd(Type, Ref, T), + insert(WatchdogT, Rec#watchdog{pid = Pid}), Pid. %% Note that the service record passed into the watchdog is the merged @@ -816,7 +831,7 @@ spawn_opts(Optss) -> T /= link, T /= monitor]. -s(Type, Ref, T) -> +wd(Type, Ref, T) -> {_MRef, Pid} = diameter_watchdog:start({Type, Ref}, T), Pid. @@ -1185,7 +1200,7 @@ connect_timer(Opts, Def0) -> %% continuous restarted in case of faulty config or other problems. tc(Time, Tc) -> choose(Tc > ?RESTART_TC - orelse timer:now_diff(now(), Time) > 1000*?RESTART_TC, + orelse diameter_lib:micro_diff(Time) > 1000*?RESTART_TC, Tc, ?RESTART_TC). @@ -1718,31 +1733,43 @@ info_transport(S) -> [], PeerD). -%% Only a config entry for a listening transport: use it. -transport([[{type, listen}, _] = L]) -> - L ++ [{accept, []}]; - -%% Only one config or peer entry for a connecting transport: use it. -transport([[{type, connect} | _] = L]) -> - L; +%% Single config entry. Distinguish between pool_size config or not on +%% a connecting transport for backwards compatibility: with the option +%% the form is similar to the listening case, with connections grouped +%% in a pool tuple (for lack of a better name), without as before. +transport([[{type, Type}, {options, Opts}] = L]) + when Type == listen; + Type == connect -> + L ++ [{K, []} || [{_,K}] <- [keys(Type, Opts)]]; %% Peer entries: discard config. Note that the peer entries have %% length at least 3. transport([[_,_] | L]) -> transport(L); -%% Possibly many peer entries for a listening transport. Note that all -%% have the same options by construction, which is not terribly space -%% efficient. -transport([[{type, accept}, {options, Opts} | _] | _] = Ls) -> - [{type, listen}, +%% Multiple tranports. Note that all have the same options by +%% construction, which is not terribly space efficient. +transport([[{type, Type}, {options, Opts} | _] | _] = Ls) -> + transport(keys(Type, Opts), Ls). + +%% Group transports in an accept or pool tuple ... +transport([{Type, Key}], [[{type, _}, {options, Opts} | _] | _] = Ls) -> + [{type, Type}, {options, Opts}, - {accept, [lists:nthtail(2,L) || L <- Ls]}]. + {Key, [tl(tl(L)) || L <- Ls]}]; + +%% ... or not: there can only be one. +transport([], [L]) -> + L. + +keys(connect = T, Opts) -> + [{T, pool} || lists:keymember(pool_size, 1, Opts)]; +keys(_, _) -> + [{listen, accept}]. peer_dict(#state{watchdogT = WatchdogT, peerT = PeerT}, Dict0) -> try ets:tab2list(WatchdogT) of - L -> - lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L) + L -> lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L) catch error: badarg -> Dict0 %% service has gone down end. diff --git a/lib/diameter/src/base/diameter_service_sup.erl b/lib/diameter/src/base/diameter_service_sup.erl index 153fff902f..e3177f0083 100644 --- a/lib/diameter/src/base/diameter_service_sup.erl +++ b/lib/diameter/src/base/diameter_service_sup.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2011. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -58,7 +58,7 @@ init([]) -> ChildSpec = {Mod, {Mod, start_link, []}, temporary, - 1000, + 5000, worker, [Mod]}, {ok, {Flags, [ChildSpec]}}. diff --git a/lib/diameter/src/base/diameter_session.erl b/lib/diameter/src/base/diameter_session.erl index 3b236f109a..c5ea0428b5 100644 --- a/lib/diameter/src/base/diameter_session.erl +++ b/lib/diameter/src/base/diameter_session.erl @@ -157,8 +157,8 @@ session_id(Host) -> %% --------------------------------------------------------------------------- init() -> - Now = now(), - random:seed(Now), + {Now, Seed} = diameter_lib:seed(), + random:seed(Seed), Time = time32(Now), Seq = (?INT32 band (Time bsl 20)) bor (random:uniform(1 bsl 20) - 1), ets:insert(diameter_sequence, [{origin_state_id, Time}, diff --git a/lib/diameter/src/base/diameter_stats.erl b/lib/diameter/src/base/diameter_stats.erl index 8353613d32..64ea082be0 100644 --- a/lib/diameter/src/base/diameter_stats.erl +++ b/lib/diameter/src/base/diameter_stats.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -22,9 +22,11 @@ %% -module(diameter_stats). - -behaviour(gen_server). +-compile({no_auto_import, [now/0]}). +-import(diameter_lib, [now/0]). + -export([reg/2, reg/1, incr/3, incr/1, read/1, diff --git a/lib/diameter/src/base/diameter_sup.erl b/lib/diameter/src/base/diameter_sup.erl index e5afd23dcd..4ede4086d8 100644 --- a/lib/diameter/src/base/diameter_sup.erl +++ b/lib/diameter/src/base/diameter_sup.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2011. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -64,7 +64,7 @@ spec(Mod) -> {Mod, {Mod, start_link, []}, permanent, - 1000, + infinity, supervisor, [Mod]}. diff --git a/lib/diameter/src/base/diameter_sync.erl b/lib/diameter/src/base/diameter_sync.erl index ce2db4b3a2..90eabece3d 100644 --- a/lib/diameter/src/base/diameter_sync.erl +++ b/lib/diameter/src/base/diameter_sync.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2011. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -27,6 +27,9 @@ -module(diameter_sync). -behaviour(gen_server). +-compile({no_auto_import, [now/0]}). +-import(diameter_lib, [now/0]). + -export([call/4, call/5, cast/4, cast/5, carp/1, carp/2]). diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl index 3b62afca47..0b503338a6 100644 --- a/lib/diameter/src/base/diameter_traffic.erl +++ b/lib/diameter/src/base/diameter_traffic.erl @@ -162,24 +162,28 @@ incr_error(Dir, Id, TPid) -> %% incr_rc/4 %% --------------------------------------------------------------------------- --spec incr_rc(send|recv, Pkt, TPid, Dict0) +-spec incr_rc(send|recv, Pkt, TPid, DictT) -> {Counter, non_neg_integer()} | Reason when Pkt :: #diameter_packet{}, TPid :: pid(), - Dict0 :: module(), + DictT :: module() | {module(), module(), module()}, Counter :: {'Result-Code', integer()} | {'Experimental-Result', integer(), integer()}, Reason :: atom(). -incr_rc(Dir, Pkt, TPid, Dict0) -> +incr_rc(Dir, Pkt, TPid, {Dict, _, _} = DictT) -> try - incr_result(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}) + incr_result(Dir, Pkt, TPid, DictT) catch exit: {E,_} when E == no_result_code; E == invalid_error_bit -> + incr(TPid, {msg_id(Pkt#diameter_packet.header, Dict), Dir, E}), E - end. + end; + +incr_rc(Dir, Pkt, TPid, Dict0) -> + incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}). %% --------------------------------------------------------------------------- %% pending/1 @@ -678,7 +682,7 @@ local(Msg, TPid, {Dict, AppDict, Dict0} = DictT, Fs, ReqPkt) -> reset(make_answer_packet(Msg, ReqPkt), Dict, Dict0), Fs), incr(send, Pkt, TPid, AppDict), - incr_result(send, Pkt, TPid, DictT), %% count outgoing + incr_rc(send, Pkt, TPid, DictT), %% count outgoing send(TPid, Pkt). %% reset/3 @@ -1388,6 +1392,21 @@ make_request_packet(#diameter_packet{header = Hdr} = Pkt, make_request_packet(Msg, Pkt) -> Pkt#diameter_packet{msg = Msg}. +%% make_retransmit_packet/2 + +make_retransmit_packet(#diameter_packet{msg = [#diameter_header{} = Hdr + | Avps]} + = Pkt) -> + Pkt#diameter_packet{msg = [make_retransmit_header(Hdr) | Avps]}; + +make_retransmit_packet(#diameter_packet{header = Hdr} = Pkt) -> + Pkt#diameter_packet{header = make_retransmit_header(Hdr)}. + +%% make_retransmit_header/1 + +make_retransmit_header(Hdr) -> + Hdr#diameter_header{is_retransmitted = true}. + %% fold_record/2 fold_record(undefined, R) -> @@ -1674,9 +1693,7 @@ retransmit({TPid, Caps, App} have_request(Pkt0, TPid) %% Don't failover to a peer we've andalso ?THROW(timeout), %% already sent to. - #diameter_packet{header = Hdr0} = Pkt0, - Hdr = Hdr0#diameter_header{is_retransmitted = true}, - Pkt = Pkt0#diameter_packet{header = Hdr}, + Pkt = make_retransmit_packet(Pkt0), retransmit(cb(App, prepare_retransmit, [Pkt, SvcName, {TPid, Caps}]), Transport, diff --git a/lib/diameter/src/base/diameter_types.erl b/lib/diameter/src/base/diameter_types.erl index ca3338be5f..442d90c98b 100644 --- a/lib/diameter/src/base/diameter_types.erl +++ b/lib/diameter/src/base/diameter_types.erl @@ -75,7 +75,7 @@ %% message indicating this error MUST include the offending AVPs %% within a Failed-AVP AVP. %% --define(INVALID_LENGTH(Bin), erlang:error({'DIAMETER', 5014, Bin})). +-define(INVALID_LENGTH(Bitstr), erlang:error({'DIAMETER', 5014, Bitstr})). %% ------------------------------------------------------------------------- %% 3588, 4.2. Basic AVP Data Formats diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl index b7f2d24941..67715906e8 100644 --- a/lib/diameter/src/base/diameter_watchdog.erl +++ b/lib/diameter/src/base/diameter_watchdog.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -122,7 +122,8 @@ i({Ack, T, Pid, {RecvData, = Svc}}) -> erlang:monitor(process, Pid), wait(Ack, Pid), - random:seed(now()), + {_, Seed} = diameter_lib:seed(), + random:seed(Seed), putr(restart, {T, Opts, Svc}), %% save seeing it in trace putr(dwr, dwr(Caps)), %% {_,_} = Mask = proplists:get_value(sequence, SvcOpts), diff --git a/lib/diameter/src/modules.mk b/lib/diameter/src/modules.mk index a2a7a51892..c9dd4e683a 100644 --- a/lib/diameter/src/modules.mk +++ b/lib/diameter/src/modules.mk @@ -1,7 +1,7 @@ # %CopyrightBegin% # -# Copyright Ericsson AB 2010-2014. All Rights Reserved. +# Copyright Ericsson AB 2010-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -94,7 +94,7 @@ BINS = \ # Released files relative to ../examples. EXAMPLES = \ code/GNUmakefile \ - code/peer.erl \ + code/node.erl \ code/client.erl \ code/client_cb.erl \ code/server.erl \ diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl index 32e7aaca39..2c8d6f0a14 100644 --- a/lib/diameter/src/transport/diameter_sctp.erl +++ b/lib/diameter/src/transport/diameter_sctp.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,9 +18,11 @@ %% -module(diameter_sctp). - -behaviour(gen_server). +-compile({no_auto_import, [now/0]}). +-import(diameter_lib, [now/0]). + %% interface -export([start/3]). @@ -37,7 +39,8 @@ code_change/3, terminate/2]). --export([info/1]). %% service_info callback +-export([listener/1,%% diameter_sync callback + info/1]). %% service_info callback -export([ports/0, ports/1]). @@ -99,22 +102,31 @@ -record(listener, {ref :: reference(), socket :: gen_sctp:sctp_socket(), - count = 0 :: uint(), + count = 0 :: uint(), %% attached transport processes tmap = ets:new(?MODULE, []) :: ets:tid(), %% {MRef, Pid|AssocId}, {AssocId, Pid} pending = {0, ets:new(?MODULE, [ordered_set])}, tref :: reference(), accept :: [match()]}). %% Field tmap is used to map an incoming message or event to the -%% relevent transport process. Field pending implements a queue of -%% transport processes to which an association has been assigned (at -%% comm_up and written into tmap) but for which diameter hasn't yet -%% spawned a transport process: a short-lived state of affairs as a -%% new transport is spawned as a consequence of a peer being taken up, -%% transport processes being spawned by the listener on demand. In -%% case diameter starts a transport before comm_up on a new -%% association, pending is set to an improper list with the spawned -%% transport as head and the queue as tail. +%% relevant transport process. Field pending implements two queues: +%% the first of transport-to-be processes to which an association has +%% been assigned (at comm_up and written into tmap) but for which +%% diameter hasn't yet spawned a transport process, a short-lived +%% state of affairs as a new transport is spawned as a consequence of +%% a peer being taken up, transport processes being spawned by the +%% listener on demand; the second of started transport processes that +%% have not yet been assigned an association. +%% +%% When diameter calls start/3, the transport process is either taken +%% from the first queue or spawned and placed in the second queue +%% until an association is established. When an association is +%% established, a controlling process is either taken from the second +%% queue or spawned and placed in the first queue. Thus, there are +%% only elements in one queue at a time, so share an ets table queue +%% and tag it with a positive length if it contains the first queue, a +%% negative length if it contains the second queue. The case -1 is +%% handled differently for backwards compatibility reasons. %% --------------------------------------------------------------------------- %% # start/3 @@ -139,9 +151,9 @@ ip(T) -> T. %% A listener spawns transports either as a consequence of this call -%% when there is not yet an association to associate with it, or at -%% comm_up on a new association in which case the call retrieves a -%% transport from the pending queue. +%% when there is not yet an association to assign it, or at comm_up on +%% a new association in which case the call retrieves a transport from +%% the pending queue. s({accept, Ref} = A, Addrs, Opts) -> {LPid, LAs} = listener(Ref, {Opts, Addrs}), try gen_server:call(LPid, {A, self()}, infinity) of @@ -226,7 +238,7 @@ i({connect, Pid, Opts, Addrs, Ref}) -> {LAs, Sock} = open(Addrs, Rest, 0), putr(?REF_KEY, Ref), proc_lib:init_ack({ok, self(), LAs}), - erlang:monitor(process, Pid), + monitor(process, Pid), #transport{parent = Pid, mode = {connect, connect(Sock, RAs, RP, [])}, socket = Sock}; @@ -236,8 +248,8 @@ i({accept, Pid, LPid, Sock, Ref}) when is_pid(Pid) -> putr(?REF_KEY, Ref), proc_lib:init_ack({ok, self()}), - erlang:monitor(process, Pid), - erlang:monitor(process, LPid), + monitor(process, Pid), + monitor(process, LPid), #transport{parent = Pid, mode = {accept, LPid}, socket = Sock}; @@ -246,7 +258,7 @@ i({accept, Pid, LPid, Sock, Ref}) i({accept, Ref, LPid, Sock, Id}) -> putr(?REF_KEY, Ref), proc_lib:init_ack({ok, self()}), - MRef = erlang:monitor(process, LPid), + MRef = monitor(process, LPid), %% Wait for a signal that the transport has been started before %% processing other messages. receive @@ -270,15 +282,23 @@ close(Sock, Id) -> %% listener/2 +%% Accepting processes can be started concurrently: ensure only one +%% listener is started. listener(LRef, T) -> + diameter_sync:call({?MODULE, listener, LRef}, + {?MODULE, listener, [{LRef, T}]}, + infinity, + infinity). + +listener({LRef, T}) -> l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T). -%% Existing process with the listening socket ... +%% Existing listening process ... l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) -> - {LAs, _Sock} = AS, - {LPid, LAs}; - -%% ... or not: start one. + {LAs, _Sock} = AS, + {LPid, LAs}; + +%% ... or not. l([], LRef, T) -> {ok, LPid, LAs} = diameter_sctp_sup:start_child({listen, LRef, T}), {LPid, LAs}. @@ -347,11 +367,17 @@ type(T) -> %% # handle_call/3 %% --------------------------------------------------------------------------- +handle_call(T, From, #listener{pending = L} = S) + when is_list(L) -> + handle_call(T, From, upgrade(S)); + handle_call({{accept, Ref}, Pid}, _, #listener{ref = Ref, - count = N} + pending = {N,Q}, + count = K} = S) -> - {TPid, NewS} = accept(Ref, Pid, S), - {reply, {ok, TPid}, NewS#listener{count = N+1}}; + TPid = accept(Ref, Pid, S), + {reply, {ok, TPid}, downgrade(S#listener{pending = {N-1,Q}, + count = K+1})}; handle_call(_, _, State) -> {reply, nok, State}. @@ -370,8 +396,46 @@ handle_cast(_, State) -> handle_info(T, #transport{} = S) -> {noreply, #transport{} = t(T,S)}; +handle_info(T, #listener{pending = L} = S) + when is_list(L) -> + handle_info(T, upgrade(S)); + handle_info(T, #listener{} = S) -> - {noreply, #listener{} = l(T,S)}. + {noreply, downgrade(#listener{} = l(T,S))}. + +%% upgrade/1 + +upgrade(#listener{pending = [TPid | {0,Q}]} = S) -> + ets:insert(Q, {TPid, now()}), + S#listener{pending = {-1,Q}}. +%% Prior to the possiblity of setting pool_size on in transport +%% configuration, a new accepting transport was only started following +%% the death of a predecessor, so that there was only at most one +%% previously started transport process waiting for an association. +%% This assumption no longer holds with pool_size > 1, in which case +%% several accepting transports are started concurrently. Deal with +%% this by placing the started transports in a new queue of transport +%% processes waiting for an association. +%% +%% Since only one of this queue and the existing queue of controlling +%% processes waiting for a transport to be started can be non-empty at +%% any given time, implement both queues in the same ets table. The +%% absolute value of the first element of the 2-tuple is the queue +%% length, the sign says which queue it is. + +%% downgrade/1 +%% +%% Revert to the pre-pool_size representation when possible, for +%% backwards compatibility in the case that the pool_size option +%% hasn't been used. + +downgrade(#listener{pending = {-1,Q}} = S) -> + TPid = ets:first(Q), + ets:delete(Q, TPid), + S#listener{pending = [TPid | {0,Q}]}; + +downgrade(S) -> + S. %% --------------------------------------------------------------------------- %% # code_change/3 @@ -436,54 +500,46 @@ l({sctp, Sock, _RA, _RP, Data} = Msg, #listener{socket = Sock} = S) -> setopts(Sock) end; -%% Transport is asking message to be sent. See send/3 for why the send -%% isn't directly from the transport. -l({send, AssocId, StreamId, Bin}, #listener{socket = Sock} = S) -> - send(Sock, AssocId, StreamId, Bin), - S; +l({'DOWN', MRef, process, TPid, _}, #listener{pending = {_,Q}} = S) -> + down(ets:member(Q, TPid), MRef, TPid, S); + +%% Timeout after the last accepting process has died. +l({timeout, TRef, close = T}, #listener{tref = TRef, + count = 0}) -> + x(T); +l({timeout, _, close}, #listener{} = S) -> + S. + +%% down/4 %% Accepting transport has died. One that's awaiting an association ... -l({'DOWN', MRef, process, TPid, _}, #listener{pending = [TPid | Q], - tmap = T, - count = N} - = S) -> +down(true, MRef, TPid, #listener{pending = {N,Q}, + tmap = T, + count = K} + = S) + when N < 0 -> + ets:delete(Q, TPid), ets:delete(T, MRef), ets:delete(T, TPid), - start_timer(S#listener{count = N-1, - pending = Q}); - -%% ... ditto and a new transport has already been started ... -l({'DOWN', _, process, _, _} = T, #listener{pending = [TPid | Q]} - = S) -> - #listener{pending = NQ} - = NewS - = l(T, S#listener{pending = Q}), - NewS#listener{pending = [TPid | NQ]}; - -%% ... or not. -l({'DOWN', MRef, process, TPid, _}, #listener{socket = Sock, - tmap = T, - count = N, - pending = {P,Q}} - = S) -> + start_timer(S#listener{count = K-1, + pending = {N+1,Q}}); + +%% ... or one that already has one. +down(B, MRef, TPid, #listener{socket = Sock, + tmap = T, + count = K, + pending = {N,Q}} + = S) -> [{MRef, Id}] = ets:lookup(T, MRef), %% Id = TPid | AssocId ets:delete(T, MRef), ets:delete(T, Id), Id == TPid orelse close(Sock, Id), - case ets:lookup(Q, TPid) of - [{TPid, _}] -> %% transport in the pending queue ... + if B -> %% Waiting for attachment in the pending queue ... ets:delete(Q, TPid), - S#listener{pending = {P-1, Q}}; - [] -> %% ... or not - start_timer(S#listener{count = N-1}) - end; - -%% Timeout after the last accepting process has died. -l({timeout, TRef, close = T}, #listener{tref = TRef, - count = 0}) -> - x(T); -l({timeout, _, close}, #listener{} = S) -> - S. + S#listener{pending = {N-1,Q}}; + true -> %% ... or already attached + start_timer(S#listener{count = K-1}) + end. %% t/2 %% @@ -582,29 +638,24 @@ accept(Opts) -> %% No pending associations: spawn a new transport. accept(Ref, Pid, #listener{socket = Sock, tmap = T, - pending = {0,_} = Q} - = S) -> + pending = {N,Q}}) + when N =< 0 -> Arg = {accept, Pid, self(), Sock, Ref}, {ok, TPid} = diameter_sctp_sup:start_child(Arg), - MRef = erlang:monitor(process, TPid), + MRef = monitor(process, TPid), ets:insert(T, [{MRef, TPid}, {TPid, MRef}]), - {TPid, S#listener{pending = [TPid | Q]}}; -%% Placing the transport in the pending field makes it available to -%% the next association. The stack starts a new accepting transport -%% only after this one brings the connection up (or dies). - -%% Accepting transport has died. This can happen if a new transport is -%% started before the DOWN has arrived. -accept(Ref, Pid, #listener{pending = [TPid | {0,_} = Q]} = S) -> - false = is_process_alive(TPid), %% assert - accept(Ref, Pid, S#listener{pending = Q}); + ets:insert(Q, {TPid, now()}), + TPid; +%% Placing the transport in the second pending table makes it +%% available to the next association. %% Pending associations: attach to the first in the queue. -accept(_, Pid, #listener{ref = Ref, pending = {N,Q}} = S) -> +accept(_, Pid, #listener{ref = Ref, + pending = {_,Q}}) -> TPid = ets:first(Q), TPid ! {Ref, Pid}, ets:delete(Q, TPid), - {TPid, S#listener{pending = {N-1, Q}}}. + TPid. %% send/2 @@ -718,34 +769,12 @@ up(#transport{parent = Pid, find(Id, Data, #listener{tmap = T} = S) -> f(ets:lookup(T, Id), Data, S). -%% New association and a transport waiting for one: use it. -f([], - {_, #sctp_assoc_change{state = comm_up, - assoc_id = Id}}, - #listener{tmap = T, - pending = [TPid | {_,_} = Q]} - = S) -> - [{TPid, MRef}] = ets:lookup(T, TPid), - ets:insert(T, [{MRef, Id}, {Id, TPid}]), - ets:delete(T, TPid), - {TPid, S#listener{pending = Q}}; - -%% New association and no transport start yet: spawn one and place it -%% in the queue. +%% New association ... f([], - {_, #sctp_assoc_change{state = comm_up, - assoc_id = Id}}, - #listener{ref = Ref, - socket = Sock, - tmap = T, - pending = {N,Q}} + {_, #sctp_assoc_change{state = comm_up, assoc_id = Id}}, + #listener{pending = {N,Q}} = S) -> - Arg = {accept, Ref, self(), Sock, Id}, - {ok, TPid} = diameter_sctp_sup:start_child(Arg), - MRef = erlang:monitor(process, TPid), - ets:insert(T, [{MRef, Id}, {Id, TPid}]), - ets:insert(Q, {TPid, now()}), - {TPid, S#listener{pending = {N+1, Q}}}; + {find(Id, S), S#listener{pending = {N+1,Q}}}; %% Known association ... f([{_, TPid}], _, S) -> @@ -755,6 +784,31 @@ f([{_, TPid}], _, S) -> f([], _, _) -> false. +%% find/2 + +%% Transport waiting for an association: use it. +find(Id, #listener{tmap = T, + pending = {N,Q}}) + when N < 0 -> + TPid = ets:first(Q), + [{TPid, MRef}] = ets:lookup(T, TPid), + ets:insert(T, [{MRef, Id}, {Id, TPid}]), + ets:delete(T, TPid), + ets:delete(Q, TPid), + TPid; + +%% No transport start yet: spawn one and queue. +find(Id, #listener{ref = Ref, + socket = Sock, + tmap = T, + pending = {_,Q}}) -> + Arg = {accept, Ref, self(), Sock, Id}, + {ok, TPid} = diameter_sctp_sup:start_child(Arg), + MRef = monitor(process, TPid), + ets:insert(T, [{MRef, Id}, {Id, TPid}]), + ets:insert(Q, {TPid, now()}), + TPid. + %% assoc_id/1 assoc_id({[#sctp_sndrcvinfo{assoc_id = Id}], _}) -> diff --git a/lib/diameter/src/transport/diameter_tcp.erl b/lib/diameter/src/transport/diameter_tcp.erl index 4d1b8bec51..0b26f429fb 100644 --- a/lib/diameter/src/transport/diameter_tcp.erl +++ b/lib/diameter/src/transport/diameter_tcp.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -37,7 +37,8 @@ code_change/3, terminate/2]). --export([info/1]). %% service_info callback +-export([listener/1,%% diameter_sync callback + info/1]). %% service_info callback -export([ports/0, ports/1]). @@ -191,7 +192,7 @@ init(T) -> i({T, Ref, Mod, Pid, Opts, Addrs}) when T == accept; T == connect -> - erlang:monitor(process, Pid), + monitor(process, Pid), %% Since accept/connect might block indefinitely, spawn a process %% that does nothing but kill us with the parent until call %% returns. @@ -218,8 +219,8 @@ i({T, Ref, Mod, Pid, Opts, Addrs}) %% A monitor process to kill the transport if the parent dies. i(#monitor{parent = Pid, transport = TPid} = S) -> proc_lib:init_ack({ok, self()}), - erlang:monitor(process, Pid), - erlang:monitor(process, TPid), + monitor(process, Pid), + monitor(process, TPid), S; %% In principle a link between the transport and killer processes %% could do the same thing: have the accepting/connecting process be @@ -235,7 +236,7 @@ i({listen, LRef, APid, {Mod, Opts, Addrs}}) -> LAddr = laddr(LAddrOpt, Mod, LSock), true = diameter_reg:add_new({?MODULE, listener, {LRef, {LAddr, LSock}}}), proc_lib:init_ack({ok, self(), {LAddr, LSock}}), - erlang:monitor(process, APid), + monitor(process, APid), start_timer(#listener{socket = LSock}). laddr([], Mod, Sock) -> @@ -336,17 +337,25 @@ accept(Opts) -> %% listener/2 +%% Accepting processes can be started concurrently: ensure only one +%% listener is started. listener(LRef, T) -> - l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T). + diameter_sync:call({?MODULE, listener, LRef}, + {?MODULE, listener, [{LRef, T, self()}]}, + infinity, + infinity). -%% Existing process with the listening socket ... -l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) -> - LPid ! {accept, self()}, +listener({LRef, T, TPid}) -> + l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T, TPid). + +%% Existing listening process ... +l([{{?MODULE, listener, {_, AS}}, LPid}], _, _, TPid) -> + LPid ! {accept, TPid}, AS; -%% ... or not: start one. -l([], LRef, T) -> - {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, self(), T}), +%% ... or not. +l([], LRef, T, TPid) -> + {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, TPid, T}), AS. %% get_addr/1 @@ -502,7 +511,7 @@ m({'DOWN', _, process, Pid, _}, #monitor{parent = Pid, %% Another accept transport is attaching. l({accept, TPid}, #listener{count = N} = S) -> - erlang:monitor(process, TPid), + monitor(process, TPid), S#listener{count = N+1}; %% Accepting process has died. diff --git a/lib/diameter/src/transport/diameter_transport_sup.erl b/lib/diameter/src/transport/diameter_transport_sup.erl index 6457ab78b0..284a41a752 100644 --- a/lib/diameter/src/transport/diameter_transport_sup.erl +++ b/lib/diameter/src/transport/diameter_transport_sup.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2011. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -54,7 +54,7 @@ start_child(Name, Module) -> Spec = {Name, {Module, start_link, [Name]}, permanent, - 1000, + infinity, supervisor, [Module]}, supervisor:start_child(?MODULE, Spec). diff --git a/lib/diameter/test/diameter_app_SUITE.erl b/lib/diameter/test/diameter_app_SUITE.erl index f68a18b5c2..cf34c762e1 100644 --- a/lib/diameter/test/diameter_app_SUITE.erl +++ b/lib/diameter/test/diameter_app_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -187,15 +187,14 @@ xref(Config) -> xref:stop(XRef), + Rel = release(), %% otp_release-ish + %% Only care about calls from our own application. - [] = lists:filter(fun({{F,_,_},{T,_,_}}) -> + [] = lists:filter(fun({{F,_,_} = From, {_,_,_} = To}) -> lists:member(F, Mods) - andalso {F,T} /= {diameter_tcp, ssl} + andalso not ignored(From, To, Rel) end, Undefs), - %% diameter_tcp does call ssl despite the latter not being listed - %% as a dependency in the app file since ssl is only required for - %% TLS security: it's up to a client who wants TLS to start ssl. %% Ensure that only runtime or info modules call runtime modules. %% It's not strictly necessary that diameter compiler modules not @@ -214,6 +213,38 @@ xref(Config) -> [] = lists:filter(fun(M) -> not lists:member(app(M), Deps) end, RTdeps -- Mods). +ignored({FromMod,_,_}, {ToMod,_,_} = To, Rel)-> + %% diameter_tcp does call ssl despite the latter not being listed + %% as a dependency in the app file since ssl is only required for + %% TLS security: it's up to a client who wants TLS to start ssl. + %% The OTP 18 time api is also called if it exists, so that the + %% same code can be run on older releases. + {FromMod, ToMod} == {diameter_tcp, ssl} + orelse (FromMod == diameter_lib + andalso Rel < 18 + andalso lists:member(To, time_api())). + +%% New time api in OTP 18. +time_api() -> + [{erlang, F, A} || {F,A} <- [{convert_time_resolution,3}, + {monotonic_time,0}, + {monotonic_time,1}, + {time_offset,0}, + {time_offset,1}, + {time_resolution,0}, + {timestamp,0}, + {unique_integer,0}, + {unique_integer,1}]]. + +release() -> + Rel = erlang:system_info(otp_release), + try list_to_integer(Rel) of + N -> N + catch + error:_ -> + 0 %% aka < 17 + end. + unversion(App) -> T = lists:dropwhile(fun is_vsn_ch/1, lists:reverse(App)), lists:reverse(case T of [$-|TT] -> TT; _ -> T end). diff --git a/lib/diameter/test/diameter_capx_SUITE.erl b/lib/diameter/test/diameter_capx_SUITE.erl index deabdd720b..02501ce779 100644 --- a/lib/diameter/test/diameter_capx_SUITE.erl +++ b/lib/diameter/test/diameter_capx_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -144,8 +144,8 @@ end_per_suite(_Config) -> %% Generate a unique hostname for each testcase so that watchdogs %% don't prevent a connection from being brought up immediately. init_per_testcase(Name, Config) -> - Uniq = ["." ++ integer_to_list(N) || N <- tuple_to_list(now())], - [{host, lists:flatten([?L(Name) | Uniq])} | Config]. + [{host, ?L(Name) ++ "." ++ diameter_util:unique_string()} + | Config]. init_per_group(Name, Config) -> [{rfc, Name} | Config]. diff --git a/lib/diameter/test/diameter_codec_SUITE.erl b/lib/diameter/test/diameter_codec_SUITE.erl index cd8ca41f66..64ea90554d 100644 --- a/lib/diameter/test/diameter_codec_SUITE.erl +++ b/lib/diameter/test/diameter_codec_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -29,6 +29,9 @@ -export([suite/0, all/0, + groups/0, + init_per_group/2, + end_per_group/2, init_per_testcase/2, end_per_testcase/2]). @@ -36,9 +39,13 @@ -export([base/1, gen/1, lib/1, - unknown/1]). + unknown/1, + success/1, + grouped_error/1, + failed_error/1]). -include("diameter_ct.hrl"). +-include("diameter.hrl"). -define(L, atom_to_list). @@ -48,7 +55,19 @@ suite() -> [{timetrap, {seconds, 10}}]. all() -> - [base, gen, lib, unknown]. + [base, gen, lib, unknown, {group, recode}]. + +groups() -> + [{recode, [], [success, + grouped_error, + failed_error]}]. + +init_per_group(recode, Config) -> + ok = diameter:start(), + Config. + +end_per_group(_, _) -> + ok = diameter:stop(). init_per_testcase(gen, Config) -> [{application, ?APP, App}] = diameter_util:consult(?APP, app), @@ -98,3 +117,166 @@ compile(File) -> compile(File, Opts) -> compile:file(File, [return | Opts]). + +%% =========================================================================== + +%% Ensure a Grouped AVP is represented by a list in the avps field. +success(_) -> + Avps = [{295, <<1:32>>}, %% Termination-Cause + {284, [{280, "Proxy-Host"}, %% Proxy-Info + {33, "Proxy-State"}, %% + {295, <<2:32>>}]}], %% Termination-Cause + #diameter_packet{avps = [#diameter_avp{code = 295, + value = 1, + data = <<1:32>>}, + [#diameter_avp{code = 284}, + #diameter_avp{code = 280}, + #diameter_avp{code = 33}, + #diameter_avp{code = 295, + value = 2, + data = <<2:32>>}]], + errors = []} + = str(recode(str(Avps))). + +%% =========================================================================== + +%% Ensure a Grouped AVP is represented by a list in the avps field +%% even in the case of a decode error on a component AVP. +grouped_error(_) -> + Avps = [{295, <<1:32>>}, %% Termination-Cause + {284, [{295, <<0:32>>}, %% Proxy-Info, Termination-Cause + {280, "Proxy-Host"}, + {33, "Proxy-State"}]}], + #diameter_packet{avps = [#diameter_avp{code = 295, + value = 1, + data = <<1:32>>}, + [#diameter_avp{code = 284}, + #diameter_avp{code = 295, + value = undefined, + data = <<0:32>>}, + #diameter_avp{code = 280}, + #diameter_avp{code = 33}]], + errors = [{5004, #diameter_avp{code = 284}}]} + = str(recode(str(Avps))). + +%% =========================================================================== + +%% Ensure that a failed decode in Failed-AVP is acceptable, and that +%% the component AVPs are decoded if possible. +failed_error(_) -> + Avps = [{279, [{295, <<0:32>>}, %% Failed-AVP, Termination-Cause + {258, <<1:32>>}, %% Auth-Application-Id + {284, [{280, "Proxy-Host"}, %% Proxy-Info + {33, "Proxy-State"}, + {295, <<0:32>>}, %% Termination-Cause, invalid + {258, <<2:32>>}]}]}], %% Auth-Application-Id + #diameter_packet{avps = [[#diameter_avp{code = 279}, + #diameter_avp{code = 295, + value = undefined, + data = <<0:32>>}, + #diameter_avp{code = 258, + value = 1, + data = <<1:32>>}, + [#diameter_avp{code = 284}, + #diameter_avp{code = 280}, + #diameter_avp{code = 33}, + #diameter_avp{code = 295, + value = undefined}, + #diameter_avp{code = 258, + value = 2, + data = <<2:32>>}]]], + errors = []} + = sta(recode(sta(Avps))). + +%% =========================================================================== + +%% str/1 + +str(#diameter_packet{avps = [#diameter_avp{code = 263}, + #diameter_avp{code = 264}, + #diameter_avp{code = 296}, + #diameter_avp{code = 283}, + #diameter_avp{code = 258, + value = 0} + | T]} + = Pkt) -> + Pkt#diameter_packet{avps = T}; + +str(Avps) -> + OH = "diameter.erlang.org", + OR = "erlang.org", + DR = "example.com", + Sid = "diameter.erlang.org;123;456", + + [#diameter_header{version = 1, + cmd_code = 275, %% STR + is_request = true, + application_id = 0, + hop_by_hop_id = 17, + end_to_end_id = 42, + is_proxiable = false, + is_error = false, + is_retransmitted = false} + | avp([{263, Sid}, %% Session-Id + {264, OH}, %% Origin-Host + {296, OR}, %% Origin-Realm + {283, DR}, %% Destination-Realm + {258, <<0:32>>}] %% Auth-Application-Id + ++ Avps)]. + +%% sta/1 + +sta(#diameter_packet{avps = [#diameter_avp{code = 263}, + #diameter_avp{code = 268}, + #diameter_avp{code = 264}, + #diameter_avp{code = 296}, + #diameter_avp{code = 278, + value = 4} + | T]} + = Pkt) -> + Pkt#diameter_packet{avps = T}; + +sta(Avps) -> + OH = "diameter.erlang.org", + OR = "erlang.org", + Sid = "diameter.erlang.org;123;456", + + [#diameter_header{version = 1, + cmd_code = 275, %% STA + is_request = false, + application_id = 0, + hop_by_hop_id = 17, + end_to_end_id = 42, + is_proxiable = false, + is_error = false, + is_retransmitted = false} + | avp([{263, Sid}, %% Session-Id + {268, <<2002:32>>}, %% Result-Code + {264, OH}, %% Origin-Host + {296, OR}, %% Origin-Realm + {278, <<4:32>>}] %% Origin-State-Id + ++ Avps)]. + +avp({Code, Data}) -> + #diameter_avp{code = Code, + data = avp(Data)}; + +avp(#diameter_avp{} = A) -> + A; + +avp([{_,_} | _] = Avps) -> + lists:map(fun avp/1, Avps); + +avp(V) -> + V. + +%% recode/1 + +recode(Msg) -> + recode(Msg, diameter_gen_base_rfc6733). + +recode(#diameter_packet{} = Pkt, Dict) -> + diameter_codec:decode(Dict, diameter_codec:encode(Dict, Pkt)); + +recode(Msg, Dict) -> + recode(#diameter_packet{msg = Msg}, Dict). diff --git a/lib/diameter/test/diameter_codec_test.erl b/lib/diameter/test/diameter_codec_test.erl index 90536dcf2b..472755c62a 100644 --- a/lib/diameter/test/diameter_codec_test.erl +++ b/lib/diameter/test/diameter_codec_test.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -229,8 +229,7 @@ v(Max, Ord, E) when Ord =< Max -> diameter_enum:to_list(E); v(Max, Ord, E) -> - {M,S,U} = now(), - random:seed(M,S,U), + random:seed(diameter_util:seed()), v(Max, Ord, E, []). v(0, _, _, Acc) -> @@ -512,7 +511,7 @@ random(Mn,Mx) -> seed(undefined) -> put({?MODULE, seed}, true), - random:seed(now()); + random:seed(diameter_util:seed()); seed(true) -> ok. diff --git a/lib/diameter/test/diameter_ct.erl b/lib/diameter/test/diameter_ct.erl index ed2f884681..85c502ea7f 100644 --- a/lib/diameter/test/diameter_ct.erl +++ b/lib/diameter/test/diameter_ct.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -43,7 +43,7 @@ ct_run(Opts) -> info(Start , info()). info() -> - [{time, now()}, + [{time, diameter_lib:now()}, {process_count, erlang:system_info(process_count)} | erlang:memory()]. @@ -56,6 +56,6 @@ info(L0, L1) -> io:format("INFO: ~p~n", [Diff]). diff(time, T0, T1) -> - timer:now_diff(T1, T0); + diameter_lib:micro_diff(T1, T0); diff(_, N0, N1) -> N1 - N0. diff --git a/lib/diameter/test/diameter_event_SUITE.erl b/lib/diameter/test/diameter_event_SUITE.erl index f43f111d20..bfe160203c 100644 --- a/lib/diameter/test/diameter_event_SUITE.erl +++ b/lib/diameter/test/diameter_event_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013. All Rights Reserved. +%% Copyright Ericsson AB 2013-15. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -168,16 +168,15 @@ connect(Config, Opts) -> {Name, Ref}. uniq() -> - {MS,S,US} = now(), - lists:flatten(io_lib:format("-~p-~p-~p-", [MS,S,US])). + "-" ++ diameter_util:unique_string(). event(Name) -> receive #diameter_event{service = Name, info = T} -> T end. event(Name, TL, TH) -> - T0 = now(), + T0 = diameter_lib:now(), Event = event(Name), - DT = timer:now_diff(now(), T0) div 1000, + DT = diameter_lib:micro_diff(T0) div 1000, {true, true, DT, Event} = {TL < DT, DT < TH, DT, Event}, Event. diff --git a/lib/diameter/test/diameter_examples_SUITE.erl b/lib/diameter/test/diameter_examples_SUITE.erl index aef4bc35ef..ef8e459175 100644 --- a/lib/diameter/test/diameter_examples_SUITE.erl +++ b/lib/diameter/test/diameter_examples_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -295,15 +295,15 @@ slave() -> [{timetrap, {minutes, 10}}]. slave(_) -> - T0 = now(), + T0 = diameter_lib:now(), {ok, Node} = ct_slave:start(?MODULE, ?TIMEOUTS), - T1 = now(), + T1 = diameter_lib:now(), T2 = rpc:call(Node, erlang, now, []), {ok, Node} = ct_slave:stop(?MODULE), - now_diff([T0, T1, T2, now()]). + now_diff([T0, T1, T2, diameter_lib:now()]). now_diff([T1,T2|_] = Ts) -> - [timer:now_diff(T2,T1) | now_diff(tl(Ts))]; + [diameter_lib:micro_diff(T2,T1) | now_diff(tl(Ts))]; now_diff(_) -> []. @@ -397,4 +397,4 @@ stop(Name) stop(Config) -> Prot = proplists:get_value(group, Config), - [] = [RC || N <- ?NODES, RC <- [stop(concat(Prot, N))], RC /= ok]. + [] = [RC || N <- ?NODES, RC <- [catch stop(concat(Prot, N))], RC /= ok]. diff --git a/lib/diameter/test/diameter_gen_sctp_SUITE.erl b/lib/diameter/test/diameter_gen_sctp_SUITE.erl index 51ccb1e6ec..4ea5e80095 100644 --- a/lib/diameter/test/diameter_gen_sctp_SUITE.erl +++ b/lib/diameter/test/diameter_gen_sctp_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -119,10 +119,10 @@ send_not_from_controlling_process(_) -> send_not_from_controlling_process() -> FPid = self(), - {L, MRef} = spawn_monitor(fun() -> listen(FPid) end),%% listening process + {L, MRef} = spawn_monitor(fun() -> listen(FPid) end), receive {?MODULE, C, S} -> - erlang:demonitor(MRef, [flush]), + demonitor(MRef, [flush]), [L,C,S]; {'DOWN', MRef, process, _, _} = T -> error(T) @@ -137,13 +137,7 @@ listen(FPid) -> LPid = self(), spawn(fun() -> connect1(PortNr, FPid, LPid) end), %% connecting process Id = assoc(Sock), - ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], _Bin}) - = recv(). %% Waits with this as current_function. - -%% recv/0 - -recv() -> - receive T -> T end. + recv(Sock, Id). %% connect1/3 @@ -154,7 +148,7 @@ connect1(PortNr, FPid, LPid) -> FPid ! {?MODULE, self(), spawn(fun() -> send(Sock, Id) end)}, %% sending process - MRef = erlang:monitor(process, LPid), + MRef = monitor(process, LPid), down(MRef). %% Waits with this as current_function. %% down/1 @@ -277,7 +271,8 @@ acc(N, Acc) -> loop(Sock, MRef, Bin) -> receive - ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B}) -> + ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B}) + when is_binary(B) -> Sz = size(Bin), {Sz, Bin} = {size(B), B}, %% assert ok = send(Sock, Id, mark(Bin)), @@ -291,7 +286,7 @@ loop(Sock, MRef, Bin) -> %% connect2/3 connect2(Pid, PortNr, Bin) -> - erlang:monitor(process, Pid), + monitor(process, Pid), {ok, Sock} = open(), ok = gen_sctp:connect_init(Sock, ?ADDR, PortNr, []), @@ -301,19 +296,25 @@ connect2(Pid, PortNr, Bin) -> %% T2 = time after listening process received our message %% T3 = time after reply is received - T1 = now(), + T1 = diameter_util:timestamp(), ok = send(Sock, Id, Bin), T2 = unmark(recv(Sock, Id)), - T3 = now(), - {timer:now_diff(T2, T1), timer:now_diff(T3, T2)}. %% {Outbound, Inbound} + T3 = diameter_util:timestamp(), + {diameter_lib:micro_diff(T2, T1), %% Outbound + diameter_lib:micro_diff(T3, T2)}. %% Inbound %% recv/2 recv(Sock, Id) -> receive - ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}) -> + ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = I}], Bin}) + when is_binary(Bin) -> + Id = I, %% assert Bin; - T -> %% eg. 'DOWN' + ?SCTP(S, _) -> + Sock = S, %% assert + recv(Sock, Id); + T -> exit(T) end. @@ -325,7 +326,7 @@ send(Sock, Id, Bin) -> %% mark/1 mark(Bin) -> - Info = term_to_binary(now()), + Info = term_to_binary(diameter_util:timestamp()), <<Info/binary, Bin/binary>>. %% unmark/1 diff --git a/lib/diameter/test/diameter_gen_tcp_SUITE.erl b/lib/diameter/test/diameter_gen_tcp_SUITE.erl index 7e232edb44..4b542e0156 100644 --- a/lib/diameter/test/diameter_gen_tcp_SUITE.erl +++ b/lib/diameter/test/diameter_gen_tcp_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013. All Rights Reserved. +%% Copyright Ericsson AB 2014-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,10 +18,10 @@ %% %% -%% Some gen_sctp-specific tests demonstrating problems that were +%% Some gen_tcp-specific tests demonstrating problems that were %% encountered during diameter development but have nothing -%% specifically to do with diameter. At least one of them can cause -%% diameter_traffic_SUITE testcases to fail. +%% specifically to do with diameter. These can cause testcases in +%% other suites to fail. %% -module(diameter_gen_tcp_SUITE). @@ -30,7 +30,8 @@ all/0]). %% testcases --export([send_long/1]). +-export([send_long/1, + connect/1]). -define(LOOPBACK, {127,0,0,1}). -define(GEN_OPTS, [binary, {active, true}, {ip, ?LOOPBACK}]). @@ -41,7 +42,8 @@ suite() -> [{timetrap, {minutes, 2}}]. all() -> - [send_long]. + [connect, %% Appears to fail only when run first. + send_long]. %% =========================================================================== @@ -87,15 +89,6 @@ connect(PortNr, LPid) -> LPid ! {self(), fun(B) -> send(Sock, B) end}, down(LPid). -%% down/1 - -down(Pid) - when is_pid(Pid) -> - down(erlang:monitor(process, Pid)); - -down(MRef) -> - receive {'DOWN', MRef, process, _, Reason} -> Reason end. - %% send/2 %% %% Send from a spawned process just to avoid sending from the @@ -104,3 +97,47 @@ down(MRef) -> send(Sock, Bin) -> {_, MRef} = spawn_monitor(fun() -> exit(gen_tcp:send(Sock, Bin)) end), down(MRef). + +%% =========================================================================== + +%% connect/1 +%% +%% Test that simultaneous connections succeed. This fails sporadically +%% on OS X at the time of writing, when gen_tcp:connect/2 returns +%% {error, econnreset}. + +connect(_) -> + {ok, LSock} = gen_tcp:listen(0, ?GEN_OPTS), + {ok, {_,PortNr}} = inet:sockname(LSock), + Count = lists:seq(1,8), %% 8 simultaneous connects + As = [gen_accept(LSock) || _ <- Count], + %% Wait for spawned processes to have called gen_tcp:accept/1 + %% (presumably). + receive after 2000 -> ok end, + Cs = [gen_connect(PortNr) || _ <- Count], + [] = failures(Cs), + [] = failures(As). + +failures(Monitors) -> + [RC || {_, MRef} <- Monitors, RC <- [down(MRef)], ok /= element(1, RC)]. + +gen_accept(LSock) -> + spawn_monitor(fun() -> + exit(gen_tcp:accept(LSock)) + end). + +gen_connect(PortNr) -> + spawn_monitor(fun() -> + exit(gen_tcp:connect(?LOOPBACK, PortNr, ?GEN_OPTS)) + end). + +%% =========================================================================== + +%% down/1 + +down(Pid) + when is_pid(Pid) -> + down(monitor(process, Pid)); + +down(MRef) -> + receive {'DOWN', MRef, process, _, Reason} -> Reason end. diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl new file mode 100644 index 0000000000..a59cd66a2e --- /dev/null +++ b/lib/diameter/test/diameter_pool_SUITE.erl @@ -0,0 +1,133 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%% +%% Test of the pool_size option in connecting nodes with multiple +%% connections. +%% + +-module(diameter_pool_SUITE). + +-export([suite/0, + all/0, + init_per_testcase/2, + end_per_testcase/2, + init_per_suite/1, + end_per_suite/1]). + +%% testcases +-export([tcp_connect/1, + sctp_connect/1, + any_connect/1]). + +%% =========================================================================== + +-define(util, diameter_util). + +%% Config for diameter:start_service/2. +-define(SERVICE(Host), + [{'Origin-Host', Host ++ ".ericsson.com"}, + {'Origin-Realm', "ericsson.com"}, + {'Host-IP-Address', [{127,0,0,1}]}, + {'Vendor-Id', 12345}, + {'Product-Name', "OTP/diameter"}, + {'Auth-Application-Id', [0]}, %% common + {'Acct-Application-Id', [3]}, %% accounting + {restrict_connections, false}, + {application, [{alias, common}, + {dictionary, diameter_gen_base_rfc6733}, + {module, diameter_callback}]}, + {application, [{alias, accounting}, + {dictionary, diameter_gen_acct_rfc6733}, + {module, diameter_callback}]}]). + +%% =========================================================================== + +suite() -> + [{timetrap, {seconds, 30}}]. + +all() -> + [tcp_connect, + sctp_connect, + any_connect]. + +init_per_testcase(_Name, Config) -> + Config. + +end_per_testcase(_Name, _Config) -> + diameter:stop(). + +init_per_suite(Config) -> + [{sctp, ?util:have_sctp()} | Config]. + +end_per_suite(_Config) -> + ok. + +%% =========================================================================== + +tcp_connect(_Config) -> + connect(tcp, tcp). + +sctp_connect(Config) -> + case lists:member({sctp, true}, Config) of + true -> connect(sctp, sctp); + false -> {skip, no_sctp} + end. + +any_connect(_Config) -> + connect(any, tcp). + +%% connect/2 + +%% Establish multiple connections between a client and server. +connect(ClientProt, ServerProt) -> + ok = diameter:start(), + [] = [{S,T} || S <- ["server", "client"], + T <- [diameter:start_service(S, ?SERVICE(S))], + T /= ok], + %% Listen with a single transport with pool_size = 4. Ensure the + %% expected number of transport processes are started. + LRef = ?util:listen("server", ServerProt, [{pool_size, 4}]), + {4,0} = count("server", LRef, accept), %% 4 transports, no connections + %% Establish 5 connections. + Ref = ?util:connect("client", ClientProt, LRef, [{pool_size, 5}]), + {5,5} = count("client", Ref, pool), %% 5 connections + %% Ensure the server has started replacement transports within a + %% reasonable time. Sleepsince there's no guarantee the + %% replacements have been started before the client has received + %% 'up' events. (Although it's likely.) + sleep(), + {9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting + %% Ensure ther are still the expected number of accepting transports + %% after stopping the client service. + ok = diameter:stop_service("client"), + sleep(), + {4,0} = count("server", LRef, accept), %% 4 transports, no connections + %% Done. + ok = diameter:stop_service("server"). + +count(Name, Ref, Key) -> + [{transport, [[{ref, Ref} | T]]}, + {connections, Cs}] + = diameter:service_info(Name, [transport, connections]), + {Key, Ps} = lists:keyfind(Key, 1, T), + {length(Ps), length(Cs)}. %% number of processes, connections + +sleep() -> + receive after 1000 -> ok end. diff --git a/lib/diameter/test/diameter_traffic_SUITE.erl b/lib/diameter/test/diameter_traffic_SUITE.erl index 4b67372016..9822b95301 100644 --- a/lib/diameter/test/diameter_traffic_SUITE.erl +++ b/lib/diameter/test/diameter_traffic_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -414,12 +414,13 @@ send_eval(Config) -> = call(Config, Req). %% Send an accounting ACR that the server tries to answer with an -%% inappropriate header, resulting in no answer being sent and the -%% request timing out. +%% inappropriate header. That the error is detected is coded in +%% handle_answer. send_bad_answer(Config) -> Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD}, {'Accounting-Record-Number', 2}], - {timeout, _} = call(Config, Req). + ?answer_message(?SUCCESS) + = call(Config, Req). %% Send an ACR that the server callback answers explicitly with a %% protocol error. @@ -759,7 +760,7 @@ call(Config, Req, Opts) -> diameter:call(?CLIENT, dict(Req, Dict0), msg(Req, ReqEncoding, Dict0), - [{extra, [{Name, Group}, now()]} | Opts]). + [{extra, [{Name, Group}, diameter_lib:now()]} | Opts]). origin({A,C}) -> 2*codec(A) + container(C); @@ -1057,15 +1058,12 @@ answer(Pkt, Req, _Peer, Name, #group{client_dict0 = Dict0}) -> [R | Vs] = Dict:'#get-'(answer(Ans, Es, Name)), [Dict:rec2msg(R) | Vs]. -answer(Rec, [_|_], N) - when N == send_long_avp_length; - N == send_short_avp_length; - N == send_zero_avp_length; - N == send_invalid_avp_length; - N == send_invalid_reject; - N == send_unknown_short_mandatory; - N == send_unexpected_mandatory_decode -> +%% An inappropriate E-bit results in a decode error ... +answer(Rec, Es, send_bad_answer) -> + [{5004, #diameter_avp{name = 'Result-Code'}} | _] = Es, Rec; + +%% ... while other errors are reflected in Failed-AVP. answer(Rec, [], _) -> Rec. @@ -1078,8 +1076,10 @@ app(Req, _, Dict0) -> %% handle_error/6 handle_error(timeout = Reason, _Req, ?CLIENT, _Peer, _, Time) -> - Now = now(), - {Reason, {Time, Now, timer:now_diff(Now, Time)}}; + Now = diameter_lib:now(), + {Reason, {diameter_lib:timestamp(Time), + diameter_lib:timestamp(Now), + diameter_lib:micro_diff(Now, Time)}}; handle_error(Reason, _Req, ?CLIENT, _Peer, _, _Time) -> {error, Reason}. diff --git a/lib/diameter/test/diameter_transport_SUITE.erl b/lib/diameter/test/diameter_transport_SUITE.erl index fcffa69c24..f098851bea 100644 --- a/lib/diameter/test/diameter_transport_SUITE.erl +++ b/lib/diameter/test/diameter_transport_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -53,7 +53,7 @@ %% Receive a message. -define(RECV(Pat, Ret), receive Pat -> Ret end). --define(RECV(Pat), ?RECV(Pat, now())). +-define(RECV(Pat), ?RECV(Pat, diameter_util:timestamp())). %% Sockets are opened on the loopback address. -define(ADDR, {127,0,0,1}). @@ -104,7 +104,7 @@ tc() -> reconnect]. init_per_suite(Config) -> - [{sctp, have_sctp()} | Config]. + [{sctp, ?util:have_sctp()} | Config]. end_per_suite(_Config) -> ok. @@ -127,7 +127,10 @@ tcp_accept(_) -> accept(tcp). sctp_accept(Config) -> - if_sctp(fun accept/1, Config). + case lists:member({sctp, true}, Config) of + true -> accept(sctp); + false -> {skip, no_sctp} + end. %% Start multiple accepting transport processes that are connected to %% with an equal number of connecting processes using gen_tcp/sctp @@ -157,7 +160,10 @@ tcp_connect(_) -> connect(tcp). sctp_connect(Config) -> - if_sctp(fun connect/1, Config). + case lists:member({sctp, true}, Config) of + true -> connect(sctp); + false -> {skip, no_sctp} + end. connect(Prot) -> T = {Prot, make_ref()}, @@ -219,7 +225,7 @@ reconnect(_) -> || T <- [listen, connect]]). start_service(SvcName) -> - OH = io_lib:format("~p-~p-~p", tuple_to_list(now())), + OH = diameter_util:unique_string(), Opts = [{application, [{dictionary, diameter_gen_base_rfc6733}, {module, diameter_callback}]}, {'Origin-Host', OH}, @@ -251,28 +257,6 @@ abort(SvcName, LRef, Ref) %% =========================================================================== %% =========================================================================== -%% have_sctp/0 - -have_sctp() -> - case gen_sctp:open() of - {ok, Sock} -> - gen_sctp:close(Sock), - true; - {error, E} when E == eprotonosupport; - E == esocktnosupport -> %% fail on any other reason - false - end. - -%% if_sctp/2 - -if_sctp(F, Config) -> - case proplists:get_value(sctp, Config) of - true -> - F(sctp); - false -> - {skip, no_sctp} - end. - %% init/2 init(accept, {Prot, Ref}) -> @@ -351,7 +335,7 @@ make_msg() -> %% crypto:rand_bytes/1 isn't available on all platforms (since openssl %% isn't) so roll our own. rand_bytes(N) -> - random:seed(now()), + random:seed(diameter_util:seed()), rand_bytes(N, <<>>). rand_bytes(0, Bin) -> @@ -381,37 +365,14 @@ start_connect(tcp, T, Svc, Opts) -> diameter_tcp:start(T, Svc, Opts). %% start_accept/2 -%% -%% Start transports sequentially by having each wait for a message -%% from a job in a queue before commencing. Only one transport with a -%% pending accept is started at a time since diameter_{tcp,sctp} -%% currently assume (and diameter currently implements) this. start_accept(Prot, Ref) -> - Pid = sync(accept, Ref), {Mod, Opts} = tmod(Prot), - - try - {ok, TPid, [?ADDR]} = Mod:start({accept, Ref}, - ?SVC([?ADDR]), - [{port, 0} | Opts]), - ?RECV(?TMSG({TPid, connected})), - TPid - after - Pid ! Ref - end. - -sync(What, Ref) -> - ok = diameter_sync:cast({?MODULE, What, Ref}, - [fun lock/2, Ref, self()], - infinity, - infinity), - receive {start, Ref, Pid} -> Pid end. - -lock(Ref, Pid) -> - Pid ! {start, Ref, self()}, - erlang:monitor(process, Pid), - Ref = receive T -> T end. + {ok, TPid, [?ADDR]} = Mod:start({accept, Ref}, + ?SVC([?ADDR]), + [{port, 0} | Opts]), + ?RECV(?TMSG({TPid, connected})), + TPid. tmod(sctp) -> {diameter_sctp, [{sctp_initmsg, ?SCTP_INIT}]}; @@ -454,7 +415,7 @@ gen_accept(tcp, LSock) -> gen_send(sctp, Sock, Bin) -> {OS, _IS, Id} = getr(assoc), - {_, _, Us} = now(), + {_, _, Us} = diameter_util:timestamp(), gen_sctp:send(Sock, Id, Us rem OS, Bin); gen_send(tcp, Sock, Bin) -> gen_tcp:send(Sock, Bin). @@ -463,7 +424,11 @@ gen_send(tcp, Sock, Bin) -> gen_recv(sctp, Sock) -> {_OS, _IS, Id} = getr(assoc), - ?RECV(?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}), Bin); + receive + ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}) + when is_binary(Bin) -> + Bin + end; gen_recv(tcp, Sock) -> tcp_recv(Sock, <<>>). diff --git a/lib/diameter/test/diameter_util.erl b/lib/diameter/test/diameter_util.erl index 92c72c84e7..c496876ee1 100644 --- a/lib/diameter/test/diameter_util.erl +++ b/lib/diameter/test/diameter_util.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -29,7 +29,11 @@ run/1, fold/3, foldl/3, - scramble/1]). + scramble/1, + timestamp/0, + seed/0, + unique_string/0, + have_sctp/0]). %% diameter-specific -export([lport/2, @@ -174,7 +178,7 @@ scramble(L) -> [[fun s/1, L]]). s(L) -> - random:seed(now()), + random:seed(seed()), s([], L). s(Acc, []) -> @@ -184,6 +188,44 @@ s(Acc, L) -> s([T|Acc], H ++ Rest). %% --------------------------------------------------------------------------- +%% timestamp/0 + +timestamp() -> + diameter_lib:timestamp(diameter_lib:now()). + +%% --------------------------------------------------------------------------- +%% seed/0 + +seed() -> + {_,T} = diameter_lib:seed(), + T. + +%% --------------------------------------------------------------------------- +%% unique_string/0 + +unique_string() -> + us(diameter_lib:now()). + +us({M,S,U}) -> + tl(lists:append(["-" ++ integer_to_list(N) || N <- [M,S,U]])); + +us(MonoT) -> + integer_to_list(MonoT). + +%% --------------------------------------------------------------------------- +%% have_sctp/0 + +have_sctp() -> + case gen_sctp:open() of + {ok, Sock} -> + gen_sctp:close(Sock), + true; + {error, E} when E == eprotonosupport; + E == esocktnosupport -> %% fail on any other reason + false + end. + +%% --------------------------------------------------------------------------- %% eval/1 %% %% Evaluate a function in one of a number of forms. @@ -254,13 +296,12 @@ path(Config, Name) -> %% %% Lookup the port number of a tcp/sctp listening transport. -lport(M, {Node, Ref}) -> - rpc:call(Node, ?MODULE, lport, [M, Ref]); +lport(Prot, {Node, Ref}) -> + rpc:call(Node, ?MODULE, lport, [Prot, Ref]); lport(Prot, Ref) -> - Mod = tmod(Prot), [_] = diameter_reg:wait({'_', listener, {Ref, '_'}}), - [N || {listen, N, _} <- Mod:ports(Ref)]. + [N || M <- tmod(Prot), {listen, N, _} <- M:ports(Ref)]. %% --------------------------------------------------------------------------- %% listen/2-3 @@ -292,13 +333,17 @@ connect(Client, Prot, LRef, Opts) -> Ref = add_transport(Client, {connect, opts(Prot, PortNr) ++ Opts}), true = transport(Client, Ref), %% assert - ok = receive - {diameter_event, Client, {up, Ref, _, _, _}} -> ok - after 10000 -> - {Client, Prot, PortNr, process_info(self(), messages)} - end, + diameter_lib:for_n(fun(_) -> ok = up(Client, Ref, Prot, PortNr) end, + proplists:get_value(pool_size, Opts, 1)), Ref. +up(Client, Ref, Prot, PortNr) -> + receive + {diameter_event, Client, {up, Ref, _, _, _}} -> ok + after 10000 -> + {Client, Prot, PortNr, process_info(self(), messages)} + end. + transport(SvcName, Ref) -> [Ref] == [R || [{ref, R} | _] <- diameter:service_info(SvcName, transport), R == Ref]. @@ -327,13 +372,15 @@ add_transport(SvcName, T) -> Ref. tmod(tcp) -> - diameter_tcp; + [diameter_tcp]; tmod(sctp) -> - diameter_sctp. + [diameter_sctp]; +tmod(any) -> + [diameter_sctp, diameter_tcp]. opts(Prot, T) -> - [{transport_module, tmod(Prot)}, - {transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}]. + [{transport_module, M} || M <- tmod(Prot)] + ++ [{transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}]. opts(listen) -> [{accept, M} || M <- [{256,0,0,1}, ["256.0.0.1", ["^.+$"]]]]; diff --git a/lib/diameter/test/diameter_watchdog_SUITE.erl b/lib/diameter/test/diameter_watchdog_SUITE.erl index b6e8730ec2..5a3ff2c92f 100644 --- a/lib/diameter/test/diameter_watchdog_SUITE.erl +++ b/lib/diameter/test/diameter_watchdog_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -420,6 +420,7 @@ suspect(TRef, false, SvcName, N) -> %% abuse/1 abuse(F) -> + [] = run([[abuse, F, T] || T <- [listen, connect]]). abuse(F, [_,_,_|_] = Args) -> @@ -672,7 +673,8 @@ jitter(T,D) -> %% Generate a unique hostname for the faked peer. hostname() -> - lists:flatten(io_lib:format("~p-~p-~p", tuple_to_list(now()))). + {M,S,U} = diameter_util:timestamp(), + lists:flatten(io_lib:format("~p-~p-~p", [M,S,U])). putr(Key, Val) -> put({?MODULE, Key}, Val). diff --git a/lib/diameter/test/modules.mk b/lib/diameter/test/modules.mk index 4fea62461c..6da96bd676 100644 --- a/lib/diameter/test/modules.mk +++ b/lib/diameter/test/modules.mk @@ -1,8 +1,7 @@ -#-*-makefile-*- ; force emacs to enter makefile-mode # %CopyrightBegin% # -# Copyright Ericsson AB 2010-2013. All Rights Reserved. +# Copyright Ericsson AB 2010-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -40,6 +39,7 @@ MODULES = \ diameter_gen_sctp_SUITE \ diameter_gen_tcp_SUITE \ diameter_length_SUITE \ + diameter_pool_SUITE \ diameter_reg_SUITE \ diameter_relay_SUITE \ diameter_stats_SUITE \ diff --git a/lib/edoc/src/edoc_extract.erl b/lib/edoc/src/edoc_extract.erl index a2a1a6b718..6d34de3a85 100644 --- a/lib/edoc/src/edoc_extract.erl +++ b/lib/edoc/src/edoc_extract.erl @@ -351,8 +351,6 @@ preprocess_forms_2(F, Fs) -> [F | preprocess_forms_1(Fs)]; {function, _} -> [F | preprocess_forms_1(Fs)]; - {rule, _} -> - [F | preprocess_forms_1(Fs)]; {attribute, {module, _}} -> [F | preprocess_forms_1(Fs)]; text -> @@ -390,15 +388,6 @@ collect([F | Fs], Cs, Ss, Ts, As, Header, Mod) -> export = Export, data = {comment_text(Cs),Ss,Ts}} | As], Header, Mod); - {rule, Name} -> - L = erl_syntax:get_pos(F), - Export = ordsets:is_element(Name, Mod#module.exports), - Args = parameters(erl_syntax:rule_clauses(F)), - collect(Fs, [], [], [], - [#entry{name = Name, args = Args, line = L, - export = Export, - data = {comment_text(Cs),Ss,Ts}} | As], - Header, Mod); {attribute, {module, _}} when Header =:= undefined -> L = erl_syntax:get_pos(F), collect(Fs, [], [], [], As, diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl index 5124e7238a..09dffe1280 100644 --- a/lib/hipe/cerl/erl_types.erl +++ b/lib/hipe/cerl/erl_types.erl @@ -2,7 +2,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2003-2014. All Rights Reserved. +%% Copyright Ericsson AB 2003-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -78,10 +78,11 @@ t_non_neg_fixnum/0, t_pos_fixnum/0, t_float/0, + t_var_names/1, t_form_to_string/1, - t_from_form/1, - t_from_form/2, - t_from_form/3, + t_from_form/4, + t_from_form/5, + t_from_form_without_remote/2, t_from_range/2, t_from_range_unsafe/2, t_from_term/1, @@ -181,7 +182,6 @@ t_remote/3, t_string/0, t_struct_from_opaque/2, - t_solve_remote/3, t_subst/2, t_subtract/2, t_subtract_list/2, @@ -248,6 +248,8 @@ %% -define(REC_TYPE_LIMIT, 2). +-define(EXPAND_DEPTH, 16). +-define(EXPAND_LIMIT, 10000). -define(TUPLE_TAG_LIMIT, 5). -define(TUPLE_ARITY_LIMIT, 8). @@ -366,7 +368,7 @@ -type record_key() :: {'record', atom()}. -type type_key() :: {'type' | 'opaque', atom(), arity()}. --type record_value() :: orddict:orddict(). % XXX. To be refined +-type record_value() :: [{atom(), erl_parse:abstract_expr(), erl_type()}]. -type type_value() :: {module(), erl_type(), atom()}. -type type_table() :: dict:dict(record_key(), record_value()) | dict:dict(type_key(), type_value()). @@ -747,7 +749,7 @@ t_opaque_from_records(RecDict) -> end end, RecDict), OpaqueTypeDict = - dict:map(fun({opaque, Name, _Arity}, {Module, _Type, ArgNames}) -> + dict:map(fun({opaque, Name, _Arity}, {{Module, _Form, ArgNames}, _Type}) -> %% Args = args_to_types(ArgNames), %% List = lists:zip(ArgNames, Args), %% TmpVarDict = dict:from_list(List), @@ -808,134 +810,6 @@ is_remote(_) -> false. -type mod_records() :: dict:dict(module(), type_table()). --spec t_solve_remote(erl_type(), sets:set(mfa()), mod_records()) -> erl_type(). - -t_solve_remote(Type, ExpTypes, Records) -> - {RT, _RR} = t_solve_remote(Type, ExpTypes, Records, []), - RT. - -t_solve_remote(?function(Domain, Range), ET, R, C) -> - {RT1, RR1} = t_solve_remote(Domain, ET, R, C), - {RT2, RR2} = t_solve_remote(Range, ET, R, C), - {?function(RT1, RT2), RR1 ++ RR2}; -t_solve_remote(?list(Types, Term, Size), ET, R, C) -> - {RT1, RR1} = t_solve_remote(Types, ET, R, C), - {RT2, RR2} = t_solve_remote(Term, ET, R, C), - {?list(RT1, RT2, Size), RR1 ++ RR2}; -t_solve_remote(?product(Types), ET, R, C) -> - {RL, RR} = list_solve_remote(Types, ET, R, C), - {?product(RL), RR}; -t_solve_remote(?opaque(Set), ET, R, C) -> - List = ordsets:to_list(Set), - {NewList, RR} = opaques_solve_remote(List, ET, R, C), - {?opaque(ordsets:from_list(NewList)), RR}; -t_solve_remote(?tuple(?any, _, _) = T, _ET, _R, _C) -> {T, []}; -t_solve_remote(?tuple(Types, _Arity, _Tag), ET, R, C) -> - {RL, RR} = list_solve_remote(Types, ET, R, C), - {t_tuple(RL), RR}; -t_solve_remote(?tuple_set(Set), ET, R, C) -> - {NewTuples, RR} = tuples_solve_remote(Set, ET, R, C), - {t_sup(NewTuples), RR}; -t_solve_remote(?remote(Set), ET, R, C) -> - RemoteList = ordsets:to_list(Set), - {RL, RR} = list_solve_remote_type(RemoteList, ET, R, C), - {t_sup(RL), RR}; -t_solve_remote(?union(List), ET, R, C) -> - {RL, RR} = list_solve_remote(List, ET, R, C), - {t_sup(RL), RR}; -t_solve_remote(T, _ET, _R, _C) -> {T, []}. - -t_solve_remote_type(#remote{mod = RemMod, name = Name, args = Args0} = RemType, - ET, R, C) -> - Args = lists:map(fun(A) -> - {Arg, _} = t_solve_remote(A, ET, R, C), - Arg - end, Args0), - ArgsLen = length(Args), - case dict:find(RemMod, R) of - error -> - self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, - {t_any(), []}; - {ok, RemDict} -> - MFA = {RemMod, Name, ArgsLen}, - case sets:is_element(MFA, ET) of - true -> - case lookup_type(Name, ArgsLen, RemDict) of - {type, {_Mod, Type, ArgNames}} -> - {NewType, NewCycle, NewRR} = - case can_unfold_more(RemType, C) of - true -> - List = lists:zip(ArgNames, Args), - TmpVarDict = dict:from_list(List), - {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []}; - false -> - {t_any(), C, [RemType]} - end, - {RT, RR} = t_solve_remote(NewType, ET, R, NewCycle), - RetRR = NewRR ++ RR, - RT1 = - case lists:member(RemType, RetRR) of - true -> t_limit(RT, ?REC_TYPE_LIMIT); - false -> RT - end, - {RT1, RetRR}; - {opaque, {Mod, Type, ArgNames}} -> - List = lists:zip(ArgNames, Args), - TmpVarDict = dict:from_list(List), - {Rep, NewCycle, NewRR} = - case can_unfold_more(RemType, C) of - true -> - {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []}; - false -> - {t_any(), C, [RemType]} - end, - {NewRep, RR} = t_solve_remote(Rep, ET, R, NewCycle), - RetRR = NewRR ++ RR, - RT1 = - case lists:member(RemType, RetRR) of - true -> t_limit(NewRep, ?REC_TYPE_LIMIT); - false -> NewRep - end, - {skip_opaque_alias(RT1, Mod, Name, Args), RetRR}; - error -> - Msg = io_lib:format("Unable to find remote type ~w:~w()\n", - [RemMod, Name]), - throw({error, Msg}) - end; - false -> - self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, - {t_any(), []} - end - end. - -list_solve_remote([], _ET, _R, _C) -> - {[], []}; -list_solve_remote([Type|Types], ET, R, C) -> - {RT, RR1} = t_solve_remote(Type, ET, R, C), - {RL, RR2} = list_solve_remote(Types, ET, R, C), - {[RT|RL], RR1 ++ RR2}. - -list_solve_remote_type([], _ET, _R, _C) -> - {[], []}; -list_solve_remote_type([Type|Types], ET, R, C) -> - {RT, RR1} = t_solve_remote_type(Type, ET, R, C), - {RL, RR2} = list_solve_remote_type(Types, ET, R, C), - {[RT|RL], RR1 ++ RR2}. - -opaques_solve_remote([], _ET, _R, _C) -> - {[], []}; -opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], ET, R, C) -> - {RT, RR1} = t_solve_remote(Struct, ET, R, C), - {LOp, RR2} = opaques_solve_remote(Tail, ET, R, C), - {[Remote#opaque{struct = RT}|LOp], RR1 ++ RR2}. - -tuples_solve_remote([], _ET, _R, _C) -> - {[], []}; -tuples_solve_remote([{_Sz, Tuples}|Tail], ET, R, C) -> - {RL, RR1} = list_solve_remote(Tuples, ET, R, C), - {LSzTpls, RR2} = tuples_solve_remote(Tail, ET, R, C), - {RL ++ LSzTpls, RR1 ++ RR2}. - %%----------------------------------------------------------------------------- %% Unit type. Signals non termination. %% @@ -2264,14 +2138,19 @@ expand_range_from_set(Range = ?int_range(From, To), Set) -> -spec t_sup([erl_type()]) -> erl_type(). -t_sup([?any|_]) -> - ?any; -t_sup([H1, H2|T]) -> - t_sup([t_sup(H1, H2)|T]); -t_sup([H]) -> - subst_all_vars_to_any(H); -t_sup([]) -> - ?none. +t_sup([]) -> ?none; +t_sup(Ts) -> + case lists:any(fun is_any/1, Ts) of + true -> ?any; + false -> + t_sup1(Ts, []) + end. + +t_sup1([H1, H2|T], L) -> + t_sup1(T, [t_sup(H1, H2)|L]); +t_sup1([T], []) -> subst_all_vars_to_any(T); +t_sup1(Ts, L) -> + t_sup1(Ts++L, []). -spec t_sup(erl_type(), erl_type()) -> erl_type(). @@ -3089,12 +2968,12 @@ t_subst_aux(T, _VarMap) -> subst_all_remote(Type0, Substitute) -> Map = fun(Type) -> - case erl_types:t_is_remote(Type) of + case t_is_remote(Type) of true -> Substitute; false -> Type end end, - erl_types:t_map(Map, Type0). + t_map(Map, Type0). %%----------------------------------------------------------------------------- %% Unification @@ -3776,7 +3655,7 @@ t_abstract_records(?tuple(Elements, Arity, ?atom(_) = Tag), RecDict) -> [TagAtom] = atom_vals(Tag), case lookup_record(TagAtom, Arity - 1, RecDict) of error -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]); - {ok, Fields} -> t_tuple([Tag|[T || {_Name, T} <- Fields]]) + {ok, Fields} -> t_tuple([Tag|[T || {_Name, _Abstr, T} <- Fields]]) end; t_abstract_records(?tuple(Elements, _Arity, _Tag), RecDict) -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]); @@ -3997,7 +3876,8 @@ record_to_string(Tag, [_|Fields], FieldNames, RecDict) -> FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []), "#" ++ atom_to_string(Tag) ++ "{" ++ string:join(FieldStrings, ",") ++ "}". -record_fields_to_string([F|Fs], [{FName, _DefType}|FDefs], RecDict, Acc) -> +record_fields_to_string([F|Fs], [{FName, _Abstr, _DefType}|FDefs], + RecDict, Acc) -> NewAcc = case t_is_equal(F, t_any()) orelse t_is_any_atom('undefined', F) of true -> Acc; @@ -4023,7 +3903,7 @@ record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) -> FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []), string:join(FieldDiffs, " and "). -field_diffs([F|Fs], [{FName, DefType}|FDefs], RecDict, Acc) -> +field_diffs([F|Fs], [{FName, _Abstr, DefType}|FDefs], RecDict, Acc) -> %% Don't care about opaqueness for now. NewAcc = case not t_is_none(t_inf(F, DefType)) of @@ -4071,357 +3951,476 @@ mod_name(Mod, Name) -> %% %%============================================================================= --spec t_from_form(parse_form()) -> erl_type(). +-type type_names() :: [type_key() | record_key()]. -t_from_form(Form) -> - t_from_form(Form, dict:new()). +-spec t_from_form(parse_form(), sets:set(mfa()), + module(), mod_records()) -> erl_type(). --spec t_from_form(parse_form(), type_table()) -> erl_type(). +t_from_form(Form, ExpTypes, Module, RecDict) -> + t_from_form(Form, ExpTypes, Module, RecDict, dict:new()). -t_from_form(Form, RecDict) -> - t_from_form(Form, RecDict, dict:new()). +-spec t_from_form(parse_form(), sets:set(mfa()), + module(), mod_records(), var_table()) -> erl_type(). --spec t_from_form(parse_form(), type_table(), var_table()) -> erl_type(). +t_from_form(Form, ExpTypes, Module, RecDict, VarDict) -> + {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, VarDict), + T. -t_from_form(Form, RecDict, VarDict) -> - {T, _R} = t_from_form(Form, [], RecDict, VarDict), +%% Replace external types with with none(). +-spec t_from_form_without_remote(parse_form(), type_table()) -> erl_type(). + +t_from_form_without_remote(Form, TypeTable) -> + Module = mod, + RecDict = dict:from_list([{Module, TypeTable}]), + ExpTypes = replace_by_none, + {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, dict:new()), T. --type type_names() :: [type_key() | record_key()]. +%% REC_TYPE_LIMIT is used for limiting the depth of recursive types. +%% EXPAND_LIMIT is used for limiting the size of types by +%% limiting the number of elements of lists within one type form. +%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the +%% types balanced (unions will otherwise collapse to any()) by limiting +%% the depth the same way as t_limit/2 does. + +-type expand_limit() :: integer(). --spec t_from_form(parse_form(), type_names(), type_table(), var_table()) -> - {erl_type(), type_names()}. +-type expand_depth() :: integer(). -t_from_form({var, _L, '_'}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({var, _L, Name}, _TypeNames, _RecDict, VarDict) -> - case dict:find(Name, VarDict) of - error -> {t_var(Name), []}; - {ok, Val} -> {Val, []} +t_from_form1(Form, TypeNames, ET, M, MR, V) -> + t_from_form1(Form, TypeNames, ET, M, MR, V, ?EXPAND_DEPTH). + +t_from_form1(Form, TypeNames, ET, M, MR, V, D) -> + L = ?EXPAND_LIMIT, + {T, L1} = t_from_form(Form, TypeNames, ET, M, MR, V, D, L), + if + L1 =< 0, D > 1 -> + D1 = D div 2, + t_from_form1(Form, TypeNames, ET, M, MR, V, D1); + true -> + {T, L1} + end. + +-spec t_from_form(parse_form(), type_names(), + sets:set(mfa()) | 'replace_by_none', + module(), mod_records(), var_table(), + expand_depth(), expand_limit()) + -> {erl_type(), expand_limit()}. + +%% If there is something wrong with parse_form() +%% throw({error, io_lib:chars()} is called; +%% for unknown remote types +%% self() ! {self(), ext_types, {RemMod, Name, ArgsLen}} +%% is called, unless 'replace_by_none' is given. +%% +%% It is assumed that M can be found in MR. + +t_from_form(_, _TypeNames, _ET, _M, _MR, _V, D, L) when D =< 0 ; L =< 0 -> + {t_any(), L}; +t_from_form({var, _L, '_'}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({var, _L, Name}, _TypeNames, _ET, _M, _MR, V, _D, L) -> + case dict:find(Name, V) of + error -> {t_var(Name), L}; + {ok, Val} -> {Val, L} end; -t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, RecDict, VarDict) -> - t_from_form(Type, TypeNames, RecDict, VarDict); -t_from_form({paren_type, _L, [Type]}, TypeNames, RecDict, VarDict) -> - t_from_form(Type, TypeNames, RecDict, VarDict); +t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, ET, M, MR, V, D, L) -> + t_from_form(Type, TypeNames, ET, M, MR, V, D, L); +t_from_form({paren_type, _L, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + t_from_form(Type, TypeNames, ET, M, MR, V, D, L); t_from_form({remote_type, _L, [{atom, _, Module}, {atom, _, Type}, Args]}, - TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_remote(Module, Type, L), R}; -t_from_form({atom, _L, Atom}, _TypeNames, _RecDict, _VarDict) -> - {t_atom(Atom), []}; -t_from_form({integer, _L, Int}, _TypeNames, _RecDict, _VarDict) -> - {t_integer(Int), []}; -t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _RecDict, _VarDict) -> + TypeNames, ET, M, MR, V, D, L) -> + remote_from_form(Module, Type, Args, TypeNames, ET, M, MR, V, D, L); +t_from_form({atom, _L, Atom}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_atom(Atom), L}; +t_from_form({integer, _L, Int}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_integer(Int), L}; +t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _ET, _M, _MR, _V, _D, L) -> case erl_eval:partial_eval(Op) of {integer, _, Val} -> - {t_integer(Val), []}; + {t_integer(Val), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])}) end; t_from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _TypeNames, - _RecDict, _VarDict) -> + _ET, _M, _MR, _V, _D, L) -> case erl_eval:partial_eval(Op) of {integer, _, Val} -> - {t_integer(Val), []}; + {t_integer(Val), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])}) end; -t_from_form({type, _L, any, []}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({type, _L, arity, []}, _TypeNames, _RecDict, _VarDict) -> - {t_arity(), []}; -t_from_form({type, _L, atom, []}, _TypeNames, _RecDict, _VarDict) -> - {t_atom(), []}; -t_from_form({type, _L, binary, []}, _TypeNames, _RecDict, _VarDict) -> - {t_binary(), []}; +t_from_form({type, _L, any, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({type, _L, arity, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_arity(), L}; +t_from_form({type, _L, atom, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_atom(), L}; +t_from_form({type, _L, binary, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_binary(), L}; t_from_form({type, _L, binary, [Base, Unit]} = Type, - _TypeNames, _RecDict, _VarDict) -> + _TypeNames, _ET, _M, _MR, _V, _D, L) -> case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of {{integer, _, B}, {integer, _, U}} when B >= 0, U >= 0 -> - {t_bitstr(U, B), []}; + {t_bitstr(U, B), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])}) end; -t_from_form({type, _L, bitstring, []}, _TypeNames, _RecDict, _VarDict) -> - {t_bitstr(), []}; -t_from_form({type, _L, bool, []}, _TypeNames, _RecDict, _VarDict) -> - {t_boolean(), []}; % XXX: Temporarily -t_from_form({type, _L, boolean, []}, _TypeNames, _RecDict, _VarDict) -> - {t_boolean(), []}; -t_from_form({type, _L, byte, []}, _TypeNames, _RecDict, _VarDict) -> - {t_byte(), []}; -t_from_form({type, _L, char, []}, _TypeNames, _RecDict, _VarDict) -> - {t_char(), []}; -t_from_form({type, _L, float, []}, _TypeNames, _RecDict, _VarDict) -> - {t_float(), []}; -t_from_form({type, _L, function, []}, _TypeNames, _RecDict, _VarDict) -> - {t_fun(), []}; -t_from_form({type, _L, 'fun', []}, _TypeNames, _RecDict, _VarDict) -> - {t_fun(), []}; +t_from_form({type, _L, bitstring, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_bitstr(), L}; +t_from_form({type, _L, bool, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_boolean(), L}; % XXX: Temporarily +t_from_form({type, _L, boolean, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_boolean(), L}; +t_from_form({type, _L, byte, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_byte(), L}; +t_from_form({type, _L, char, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_char(), L}; +t_from_form({type, _L, float, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_float(), L}; +t_from_form({type, _L, function, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_fun(), L}; +t_from_form({type, _L, 'fun', []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_fun(), L}; t_from_form({type, _L, 'fun', [{type, _, any}, Range]}, TypeNames, - RecDict, VarDict) -> - {T, R} = t_from_form(Range, TypeNames, RecDict, VarDict), - {t_fun(T), R}; + ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L - 1), + {t_fun(T), L1}; t_from_form({type, _L, 'fun', [{type, _, product, Domain}, Range]}, - TypeNames, RecDict, VarDict) -> - {L, R1} = list_from_form(Domain, TypeNames, RecDict, VarDict), - {T, R2} = t_from_form(Range, TypeNames, RecDict, VarDict), - {t_fun(L, T), R1 ++ R2}; -t_from_form({type, _L, identifier, []}, _TypeNames, _RecDict, _VarDict) -> - {t_identifier(), []}; -t_from_form({type, _L, integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_integer(), []}; -t_from_form({type, _L, iodata, []}, _TypeNames, _RecDict, _VarDict) -> - {t_iodata(), []}; -t_from_form({type, _L, iolist, []}, _TypeNames, _RecDict, _VarDict) -> - {t_iolist(), []}; -t_from_form({type, _L, list, []}, _TypeNames, _RecDict, _VarDict) -> - {t_list(), []}; -t_from_form({type, _L, list, [Type]}, TypeNames, RecDict, VarDict) -> - {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict), - {t_list(T), R}; -t_from_form({type, _L, map, _}, TypeNames, RecDict, VarDict) -> - builtin_type(map, t_map([]), TypeNames, RecDict, VarDict); -t_from_form({type, _L, mfa, []}, _TypeNames, _RecDict, _VarDict) -> - {t_mfa(), []}; -t_from_form({type, _L, module, []}, _TypeNames, _RecDict, _VarDict) -> - {t_module(), []}; -t_from_form({type, _L, nil, []}, _TypeNames, _RecDict, _VarDict) -> - {t_nil(), []}; -t_from_form({type, _L, neg_integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_neg_integer(), []}; -t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _RecDict, - _VarDict) -> - {t_non_neg_integer(), []}; -t_from_form({type, _L, no_return, []}, _TypeNames, _RecDict, _VarDict) -> - {t_unit(), []}; -t_from_form({type, _L, node, []}, _TypeNames, _RecDict, _VarDict) -> - {t_node(), []}; -t_from_form({type, _L, none, []}, _TypeNames, _RecDict, _VarDict) -> - {t_none(), []}; -t_from_form({type, _L, nonempty_list, []}, _TypeNames, _RecDict, _VarDict) -> - {t_nonempty_list(), []}; -t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, RecDict, VarDict) -> - {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict), - {t_nonempty_list(T), R}; + TypeNames, ET, M, MR, V, D, L) -> + {Dom1, L1} = list_from_form(Domain, TypeNames, ET, M, MR, V, D, L), + {Ran1, L2} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L1), + {t_fun(Dom1, Ran1), L2}; +t_from_form({type, _L, identifier, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_identifier(), L}; +t_from_form({type, _L, integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_integer(), L}; +t_from_form({type, _L, iodata, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_iodata(), L}; +t_from_form({type, _L, iolist, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_iolist(), L}; +t_from_form({type, _L, list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_list(), L}; +t_from_form({type, _L, list, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D - 1, L - 1), + {t_list(T), L1}; +t_from_form({type, _L, map, _}, TypeNames, ET, M, MR, V, D, L) -> + builtin_type(map, t_map([]), TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, mfa, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_mfa(), L}; +t_from_form({type, _L, module, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_module(), L}; +t_from_form({type, _L, nil, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nil(), L}; +t_from_form({type, _L, neg_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_neg_integer(), L}; +t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _ET, _M, _MR, + _V, _D, L) -> + {t_non_neg_integer(), L}; +t_from_form({type, _L, no_return, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_unit(), L}; +t_from_form({type, _L, node, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_node(), L}; +t_from_form({type, _L, none, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_none(), L}; +t_from_form({type, _L, nonempty_list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nonempty_list(), L}; +t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1), + {t_nonempty_list(T), L1}; t_from_form({type, _L, nonempty_improper_list, [Cont, Term]}, TypeNames, - RecDict, VarDict) -> - {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict), - {t_cons(T1, T2), R1 ++ R2}; + ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1), + {t_cons(T1, T2), L2}; t_from_form({type, _L, nonempty_maybe_improper_list, []}, _TypeNames, - _RecDict, _VarDict) -> - {t_cons(?any, ?any), []}; + _ET, _M, _MR, _V, _D, L) -> + {t_cons(?any, ?any), L}; t_from_form({type, _L, nonempty_maybe_improper_list, [Cont, Term]}, - TypeNames, RecDict, VarDict) -> - {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict), - {t_cons(T1, T2), R1 ++ R2}; -t_from_form({type, _L, nonempty_string, []}, _TypeNames, _RecDict, - _VarDict) -> - {t_nonempty_string(), []}; -t_from_form({type, _L, number, []}, _TypeNames, _RecDict, _VarDict) -> - {t_number(), []}; -t_from_form({type, _L, pid, []}, _TypeNames, _RecDict, _VarDict) -> - {t_pid(), []}; -t_from_form({type, _L, port, []}, _TypeNames, _RecDict, _VarDict) -> - {t_port(), []}; -t_from_form({type, _L, pos_integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_pos_integer(), []}; + TypeNames, ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1), + {t_cons(T1, T2), L2}; +t_from_form({type, _L, nonempty_string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nonempty_string(), L}; +t_from_form({type, _L, number, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_number(), L}; +t_from_form({type, _L, pid, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_pid(), L}; +t_from_form({type, _L, port, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_port(), L}; +t_from_form({type, _L, pos_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_pos_integer(), L}; t_from_form({type, _L, maybe_improper_list, []}, _TypeNames, - _RecDict, _VarDict) -> - {t_maybe_improper_list(), []}; + _ET, _M, _MR, _V, _D, L) -> + {t_maybe_improper_list(), L}; t_from_form({type, _L, maybe_improper_list, [Content, Termination]}, - TypeNames, RecDict, VarDict) -> - {T1, R1} = t_from_form(Content, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Termination, TypeNames, RecDict, VarDict), - {t_maybe_improper_list(T1, T2), R1 ++ R2}; -t_from_form({type, _L, product, Elements}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Elements, TypeNames, RecDict, VarDict), - {t_product(L), R}; + TypeNames, ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Content, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Termination, TypeNames, ET, M, MR, V, D, L1), + {t_maybe_improper_list(T1, T2), L2}; +t_from_form({type, _L, product, Elements}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Elements, TypeNames, ET, M, MR, V, D - 1, L), + {t_product(Lst), L1}; t_from_form({type, _L, range, [From, To]} = Type, - _TypeNames, _RecDict, _VarDict) -> + _TypeNames, _ET, _M, _MR, _V, _D, L) -> case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of {{integer, _, FromVal}, {integer, _, ToVal}} -> - {t_from_range(FromVal, ToVal), []}; + {t_from_range(FromVal, ToVal), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])}) end; -t_from_form({type, _L, record, [Name|Fields]}, TypeNames, RecDict, VarDict) -> - record_from_form(Name, Fields, TypeNames, RecDict, VarDict); -t_from_form({type, _L, reference, []}, _TypeNames, _RecDict, _VarDict) -> - {t_reference(), []}; -t_from_form({type, _L, string, []}, _TypeNames, _RecDict, _VarDict) -> - {t_string(), []}; -t_from_form({type, _L, term, []}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({type, _L, timeout, []}, _TypeNames, _RecDict, _VarDict) -> - {t_timeout(), []}; -t_from_form({type, _L, tuple, any}, _TypeNames, _RecDict, _VarDict) -> - {t_tuple(), []}; -t_from_form({type, _L, tuple, Args}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_tuple(L), R}; -t_from_form({type, _L, union, Args}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_sup(L), R}; -t_from_form({user_type, _L, Name, Args}, TypeNames, RecDict, VarDict) -> - type_from_form(Name, Args, TypeNames, RecDict, VarDict); -t_from_form({type, _L, Name, Args}, TypeNames, RecDict, VarDict) -> +t_from_form({type, _L, record, [Name|Fields]}, TypeNames, ET, M, MR, V, D, L) -> + record_from_form(Name, Fields, TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, reference, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_reference(), L}; +t_from_form({type, _L, string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_string(), L}; +t_from_form({type, _L, term, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({type, _L, timeout, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_timeout(), L}; +t_from_form({type, _L, tuple, any}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_tuple(), L}; +t_from_form({type, _L, tuple, Args}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D - 1, L), + {t_tuple(Lst), L1}; +t_from_form({type, _L, union, Args}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + {t_sup(Lst), L1}; +t_from_form({user_type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) -> + type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) -> %% Compatibility: modules compiled before Erlang/OTP 18.0. - type_from_form(Name, Args, TypeNames, RecDict, VarDict); + type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L); t_from_form({opaque, _L, Name, {Mod, Args, Rep}}, _TypeNames, - _RecDict, _VarDict) -> - {t_opaque(Mod, Name, Args, Rep), []}. - -builtin_type(Name, Type, TypeNames, RecDict, VarDict) -> - case lookup_type(Name, 0, RecDict) of - {_, {_M, _T, _A}} -> - type_from_form(Name, [], TypeNames, RecDict, VarDict); + _ET, _M, _MR, _V, _D, L) -> + %% XXX. To be removed. + {t_opaque(Mod, Name, Args, Rep), L}. + +builtin_type(Name, Type, TypeNames, ET, M, MR, V, D, L) -> + case dict:find(M, MR) of + {ok, R} -> + case lookup_type(Name, 0, R) of + {_, {{_M, _F, _A}, _T}} -> + type_from_form(Name, [], TypeNames, ET, M, MR, V, D, L); + error -> + {Type, L} + end; error -> - {Type, []} + {Type, L} end. -type_from_form(Name, Args, TypeNames, RecDict, VarDict) -> +type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L) -> ArgsLen = length(Args), - ArgTypes = forms_to_types(Args, TypeNames, RecDict, VarDict), - case lookup_type(Name, ArgsLen, RecDict) of - {type, {_Module, Type, ArgNames}} -> - TypeName = {type, Name, ArgsLen}, + {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + {ok, R} = dict:find(M, MR), + case lookup_type(Name, ArgsLen, R) of + {type, {{Module, Form, ArgNames}, _Type}} -> + TypeName = {type, Module, Name, ArgsLen}, case can_unfold_more(TypeName, TypeNames) of true -> List = lists:zip(ArgNames, ArgTypes), - TmpVarDict = dict:from_list(List), - {T, R} = t_from_form(Type, [TypeName|TypeNames], - RecDict, TmpVarDict), - case lists:member(TypeName, R) of - true -> {t_limit(T, ?REC_TYPE_LIMIT), R}; - false -> {T, R} - end; - false -> {t_any(), [TypeName]} + TmpV = dict:from_list(List), + t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1); + false -> + {t_any(), L1} end; - {opaque, {Module, Type, ArgNames}} -> - TypeName = {opaque, Name, ArgsLen}, - {Rep, Rret} = + {opaque, {{Module, Form, ArgNames}, Type}} -> + TypeName = {opaque, Module, Name, ArgsLen}, + {Rep, L2} = case can_unfold_more(TypeName, TypeNames) of true -> List = lists:zip(ArgNames, ArgTypes), - TmpVarDict = dict:from_list(List), - {T, R} = t_from_form(Type, [TypeName|TypeNames], - RecDict, TmpVarDict), - case lists:member(TypeName, R) of - true -> {t_limit(T, ?REC_TYPE_LIMIT), R}; - false -> {T, R} - end; - false -> {t_any(), [TypeName]} + TmpV = dict:from_list(List), + t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1); + false -> {t_any(), L1} end, + Rep1 = choose_opaque_type(Rep, Type), Args2 = [subst_all_vars_to_any(ArgType) || ArgType <- ArgTypes], - {skip_opaque_alias(Rep, Module, Name, Args2), Rret}; + {skip_opaque_alias(Rep1, Module, Name, Args2), L2}; error -> Msg = io_lib:format("Unable to find type ~w/~w\n", [Name, ArgsLen]), throw({error, Msg}) end. -forms_to_types(Forms, TypeNames, RecDict, VarDict) -> - {Types, _} = list_from_form(Forms, TypeNames, RecDict, VarDict), - Types. - skip_opaque_alias(?opaque(_) = T, _Mod, _Name, _Args) -> T; skip_opaque_alias(T, Module, Name, Args) -> t_opaque(Module, Name, Args, T). -record_from_form({atom, _, Name}, ModFields, TypeNames, RecDict, VarDict) -> +remote_from_form(RemMod, Name, Args, TypeNames, ET, M, MR, V, D, L) -> + {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + if + ET =:= replace_by_none -> + {t_none(), L1}; + true -> + ArgsLen = length(Args), + case dict:find(RemMod, MR) of + error -> + self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, + {t_any(), L1}; + {ok, RemDict} -> + MFA = {RemMod, Name, ArgsLen}, + case sets:is_element(MFA, ET) of + true -> + case lookup_type(Name, ArgsLen, RemDict) of + {type, {{_Mod, Form, ArgNames}, _Type}} -> + RemType = {type, RemMod, Name, ArgsLen}, + case can_unfold_more(RemType, TypeNames) of + true -> + List = lists:zip(ArgNames, ArgTypes), + TmpVarDict = dict:from_list(List), + NewTypeNames = [RemType|TypeNames], + t_from_form(Form, NewTypeNames, ET, + RemMod, MR, TmpVarDict, D, L1); + false -> + {t_any(), L1} + end; + {opaque, {{Mod, Form, ArgNames}, Type}} -> + RemType = {opaque, RemMod, Name, ArgsLen}, + List = lists:zip(ArgNames, ArgTypes), + TmpVarDict = dict:from_list(List), + {NewRep, L2} = + case can_unfold_more(RemType, TypeNames) of + true -> + NewTypeNames = [RemType|TypeNames], + t_from_form(Form, NewTypeNames, ET, RemMod, MR, + TmpVarDict, D, L1); + false -> + {t_any(), L1} + end, + NewRep1 = choose_opaque_type(NewRep, Type), + {skip_opaque_alias(NewRep1, Mod, Name, ArgTypes), L2}; + error -> + Msg = io_lib:format("Unable to find remote type ~w:~w()\n", + [RemMod, Name]), + throw({error, Msg}) + end; + false -> + self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, + {t_any(), L1} + end + end + end. + +%% Opaque types (both local and remote) are problematic when it comes +%% to the limits (TypeNames, D, and L). The reason is that if any() is +%% substituted for a more specialized subtype of an opaque type, the +%% property stated along with decorate_with_opaque() (the type has to +%% be a subtype of the declared type) no longer holds. +%% +%% The less than perfect remedy: if the opaque type created from a +%% form is not a subset of the declared type, the declared type is +%% used instead, effectively bypassing the limits, and potentially +%% resulting in huge types. +choose_opaque_type(Type, DeclType) -> + case + t_is_subtype(subst_all_vars_to_any(Type), + subst_all_vars_to_any(DeclType)) + of + true -> Type; + false -> DeclType + end. + +record_from_form({atom, _, Name}, ModFields, TypeNames, ET, M, MR, V, D, L) -> case can_unfold_more({record, Name}, TypeNames) of true -> - case lookup_record(Name, RecDict) of + {ok, R} = dict:find(M, MR), + case lookup_record(Name, R) of {ok, DeclFields} -> - TypeNames1 = [{record, Name}|TypeNames], - AreTyped = [is_erl_type(FieldType) - || {_FieldName, FieldType} <- DeclFields], - {DeclFields1, R1} = - case lists:all(fun(Elem) -> Elem end, AreTyped) of - true -> {DeclFields, []}; - false -> fields_from_form(DeclFields, TypeNames1, - RecDict, dict:new()) - end, - {GetModRec, R2} = get_mod_record(ModFields, DeclFields1, - TypeNames1, - RecDict, VarDict), + NewTypeNames = [{record, Name}|TypeNames], + {GetModRec, L1} = get_mod_record(ModFields, DeclFields, + NewTypeNames, ET, M, MR, V, D, L), case GetModRec of {error, FieldName} -> throw({error, io_lib:format("Illegal declaration of #~w{~w}\n", [Name, FieldName])}); {ok, NewFields} -> - {t_tuple( - [t_atom(Name)|[Type || {_FieldName, Type} <- NewFields]]), - R1 ++ R2} + {NewFields1, L2} = + fields_from_form(NewFields, NewTypeNames, ET, M, MR, + dict:new(), D, L1), + Rec = t_tuple( + [t_atom(Name)|[Type + || {_FieldName, Type} <- NewFields1]]), + {Rec, L2} end; error -> throw({error, io_lib:format("Unknown record #~w{}\n", [Name])}) end; - false -> {t_any(), []} + false -> + {t_any(), L} end. -get_mod_record([], DeclFields, _TypeNames, _RecDict, _VarDict) -> - {{ok, DeclFields}, []}; -get_mod_record(ModFields, DeclFields, TypeNames, RecDict, VarDict) -> - DeclFieldsDict = orddict:from_list(DeclFields), - {ModFieldsDict, R} = build_field_dict(ModFields, TypeNames, - RecDict, VarDict), - case get_mod_record(DeclFieldsDict, ModFieldsDict, []) of - {error, _FieldName} = Error -> {Error, R}; - {ok, FinalOrdDict} -> - {{ok, [{FieldName, orddict:fetch(FieldName, FinalOrdDict)} - || {FieldName, _} <- DeclFields]}, - R} +get_mod_record([], DeclFields, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {{ok, DeclFields}, L}; +get_mod_record(ModFields, DeclFields, TypeNames, ET, M, MR, V, D, L) -> + DeclFieldsDict = lists:keysort(1, DeclFields), + {ModFieldsDict, L1} = + build_field_dict(ModFields, TypeNames, ET, M, MR, V, D, L), + case get_mod_record_types(DeclFieldsDict, ModFieldsDict, []) of + {error, _FieldName} = Error -> {Error, L1}; + {ok, FinalKeyDict} -> + Fields = [lists:keyfind(FieldName, 1, FinalKeyDict) + || {FieldName, _, _} <- DeclFields], + {{ok, Fields}, L1} end. -build_field_dict(FieldTypes, TypeNames, RecDict, VarDict) -> - build_field_dict(FieldTypes, TypeNames, RecDict, VarDict, []). - -build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left], - TypeNames, RecDict, VarDict, Acc) -> - {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict), - NewAcc = [{Name, T}|Acc], - {D, R2} = build_field_dict(Left, TypeNames, RecDict, VarDict, NewAcc), - {D, R1 ++ R2}; -build_field_dict([], _TypeNames, _RecDict, _VarDict, Acc) -> - {orddict:from_list(Acc), []}. - -get_mod_record([{FieldName, DeclType}|Left1], - [{FieldName, ModType}|Left2], Acc) -> - ModTypeNoVars = subst_all_vars_to_any(ModType), - case - contains_remote(ModTypeNoVars) - orelse contains_remote(DeclType) - orelse t_is_subtype(ModTypeNoVars, DeclType) - of +build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L) -> + build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L, []). + +build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left], + TypeNames, ET, M, MR, V, D, L, Acc) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1), + %% The cached record field type (DeclType) in + %% get_mod_record_types()), was created with a similar call as TT. + %% Using T for the subtype test does not work since any() is not + %% always a subset of the field type. + TT = t_from_form(Type, ET, M, MR, V), + NewAcc = [{Name, Type, T, TT}|Acc], + {Dict, L2} = + build_field_dict(Left, TypeNames, ET, M, MR, V, D, L1, NewAcc), + {Dict, L2}; +build_field_dict([], _TypeNames, _ET, _M, _MR, _V, _D, L, Acc) -> + {lists:keysort(1, Acc), L}. + +get_mod_record_types([{FieldName, _Abstr, DeclType}|Left1], + [{FieldName, TypeForm, ModType, ModTypeTest}|Left2], + Acc) -> + ModTypeNoVars = subst_all_vars_to_any(ModTypeTest), + case t_is_subtype(ModTypeNoVars, DeclType) of false -> {error, FieldName}; - true -> get_mod_record(Left1, Left2, [{FieldName, ModType}|Acc]) + true -> get_mod_record_types(Left1, Left2, + [{FieldName, TypeForm, ModType}|Acc]) end; -get_mod_record([{FieldName1, _DeclType} = DT|Left1], - [{FieldName2, _ModType}|_] = List2, - Acc) when FieldName1 < FieldName2 -> - get_mod_record(Left1, List2, [DT|Acc]); -get_mod_record(DeclFields, [], Acc) -> - {ok, orddict:from_list(Acc ++ DeclFields)}; -get_mod_record(_, [{FieldName2, _ModType}|_], _Acc) -> +get_mod_record_types([{FieldName1, _Abstr, _DeclType} = DT|Left1], + [{FieldName2, _FormType, _ModType, _TT}|_] = List2, + Acc) when FieldName1 < FieldName2 -> + get_mod_record_types(Left1, List2, [DT|Acc]); +get_mod_record_types(Left1, [], Acc) -> + {ok, lists:keysort(1, Left1++Acc)}; +get_mod_record_types(_, [{FieldName2, _FormType, _ModType, _TT}|_], _Acc) -> {error, FieldName2}. -contains_remote(Type) -> - TypeNoRemote = subst_all_remote(Type, t_none()), - not t_is_equal(Type, TypeNoRemote). - -fields_from_form([], _TypeNames, _RecDict, _VarDict) -> - {[], []}; -fields_from_form([{Name, Type}|Tail], TypeNames, RecDict, - VarDict) -> - {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict), - {F, R2} = fields_from_form(Tail, TypeNames, RecDict, VarDict), - {[{Name, T}|F], R1 ++ R2}. - -list_from_form([], _TypeNames, _RecDict, _VarDict) -> - {[], []}; -list_from_form([H|Tail], TypeNames, RecDict, VarDict) -> - {T, R1} = t_from_form(H, TypeNames, RecDict, VarDict), - {L, R2} = list_from_form(Tail, TypeNames, RecDict, VarDict), - {[T|L], R1 ++ R2}. +%% It is important to create a limited version of the record type +%% since nested record types can otherwise easily result in huge +%% terms. +fields_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {[], L}; +fields_from_form([{Name, Abstr, _Type}|Tail], TypeNames, ET, M, MR, + V, D, L) -> + {T, L1} = t_from_form(Abstr, TypeNames, ET, M, MR, V, D, L), + {F, L2} = fields_from_form(Tail, TypeNames, ET, M, MR, V, D, L1), + {[{Name, T}|F], L2}. + +list_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {[], L}; +list_from_form([H|Tail], TypeNames, ET, M, MR, V, D, L) -> + {H1, L1} = t_from_form(H, TypeNames, ET, M, MR, V, D, L - 1), + {T1, L2} = list_from_form(Tail, TypeNames, ET, M, MR, V, D, L1), + {[H1|T1], L2}. + +-spec t_var_names([erl_type()]) -> [atom()]. + +t_var_names([{var, _, Name}|L]) when L =/= '_' -> + [Name|t_var_names(L)]; +t_var_names([]) -> + []. -spec t_form_to_string(parse_form()) -> string(). @@ -4505,7 +4504,13 @@ t_form_to_string({type, _L, tuple, Args}) -> t_form_to_string({type, _L, union, Args}) -> string:join(t_form_to_string_list(Args), " | "); t_form_to_string({type, _L, Name, []} = T) -> - try t_to_string(t_from_form(T)) + try + M = mod, + D0 = dict:new(), + MR = dict:from_list([{M, D0}]), + {T1, _} = + t_from_form(T, [], sets:new(), M, MR, D0, _Deep=1000, _ALot=100000), + t_to_string(T1) catch throw:{error, _} -> atom_to_string(Name) ++ "()" end; t_form_to_string({user_type, _L, Name, List}) -> @@ -4556,7 +4561,7 @@ is_erl_type(#c{}) -> true; is_erl_type(_) -> false. -spec lookup_record(atom(), type_table()) -> - 'error' | {'ok', [{atom(), parse_form() | erl_type()}]}. + 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}. lookup_record(Tag, RecDict) when is_atom(Tag) -> case dict:find({record, Tag}, RecDict) of @@ -4571,7 +4576,7 @@ lookup_record(Tag, RecDict) when is_atom(Tag) -> end. -spec lookup_record(atom(), arity(), type_table()) -> - 'error' | {'ok', [{atom(), erl_type()}]}. + 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}. lookup_record(Tag, Arity, RecDict) when is_atom(Tag) -> case dict:find({record, Tag}, RecDict) of diff --git a/lib/kernel/doc/src/inet.xml b/lib/kernel/doc/src/inet.xml index 8dd311e5cd..77a8caaaf6 100644 --- a/lib/kernel/doc/src/inet.xml +++ b/lib/kernel/doc/src/inet.xml @@ -332,23 +332,23 @@ fe80::204:acff:fe17:bf38 <taglist> <tag><c>recv_avg</c></tag> <item> - <p>Average size of packets in bytes received to the socket.</p> + <p>Average size of packets in bytes received by the socket.</p> </item> <tag><c>recv_cnt</c></tag> <item> - <p>Number of packets received to the socket.</p> + <p>Number of packets received by the socket.</p> </item> <tag><c>recv_dvi</c></tag> <item> - <p>Average packet size deviation in bytes received to the socket.</p> + <p>Average packet size deviation in bytes received by the socket.</p> </item> <tag><c>recv_max</c></tag> <item> - <p>The size of the largest packet in bytes received to the socket.</p> + <p>The size of the largest packet in bytes received by the socket.</p> </item> <tag><c>recv_oct</c></tag> <item> - <p>Number of bytes received to the socket.</p> + <p>Number of bytes received by the socket.</p> </item> <tag><c>send_avg</c></tag> diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml index 00c6bc33d6..96e3651140 100644 --- a/lib/kernel/doc/src/kernel_app.xml +++ b/lib/kernel/doc/src/kernel_app.xml @@ -4,7 +4,7 @@ <appref> <header> <copyright> - <year>1996</year><year>2014</year> + <year>1996</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -188,6 +188,18 @@ <p>Define the <c>First..Last</c> port range for the listener socket of a distributed Erlang node.</p> </item> + <tag><c>{inet_dist_listen_options, Opts}</c></tag> + <item> + <p>Define a list of extra socket options to be used when opening the + listening socket for a distributed Erlang node. + See <seealso marker="gen_tcp#listen/2">gen_tcp:listen/2</seealso></p> + </item> + <tag><c>{inet_dist_connect_options, Opts}</c></tag> + <item> + <p>Define a list of extra socket options to be used when connecting to + other distributed Erlang nodes. + See <seealso marker="gen_tcp#connect/4">gen_tcp:connect/4</seealso></p> + </item> <tag><c>inet_parse_error_log = silent</c></tag> <item> <p>If this configuration parameter is set, no diff --git a/lib/kernel/src/application_controller.erl b/lib/kernel/src/application_controller.erl index daad45b6c2..6635885aaf 100644 --- a/lib/kernel/src/application_controller.erl +++ b/lib/kernel/src/application_controller.erl @@ -1615,7 +1615,6 @@ conv([Key, Val | T]) -> [{make_term(Key), make_term(Val)} | conv(T)]; conv(_) -> []. -%%% Fix some day: eliminate the duplicated code here make_term(Str) -> case erl_scan:string(Str) of {ok, Tokens, _} -> @@ -1623,16 +1622,17 @@ make_term(Str) -> {ok, Term} -> Term; {error, {_,M,Reason}} -> - error_logger:format("application_controller: ~ts: ~ts~n", - [M:format_error(Reason), Str]), - throw({error, {bad_environment_value, Str}}) + handle_make_term_error(M, Reason, Str) end; {error, {_,M,Reason}, _} -> - error_logger:format("application_controller: ~ts: ~ts~n", - [M:format_error(Reason), Str]), - throw({error, {bad_environment_value, Str}}) + handle_make_term_error(M, Reason, Str) end. +handle_make_term_error(Mod, Reason, Str) -> + error_logger:format("application_controller: ~ts: ~ts~n", + [Mod:format_error(Reason), Str]), + throw({error, {bad_environment_value, Str}}). + get_env_i(Name, #state{conf_data = ConfData}) when is_list(ConfData) -> case lists:keyfind(Name, 1, ConfData) of {_Name, Env} -> Env; diff --git a/lib/kernel/src/gen_udp.erl b/lib/kernel/src/gen_udp.erl index 70dceb3679..860eec10a0 100644 --- a/lib/kernel/src/gen_udp.erl +++ b/lib/kernel/src/gen_udp.erl @@ -78,7 +78,7 @@ ipv6_v6only. -type socket() :: port(). --export_type([option/0, option_name/0]). +-export_type([option/0, option_name/0, socket/0]). -spec open(Port) -> {ok, Socket} | {error, Reason} when Port :: inet:port_number(), diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl index 43bab8bcf0..ec2c350931 100644 --- a/lib/kernel/src/inet.erl +++ b/lib/kernel/src/inet.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2014. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -1070,7 +1070,7 @@ gethostbyname_tm(Name, Type, Timer, [wins|_]=Opts) -> gethostbyname_tm_native(Name, Type, Timer, Opts); gethostbyname_tm(Name, Type, Timer, [native|_]=Opts) -> gethostbyname_tm_native(Name, Type, Timer, Opts); -gethostbyname_tm(Name, Type, Timer, [_|_]=Opts) -> +gethostbyname_tm(Name, Type, Timer, [_|Opts]) -> gethostbyname_tm(Name, Type, Timer, Opts); %% Make sure we always can look up our own hostname. gethostbyname_tm(Name, Type, Timer, []) -> diff --git a/lib/kernel/src/inet_tcp_dist.erl b/lib/kernel/src/inet_tcp_dist.erl index 63f236b069..835dcf2705 100644 --- a/lib/kernel/src/inet_tcp_dist.erl +++ b/lib/kernel/src/inet_tcp_dist.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2013. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -77,7 +77,7 @@ listen(Name) -> Error end. -do_listen(Options0) -> +do_listen(Options) -> {First,Last} = case application:get_env(kernel,inet_dist_listen_min) of {ok,N} when is_integer(N) -> case application:get_env(kernel, @@ -90,13 +90,7 @@ do_listen(Options0) -> _ -> {0,0} end, - Options = case application:get_env(kernel, inet_dist_use_interface) of - {ok, Ip} -> - [{ip, Ip} | Options0]; - _ -> - Options0 - end, - do_listen(First, Last, [{backlog,128}|Options]). + do_listen(First, Last, listen_options([{backlog,128}|Options])). do_listen(First,Last,_) when First > Last -> {error,eaddrinuse}; @@ -108,6 +102,23 @@ do_listen(First,Last,Options) -> Other end. +listen_options(Opts0) -> + Opts1 = + case application:get_env(kernel, inet_dist_use_interface) of + {ok, Ip} -> + [{ip, Ip} | Opts0]; + _ -> + Opts0 + end, + case application:get_env(kernel, inet_dist_listen_options) of + {ok,ListenOpts} -> + erlang:display({inet_dist_listen_options, ListenOpts}), + ListenOpts ++ Opts1; + _ -> + Opts1 + end. + + %% ------------------------------------------------------------ %% Accepts new connection attempts from other Erlang nodes. %% ------------------------------------------------------------ @@ -219,7 +230,7 @@ nodelay() -> _ -> {nodelay, true} end. - + %% ------------------------------------------------------------ %% Get remote information about a Socket. @@ -260,9 +271,11 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) -> ?trace("port_please(~p) -> version ~p~n", [Node,Version]), dist_util:reset_timer(Timer), - case inet_tcp:connect(Ip, TcpPort, - [{active, false}, - {packet,2}]) of + case + inet_tcp:connect( + Ip, TcpPort, + connect_options([{active, false}, {packet, 2}])) + of {ok, Socket} -> HSData = #hs_data{ kernel_pid = Kernel, @@ -324,6 +337,15 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) -> ?shutdown(Node) end. +connect_options(Opts) -> + case application:get_env(kernel, inet_dist_connect_options) of + {ok,ConnectOpts} -> + erlang:display({inet_dist_listen_options, ConnectOpts}), + ConnectOpts ++ Opts; + _ -> + Opts + end. + %% %% Close a socket. %% diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl index 9cccdab76b..15c2adc957 100644 --- a/lib/kernel/test/erl_distribution_SUITE.erl +++ b/lib/kernel/test/erl_distribution_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2011. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -26,7 +26,8 @@ -export([tick/1, tick_change/1, illegal_nodenames/1, hidden_node/1, table_waste/1, net_setuptime/1, - + inet_dist_options_options/1, + monitor_nodes_nodedown_reason/1, monitor_nodes_complex_nodedown_reason/1, monitor_nodes_node_type/1, @@ -38,7 +39,8 @@ monitor_nodes_many/1]). %% Performs the test at another node. --export([tick_cli_test/1, tick_cli_test1/1, +-export([get_socket_priorities/0, + tick_cli_test/1, tick_cli_test1/1, tick_serv_test/2, tick_serv_test1/1, keep_conn/1, time_ping/1]). @@ -62,7 +64,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> [tick, tick_change, illegal_nodenames, hidden_node, - table_waste, net_setuptime, {group, monitor_nodes}]. + table_waste, net_setuptime, inet_dist_options_options, + {group, monitor_nodes}]. groups() -> [{monitor_nodes, [], @@ -554,6 +557,71 @@ check_monitor_nodes_res(Pid, Node) -> end. + +inet_dist_options_options(suite) -> []; +inet_dist_options_options(doc) -> + ["Check the kernel inet_dist_{listen,connect}_options options"]; +inet_dist_options_options(Config) when is_list(Config) -> + Prio = 1, + case gen_udp:open(0, [{priority,Prio}]) of + {ok,Socket} -> + case inet:getopts(Socket, [priority]) of + {ok,[{priority,Prio}]} -> + ok = gen_udp:close(Socket), + do_inet_dist_options_options(Prio); + _ -> + ok = gen_udp:close(Socket), + {skip, + "Can not set priority "++integer_to_list(Prio)++ + " on socket"} + end; + {error,_} -> + {skip, "Can not set priority on socket"} + end. + +do_inet_dist_options_options(Prio) -> + PriorityString0 = "[{priority,"++integer_to_list(Prio)++"}]", + PriorityString = + case os:cmd("echo [{a,1}]") of + "[{a,1}]"++_ -> + PriorityString0; + _ -> + %% Some shells need quoting of [{}] + "'"++PriorityString0++"'" + end, + InetDistOptions = + "-hidden " + "-kernel inet_dist_connect_options "++PriorityString++" " + "-kernel inet_dist_listen_options "++PriorityString, + ?line {ok,Node1} = + start_node(inet_dist_options_1, InetDistOptions), + ?line {ok,Node2} = + start_node(inet_dist_options_2, InetDistOptions), + %% + ?line pong = + rpc:call(Node1, net_adm, ping, [Node2]), + ?line PrioritiesNode1 = + rpc:call(Node1, ?MODULE, get_socket_priorities, []), + ?line PrioritiesNode2 = + rpc:call(Node2, ?MODULE, get_socket_priorities, []), + ?line ?t:format("PrioritiesNode1 = ~p", [PrioritiesNode1]), + ?line ?t:format("PrioritiesNode2 = ~p", [PrioritiesNode2]), + ?line Elevated = [P || P <- PrioritiesNode1, P =:= Prio], + ?line Elevated = [P || P <- PrioritiesNode2, P =:= Prio], + ?line [_|_] = Elevated, + %% + ?line stop_node(Node2), + ?line stop_node(Node1), + ok. + +get_socket_priorities() -> + [Priority || + {ok,[{priority,Priority}]} <- + [inet:getopts(Port, [priority]) || + Port <- erlang:ports(), + element(2, erlang:port_info(Port, name)) =:= "tcp_inet"]]. + + %% %% Testcase: diff --git a/lib/kernel/test/inet_SUITE.erl b/lib/kernel/test/inet_SUITE.erl index d45dfc2173..849013ac79 100644 --- a/lib/kernel/test/inet_SUITE.erl +++ b/lib/kernel/test/inet_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2014. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -36,6 +36,7 @@ gethostnative_parallell/1, cname_loop/1, gethostnative_soft_restart/0, gethostnative_soft_restart/1, gethostnative_debug_level/0, gethostnative_debug_level/1, + lookup_bad_search_option/1, getif/1, getif_ifr_name_overflow/1,getservbyname_overflow/1, getifaddrs/1, parse_strict_address/1, simple_netns/1, simple_netns_open/1]). @@ -52,6 +53,7 @@ all() -> ipv4_to_ipv6, host_and_addr, {group, parse}, t_gethostnative, gethostnative_parallell, cname_loop, gethostnative_debug_level, gethostnative_soft_restart, + lookup_bad_search_option, getif, getif_ifr_name_overflow, getservbyname_overflow, getifaddrs, parse_strict_address, simple_netns, simple_netns_open]. @@ -908,6 +910,21 @@ lookup_loop([H|Hs], Delay, Tag, Parent, Cnt, Hosts) -> +lookup_bad_search_option(suite) -> + []; +lookup_bad_search_option(doc) -> + ["Test lookup with erroneously configured lookup option (OTP-12133)"]; +lookup_bad_search_option(Config) when is_list(Config) -> + Db = inet_db, + %% The bad option can not enter through inet_db:set_lookup/1, + %% but through e.g .inetrc. + ets:insert(Db, {res_lookup,[lookup_bad_search_option]}), + {ok,Hostname} = inet:gethostname(), + {ok,_Hent} = inet:gethostbyname(Hostname), % Will hang loop for this bug + ok. + + + getif(suite) -> []; getif(doc) -> diff --git a/lib/kernel/test/zlib_SUITE.erl b/lib/kernel/test/zlib_SUITE.erl index 3be6f39d95..e99151284f 100644 --- a/lib/kernel/test/zlib_SUITE.erl +++ b/lib/kernel/test/zlib_SUITE.erl @@ -82,7 +82,7 @@ groups() -> api_deflateSetDictionary, api_deflateReset, api_deflateParams, api_deflate, api_deflateEnd, api_inflateInit, api_inflateSetDictionary, - api_inflateSync, api_inflateReset, api_inflate, + api_inflateSync, api_inflateReset, api_inflate, api_inflateChunk, api_inflateEnd, api_setBufsz, api_getBufsz, api_crc32, api_adler32, api_getQSize, api_un_compress, api_un_zip, api_g_un_zip]}, @@ -357,6 +357,39 @@ api_inflate(Config) when is_list(Config) -> ?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, <<2,1,2,1,2>>)), ?m(ok, zlib:close(Z1)). +api_inflateChunk(doc) -> "Test inflateChunk"; +api_inflateChunk(suite) -> []; +api_inflateChunk(Config) when is_list(Config) -> + ChunkSize = 1024, + Data = << <<(I rem 150)>> || I <- lists:seq(1, 3 * ChunkSize) >>, + Part1 = binary:part(Data, 0, ChunkSize), + Part2 = binary:part(Data, ChunkSize, ChunkSize), + Part3 = binary:part(Data, ChunkSize * 2, ChunkSize), + ?line Compressed = zlib:compress(Data), + ?line Z1 = zlib:open(), + ?line zlib:setBufSize(Z1, ChunkSize), + ?m(ok, zlib:inflateInit(Z1)), + ?m([], zlib:inflateChunk(Z1, <<>>)), + ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)), + ?m({more, Part2}, zlib:inflateChunk(Z1)), + ?m(Part3, zlib:inflateChunk(Z1)), + ?m(ok, zlib:inflateEnd(Z1)), + + ?m(ok, zlib:inflateInit(Z1)), + ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)), + + ?m(ok, zlib:inflateReset(Z1)), + + ?line zlib:setBufSize(Z1, size(Data)), + ?m(Data, zlib:inflateChunk(Z1, Compressed)), + ?m(ok, zlib:inflateEnd(Z1)), + + ?m(ok, zlib:inflateInit(Z1)), + ?m(?BARG, zlib:inflateChunk(gurka, Compressed)), + ?m(?BARG, zlib:inflateChunk(Z1, 4384)), + ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)), + ?m(ok, zlib:close(Z1)). + api_inflateEnd(doc) -> "Test inflateEnd"; api_inflateEnd(suite) -> []; api_inflateEnd(Config) when is_list(Config) -> diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl index e27396731f..1efb939e00 100644 --- a/lib/mnesia/src/mnesia_locker.erl +++ b/lib/mnesia/src/mnesia_locker.erl @@ -982,8 +982,14 @@ sticky_flush(Ns=[Node | Tail], Store) -> flush_remaining([], _SkipNode, Res) -> del_debug(), exit(Res); -flush_remaining([SkipNode | Tail ], SkipNode, Res) -> - flush_remaining(Tail, SkipNode, Res); +flush_remaining(Ns=[SkipNode | Tail ], SkipNode, Res) -> + add_debug(Ns), + receive + {?MODULE, SkipNode, _} -> + flush_remaining(Tail, SkipNode, Res) + after 0 -> + flush_remaining(Tail, SkipNode, Res) + end; flush_remaining(Ns=[Node | Tail], SkipNode, Res) -> add_debug(Ns), receive diff --git a/lib/mnesia/test/mnesia_recovery_test.erl b/lib/mnesia/test/mnesia_recovery_test.erl index 0d0ad32fb0..946a9f97ba 100644 --- a/lib/mnesia/test/mnesia_recovery_test.erl +++ b/lib/mnesia/test/mnesia_recovery_test.erl @@ -320,7 +320,9 @@ read_during_down(Op, Config) when is_list(Config) -> ?log("W2R ~p~n", [W2R]), loop_and_kill_mnesia(10, hd(W2R), Tabs), [Pid ! self() || Pid <- Readers], - ?match([ok, ok, ok], [receive ok -> ok after 1000 -> {Pid, mnesia_lib:dist_coredump()} end || Pid <- Readers]), + ?match([ok, ok, ok], + [receive ok -> ok after 5000 -> {Pid, mnesia_lib:dist_coredump()} end + || Pid <- Readers]), ?verify_mnesia(Ns, []). reader(Tab, OP) -> @@ -338,8 +340,12 @@ reader(Tab, OP) -> ?error("Expected ~p Got ~p ~n", [[{Tab, key, val}], Else]), erlang:error(test_failed) end, - receive Pid -> - Pid ! ok + receive + Pid when is_pid(Pid) -> + Pid ! ok; + Other -> + io:format("Msg: ~p~n", [Other]), + error(Other) after 50 -> reader(Tab, OP) end. @@ -1537,6 +1543,7 @@ disc_less(Config) when is_list(Config) -> timer:sleep(500), ?match(ok, rpc:call(Node3, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])), ?match(ok, rpc:call(Node3, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])), + ?match(ok, rpc:call(Node1, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])), ?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab1, 100])), ?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab2, 100])), diff --git a/lib/mnesia/test/mnesia_test_lib.hrl b/lib/mnesia/test/mnesia_test_lib.hrl index 94a195f01f..cd76377df6 100644 --- a/lib/mnesia/test/mnesia_test_lib.hrl +++ b/lib/mnesia/test/mnesia_test_lib.hrl @@ -66,12 +66,14 @@ ?verbose("ok, ~n Result as expected:~p~n",[_AR_2]), {success,_AR_2}; _AR_2 -> - ?error("Not Matching Actual result was:~n ~p~n", [_AR_2]), + ?error("Not Matching Actual result was:~n ~p~n ~p~n", + [_AR_2, erlang:get_stacktrace()]), {fail,_AR_2} end; - _:_AR_1 -> - ?error("Not Matching Actual result was:~n ~p~n", [_AR_1]), - {fail,_AR_1} + _T1_:_AR_1 -> + ?error("Not Matching Actual result was:~n ~p~n ~p~n", + [{_T1_,_AR_1}, erlang:get_stacktrace()]), + {fail,{_T1_,_AR_1}} end end()). diff --git a/lib/observer/src/cdv_proc_cb.erl b/lib/observer/src/cdv_proc_cb.erl index dfc2df9c4c..d1549f79eb 100644 --- a/lib/observer/src/cdv_proc_cb.erl +++ b/lib/observer/src/cdv_proc_cb.erl @@ -129,6 +129,7 @@ info_fields() -> {"Started", start_time}, {"Parent", {click,parent}}, {"Message Queue Len",msg_q_len}, + {"Run queue", run_queue}, {"Reductions", reds}, {"Program counter", prog_count}, {"Continuation pointer",cp}, diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl index 48606d7e0a..ef14ba46e2 100644 --- a/lib/observer/src/crashdump_viewer.erl +++ b/lib/observer/src/crashdump_viewer.erl @@ -1133,6 +1133,8 @@ all_procinfo(Fd,Fun,Proc,WS,LineHead) -> "arity = " ++ Arity -> %%! Temporary workaround get_procinfo(Fd,Fun,Proc#proc{arity=Arity--"\r\n"},WS); + "Run queue" -> + get_procinfo(Fd,Fun,Proc#proc{run_queue=val(Fd)},WS); "=" ++ _next_tag -> Proc; Other -> diff --git a/lib/observer/src/crashdump_viewer.hrl b/lib/observer/src/crashdump_viewer.hrl index 0e2eba6dee..47705d0da7 100644 --- a/lib/observer/src/crashdump_viewer.hrl +++ b/lib/observer/src/crashdump_viewer.hrl @@ -85,7 +85,9 @@ old_heap_top, old_heap_end, memory, - stack_dump}). + stack_dump, + run_queue=?unknown + }). -record(port, {id, diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml index e3473f80d7..b86d0fe0ab 100644 --- a/lib/public_key/doc/src/public_key.xml +++ b/lib/public_key/doc/src/public_key.xml @@ -5,7 +5,7 @@ <header> <copyright> <year>2008</year> - <year>2014</year> + <year>2015</year> <holder>Ericsson AB, All Rights Reserved</holder> </copyright> <legalnotice> @@ -127,6 +127,8 @@ affiliationChanged | superseded | cessationOfOperation | certificateHold | privilegeWithdrawn | aACompromise</code></p> + <p><code>issuer_name() = {rdnSequence,[#'AttributeTypeAndValue'{}]} </code> </p> + <p><code>ssh_file() = openssh_public_key | rfc4716_public_key | known_hosts | auth_keys</code></p> @@ -368,8 +370,8 @@ <name>pkix_is_issuer(Cert, IssuerCert) -> boolean()</name> <fsummary> Checks if <c>IssuerCert</c> issued <c>Cert</c> </fsummary> <type> - <v>Cert = der_encode() | #'OTPCertificate'{}</v> - <v>IssuerCert = der_encode() | #'OTPCertificate'{}</v> + <v>Cert = der_encoded() | #'OTPCertificate'{}</v> + <v>IssuerCert = der_encoded() | #'OTPCertificate'{}</v> </type> <desc> <p> Checks if <c>IssuerCert</c> issued <c>Cert</c> </p> @@ -380,7 +382,7 @@ <name>pkix_is_fixed_dh_cert(Cert) -> boolean()</name> <fsummary> Checks if a Certificate is a fixed Diffie-Hellman Cert.</fsummary> <type> - <v>Cert = der_encode() | #'OTPCertificate'{}</v> + <v>Cert = der_encoded() | #'OTPCertificate'{}</v> </type> <desc> <p> Checks if a Certificate is a fixed Diffie-Hellman Cert.</p> @@ -391,7 +393,7 @@ <name>pkix_is_self_signed(Cert) -> boolean()</name> <fsummary> Checks if a Certificate is self signed.</fsummary> <type> - <v>Cert = der_encode() | #'OTPCertificate'{}</v> + <v>Cert = der_encoded() | #'OTPCertificate'{}</v> </type> <desc> <p> Checks if a Certificate is self signed.</p> @@ -402,24 +404,25 @@ <name>pkix_issuer_id(Cert, IssuedBy) -> {ok, IssuerID} | {error, Reason}</name> <fsummary> Returns the issuer id.</fsummary> <type> - <v>Cert = der_encode() | #'OTPCertificate'{}</v> + <v>Cert = der_encoded() | #'OTPCertificate'{}</v> <v>IssuedBy = self | other</v> - <v>IssuerID = {integer(), {rdnSequence, [#'AttributeTypeAndValue'{}]}}</v> + <v>IssuerID = {integer(), issuer_name()}</v> <d>The issuer id consists of the serial number and the issuers name.</d> <v>Reason = term()</v> - </type> - <desc> - <p> Returns the issuer id.</p> - </desc> + </type> + <desc> + <p> Returns the issuer id.</p> + </desc> </func> - + + <func> <name>pkix_normalize_name(Issuer) -> Normalized</name> <fsummary>Normalizes a issuer name so that it can be easily compared to another issuer name. </fsummary> <type> - <v>Issuer = {rdnSequence,[#'AttributeTypeAndValue'{}]}</v> - <v>Normalized = {rdnSequence, [#'AttributeTypeAndValue'{}]}</v> + <v>Issuer = issuer_name()</v> + <v>Normalized = issuer_name()</v> </type> <desc> <p>Normalizes a issuer name so that it can be easily @@ -431,13 +434,13 @@ <name>pkix_path_validation(TrustedCert, CertChain, Options) -> {ok, {PublicKeyInfo, PolicyTree}} | {error, {bad_cert, Reason}} </name> <fsummary> Performs a basic path validation according to RFC 5280.</fsummary> <type> - <v> TrustedCert = #'OTPCertificate'{} | der_encode() | atom() </v> + <v> TrustedCert = #'OTPCertificate'{} | der_encoded() | atom() </v> <d>Normally a trusted certificate but it can also be a path validation error that can be discovered while constructing the input to this function and that should be run through the <c>verify_fun</c>. For example <c>unknown_ca </c> or <c>selfsigned_peer </c> </d> - <v> CertChain = [der_encode()]</v> + <v> CertChain = [der_encoded()]</v> <d>A list of DER encoded certificates in trust order ending with the peer certificate.</d> <v> Options = proplists:proplist()</v> <v>PublicKeyInfo = {?'rsaEncryption' | ?'id-dsa', @@ -527,6 +530,17 @@ fun(OtpCert :: #'OTPCertificate'{}, </desc> </func> + <func> + <name>pkix_crl_issuer(CRL) -> issuer_name()</name> + <fsummary>Returns the issuer of the <c>CRL</c>.</fsummary> + <type> + <v>CRL = der_encoded() | #'CertificateList'{} </v> + </type> + <desc> + <p>Returns the issuer of the <c>CRL</c>.</p> + </desc> + </func> + <func> <name>pkix_crls_validate(OTPCertificate, DPAndCRLs, Options) -> CRLStatus()</name> <fsummary> Performs CRL validation.</fsummary> @@ -574,9 +588,48 @@ fun(#'DistributionPoint'{}, #'CertificateList'{}, </taglist> </desc> </func> + + <func> + <name>pkix_crl_verify(CRL, Cert) -> boolean()</name> + <fsummary> Verify that <c>Cert</c> is the <c> CRL</c> signer. </fsummary> + <type> + <v>CRL = der_encoded() | #'CertificateList'{} </v> + <v>Cert = der_encoded() | #'OTPCertificate'{} </v> + </type> + <desc> + <p>Verify that <c>Cert</c> is the <c>CRL</c> signer.</p> + </desc> + </func> + <func> + <name>pkix_dist_point(Cert) -> DistPoint</name> + <fsummary>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.</fsummary> + <type> + <v> Cert = der_encoded() | #'OTPCertificate'{} </v> + <v> DistPoint = #'DistributionPoint'{}</v> + </type> + <desc> + <p>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>. + Can be used as input to <seealso + marker="#pkix_crls_validate-3">pkix_crls_validate/3 </seealso> + </p> + </desc> + </func> + + <func> + <name>pkix_dist_points(Cert) -> DistPoints</name> + <fsummary> Extracts distribution points from the certificates extensions.</fsummary> + <type> + <v> Cert = der_encoded() | #'OTPCertificate'{} </v> + <v> DistPoints = [#'DistributionPoint'{}]</v> + </type> + <desc> + <p> Extracts distribution points from the certificates extensions.</p> + </desc> + </func> + <func> - <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encode()</name> + <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encoded()</name> <fsummary>Signs certificate.</fsummary> <type> <v>Key = rsa_public_key() | dsa_public_key()</v> @@ -606,7 +659,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{}, <name>pkix_verify(Cert, Key) -> boolean()</name> <fsummary> Verify pkix x.509 certificate signature.</fsummary> <type> - <v>Cert = der_encode()</v> + <v>Cert = der_encoded()</v> <v>Key = rsa_public_key() | dsa_public_key()</v> </type> <desc> diff --git a/lib/public_key/src/pubkey_cert.erl b/lib/public_key/src/pubkey_cert.erl index ae517ca642..8b11538499 100644 --- a/lib/public_key/src/pubkey_cert.erl +++ b/lib/public_key/src/pubkey_cert.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2013. All Rights Reserved. +%% Copyright Ericsson AB 2008-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -28,8 +28,9 @@ validate_issuer/4, validate_names/6, validate_extensions/4, normalize_general_name/1, is_self_signed/1, - is_issuer/2, issuer_id/2, is_fixed_dh_cert/1, - verify_data/1, verify_fun/4, select_extension/2, match_name/3, + is_issuer/2, issuer_id/2, distribution_points/1, + is_fixed_dh_cert/1, verify_data/1, verify_fun/4, + select_extension/2, match_name/3, extensions_list/1, cert_auth_key_id/1, time_str_2_gregorian_sec/1]). -define(NULL, 0). @@ -272,6 +273,16 @@ issuer_id(Otpcert, self) -> SerialNr = TBSCert#'OTPTBSCertificate'.serialNumber, {ok, {SerialNr, normalize_general_name(Issuer)}}. +distribution_points(Otpcert) -> + TBSCert = Otpcert#'OTPCertificate'.tbsCertificate, + Extensions = extensions_list(TBSCert#'OTPTBSCertificate'.extensions), + case select_extension(?'id-ce-cRLDistributionPoints', Extensions) of + undefined -> + []; + #'Extension'{extnValue = Value} -> + Value + end. + %%-------------------------------------------------------------------- -spec is_fixed_dh_cert(#'OTPCertificate'{}) -> boolean(). %% @@ -296,7 +307,9 @@ is_fixed_dh_cert(#'OTPCertificate'{tbsCertificate = %% -------------------------------------------------------------------- verify_fun(Otpcert, Result, UserState0, VerifyFun) -> case VerifyFun(Otpcert, Result, UserState0) of - {valid,UserState} -> + {valid, UserState} -> + UserState; + {valid_peer, UserState} -> UserState; {fail, Reason} -> case Reason of diff --git a/lib/public_key/src/pubkey_crl.erl b/lib/public_key/src/pubkey_crl.erl index f0df4bc3f2..488cc97c70 100644 --- a/lib/public_key/src/pubkey_crl.erl +++ b/lib/public_key/src/pubkey_crl.erl @@ -41,10 +41,10 @@ validate(OtpCert, OtherDPCRLs, DP, {DerCRL, CRL}, {DerDeltaCRL, DeltaCRL}, CRLIssuer = TBSCRL#'TBSCertList'.issuer, AltNames = case pubkey_cert:select_extension(?'id-ce-subjectAltName', TBSCert#'OTPTBSCertificate'.extensions) of - undefined -> - []; - Ext -> - Ext#'Extension'.extnValue + #'Extension'{extnValue = Value} -> + Value; + _ -> + [] end, revoked_status(DP, IDP, {directoryName, CRLIssuer}, [ {directoryName, CertIssuer} | AltNames], SerialNumber, Revoked, diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl index 1bbf4ef416..a0a87e5351 100644 --- a/lib/public_key/src/public_key.erl +++ b/lib/public_key/src/public_key.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2014. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -46,7 +46,11 @@ pkix_normalize_name/1, pkix_path_validation/3, ssh_decode/2, ssh_encode/2, - pkix_crls_validate/3 + pkix_crls_validate/3, + pkix_dist_point/1, + pkix_dist_points/1, + pkix_crl_verify/2, + pkix_crl_issuer/1 ]). -export_type([public_key/0, private_key/0, pem_entry/0, @@ -470,6 +474,45 @@ verify(DigestOrPlainText, sha = DigestType, Signature, {Key, #'Dss-Parms'{p = P crypto:verify(dss, DigestType, DigestOrPlainText, Signature, [P, Q, G, Key]). %%-------------------------------------------------------------------- +-spec pkix_dist_point(der_encoded() | #'OTPCertificate'{}) -> + #'DistributionPoint'{}. +%% Description: Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>. +%%-------------------------------------------------------------------- +pkix_dist_point(OtpCert) when is_binary(OtpCert) -> + pkix_dist_point(pkix_decode_cert(OtpCert, otp)); +pkix_dist_point(OtpCert) -> + Issuer = public_key:pkix_normalize_name( + pubkey_cert_records:transform( + OtpCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer, encode)), + + TBSCert = OtpCert#'OTPCertificate'.tbsCertificate, + Extensions = pubkey_cert:extensions_list(TBSCert#'OTPTBSCertificate'.extensions), + AltNames = case pubkey_cert:select_extension(?'id-ce-issuerAltName', Extensions) of + undefined -> + []; + #'Extension'{extnValue = Value} -> + Value + end, + Point = {fullName, [{directoryName, Issuer} | AltNames]}, + #'DistributionPoint'{cRLIssuer = asn1_NOVALUE, + reasons = asn1_NOVALUE, + distributionPoint = Point}. +%%-------------------------------------------------------------------- +-spec pkix_dist_points(der_encoded() | #'OTPCertificate'{}) -> + [#'DistributionPoint'{}]. +%% Description: Extracts distributionpoints specified in the certificates extensions. +%%-------------------------------------------------------------------- +pkix_dist_points(OtpCert) when is_binary(OtpCert) -> + pkix_dist_points(pkix_decode_cert(OtpCert, otp)); +pkix_dist_points(OtpCert) -> + Value = pubkey_cert:distribution_points(OtpCert), + lists:foldl(fun(Point, Acc0) -> + DistPoint = pubkey_cert_records:transform(Point, decode), + [DistPoint | Acc0] + end, + [], Value). + +%%-------------------------------------------------------------------- -spec pkix_sign(#'OTPTBSCertificate'{}, rsa_private_key() | dsa_private_key()) -> Der::binary(). %% @@ -511,6 +554,25 @@ pkix_verify(DerCert, Key = {#'ECPoint'{}, _}) verify(PlainText, DigestType, Signature, Key). %%-------------------------------------------------------------------- +-spec pkix_crl_verify(CRL::binary() | #'CertificateList'{}, Cert::binary() | #'OTPCertificate'{}) -> boolean(). +%% +%% Description: Verify that Cert is the CRL signer. +%%-------------------------------------------------------------------- +pkix_crl_verify(CRL, Cert) when is_binary(CRL) -> + pkix_crl_verify(der_decode('CertificateList', CRL), Cert); +pkix_crl_verify(CRL, Cert) when is_binary(Cert) -> + pkix_crl_verify(CRL, pkix_decode_cert(Cert, otp)); +pkix_crl_verify(#'CertificateList'{} = CRL, #'OTPCertificate'{} = Cert) -> + TBSCert = Cert#'OTPCertificate'.tbsCertificate, + PublicKeyInfo = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo, + PublicKey = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.subjectPublicKey, + AlgInfo = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.algorithm, + PublicKeyParams = AlgInfo#'PublicKeyAlgorithm'.parameters, + pubkey_crl:verify_crl_signature(CRL, + der_encode('CertificateList', CRL), + PublicKey, PublicKeyParams). + +%%-------------------------------------------------------------------- -spec pkix_is_issuer(Cert :: der_encoded()| #'OTPCertificate'{} | #'CertificateList'{}, IssuerCert :: der_encoded()| #'OTPCertificate'{}) -> boolean(). @@ -564,15 +626,21 @@ pkix_is_fixed_dh_cert(Cert) when is_binary(Cert) -> % %% Description: Returns the issuer id. %%-------------------------------------------------------------------- -pkix_issuer_id(#'OTPCertificate'{} = OtpCert, self) -> - pubkey_cert:issuer_id(OtpCert, self); - -pkix_issuer_id(#'OTPCertificate'{} = OtpCert, other) -> - pubkey_cert:issuer_id(OtpCert, other); +pkix_issuer_id(Cert, Signed)-> + pkix_issuer_id(Cert, Signed, decode). -pkix_issuer_id(Cert, Signed) when is_binary(Cert) -> - OtpCert = pkix_decode_cert(Cert, otp), - pkix_issuer_id(OtpCert, Signed). +%%-------------------------------------------------------------------- +-spec pkix_crl_issuer(CRL::binary()| #'CertificateList'{}) -> + {rdnSequence, + [#'AttributeTypeAndValue'{}]}. +% +%% Description: Returns the issuer. +%%-------------------------------------------------------------------- +pkix_crl_issuer(CRL) when is_binary(CRL) -> + pkix_crl_issuer(der_decode('CertificateList', CRL)); +pkix_crl_issuer(#'CertificateList'{} = CRL) -> + pubkey_cert_records:transform( + CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode). %%-------------------------------------------------------------------- -spec pkix_normalize_name({rdnSequence, @@ -921,3 +989,18 @@ ec_key({PubKey, PrivateKey}, Params) -> privateKey = binary_to_list(PrivateKey), parameters = Params, publicKey = {0, PubKey}}. + +pkix_issuer_id(#'OTPCertificate'{} = OtpCert, Signed, decode) when (Signed == self) or + (Signed == other) -> + pubkey_cert:issuer_id(OtpCert, Signed); +pkix_issuer_id(#'OTPCertificate'{} = OtpCert, Signed, encode) when (Signed == self) or + (Signed == other) -> + case pubkey_cert:issuer_id(OtpCert, Signed) of + {ok, {Serial, Issuer}} -> + {ok, {Serial, pubkey_cert_records:transform(Issuer, encode)}}; + Error -> + Error + end; +pkix_issuer_id(Cert, Signed, Decode) when is_binary(Cert) -> + OtpCert = pkix_decode_cert(Cert, otp), + pkix_issuer_id(OtpCert, Signed, Decode). diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl index 163f5f4413..40c28e86b3 100644 --- a/lib/public_key/test/public_key_SUITE.erl +++ b/lib/public_key/test/public_key_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2013. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -42,7 +42,7 @@ all() -> encrypt_decrypt, {group, sign_verify}, pkix, pkix_countryname, pkix_emailaddress, pkix_path_validation, - pkix_iso_rsa_oid, pkix_iso_dsa_oid]. + pkix_iso_rsa_oid, pkix_iso_dsa_oid, pkix_crl]. groups() -> [{pem_decode_encode, [], [dsa_pem, rsa_pem, encrypted_pem, @@ -712,6 +712,42 @@ pkix_iso_dsa_oid(Config) when is_list(Config) -> {_, dsa} = public_key:pkix_sign_types(SigAlg#'SignatureAlgorithm'.algorithm). %%-------------------------------------------------------------------- + +pkix_crl() -> + [{doc, "test pkix_crl_* functions"}]. + +pkix_crl(Config) when is_list(Config) -> + Datadir = ?config(data_dir, Config), + {ok, PemCRL} = file:read_file(filename:join(Datadir, "idp_crl.pem")), + [{_, CRL, _}] = public_key:pem_decode(PemCRL), + + {ok, IDPPemCert} = file:read_file(filename:join(Datadir, "idp_cert.pem")), + [{_, IDPCert, _}] = public_key:pem_decode(IDPPemCert), + + {ok, SignPemCert} = file:read_file(filename:join(Datadir, "crl_signer.pem")), + [{_, SignCert, _}] = public_key:pem_decode(SignPemCert), + + OTPIDPCert = public_key:pkix_decode_cert(IDPCert, otp), + OTPSignCert = public_key:pkix_decode_cert(SignCert, otp), + ERLCRL = public_key:der_decode('CertificateList',CRL), + + {rdnSequence,_} = public_key:pkix_crl_issuer(CRL), + {rdnSequence,_} = public_key:pkix_crl_issuer(ERLCRL), + + true = public_key:pkix_crl_verify(CRL, SignCert), + true = public_key:pkix_crl_verify(ERLCRL, OTPSignCert), + + [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(IDPCert), + [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(OTPIDPCert), + + #'DistributionPoint'{cRLIssuer = asn1_NOVALUE, + reasons = asn1_NOVALUE, + distributionPoint = Point} = public_key:pkix_dist_point(IDPCert), + #'DistributionPoint'{cRLIssuer = asn1_NOVALUE, + reasons = asn1_NOVALUE, + distributionPoint = Point} = public_key:pkix_dist_point(OTPIDPCert). + +%%-------------------------------------------------------------------- %% Internal functions ------------------------------------------------ %%-------------------------------------------------------------------- asn1_encode_decode({Asn1Type, Der, not_encrypted} = Entry) -> diff --git a/lib/public_key/test/public_key_SUITE_data/crl_signer.pem b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem new file mode 100644 index 0000000000..d77f86b45d --- /dev/null +++ b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem @@ -0,0 +1,25 @@ +-----BEGIN CERTIFICATE----- +MIID8zCCAtugAwIBAgIJAKU8w89SmyPyMA0GCSqGSIb3DQEBBAUAMIGGMREwDwYD +VQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEUMBIGA1UEChMLRXJp +Y3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UEBhMCU0UxJTAjBgkq +hkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwHhcNMTUwMjIzMTMyNTMx +WhcNMTUwMzI1MTMyNTMxWjCBhjERMA8GA1UEAxMIZXJsYW5nQ0ExEzARBgNVBAsT +CkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMRIwEAYDVQQHEwlTdG9j +a2hvbG0xCzAJBgNVBAYTAlNFMSUwIwYJKoZIhvcNAQkBFhZwZXRlckBlcml4LmVy +aWNzc29uLnNlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyzwkmKzy +WTLOafHmgqZVENdt3OYECPA4BamVKyEdi8zgXI0S71wzPZ+XvuGbHDTBzsTHf71L +xRQgoG30tv5jqWSlfh8iyS6fO+FHxBKd+xg6hLJXk5PCUa5X1D4BO8B4aapEzev+ +T8+pTaOLeVPdfGfKp0yWF50eCpdSF/kMCCIIA8QNSahfcwuLbEEzUNZof6YPZBNm +e+XUMXCjpb/mU7krfu8nLaspG1HgxQqErEEBzGJE7mguqSVETK/xpGXEMTNIuj8N +ziFrfqAezDob3z48xHUaHKZRBb9NIxWIjVxkTYaqOtf9UNCT96CHeZ7rk9iNscQu +USabMIamFY8cNQIDAQABo2IwYDAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB +BjAdBgNVHQ4EFgQUm2M3f6UBEIsHI1HIvphbBz60RsAwIQYDVR0RBBowGIEWcGV0 +ZXJAZXJpeC5lcmljc3Nvbi5zZTANBgkqhkiG9w0BAQQFAAOCAQEAPmm0V36HZySF +BoV03DGyeFUSeMtO0DO058NaXXv2VNPpUXT72Mt1ovXNvVFcReggb01polF7TFFI +4NRb6qbsLPxny29Clf/9WKY4zDhbb2MIy8yueoOyyeNQtrzY+iQjo4q9U+Aa6xj1 +pxmG1URDfOmCgX33ItCrZXFGa4ic0HrbWgJMDNo4lSOiio8bl3IYN4vBcobRfhDs +pw5jochE5ZpPh4i76Pg6D99EFkNaLyQioWEu4n2OxR0EBSFLJkVJQ0alUx18AKio +bje+h5nzRgTm5HApYzcorF57KfUKPDaW1Q6tRckRyHApueDuK8p49ITQE71lmkLc +ywxoJMrNnA== +-----END CERTIFICATE----- + diff --git a/lib/public_key/test/public_key_SUITE_data/idp_cert.pem b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem new file mode 100644 index 0000000000..c2afc56a3a --- /dev/null +++ b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem @@ -0,0 +1,30 @@ +-----BEGIN CERTIFICATE----- +MIIFGjCCBAKgAwIBAgIBAzANBgkqhkiG9w0BAQQFADCBgzEOMAwGA1UEAxMFb3Rw +Q0ExEzARBgNVBAsTCkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMQsw +CQYDVQQGEwJTRTESMBAGA1UEBxMJU3RvY2tob2xtMSUwIwYJKoZIhvcNAQkBFhZw +ZXRlckBlcml4LmVyaWNzc29uLnNlMB4XDTE1MDIyMzEzMjUzMVoXDTI1MDEwMTEz +MjUzMVowgYQxDzANBgNVBAMTBnNlcnZlcjETMBEGA1UECxMKRXJsYW5nIE9UUDEU +MBIGA1UEChMLRXJpY3Nzb24gQUIxCzAJBgNVBAYTAlNFMRIwEAYDVQQHEwlTdG9j +a2hvbG0xJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK8EDdNZEebdfxb57e3UA8uTCq +TsFtJv5tyjnZtSFsGDrwrZYjRMOCJFh8Yv6Ddq4mZiAvUCJxMzW4zVzraMmmQC8z +Hi3xQyuIq2UCW3ESxLvchCcuSjNOWke0z+rXHzA8Yz9y1fqhhO6AF8q5lLwGo+VQ +sJkVV8QwB9UXZN4pAc3zTeqZkGCrNY/ZIgtCrk4jw7sY/gumS8BjhXCYGyFZRDvX +jzIXQx6jn7/2huNbEAiBXbYYAMd7OEwhpHHAWOVA6g+/TNydgRO3W4xVmlEhDpYs +bnMV/Tq570E1bhz1XWb642K2MnxI74g8FXmhN6x6P8d4zU/eFcs+gxO0X6KzAgMB +AAGjggGUMIIBkDAJBgNVHRMEAjAAMAsGA1UdDwQEAwIF4DAdBgNVHQ4EFgQUo8dr +DDQXK25dB6qMY8dNIjAKIPEwgbMGA1UdIwSBqzCBqIAU5YMIq7A5eYQhQsHsc/XC +7GeZ+kuhgYykgYkwgYYxETAPBgNVBAMTCGVybGFuZ0NBMRMwEQYDVQQLEwpFcmxh +bmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAGA1UEBxMJU3RvY2tob2xt +MQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0ZXJAZXJpeC5lcmljc3Nv +bi5zZYIBATAhBgNVHREEGjAYgRZwZXRlckBlcml4LmVyaWNzc29uLnNlMCEGA1Ud +EgQaMBiBFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwWwYDVR0fBFQwUjAkoCKgIIYe +aHR0cDovL2xvY2FsaG9zdC9vdHBDQS9jcmwucGVtMCqgKKAmhiRodHRwOi8vbG9j +YWxob3N0OjM3ODEzL290cENBL2NybC5wZW0wDQYJKoZIhvcNAQEEBQADggEBACwq +o4nQTTereSIL8ZLQHweJKXYstTaZrRrAaoRUe9oClY7H++zXmMa8iZvUqqdT3fXW +4KMXXyoB1o+cLxLnAPKOiFFL9rcbaeAMxZMIrTaFDQsOXAPVqJLSWWS5I5LsNvS6 +MlB6O6+0binTyilDKg683VV9nKNiNdL8WzGa5ig+HvK6xUpJwpOTmDmfdg09zQ+8 +aCbJrthXg0tNnGIorttAd2wFvmLUezoJrlfwLChB0M/qa+RVRCFMiPvkWupo5eVK +Malwpz2xp2rAUlb6qQY7eI6lV8JsVK06QxBmUHP68Y9kYT5/gy5ketjOB0Ypin05 +6+3VrZKFxrkqKaEoL50= +-----END CERTIFICATE----- diff --git a/lib/public_key/test/public_key_SUITE_data/idp_crl.pem b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem new file mode 100644 index 0000000000..0872279501 --- /dev/null +++ b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem @@ -0,0 +1,18 @@ +-----BEGIN X509 CRL----- +MIIC3TCCAcUCAQEwDQYJKoZIhvcNAQEEBQAwgYYxETAPBgNVBAMTCGVybGFuZ0NB +MRMwEQYDVQQLEwpFcmxhbmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAG +A1UEBxMJU3RvY2tob2xtMQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0 +ZXJAZXJpeC5lcmljc3Nvbi5zZRcNMTUwMjIzMTMyNTMxWhcNMTUwMjI0MTMyNTMx +WqCCAQgwggEEMIG7BgNVHSMEgbMwgbCAFJtjN3+lARCLByNRyL6YWwc+tEbAoYGM +pIGJMIGGMREwDwYDVQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEU +MBIGA1UEChMLRXJpY3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UE +BhMCU0UxJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2WCCQCl +PMPPUpsj8jA4BgNVHRwBAf8ELjAsoCqgKIYmaHR0cDovL2xvY2FsaG9zdDo4MDAw +L2VybGFuZ0NBL2NybC5wZW0wCgYDVR0UBAMCAQEwDQYJKoZIhvcNAQEEBQADggEB +AE9WKJhW1oivBEE91akeDcYCtSVp98F7DxzQyJTBLQJGMEXSg8G/oAp64F4qs3oV +LXS5YFYwxjD9tXByGVEJoIUUMtfMeCvZMgd2V8mBlAJiyHkTrFFA8PgBv+htrJji +nrheAhrEedqZbqwmrcU34h9fWHp0Zl6UDYyF3I/S0/5ilIz3DvNZ9SBfKKt3DYeW +hon7qpNo6YrtEzbXyOaa2mFX9c1w39LBZ1FdY0jEzUfh2eImBLxnBjZArNxzYuU8 +a+lNMjc6JUAJwITS6C1YfI4ECsqXe0K/n90pMcm/jgiGFCZhVbXq+Nrm/24qPKBA +zqoNos7aV7LEYLYOjknaIhY= +-----END X509 CRL----- diff --git a/lib/runtime_tools/src/dbg.erl b/lib/runtime_tools/src/dbg.erl index 186563ab74..c2de57d40b 100644 --- a/lib/runtime_tools/src/dbg.erl +++ b/lib/runtime_tools/src/dbg.erl @@ -778,50 +778,50 @@ tracer_init(Handler, HandlerData) -> tracer_loop(Handler, HandlerData). tracer_loop(Handler, Hdata) -> - receive - Msg -> - %% Don't match in receive to avoid giving EXIT message higher - %% priority than the trace messages. - case Msg of - {'EXIT',_Pid,_Reason} -> - ok; - Trace -> - NewData = recv_all_traces(Trace, Handler, Hdata), - tracer_loop(Handler, NewData) - end + {State, Suspended, Traces} = recv_all_traces(), + NewHdata = handle_traces(Suspended, Traces, Handler, Hdata), + case State of + done -> + exit(normal); + loop -> + tracer_loop(Handler, NewHdata) end. - -recv_all_traces(Trace, Handler, Hdata) -> - Suspended = suspend(Trace, []), - recv_all_traces(Suspended, Handler, Hdata, [Trace]). -recv_all_traces(Suspended0, Handler, Hdata, Traces) -> +recv_all_traces() -> + recv_all_traces([], [], infinity). + +recv_all_traces(Suspended0, Traces, Timeout) -> receive Trace when is_tuple(Trace), element(1, Trace) == trace -> Suspended = suspend(Trace, Suspended0), - recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]); + recv_all_traces(Suspended, [Trace|Traces], 0); Trace when is_tuple(Trace), element(1, Trace) == trace_ts -> Suspended = suspend(Trace, Suspended0), - recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]); + recv_all_traces(Suspended, [Trace|Traces], 0); Trace when is_tuple(Trace), element(1, Trace) == seq_trace -> Suspended = suspend(Trace, Suspended0), - recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]); + recv_all_traces(Suspended, [Trace|Traces], 0); Trace when is_tuple(Trace), element(1, Trace) == drop -> Suspended = suspend(Trace, Suspended0), - recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]); + recv_all_traces(Suspended, [Trace|Traces], 0); + {'EXIT', _Pid, _Reason} -> + {done, Suspended0, Traces}; Other -> %%% Is this really a good idea? io:format(user,"** tracer received garbage: ~p~n", [Other]), - recv_all_traces(Suspended0, Handler, Hdata, Traces) - after 0 -> - case catch invoke_handler(Traces, Handler, Hdata) of - {'EXIT',Reason} -> - resume(Suspended0), - exit({trace_handler_crashed,Reason}); - NewHdata -> - resume(Suspended0), - NewHdata - end + recv_all_traces(Suspended0, Traces, Timeout) + after Timeout -> + {loop, Suspended0, Traces} + end. + +handle_traces(Suspended, Traces, Handler, Hdata) -> + case catch invoke_handler(Traces, Handler, Hdata) of + {'EXIT',Reason} -> + resume(Suspended), + exit({trace_handler_crashed,Reason}); + NewHdata -> + resume(Suspended), + NewHdata end. invoke_handler([Tr|Traces], Handler, Hdata0) -> diff --git a/lib/runtime_tools/test/dbg_SUITE.erl b/lib/runtime_tools/test/dbg_SUITE.erl index dfae52ed1d..0bcbd67d05 100644 --- a/lib/runtime_tools/test/dbg_SUITE.erl +++ b/lib/runtime_tools/test/dbg_SUITE.erl @@ -25,7 +25,7 @@ ip_port/1, file_port/1, file_port2/1, file_port_schedfix/1, ip_port_busy/1, wrap_port/1, wrap_port_time/1, with_seq_trace/1, dead_suspend/1, local_trace/1, - saved_patterns/1]). + saved_patterns/1, tracer_exit_on_stop/1]). -export([init_per_testcase/2, end_per_testcase/2]). -export([tracee1/1, tracee2/1]). -export([dummy/0, exported/1]). @@ -47,7 +47,7 @@ all() -> [big, tiny, simple, message, distributed, ip_port, file_port, file_port2, file_port_schedfix, ip_port_busy, wrap_port, wrap_port_time, with_seq_trace, dead_suspend, - local_trace, saved_patterns]. + local_trace, saved_patterns, tracer_exit_on_stop]. groups() -> []. @@ -742,6 +742,38 @@ run_dead_suspend() -> dummy() -> ok. +%% Test that a tracer process does not ignore an exit signal message when it has +%% received (but not handled) trace messages +tracer_exit_on_stop(_) -> + %% Tracer blocks waiting for fun to complete so that the trace message and + %% the exit signal message from the dbg process are in its message queue. + Fun = fun() -> + ?MODULE:dummy(), + Ref = erlang:trace_delivered(self()), + receive {trace_delivered, _, Ref} -> stop() end + end, + {ok, _} = dbg:tracer(process, {fun spawn_once_handler/2, {self(), Fun}}), + {ok, Tracer} = dbg:get_tracer(), + MRef = monitor(process, Tracer), + {ok, _} = dbg:p(self(), [call]), + {ok, _} = dbg:p(new, [call]), + {ok, _} = dbg:tp(?MODULE, dummy, []), + ?MODULE:dummy(), + receive {'DOWN', MRef, _, _, normal} -> ok end, + [{trace,_,call,{?MODULE, dummy,[]}}, + {trace,_,call,{?MODULE, dummy,[]}}] = flush(), + ok. + +spawn_once_handler(Event, {Pid, done} = State) -> + Pid ! Event, + State; +spawn_once_handler(Event, {Pid, Fun}) -> + {_, Ref} = spawn_monitor(Fun), + receive + {'DOWN', Ref, _, _, _} -> + Pid ! Event, + {Pid, done} + end. %% %% Support functions diff --git a/lib/ssh/src/ssh_info.erl b/lib/ssh/src/ssh_info.erl index 9ed598b3ab..e5a8666af0 100644 --- a/lib/ssh/src/ssh_info.erl +++ b/lib/ssh/src/ssh_info.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2014. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -179,7 +179,14 @@ line(Len, Char) -> datetime() -> - {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(now()), + %% Adapt to new OTP 18 erlang time API and be back-compatible + TimeStamp = try + erlang:timestamp() + catch + error:undef -> + erlang:now() + end, + {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(TimeStamp), lists:flatten(io_lib:format('~4w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w UTC',[YYYY,MM,DD, H,M,S])). diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl index cb1b4ae945..b449012ffc 100644 --- a/lib/ssh/test/ssh_basic_SUITE.erl +++ b/lib/ssh/test/ssh_basic_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2014. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -715,7 +715,14 @@ ssh_connect_arg4_timeout(_Config) -> %% try to connect with a timeout, but "supervise" it Client = spawn(fun() -> - T0 = now(), + %% Adapt to OTP 18 erlang time API and be back-compatible + T0 = try + erlang:monotonic_time() + catch + error:undef -> + %% Use Erlang system time as monotonic time + erlang:now() + end, Rc = ssh:connect("localhost",Port,[],Timeout), ct:log("Client ssh:connect got ~p",[Rc]), Parent ! {done,self(),Rc,T0} @@ -724,11 +731,12 @@ ssh_connect_arg4_timeout(_Config) -> %% Wait for client reaction on the connection try: receive {done, Client, {error,timeout}, T0} -> - Msp = ms_passed(T0, now()), + Msp = ms_passed(T0), exit(Server,hasta_la_vista___baby), Low = 0.9*Timeout, High = 1.1*Timeout, - ct:log("Timeout limits: ~p--~p, timeout was ~p, expected ~p",[Low,High,Msp,Timeout]), + ct:log("Timeout limits: ~.4f - ~.4f ms, timeout " + "was ~.4f ms, expected ~p ms",[Low,High,Msp,Timeout]), if Low<Msp, Msp<High -> ok; true -> {fail, "timeout not within limits"} @@ -748,12 +756,16 @@ ssh_connect_arg4_timeout(_Config) -> end. -%% Help function -%% N2-N1 -ms_passed(N1={_,_,M1}, N2={_,_,M2}) -> - {0,{0,Min,Sec}} = calendar:time_difference(calendar:now_to_local_time(N1), - calendar:now_to_local_time(N2)), - 1000 * (Min*60 + Sec + (M2-M1)/1000000). +%% Help function, elapsed milliseconds since T0 +ms_passed({_,_,_} = T0 ) -> + %% OTP 17 and earlier + timer:now_diff(erlang:now(), T0)/1000; + +ms_passed(T0) -> + %% OTP 18 + erlang:convert_time_resolution(erlang:monotonic_time() - T0, + erlang:time_resolution(), + 1000000)/1000. %%-------------------------------------------------------------------- ssh_connect_negtimeout_parallel(Config) -> ssh_connect_negtimeout(Config,true). diff --git a/lib/ssl/doc/src/Makefile b/lib/ssl/doc/src/Makefile index fb12499ef7..cfbf98f6e3 100644 --- a/lib/ssl/doc/src/Makefile +++ b/lib/ssl/doc/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 1999-2012. All Rights Reserved. +# Copyright Ericsson AB 1999-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -37,7 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN) # Target Specs # ---------------------------------------------------- XML_APPLICATION_FILES = refman.xml -XML_REF3_FILES = ssl.xml ssl_session_cache_api.xml +XML_REF3_FILES = ssl.xml ssl_crl_cache.xml ssl_crl_cache.xml ssl_session_cache_api.xml XML_REF6_FILES = ssl_app.xml XML_PART_FILES = release_notes.xml usersguide.xml diff --git a/lib/ssl/doc/src/refman.xml b/lib/ssl/doc/src/refman.xml index ae11198edb..d5f2219af9 100644 --- a/lib/ssl/doc/src/refman.xml +++ b/lib/ssl/doc/src/refman.xml @@ -4,7 +4,7 @@ <application xmlns:xi="http://www.w3.org/2001/XInclude"> <header> <copyright> - <year>1999</year><year>2013</year> + <year>1999</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -28,23 +28,10 @@ <rev>B</rev> <file>refman.sgml</file> </header> - <description> - <p>The <em>SSL</em> application provides secure communication over - sockets. - </p> - <p>This product includes software developed by the OpenSSL Project for - use in the OpenSSL Toolkit (http://www.openssl.org/). - </p> - <p>This product includes cryptographic software written by Eric Young - ([email protected]). - </p> - <p>This product includes software written by Tim Hudson - ([email protected]). - </p> - <p>For full OpenSSL and SSLeay license texts, see <seealso marker="licenses#licenses">Licenses</seealso>.</p> - </description> <xi:include href="ssl_app.xml"/> <xi:include href="ssl.xml"/> + <xi:include href="ssl_crl_cache.xml"/> + <xi:include href="ssl_crl_cache_api.xml"/> <xi:include href="ssl_session_cache_api.xml"/> </application> diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml index 9e6d294f09..c9b02d44ec 100644 --- a/lib/ssl/doc/src/ssl.xml +++ b/lib/ssl/doc/src/ssl.xml @@ -51,9 +51,9 @@ <item>IDEA cipher suites are not supported as they have become deprecated by the latest TLS spec so there is not any real motivation to implement them.</item> - <item>CRL and policy certificate extensions are not supported - yet. However CRL verification is supported by public_key, only not integrated - in ssl yet. </item> + <item>CRL validation is supported.</item> + <item>Policy certificate extensions are not supported + yet. </item> <item>Support for 'Server Name Indication' extension client side (RFC 6066 section 3).</item> </list> @@ -301,10 +301,47 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo <item> Possible such reasons see <seealso marker="public_key:public_key#pkix_path_validation-3"> public_key:pkix_path_validation/3 </seealso></item> </taglist> + </item> + + <tag>{crl_check, boolean() | peer | best_effort )</tag> + <item> + Perform CRL (Certificate Revocation List) verification + <seealso marker="public_key:public_key#pkix_crl_validate-3"> + public_key:pkix_crls_validate/3</seealso>, during the + <seealso + marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3 </seealso> + invokation on all the certificates in the peer certificate chain. Defaults to + false. + <p><c>peer</c> - check is only performed on + the peer certificate.</p> + + <p><c>best_effort</c> - if certificate revokation status can not be determined + it will be accepted as valid.</p> + + <p>The CA certificates specified for the connection will be used to + construct the certificate chain validating the CRLs.</p> + + <p>The CRLs will be fetched from a local or external cache + <seealso marker="ssl:ssl_crl_cache_api">ssl_crl_cache_api(3)</seealso>.</p> </item> - <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca }</tag> + <tag>{crl_cache, {Module::atom, {DbHandle::internal | term(), Args::list()}}</tag> + <item> + <p>Module defaults to ssl_crl_cache with <c> DbHandle </c> internal and an + empty argument list. The following arguments may be specified for the internal cache.</p> + <taglist> + <tag>{http, timeout()}</tag> + <item> + Enables fetching of CRLs specified as http URIs in<seealso + marker="public_key:cert_records"> X509 cerificate extensions.</seealso> + Requires the OTP inets application. + </item> + </taglist> + </item> + + <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag> + <item> Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3 with the selected CA as trusted anchor and the rest of the chain. @@ -427,6 +464,23 @@ fun(srp, Username :: string(), UserState :: term()) -> Indication extension will be sent if possible, this option may also be used to disable that behavior.</p> </item> + <tag>{fallback, boolean()}</tag> + <item> + <p> Send special cipher suite TLS_FALLBACK_SCSV to avoid undesired TLS version downgrade. + Defaults to false</p> + <warning><p>Note this option is not needed in normal TLS usage and should not be used + to implement new clients. But legacy clients that that retries connections in the following manner</p> + + <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv2', 'tlsv1.1', 'tlsv1', 'sslv3']}])</c></p> + <p><c> ssl:connect(Host, Port, [...{versions, [tlsv1.1', 'tlsv1', 'sslv3']}, {fallback, true}])</c></p> + <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv1', 'sslv3']}, {fallback, true}]) </c></p> + <p><c> ssl:connect(Host, Port, [...{versions, ['sslv3']}, {fallback, true}]) </c></p> + + <p>may use it to avoid undesired TLS version downgrade. Note that TLS_FALLBACK_SCSV must also + be supported by the server for the prevention to work. + </p></warning> + </item> + </taglist> </section> diff --git a/lib/ssl/doc/src/ssl_crl_cache.xml b/lib/ssl/doc/src/ssl_crl_cache.xml new file mode 100644 index 0000000000..1ed76d3e2a --- /dev/null +++ b/lib/ssl/doc/src/ssl_crl_cache.xml @@ -0,0 +1,66 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE erlref SYSTEM "erlref.dtd"> + +<erlref> + <header> + <copyright> + <year>2015</year><year>2015</year> + <holder>Ericsson AB. All Rights Reserved.</holder> + </copyright> + <legalnotice> + The contents of this file are subject to the Erlang Public License, + Version 1.1, (the "License"); you may not use this file except in + compliance with the License. You should have received a copy of the + Erlang Public License along with this software. If not, it can be + retrieved online at http://www.erlang.org/. + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + the License for the specific language governing rights and limitations + under the License. + </legalnotice> + <title>ssl_crl_cache</title> + <file>ssl_crl_cache.xml</file> + </header> + + <module>ssl_crl_cache</module> + <modulesummary>CRL cache </modulesummary> + <description> + <p> + Implements an internal CRL (Certificate Revocation List) cache. + In addition to implementing the <seealso + marker="ssl_cache_crl_api"> ssl_cache_crl_api</seealso> + the following functions are available. + </p> + </description> + + <funcs> + <func> + <name>insert(CRLSrc) -> ok | {error, Reason}</name> + <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name> + <fsummary> </fsummary> + <type> + <v> CRLSrc = {file, string()} | {der, [ <seealso + marker="public_key:public_key"> der_encoded() </seealso> ]}</v> + <v> URI = http_uri:uri()</v> + <v> Reason = term()</v> + </type> + <desc> + Insert CRLs into the ssl applications local cache. + </desc> + </func> + + <func> + <name>delete(Entries) -> ok | {error, Reason} </name> + <fsummary> </fsummary> + <type> + <v> Entries = http_uri:uri() | {file, string()} | {der, [<seealso + marker="public_key:public_key"> der_encoded() </seealso>]}</v> + <v> Reason = term()</v> + </type> + <desc> + Delete CRLs from the ssl applications local cache. + </desc> + </func> + </funcs> +</erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_crl_cache_api.xml b/lib/ssl/doc/src/ssl_crl_cache_api.xml new file mode 100644 index 0000000000..24365c9f59 --- /dev/null +++ b/lib/ssl/doc/src/ssl_crl_cache_api.xml @@ -0,0 +1,97 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE erlref SYSTEM "erlref.dtd"> + +<erlref> + <header> + <copyright> + <year>2015</year><year>2015</year> + <holder>Ericsson AB. All Rights Reserved.</holder> + </copyright> + <legalnotice> + The contents of this file are subject to the Erlang Public License, + Version 1.1, (the "License"); you may not use this file except in + compliance with the License. You should have received a copy of the + Erlang Public License along with this software. If not, it can be + retrieved online at http://www.erlang.org/. + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + the License for the specific language governing rights and limitations + under the License. + </legalnotice> + <title>ssl_crl_cache_api</title> + <file>ssl_crl_cache_api.xml</file> + </header> + + <module>ssl_crl_cache_api</module> + <modulesummary>API for a SSL/TLS CRL (Certificate Revocation List) cache.</modulesummary> + <description> + <p> + When SSL/TLS performs certificate path validation according to + <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280 </url> it should + also perform CRL validation checks. To enable the CRL checks the application + needs access to CRLs. A database of CRLs can be set up in many different ways. + This module provides an API to integrate an arbitrary CRL cache with the erlang + ssl application. It is also used by the application itself to provide a simple + default implementation of a CRL cache. + </p> + </description> + + <section> + <title>Common Data Types</title> + + <p>The following data types are used in the functions below: + </p> + + <p><c>cache_ref() = opaque()</c></p> + <p> dist_point() = #'DistributionPoint'{} see <seealso + marker="public_key:cert_records"> X509 certificates records</seealso></p> + </section> + + <funcs> + <func> + <name>lookup(DistributionPoint, DbHandle) -> not_available | CRLs </name> + <fsummary> </fsummary> + <type> + <v> DistributionPoint = dist_point() </v> + <v> DbHandle = cache_ref() </v> + <v> CRLs = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + </type> + <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint </c> </p>. + This function may choose to only look in the cache or to follow distribution point + links depending on how the cache is administrated. + </desc> + </func> + + <func> + <name>select(Issuer, DbHandle) -> CRLs </name> + <fsummary>Select the CRLs in the cache that are issued by <c>Issuer</c></fsummary> + <type> + <v> Issuer = <seealso + marker="public_key:public_key">public_key:issuer_name()</seealso></v> + <v> DbHandle = cache_ref() </v> + </type> + <desc> + <p>Select the CRLs in the cache that are issued by <c>Issuer</c> </p> + </desc> + </func> + + <func> + <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name> + <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + public_key:pkix_crls_validate/3 </fsummary> + <type> + <v> DistributionPoint = dist_point() </v> + <v> CRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + <v> FreshCRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + </type> + <desc> + <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + <seealso marker="public_key#pkix_path_validation-3">public_key:pkix_crls_validate/3 </seealso> </p> + </desc> + </func> + </funcs> +</erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml index cb97bbfbb2..9f87d31e90 100644 --- a/lib/ssl/doc/src/ssl_session_cache_api.xml +++ b/lib/ssl/doc/src/ssl_session_cache_api.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1999</year><year>2014</year> + <year>1999</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -119,14 +119,14 @@ <func> <name>select_session(Cache, PartialKey) -> [session()]</name> - <fsummary>>Selects sessions that could be reused.</fsummary> + <fsummary>Selects a sessions that could be reused.</fsummary> <type> <v> Cache = cache_ref()</v> <v> PartialKey = partialkey()</v> <v> Session = session()</v> </type> <desc> - <p>Selects sessions that could be reused. Should be callable + <p>Selects a sessions that could be reused. Should be callable from any process. </p> </desc> diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile index 0c00a650b9..d71d3fc445 100644 --- a/lib/ssl/src/Makefile +++ b/lib/ssl/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 1999-2014. All Rights Reserved. +# Copyright Ericsson AB 1999-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -38,7 +38,8 @@ RELSYSDIR = $(RELEASE_PATH)/lib/ssl-$(VSN) # ---------------------------------------------------- BEHAVIOUR_MODULES= \ - ssl_session_cache_api + ssl_session_cache_api \ + ssl_crl_cache_api MODULES= \ ssl \ @@ -65,6 +66,8 @@ MODULES= \ ssl_manager \ ssl_session \ ssl_session_cache \ + ssl_crl\ + ssl_crl_cache \ ssl_socket \ ssl_listen_tracker_sup \ tls_record \ @@ -164,5 +167,5 @@ $(EBIN)/ssl_session_cache.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl $(EBIN)/ssl_session_cache_api.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl $(EBIN)/ssl_ssl3.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl $(EBIN)/ssl_tls1.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl - +$(EBIN)/ssl_cache.$(EMULATOR): ssl_cache.erl ssl_internal.hrl ../../public_key/include/public_key.hrl diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src index 36681e2897..955875fa95 100644 --- a/lib/ssl/src/ssl.app.src +++ b/lib/ssl/src/ssl.app.src @@ -39,6 +39,10 @@ ssl_manager, ssl_pkix_db, ssl_certificate, + %% CRL handling + ssl_crl, + ssl_crl_cache, + ssl_crl_cache_api, %% App structure ssl_app, ssl_sup, diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl index ab26b6abc4..623fa92121 100644 --- a/lib/ssl/src/ssl.erl +++ b/lib/ssl/src/ssl.erl @@ -653,7 +653,10 @@ handle_options(Opts0) -> server_name_indication = handle_option(server_name_indication, Opts, undefined), honor_cipher_order = handle_option(honor_cipher_order, Opts, false), protocol = proplists:get_value(protocol, Opts, tls), - padding_check = proplists:get_value(padding_check, Opts, true) + padding_check = proplists:get_value(padding_check, Opts, true), + fallback = proplists:get_value(fallback, Opts, false), + crl_check = handle_option(crl_check, Opts, false), + crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}}) }, CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}), @@ -666,7 +669,8 @@ handle_options(Opts0) -> cb_info, renegotiate_at, secure_renegotiate, hibernate_after, erl_dist, next_protocols_advertised, client_preferred_next_protocols, log_alert, - server_name_indication, honor_cipher_order, padding_check], + server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache, + fallback], SockOpts = lists:foldl(fun(Key, PropList) -> proplists:delete(Key, PropList) @@ -846,6 +850,14 @@ validate_option(honor_cipher_order, Value) when is_boolean(Value) -> Value; validate_option(padding_check, Value) when is_boolean(Value) -> Value; +validate_option(fallback, Value) when is_boolean(Value) -> + Value; +validate_option(crl_check, Value) when is_boolean(Value) -> + Value; +validate_option(crl_check, Value) when (Value == best_effort) or (Value == peer) -> + Value; +validate_option(crl_cache, {Cb, {_Handle, Options}} = Value) when is_atom(Cb) and is_list(Options) -> + Value; validate_option(Opt, Value) -> throw({error, {options, {Opt, Value}}}). diff --git a/lib/ssl/src/ssl_alert.erl b/lib/ssl/src/ssl_alert.erl index 78dc98bc25..9e372f739a 100644 --- a/lib/ssl/src/ssl_alert.erl +++ b/lib/ssl/src/ssl_alert.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -161,5 +161,7 @@ description_txt(?BAD_CERTIFICATE_HASH_VALUE) -> "bad certificate hash value"; description_txt(?UNKNOWN_PSK_IDENTITY) -> "unknown psk identity"; +description_txt(?INAPPROPRIATE_FALLBACK) -> + "inappropriate fallback"; description_txt(Enum) -> lists:flatten(io_lib:format("unsupported/unknown alert: ~p", [Enum])). diff --git a/lib/ssl/src/ssl_alert.hrl b/lib/ssl/src/ssl_alert.hrl index f4f1d74264..a3619e4a35 100644 --- a/lib/ssl/src/ssl_alert.hrl +++ b/lib/ssl/src/ssl_alert.hrl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -58,6 +58,7 @@ %% protocol_version(70), %% insufficient_security(71), %% internal_error(80), +%% inappropriate_fallback(86), %% user_canceled(90), %% no_renegotiation(100), %% RFC 4366 @@ -93,6 +94,7 @@ -define(PROTOCOL_VERSION, 70). -define(INSUFFICIENT_SECURITY, 71). -define(INTERNAL_ERROR, 80). +-define(INAPPROPRIATE_FALLBACK, 86). -define(USER_CANCELED, 90). -define(NO_RENEGOTIATION, 100). -define(UNSUPPORTED_EXTENSION, 110). diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl index 30d224fee2..764bd82de0 100644 --- a/lib/ssl/src/ssl_certificate.erl +++ b/lib/ssl/src/ssl_certificate.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014 All Rights Reserved. +%% Copyright Ericsson AB 2007-2015 All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,7 +33,8 @@ -export([trusted_cert_and_path/4, certificate_chain/3, file_to_certificats/2, - validate_extension/3, + file_to_crls/2, + validate/3, is_valid_extkey_usage/2, is_valid_key_usage/2, select_extension/2, @@ -84,15 +85,18 @@ trusted_cert_and_path(CertChain, CertDbHandle, CertDbRef, PartialChainHandler) - %%-------------------------------------------------------------------- -spec certificate_chain(undefined | binary(), db_handle(), certdb_ref()) -> - {error, no_cert} | {ok, [der_cert()]}. + {error, no_cert} | {ok, #'OTPCertificate'{} | undefined, [der_cert()]}. %% %% Description: Return the certificate chain to send to peer. %%-------------------------------------------------------------------- certificate_chain(undefined, _, _) -> {error, no_cert}; -certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> +certificate_chain(OwnCert, CertDbHandle, CertsDbRef) when is_binary(OwnCert) -> ErlCert = public_key:pkix_decode_cert(OwnCert, otp), - certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]). + certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]); +certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> + DerCert = public_key:pkix_encode('OTPCertificate', OwnCert, otp), + certificate_chain(OwnCert, DerCert, CertDbHandle, CertsDbRef, [DerCert]). %%-------------------------------------------------------------------- -spec file_to_certificats(binary(), term()) -> [der_cert()]. %% @@ -101,29 +105,39 @@ certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> file_to_certificats(File, DbHandle) -> {ok, List} = ssl_manager:cache_pem_file(File, DbHandle), [Bin || {'Certificate', Bin, not_encrypted} <- List]. + %%-------------------------------------------------------------------- --spec validate_extension(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid, - term()) -> {valid, term()} | - {fail, tuple()} | - {unknown, term()}. +-spec file_to_crls(binary(), term()) -> [der_cert()]. +%% +%% Description: Return list of DER encoded certificates. +%%-------------------------------------------------------------------- +file_to_crls(File, DbHandle) -> + {ok, List} = ssl_manager:cache_pem_file(File, DbHandle), + [Bin || {'CertificateList', Bin, not_encrypted} <- List]. + +%%-------------------------------------------------------------------- +-spec validate(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid, + term()) -> {valid, term()} | + {fail, tuple()} | + {unknown, term()}. %% %% Description: Validates ssl/tls specific extensions %%-------------------------------------------------------------------- -validate_extension(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage', - extnValue = KeyUse}}, Role) -> +validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage', + extnValue = KeyUse}}, {Role, _,_, _, _}) -> case is_valid_extkey_usage(KeyUse, Role) of true -> {valid, Role}; false -> {fail, {bad_cert, invalid_ext_key_usage}} end; -validate_extension(_, {bad_cert, _} = Reason, _) -> - {fail, Reason}; -validate_extension(_, {extension, _}, Role) -> +validate(_, {extension, _}, Role) -> {unknown, Role}; -validate_extension(_, valid, Role) -> +validate(_, {bad_cert, _} = Reason, _) -> + {fail, Reason}; +validate(_, valid, Role) -> {valid, Role}; -validate_extension(_, valid_peer, Role) -> +validate(_, valid_peer, Role) -> {valid, Role}. %%-------------------------------------------------------------------- @@ -194,14 +208,14 @@ certificate_chain(OtpCert, _Cert, CertDbHandle, CertsDbRef, Chain) -> %% certificate. The verification of the %% cert chain will fail if guess is %% incorrect. - {ok, lists:reverse(Chain)} + {ok, undefined, lists:reverse(Chain)} end; {{ok, {SerialNr, Issuer}}, SelfSigned} -> certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, SelfSigned) end. -certificate_chain(_,_, Chain, _SerialNr, _Issuer, true) -> - {ok, lists:reverse(Chain)}; +certificate_chain(_, _, [RootCert | _] = Chain, _, _, true) -> + {ok, RootCert, lists:reverse(Chain)}; certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) -> case ssl_manager:lookup_trusted_cert(CertDbHandle, CertsDbRef, @@ -214,7 +228,7 @@ certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned %% The trusted cert may be obmitted from the chain as the %% counter part needs to have it anyway to be able to %% verify it. - {ok, lists:reverse(Chain)} + {ok, undefined, lists:reverse(Chain)} end. find_issuer(OtpCert, CertDbHandle) -> diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl index 5ec6e1c31b..8584e56d6c 100644 --- a/lib/ssl/src/ssl_cipher.erl +++ b/lib/ssl/src/ssl_cipher.erl @@ -37,7 +37,7 @@ suite/1, suites/1, all_suites/1, ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0, rc4_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1, - hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2]). + hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1]). -export_type([cipher_suite/0, erl_cipher_suite/0, openssl_cipher_suite/0, @@ -1442,6 +1442,9 @@ is_acceptable_prf(default_prf, _) -> is_acceptable_prf(Prf, Algos) -> proplists:get_bool(Prf, Algos). +is_fallback(CipherSuites)-> + lists:member(?TLS_FALLBACK_SCSV, CipherSuites). + %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- diff --git a/lib/ssl/src/ssl_cipher.hrl b/lib/ssl/src/ssl_cipher.hrl index 448c2405aa..8689a3c68b 100644 --- a/lib/ssl/src/ssl_cipher.hrl +++ b/lib/ssl/src/ssl_cipher.hrl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -356,6 +356,10 @@ %% hello extension data as they should. -define(TLS_EMPTY_RENEGOTIATION_INFO_SCSV, <<?BYTE(16#00), ?BYTE(16#FF)>>). +%% TLS Fallback Signaling Cipher Suite Value (SCSV) for Preventing Protocol +%% Downgrade Attacks +-define(TLS_FALLBACK_SCSV, <<?BYTE(16#56), ?BYTE(16#00)>>). + %%% PSK Cipher Suites RFC 4279 %% TLS_PSK_WITH_RC4_128_SHA = { 0x00, 0x8A }; diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl index 545b8aa0f6..fc8b214a29 100644 --- a/lib/ssl/src/ssl_config.erl +++ b/lib/ssl/src/ssl_config.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2013. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,13 +31,13 @@ init(SslOpts, Role) -> init_manager_name(SslOpts#ssl_options.erl_dist), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert} = init_certificates(SslOpts, Role), PrivateKey = init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile, SslOpts#ssl_options.password, Role), DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role), - {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, PrivateKey, DHParams}. + {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}. init_manager_name(false) -> put(ssl_manager, ssl_manager:manager_name(normal)); @@ -46,9 +46,11 @@ init_manager_name(true) -> init_certificates(#ssl_options{cacerts = CaCerts, cacertfile = CACertFile, - certfile = CertFile, - cert = Cert}, Role) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle} = + certfile = CertFile, + cert = Cert, + crl_cache = CRLCache + }, Role) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} = try Certs = case CaCerts of undefined -> @@ -56,39 +58,40 @@ init_certificates(#ssl_options{cacerts = CaCerts, _ -> {der, CaCerts} end, - {ok, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role) + {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache) catch _:Reason -> file_error(CACertFile, {cacertfile, Reason}) end, init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CertFile, Role). + CacheHandle, CRLDbInfo, CertFile, Role). -init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, <<>>, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined}; +init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, + CRLDbInfo, <<>>, _) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}; init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CertFile, client) -> + CacheHandle, CRLDbInfo, CertFile, client) -> try %% Ignoring potential proxy-certificates see: %% http://dev.globus.org/wiki/Security/ProxyFileFormat [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert} catch _Error:_Reason -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined} end; init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, - PemCacheHandle, CacheRef, CertFile, server) -> + PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) -> try [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert} catch _:Reason -> file_error(CertFile, {certfile, Reason}) end; -init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, _, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, Cert}. +init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}. init_private_key(_, undefined, <<>>, _Password, _Client) -> undefined; diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl index b6059eac58..08d0145aa7 100644 --- a/lib/ssl/src/ssl_connection.erl +++ b/lib/ssl/src/ssl_connection.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -411,11 +411,15 @@ certify(#certificate{} = Cert, role = Role, cert_db = CertDbHandle, cert_db_ref = CertDbRef, + crl_db = CRLDbInfo, ssl_options = Opts} = State, Connection) -> - case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, Opts#ssl_options.depth, + case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, + Opts#ssl_options.depth, Opts#ssl_options.verify, Opts#ssl_options.verify_fun, Opts#ssl_options.partial_chain, + Opts#ssl_options.crl_check, + CRLDbInfo, Role) of {PeerCert, PublicKeyInfo} -> handle_peer_cert(Role, PeerCert, PublicKeyInfo, @@ -964,7 +968,7 @@ format_status(terminate, [_, State]) -> %%% Internal functions %%-------------------------------------------------------------------- ssl_config(Opts, Role, State) -> - {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} = + {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} = ssl_config:init(Opts, Role), Handshake = ssl_handshake:init_handshake_history(), TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}), @@ -975,6 +979,7 @@ ssl_config(Opts, Role, State) -> file_ref_db = FileRefHandle, cert_db_ref = Ref, cert_db = CertDbHandle, + crl_db = CRLDbInfo, session_cache = CacheHandle, private_key = Key, diffie_hellman_params = DHParams, diff --git a/lib/ssl/src/ssl_connection.hrl b/lib/ssl/src/ssl_connection.hrl index b9a1ef3a84..ac3b26e4bf 100644 --- a/lib/ssl/src/ssl_connection.hrl +++ b/lib/ssl/src/ssl_connection.hrl @@ -1,8 +1,7 @@ - %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -53,6 +52,7 @@ session :: #session{} | secret_printout(), session_cache :: db_handle(), session_cache_cb :: atom(), + crl_db :: term(), negotiated_version :: ssl_record:ssl_version(), client_certificate_requested = false :: boolean(), key_algorithm :: ssl_cipher:key_algo(), diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl new file mode 100644 index 0000000000..b8761f0601 --- /dev/null +++ b/lib/ssl/src/ssl_crl.erl @@ -0,0 +1,82 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% + +%---------------------------------------------------------------------- +%% Purpose: CRL handling +%%---------------------------------------------------------------------- + +-module(ssl_crl). + +-include("ssl_alert.hrl"). +-include("ssl_internal.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +-export([trusted_cert_and_path/3]). + +trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) -> + case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of + undefined -> + trusted_cert_and_path(CRL, issuer_not_found, DbHandle); + {ok, {_, OtpCert}} -> + {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef), + {ok, Root, lists:reverse(Chain)} + end; + +trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) -> + try find_issuer(CRL, DbHandle) of + OtpCert -> + {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef), + {ok, Root, lists:reverse(Chain)} + catch + throw:_ -> + {error, issuer_not_found} + end. + +find_issuer(CRL, {Db,_}) -> + Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)), + IsIssuerFun = + fun({_Key, {_Der,ErlCertCandidate}}, Acc) -> + verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc); + (_, Acc) -> + Acc + end, + + try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of + issuer_not_found -> + {error, issuer_not_found} + catch + {ok, IssuerCert} -> + IssuerCert + end. + + +verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) -> + TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate, + case public_key:pkix_normalize_name(TBSCert#'OTPTBSCertificate'.subject) of + Issuer -> + case public_key:pkix_crl_verify(CRL, ErlCertCandidate) of + true -> + throw({ok, ErlCertCandidate}); + false -> + NotIssuer; + _ -> + NotIssuer + end; + _ -> + NotIssuer + end. diff --git a/lib/ssl/src/ssl_crl_cache.erl b/lib/ssl/src/ssl_crl_cache.erl new file mode 100644 index 0000000000..b2bdb19979 --- /dev/null +++ b/lib/ssl/src/ssl_crl_cache.erl @@ -0,0 +1,179 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% + +%---------------------------------------------------------------------- +%% Purpose: Simple default CRL cache +%%---------------------------------------------------------------------- + +-module(ssl_crl_cache). + +-include("ssl_internal.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +-behaviour(ssl_crl_cache_api). + +-export([lookup/2, select/2, fresh_crl/2]). +-export([insert/1, insert/2, delete/1]). + +%%==================================================================== +%% Cache callback API +%%==================================================================== + +lookup(#'DistributionPoint'{distributionPoint={fullName, Names}}, + CRLDbInfo) -> + get_crls(Names, CRLDbInfo); +lookup(_,_) -> + not_available. + +select(Issuer, {{_Cache, Mapping},_}) -> + case ssl_pkix_db:lookup(Issuer, Mapping) of + undefined -> + []; + CRLs -> + CRLs + end. + +fresh_crl(DistributionPoint, CRL) -> + case get_crls(DistributionPoint, undefined) of + not_available -> + CRL; + [NewCRL] -> + NewCRL + end. + +%%==================================================================== +%% API +%%==================================================================== + +insert(CRLs) -> + insert(?NO_DIST_POINT, CRLs). + +insert(URI, {file, File}) when is_list(URI) -> + case file:read_file(File) of + {ok, PemBin} -> + PemEntries = public_key:pem_decode(PemBin), + CRLs = [ CRL || {'CertificateList', CRL, not_encrypted} + <- PemEntries], + do_insert(URI, CRLs); + Error -> + Error + end; +insert(URI, {der, CRLs}) -> + do_insert(URI, CRLs). + +delete({file, File}) -> + case file:read_file(File) of + {ok, PemBin} -> + PemEntries = public_key:pem_decode(PemBin), + CRLs = [ CRL || {'CertificateList', CRL, not_encrypted} + <- PemEntries], + ssl_manager:delete_crls({?NO_DIST_POINT, CRLs}); + Error -> + Error + end; +delete({der, CRLs}) -> + ssl_manager:delete_crls({?NO_DIST_POINT, CRLs}); + +delete(URI) -> + case http_uri:parse(URI) of + {ok, {http, _, _ , _, Path,_}} -> + ssl_manager:delete_crls(string:strip(Path, left, $/)); + _ -> + {error, {only_http_distribution_points_supported, URI}} + end. + +%%-------------------------------------------------------------------- +%%% Internal functions +%%-------------------------------------------------------------------- +do_insert(URI, CRLs) -> + case http_uri:parse(URI) of + {ok, {http, _, _ , _, Path,_}} -> + ssl_manager:insert_crls(string:strip(Path, left, $/), CRLs); + _ -> + {error, {only_http_distribution_points_supported, URI}} + end. + +get_crls([], _) -> + not_available; +get_crls([{uniformResourceIdentifier, "http"++_ = URL} | Rest], + CRLDbInfo) -> + case cache_lookup(URL, CRLDbInfo) of + [] -> + handle_http(URL, Rest, CRLDbInfo); + CRLs -> + CRLs + end; +get_crls([ _| Rest], CRLDbInfo) -> + %% unsupported CRL location + get_crls(Rest, CRLDbInfo). + +http_lookup(URL, Rest, CRLDbInfo, Timeout) -> + case application:ensure_started(inets) of + ok -> + http_get(URL, Rest, CRLDbInfo, Timeout); + _ -> + get_crls(Rest, CRLDbInfo) + end. + +http_get(URL, Rest, CRLDbInfo, Timeout) -> + case httpc:request(get, {URL, [{"connection", "close"}]}, + [{timeout, Timeout}], [{body_format, binary}]) of + {ok, {_Status, _Headers, Body}} -> + case Body of + <<"-----BEGIN", _/binary>> -> + Pem = public_key:pem_decode(Body), + lists:filtermap(fun({'CertificateList', + CRL, not_encrypted}) -> + {true, CRL}; + (_) -> + false + end, Pem); + _ -> + try public_key:der_decode('CertificateList', Body) of + _ -> + [Body] + catch + _:_ -> + get_crls(Rest, CRLDbInfo) + end + end; + {error, _Reason} -> + get_crls(Rest, CRLDbInfo) + end. + +cache_lookup(_, undefined) -> + []; +cache_lookup(URL, {{Cache, _}, _}) -> + {ok, {_, _, _ , _, Path,_}} = http_uri:parse(URL), + case ssl_pkix_db:lookup(string:strip(Path, left, $/), Cache) of + undefined -> + []; + CRLs -> + CRLs + end. + +handle_http(URI, Rest, {_, [{http, Timeout}]} = CRLDbInfo) -> + CRLs = http_lookup(URI, Rest, CRLDbInfo, Timeout), + %% Uncomment to improve performance, but need to + %% implement cache limit and or cleaning to prevent + %% DoS attack possibilities + %%insert(URI, {der, CRLs}), + CRLs; +handle_http(_, Rest, CRLDbInfo) -> + get_crls(Rest, CRLDbInfo). + diff --git a/lib/ssl/src/ssl_crl_cache_api.erl b/lib/ssl/src/ssl_crl_cache_api.erl new file mode 100644 index 0000000000..0915ba12e5 --- /dev/null +++ b/lib/ssl/src/ssl_crl_cache_api.erl @@ -0,0 +1,30 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%% + +-module(ssl_crl_cache_api). + +-include_lib("public_key/include/public_key.hrl"). + +-type db_handle() :: term(). + +-callback lookup(#'DistributionPoint'{}, db_handle()) -> not_available | [public_key:der_encode()]. +-callback select(term(), db_handle()) -> [public_key:der_encode()]. +-callback fresh_crl(#'DistributionPoint'{}, public_key:der_encode()) -> public_key:der_encode(). diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl index 07535e79b4..6cab8eb7a1 100644 --- a/lib/ssl/src/ssl_handshake.erl +++ b/lib/ssl/src/ssl_handshake.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -49,7 +49,7 @@ finished/5, next_protocol/1]). %% Handle handshake messages --export([certify/8, client_certificate_verify/6, certificate_verify/6, verify_signature/5, +-export([certify/10, client_certificate_verify/6, certificate_verify/6, verify_signature/5, master_secret/5, server_key_exchange_hash/2, verify_connection/6, init_handshake_history/0, update_handshake_history/2, verify_server_key/5 ]). @@ -149,7 +149,7 @@ client_hello_extensions(Host, Version, CipherSuites, SslOpts, ConnectionStates, certificate(OwnCert, CertDbHandle, CertDbRef, client) -> Chain = case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of - {ok, CertChain} -> + {ok, _, CertChain} -> CertChain; {error, _} -> %% If no suitable certificate is available, the client @@ -161,7 +161,7 @@ certificate(OwnCert, CertDbHandle, CertDbRef, client) -> certificate(OwnCert, CertDbHandle, CertDbRef, server) -> case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of - {ok, Chain} -> + {ok, _, Chain} -> #certificate{asn1_certificates = Chain}; {error, _} -> ?ALERT_REC(?FATAL, ?INTERNAL_ERROR) @@ -383,49 +383,24 @@ verify_signature(_Version, Hash, {HashAlgo, ecdsa}, Signature, %%-------------------------------------------------------------------- -spec certify(#certificate{}, db_handle(), certdb_ref(), integer() | nolimit, - verify_peer | verify_none, {fun(), term}, fun(), + verify_peer | verify_none, {fun(), term}, fun(), term(), term(), client | server) -> {der_cert(), public_key_info()} | #alert{}. %% %% Description: Handles a certificate handshake message %%-------------------------------------------------------------------- certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef, - MaxPathLen, _Verify, VerifyFunAndState, PartialChain, Role) -> + MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) -> [PeerCert | _] = ASN1Certs, - - ValidationFunAndState = - case VerifyFunAndState of - undefined -> - {fun(OtpCert, ExtensionOrVerifyResult, SslState) -> - ssl_certificate:validate_extension(OtpCert, - ExtensionOrVerifyResult, SslState) - end, Role}; - {Fun, UserState0} -> - {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) -> - case ssl_certificate:validate_extension(OtpCert, - Extension, - SslState) of - {valid, NewSslState} -> - {valid, {NewSslState, UserState}}; - {fail, Reason} -> - apply_user_fun(Fun, OtpCert, Reason, UserState, - SslState); - {unknown, _} -> - apply_user_fun(Fun, OtpCert, - Extension, UserState, SslState) - end; - (OtpCert, VerifyResult, {SslState, UserState}) -> - apply_user_fun(Fun, OtpCert, VerifyResult, UserState, - SslState) - end, {Role, UserState0}} - end, + + ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role, + CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle), try - {TrustedErlCert, CertPath} = + {TrustedCert, CertPath} = ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain), - case public_key:pkix_path_validation(TrustedErlCert, - CertPath, - [{max_path_length, - MaxPathLen}, + case public_key:pkix_path_validation(TrustedCert, + CertPath, + [{max_path_length, MaxPathLen}, {verify_fun, ValidationFunAndState}]) of {ok, {PublicKeyInfo,_}} -> {PeerCert, PublicKeyInfo}; @@ -1374,15 +1349,66 @@ sni1(Hostname) -> %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- +validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) -> + {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) -> + case ssl_certificate:validate(OtpCert, + Extension, + SslState) of + {valid, NewSslState} -> + {valid, {NewSslState, UserState}}; + {fail, Reason} -> + apply_user_fun(Fun, OtpCert, Reason, UserState, + SslState); + {unknown, _} -> + apply_user_fun(Fun, OtpCert, + Extension, UserState, SslState) + end; + (OtpCert, VerifyResult, {SslState, UserState}) -> + apply_user_fun(Fun, OtpCert, VerifyResult, UserState, + SslState) + end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}}; +validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) -> + {fun(OtpCert, {extension, _} = Extension, SslState) -> + ssl_certificate:validate(OtpCert, + Extension, + SslState); + (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) -> + case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of + valid -> + {VerifyResult, SslState}; + Reason -> + {fail, Reason} + end; + (OtpCert, VerifyResult, SslState) -> + ssl_certificate:validate(OtpCert, + VerifyResult, + SslState) + end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}. + +apply_user_fun(Fun, OtpCert, VerifyResult, UserState0, + {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when + (VerifyResult == valid) or (VerifyResult == valid_peer) -> + case Fun(OtpCert, VerifyResult, UserState0) of + {Valid, UserState} when (Valid == valid) or (Valid == valid_peer) -> + case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of + valid -> + {Valid, {SslState, UserState}}; + Result -> + apply_user_fun(Fun, OtpCert, Result, UserState, SslState) + end; + {fail, _} = Fail -> + Fail + end; apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) -> case Fun(OtpCert, ExtensionOrError, UserState0) of - {valid, UserState} -> - {valid, {SslState, UserState}}; + {Valid, UserState} when (Valid == valid) or (Valid == valid_peer)-> + {Valid, {SslState, UserState}}; {fail, _} = Fail -> Fail; {unknown, UserState} -> {unknown, {SslState, UserState}} end. + path_validation_alert({bad_cert, cert_expired}) -> ?ALERT_REC(?FATAL, ?CERTIFICATE_EXPIRED); path_validation_alert({bad_cert, invalid_issuer}) -> @@ -1393,8 +1419,10 @@ path_validation_alert({bad_cert, name_not_permitted}) -> ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, unknown_critical_extension}) -> ?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE); -path_validation_alert({bad_cert, cert_revoked}) -> +path_validation_alert({bad_cert, {revoked, _}}) -> ?ALERT_REC(?FATAL, ?CERTIFICATE_REVOKED); +path_validation_alert({bad_cert, revocation_status_undetermined}) -> + ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, selfsigned_peer}) -> ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, unknown_ca}) -> @@ -1954,3 +1982,70 @@ handle_psk_identity(_PSKIdentity, LookupFun) error; handle_psk_identity(PSKIdentity, {Fun, UserState}) -> Fun(psk, PSKIdentity, UserState). + +crl_check(_, false, _,_,_, _) -> + valid; +crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option. + valid; +crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) -> + Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) -> + ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo) + end, {CertDbHandle, CertDbRef}}}, + {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end} + ], + case dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) of + no_dps -> + case dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) of + [] -> + valid; %% No relevant CRL existed + Dps -> + crl_check_same_issuer(OtpCert, Check, Dps, Options) + end; + Dps -> %% This DP list may be empty if relevant CRLs existed + %% but could not be retrived, will result in {bad_cert, revocation_status_undetermined} + case public_key:pkix_crls_validate(OtpCert, Dps, Options) of + {bad_cert, revocation_status_undetermined} -> + crl_check_same_issuer(OtpCert, Check, dps_and_crls(OtpCert, Callback, + CRLDbHandle, same_issuer), Options); + Other -> + Other + end + end. + +crl_check_same_issuer(OtpCert, best_effort, Dps, Options) -> + case public_key:pkix_crls_validate(OtpCert, Dps, Options) of + {bad_cert, revocation_status_undetermined} -> + valid; + Other -> + Other + end; +crl_check_same_issuer(OtpCert, _, Dps, Options) -> + public_key:pkix_crls_validate(OtpCert, Dps, Options). + +dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) -> + case public_key:pkix_dist_points(OtpCert) of + [] -> + no_dps; + DistPoints -> + distpoints_lookup(DistPoints, Callback, CRLDbHandle) + end; + +dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) -> + DP = #'DistributionPoint'{distributionPoint = {fullName, GenNames}} = + public_key:pkix_dist_point(OtpCert), + CRLs = lists:flatmap(fun({directoryName, Issuer}) -> + Callback:select(Issuer, CRLDbHandle); + (_) -> + [] + end, GenNames), + [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]. + +distpoints_lookup([], _, _) -> + []; +distpoints_lookup([DistPoint | Rest], Callback, CRLDbHandle) -> + case Callback:lookup(DistPoint, CRLDbHandle) of + not_available -> + distpoints_lookup(Rest, Callback, CRLDbHandle); + CRLs -> + [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs] + end. diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl index 3cf6020169..8df79f9e8c 100644 --- a/lib/ssl/src/ssl_internal.hrl +++ b/lib/ssl/src/ssl_internal.hrl @@ -61,6 +61,8 @@ -define(CDR_HDR_SIZE, 12). -define(DEFAULT_TIMEOUT, 5000). +-define(NO_DIST_POINT, "http://dummy/no_distribution_point"). +-define(NO_DIST_POINT_PATH, "dummy/no_distribution_point"). %% Common enumerate values in for SSL-protocols -define(NULL, 0). @@ -121,7 +123,10 @@ %% Should the server prefer its own cipher order over the one provided by %% the client? honor_cipher_order = false, - padding_check = true + padding_check = true, + fallback = false, + crl_check, + crl_cache }). -record(socket_options, diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl index bf0333ba8d..9c4b2a8bad 100644 --- a/lib/ssl/src/ssl_manager.erl +++ b/lib/ssl/src/ssl_manager.erl @@ -26,10 +26,11 @@ %% Internal application API -export([start_link/1, start_link_dist/1, - connection_init/2, cache_pem_file/2, + connection_init/3, cache_pem_file/2, lookup_trusted_cert/4, new_session_id/1, clean_cert_db/2, register_session/2, register_session/3, invalidate_session/2, + insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2, invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]). % Spawn export @@ -100,19 +101,19 @@ start_link_dist(Opts) -> gen_server:start_link({local, DistMangerName}, ?MODULE, [DistMangerName, Opts], []). %%-------------------------------------------------------------------- --spec connection_init(binary()| {der, list()}, client | server) -> +-spec connection_init(binary()| {der, list()}, client | server, {Cb :: atom(), Handle:: term()}) -> {ok, certdb_ref(), db_handle(), db_handle(), db_handle(), db_handle()}. %% %% Description: Do necessary initializations for a new connection. %%-------------------------------------------------------------------- -connection_init({der, _} = Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}); +connection_init({der, _} = Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}); -connection_init(<<>> = Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}); +connection_init(<<>> = Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}); -connection_init(Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}). +connection_init(Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}). %%-------------------------------------------------------------------- -spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}. @@ -124,7 +125,7 @@ cache_pem_file(File, DbHandle) -> [{Content,_}] -> {ok, Content}; [Content] -> - {ok, Content}; + {ok, Content}; undefined -> call({cache_pem, File}) end. @@ -193,11 +194,28 @@ invalidate_session(Host, Port, Session) -> invalidate_session(Port, Session) -> cast({invalidate_session, Port, Session}). - -spec invalidate_pem(File::binary()) -> ok. invalidate_pem(File) -> cast({invalidate_pem, File}). +insert_crls(Path, CRLs)-> + insert_crls(Path, CRLs, normal). +insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + cast({insert_crls, Path, CRLs}); +insert_crls(Path, CRLs, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + call({insert_crls, Path, CRLs}). + +delete_crls(Path)-> + delete_crls(Path, normal). +delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + cast({delete_crls, Path}); +delete_crls(Path, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + call({delete_crls, Path}). + %%==================================================================== %% gen_server callbacks %%==================================================================== @@ -245,50 +263,38 @@ init([Name, Opts]) -> %% %% Description: Handling call messages %%-------------------------------------------------------------------- -handle_call({{connection_init, <<>>, client}, _Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace], - session_cache_client = Cache} = State) -> - Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache}, - {reply, Result, State}; -handle_call({{connection_init, <<>>, server}, _Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace], - session_cache_server = Cache} = State) -> - Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache}, - {reply, Result, State}; - -handle_call({{connection_init, Trustedcerts, client}, Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db, - session_cache_client = Cache} = State) -> - Result = - try - {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db), - {ok, Ref, CertDb, FileRefDb, PemChace, Cache} - catch - _:Reason -> - {error, Reason} - end, - {reply, Result, State}; -handle_call({{connection_init, Trustedcerts, server}, Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db, - session_cache_server = Cache} = State) -> - Result = - try - {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db), - {ok, Ref, CertDb, FileRefDb, PemChace, Cache} - catch - _:Reason -> - {error, Reason} - end, - {reply, Result, State}; - - -handle_call({{new_session_id,Port}, _}, +handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From, + #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> + Ref = make_ref(), + Result = {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}}, + {reply, Result, State#state{certificate_db = Db}}; + +handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From, + #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> + case add_trusted_certs(Pid, Trustedcerts, Db) of + {ok, Ref} -> + {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), + {CRLCb, crl_db_info(Db, UserCRLDb)}}, State}; + {error, _} = Error -> + {reply, Error, State} + end; + +handle_call({{insert_crls, Path, CRLs}, _}, _From, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:add_crls(Db, Path, CRLs), + {reply, ok, State}; + +handle_call({{delete_crls, CRLsOrPath}, _}, _From, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:remove_crls(Db, CRLsOrPath), + {reply, ok, State}; + +handle_call({{new_session_id, Port}, _}, _, #state{session_cache_cb = CacheCb, session_cache_server = Cache} = State) -> Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb), {reply, Id, State}; - handle_call({{cache_pem,File}, _Pid}, _, #state{certificate_db = Db} = State) -> try ssl_pkix_db:cache_pem_file(File, Db) of @@ -298,7 +304,7 @@ handle_call({{cache_pem,File}, _Pid}, _, _:Reason -> {reply, {error, Reason}, State} end; -handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace]} = State) -> +handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace | _]} = State) -> ssl_pkix_db:clear(PemChace), {reply, ok, State}. @@ -344,8 +350,19 @@ handle_cast({invalidate_session, Port, #session{session_id = ID} = Session}, session_cache_cb = CacheCb} = State) -> invalidate_session(Cache, CacheCb, {Port, ID}, Session, State); + +handle_cast({insert_crls, Path, CRLs}, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:add_crls(Db, Path, CRLs), + {noreply, State}; + +handle_cast({delete_crls, CRLsOrPath}, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:remove_crls(Db, CRLsOrPath), + {noreply, State}; + handle_cast({invalidate_pem, File}, - #state{certificate_db = [_, _, PemCache]} = State) -> + #state{certificate_db = [_, _, PemCache | _]} = State) -> ssl_pkix_db:remove(File, PemCache), {noreply, State}. @@ -374,7 +391,7 @@ handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = Cache CacheCb:delete(Cache, Key), {noreply, State}; -handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace], +handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _], clear_pem_cache = Interval, last_pem_check = CheckPoint} = State) -> NewCheckPoint = os:timestamp(), @@ -382,9 +399,8 @@ handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace], erlang:send_after(Interval, self(), clear_pem_cache), {noreply, State#state{last_pem_check = NewCheckPoint}}; - handle_info({clean_cert_db, Ref, File}, - #state{certificate_db = [CertDb,RefDb, PemCache]} = State) -> + #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) -> case ssl_pkix_db:lookup(Ref, RefDb) of undefined -> %% Alredy cleaned @@ -606,3 +622,21 @@ is_before_checkpoint(Time, CheckPoint) -> calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(CheckPoint)) - calendar:datetime_to_gregorian_seconds(Time) > 0. +add_trusted_certs(Pid, Trustedcerts, Db) -> + try + ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db) + catch + _:Reason -> + {error, Reason} + end. + +session_cache(client, #state{session_cache_client = Cache}) -> + Cache; +session_cache(server, #state{session_cache_server = Cache}) -> + Cache. + +crl_db_info([_,_,_,Local], {internal, Info}) -> + {Local, Info}; +crl_db_info(_, UserCRLDb) -> + UserCRLDb. + diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl index 8531445ba4..d7b7e3eae3 100644 --- a/lib/ssl/src/ssl_pkix_db.erl +++ b/lib/ssl/src/ssl_pkix_db.erl @@ -27,9 +27,9 @@ -include_lib("public_key/include/public_key.hrl"). -include_lib("kernel/include/file.hrl"). --export([create/0, remove/1, add_trusted_certs/3, +-export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3, remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1, - ref_count/3, lookup_trusted_cert/4, foldl/3, + ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2, lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3, lookup/2]). @@ -51,16 +51,24 @@ create() -> ets:new(ssl_otp_cacertificate_db, [set, public]), %% Let connection processes call ref_count/3 directly ets:new(ssl_otp_ca_file_ref, [set, public]), - ets:new(ssl_otp_pem_cache, [set, protected]) + ets:new(ssl_otp_pem_cache, [set, protected]), + %% Default cache + {ets:new(ssl_otp_crl_cache, [set, protected]), + ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])} ]. %%-------------------------------------------------------------------- --spec remove([db_handle()]) -> ok. +-spec remove([db_handle()]) -> ok. %% %% Description: Removes database db %%-------------------------------------------------------------------- remove(Dbs) -> - lists:foreach(fun(Db) -> + lists:foreach(fun({Db0, Db1}) -> + true = ets:delete(Db0), + true = ets:delete(Db1); + (undefined) -> + ok; + (Db) -> true = ets:delete(Db) end, Dbs). @@ -81,7 +89,7 @@ lookup_trusted_cert(DbHandle, Ref, SerialNumber, Issuer) -> {ok, Certs} end. -lookup_cached_pem([_, _, PemChache], File) -> +lookup_cached_pem([_, _, PemChache | _], File) -> lookup_cached_pem(PemChache, File); lookup_cached_pem(PemChache, File) -> lookup(File, PemChache). @@ -94,12 +102,12 @@ lookup_cached_pem(PemChache, File) -> %% runtime database. Returns Ref that should be handed to lookup_trusted_cert %% together with the cert serialnumber and issuer. %%-------------------------------------------------------------------- -add_trusted_certs(_Pid, {der, DerList}, [CerDb, _,_]) -> +add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) -> NewRef = make_ref(), - add_certs_from_der(DerList, NewRef, CerDb), + add_certs_from_der(DerList, NewRef, CertDb), {ok, NewRef}; -add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) -> +add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) -> case lookup_cached_pem(Db, File) of [{_Content, Ref}] -> ref_count(Ref, RefDb, 1), @@ -118,14 +126,15 @@ add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) -> %% Description: Cache file as binary in DB %%-------------------------------------------------------------------- -spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}. -cache_pem_file(File, [_CertsDb, _RefDb, PemChache]) -> +cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) -> {ok, PemBin} = file:read_file(File), Content = public_key:pem_decode(PemBin), insert(File, Content, PemChache), {ok, Content}. + -spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}. -cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache]) -> +cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) -> {ok, PemBin} = file:read_file(File), Content = public_key:pem_decode(PemBin), insert(File, {Content, Ref}, PemChache), @@ -149,6 +158,15 @@ remove(Key, Db) -> ok. %%-------------------------------------------------------------------- +-spec remove(term(), term(), db_handle()) -> ok. +%% +%% Description: Removes an element in a <Db>. +%%-------------------------------------------------------------------- +remove(Key, Data, Db) -> + ets:delete_object(Db, {Key, Data}), + ok. + +%%-------------------------------------------------------------------- -spec lookup(term(), db_handle()) -> [term()] | undefined. %% %% Description: Looks up an element in a <Db>. @@ -175,6 +193,10 @@ lookup(Key, Db) -> foldl(Fun, Acc0, Cache) -> ets:foldl(Fun, Acc0, Cache). + +select_cert_by_issuer(Cache, Issuer) -> + ets:select(Cache, [{{{'_','_', Issuer},{'_', '$1'}},[],['$$']}]). + %%-------------------------------------------------------------------- -spec ref_count(term(), db_handle(), integer()) -> integer(). %% @@ -244,9 +266,39 @@ add_certs(Cert, Ref, CertsDb) -> error_logger:info_report(Report) end. -new_trusted_cert_entry(File, [CertsDb, RefDb, _] = Db) -> +new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) -> Ref = make_ref(), update_counter(Ref, 1, RefDb), {ok, Content} = cache_pem_file(Ref, File, Db), add_certs_from_pem(Content, Ref, CertsDb), {ok, Ref}. + +add_crls([_,_,_, {_, Mapping} | _], ?NO_DIST_POINT, CRLs) -> + [add_crls(CRL, Mapping) || CRL <- CRLs]; +add_crls([_,_,_, {Cache, Mapping} | _], Path, CRLs) -> + insert(Path, CRLs, Cache), + [add_crls(CRL, Mapping) || CRL <- CRLs]. + +add_crls(CRL, Mapping) -> + insert(crl_issuer(CRL), CRL, Mapping). + +remove_crls([_,_,_, {_, Mapping} | _], {?NO_DIST_POINT, CRLs}) -> + [rm_crls(CRL, Mapping) || CRL <- CRLs]; + +remove_crls([_,_,_, {Cache, Mapping} | _], Path) -> + case lookup(Path, Cache) of + undefined -> + ok; + CRLs -> + remove(Path, Cache), + [rm_crls(CRL, Mapping) || CRL <- CRLs] + end. + +rm_crls(CRL, Mapping) -> + remove(crl_issuer(CRL), CRL, Mapping). + +crl_issuer(DerCRL) -> + CRL = public_key:der_decode('CertificateList', DerCRL), + TBSCRL = CRL#'CertificateList'.tbsCertList, + TBSCRL#'TBSCertList'.issuer. + diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl index 183cabcfcd..b0b6d5a8e3 100644 --- a/lib/ssl/src/tls_handshake.erl +++ b/lib/ssl/src/tls_handshake.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -28,6 +28,7 @@ -include("tls_record.hrl"). -include("ssl_alert.hrl"). -include("ssl_internal.hrl"). +-include("ssl_cipher.hrl"). -include_lib("public_key/include/public_key.hrl"). -export([client_hello/8, hello/4, @@ -47,22 +48,28 @@ %%-------------------------------------------------------------------- client_hello(Host, Port, ConnectionStates, #ssl_options{versions = Versions, - ciphers = UserSuites + ciphers = UserSuites, + fallback = Fallback } = SslOpts, Cache, CacheCb, Renegotiation, OwnCert) -> Version = tls_record:highest_protocol_version(Versions), Pending = ssl_record:pending_connection_state(ConnectionStates, read), SecParams = Pending#connection_state.security_parameters, - CipherSuites = ssl_handshake:available_suites(UserSuites, Version), + AvailableCipherSuites = ssl_handshake:available_suites(UserSuites, Version), Extensions = ssl_handshake:client_hello_extensions(Host, Version, - CipherSuites, + AvailableCipherSuites, SslOpts, ConnectionStates, Renegotiation), - - Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert), - + CipherSuites = + case Fallback of + true -> + [?TLS_FALLBACK_SCSV | ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation)]; + false -> + ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation) + end, + Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert), #client_hello{session_id = Id, client_version = Version, - cipher_suites = ssl_handshake:cipher_suites(CipherSuites, Renegotiation), + cipher_suites = CipherSuites, compression_methods = ssl_record:compressions(), random = SecParams#security_parameters.client_random, extensions = Extensions @@ -96,33 +103,22 @@ hello(#server_hello{server_version = Version, random = Random, end; hello(#client_hello{client_version = ClientVersion, - session_id = SugesstedId, - cipher_suites = CipherSuites, - compression_methods = Compressions, - random = Random, - extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt}, + cipher_suites = CipherSuites} = Hello, #ssl_options{versions = Versions} = SslOpts, - {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) -> + Info, Renegotiation) -> Version = ssl_handshake:select_version(tls_record, ClientVersion, Versions), - case tls_record:is_acceptable_version(Version, Versions) of - true -> - ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)), - {Type, #session{cipher_suite = CipherSuite} = Session1} - = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions, - Port, Session0#session{ecc = ECCCurve}, Version, - SslOpts, Cache, CacheCb, Cert), - case CipherSuite of - no_suite -> - ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY); - _ -> - handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt, - SslOpts, Session1, ConnectionStates0, - Renegotiation) + case ssl_cipher:is_fallback(CipherSuites) of + true -> + Highest = tls_record:highest_protocol_version(Versions), + case tls_record:is_higher(Highest, Version) of + true -> + ?ALERT_REC(?FATAL, ?INAPPROPRIATE_FALLBACK); + false -> + handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation) end; false -> - ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION) + handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation) end. - %%-------------------------------------------------------------------- -spec encode_handshake(tls_handshake(), tls_record:tls_version()) -> iolist(). %% @@ -149,6 +145,32 @@ get_tls_handshake(Version, Data, Buffer) -> %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- +handle_client_hello(Version, #client_hello{session_id = SugesstedId, + cipher_suites = CipherSuites, + compression_methods = Compressions, + random = Random, + extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt}, + #ssl_options{versions = Versions} = SslOpts, + {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) -> + case tls_record:is_acceptable_version(Version, Versions) of + true -> + ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)), + {Type, #session{cipher_suite = CipherSuite} = Session1} + = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions, + Port, Session0#session{ecc = ECCCurve}, Version, + SslOpts, Cache, CacheCb, Cert), + case CipherSuite of + no_suite -> + ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY); + _ -> + handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt, + SslOpts, Session1, ConnectionStates0, + Renegotiation) + end; + false -> + ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION) + end. + get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length), Body:Length/binary,Rest/binary>>, Acc) -> Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>, diff --git a/lib/ssl/src/tls_record.erl b/lib/ssl/src/tls_record.erl index 3d5c5c0da3..14a49ac7da 100644 --- a/lib/ssl/src/tls_record.erl +++ b/lib/ssl/src/tls_record.erl @@ -41,7 +41,7 @@ %% Protocol version handling -export([protocol_version/1, lowest_protocol_version/2, - highest_protocol_version/1, supported_protocol_versions/0, + highest_protocol_version/1, is_higher/2, supported_protocol_versions/0, is_acceptable_version/1, is_acceptable_version/2]). -export_type([tls_version/0, tls_atom_version/0]). @@ -278,6 +278,13 @@ highest_protocol_version(Version = {M,_}, [{N,_} | Rest]) when M > N -> highest_protocol_version(_, [Version | Rest]) -> highest_protocol_version(Version, Rest). +is_higher({M, N}, {M, O}) when N > O -> + true; +is_higher({M, _}, {N, _}) when M > N -> + true; +is_higher(_, _) -> + false. + %%-------------------------------------------------------------------- -spec supported_protocol_versions() -> [tls_version()]. %% diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl index 15a7e118ff..77631f62d3 100644 --- a/lib/ssl/test/make_certs.erl +++ b/lib/ssl/test/make_certs.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2012. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -324,8 +324,9 @@ eval_cmd(Port, Cmd) -> ok end, receive - {Port, {exit_status, Status}} when Status /= 0 -> - %% io:fwrite("exit status: ~w~n", [Status]), + {Port, {exit_status, 0}} -> + ok; + {Port, {exit_status, Status}} -> exit({eval_cmd, Cmd, Status}) after 0 -> ok @@ -369,7 +370,7 @@ req_cnf(C) -> "subjectKeyIdentifier = hash\n" "subjectAltName = email:copy\n"]. -ca_cnf(C) -> +ca_cnf(C = #config{issuing_distribution_point = true}) -> ["# Purpose: Configuration for CAs.\n" "\n" "ROOTDIR = $ENV::ROOTDIR\n" @@ -446,5 +447,83 @@ ca_cnf(C) -> "subjectAltName = email:copy\n" "issuerAltName = issuer:copy\n" "crlDistributionPoints=@crl_section\n" - ]. + ]; +ca_cnf(C = #config{issuing_distribution_point = false}) -> + ["# Purpose: Configuration for CAs.\n" + "\n" + "ROOTDIR = $ENV::ROOTDIR\n" + "default_ca = ca\n" + "\n" + + "[ca]\n" + "dir = $ROOTDIR/", C#config.commonName, "\n" + "certs = $dir/certs\n" + "crl_dir = $dir/crl\n" + "database = $dir/index.txt\n" + "new_certs_dir = $dir/newcerts\n" + "certificate = $dir/cert.pem\n" + "serial = $dir/serial\n" + "crl = $dir/crl.pem\n", + ["crlnumber = $dir/crlnumber\n" || C#config.v2_crls], + "private_key = $dir/private/key.pem\n" + "RANDFILE = $dir/private/RAND\n" + "\n" + "x509_extensions = user_cert\n", + ["crl_extensions = crl_ext\n" || C#config.v2_crls], + "unique_subject = no\n" + "default_days = 3600\n" + "default_md = md5\n" + "preserve = no\n" + "policy = policy_match\n" + "\n" + + "[policy_match]\n" + "commonName = supplied\n" + "organizationalUnitName = optional\n" + "organizationName = match\n" + "countryName = match\n" + "localityName = match\n" + "emailAddress = supplied\n" + "\n" + + "[crl_ext]\n" + "authorityKeyIdentifier=keyid:always,issuer:always\n", + %["issuingDistributionPoint=critical, @idpsec\n" || C#config.issuing_distribution_point], + + %"[idpsec]\n" + %"fullname=URI:http://localhost:8000/",C#config.commonName,"/crl.pem\n" + + "[user_cert]\n" + "basicConstraints = CA:false\n" + "keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + %"crlDistributionPoints=@crl_section\n" + + %%"[crl_section]\n" + %% intentionally invalid + %%"URI.1=http://localhost/",C#config.commonName,"/crl.pem\n" + %%"URI.2=http://localhost:",integer_to_list(C#config.crl_port),"/",C#config.commonName,"/crl.pem\n" + %%"\n" + + "[user_cert_digital_signature_only]\n" + "basicConstraints = CA:false\n" + "keyUsage = digitalSignature\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + "\n" + + "[ca_cert]\n" + "basicConstraints = critical,CA:true\n" + "keyUsage = cRLSign, keyCertSign\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid:always,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + %"crlDistributionPoints=@crl_section\n" + ]. diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl index 77ef8088b4..50d5fb411f 100644 --- a/lib/ssl/test/ssl_basic_SUITE.erl +++ b/lib/ssl/test/ssl_basic_SUITE.erl @@ -91,7 +91,8 @@ basic_tests() -> connect_twice, connect_dist, clear_pem_cache, - defaults + defaults, + fallback ]. options_tests() -> @@ -284,6 +285,14 @@ init_per_testcase(empty_protocol_versions, Config) -> ssl:start(), Config; +init_per_testcase(fallback, Config) -> + case tls_record:highest_protocol_version([]) of + {3, N} when N > 1 -> + Config; + _ -> + {skip, "Not relevant if highest supported version is less than 3.2"} + end; + %% init_per_testcase(different_ca_peer_sign, Config0) -> %% ssl_test_lib:make_mix_cert(Config0); @@ -629,7 +638,7 @@ clear_pem_cache(Config) when is_list(Config) -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - [_,FilRefDb, _] = element(6, State), + [_,FilRefDb |_] = element(6, State), {Server, Client} = basic_verify_test_no_close(Config), 2 = ets:info(FilRefDb, size), ssl:clear_pem_cache(), @@ -648,6 +657,34 @@ clear_pem_cache(Config) when is_list(Config) -> 0 = ets:info(FilRefDb, size). %%-------------------------------------------------------------------- + +fallback() -> + [{doc, "Test TLS_FALLBACK_SCSV downgrade prevention"}]. + +fallback(Config) when is_list(Config) -> + ClientOpts = ?config(client_opts, Config), + ServerOpts = ?config(server_opts, Config), + {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), + + Server = + ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, + {from, self()}, + {options, ServerOpts}]), + + Port = ssl_test_lib:inet_port(Server), + + Client = + ssl_test_lib:start_client_error([{node, ClientNode}, + {port, Port}, {host, Hostname}, + {from, self()}, {options, + [{fallback, true}, + {versions, ['tlsv1']} + | ClientOpts]}]), + + ssl_test_lib:check_result(Server, {error,{tls_alert,"inappropriate fallback"}}, + Client, {error,{tls_alert,"inappropriate fallback"}}). + +%%-------------------------------------------------------------------- peername() -> [{doc,"Test API function peername/1"}]. diff --git a/lib/ssl/test/ssl_crl_SUITE.erl b/lib/ssl/test/ssl_crl_SUITE.erl index bad0949ec4..c6bf8898ad 100644 --- a/lib/ssl/test/ssl_crl_SUITE.erl +++ b/lib/ssl/test/ssl_crl_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2013. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -26,43 +26,40 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("public_key/include/public_key.hrl"). --define(TIMEOUT, 120000). -define(LONG_TIMEOUT, 600000). --define(SLEEP, 1000). --define(OPENSSL_RENEGOTIATE, "R\n"). --define(OPENSSL_QUIT, "Q\n"). --define(OPENSSL_GARBAGE, "P\n"). --define(EXPIRE, 10). %%-------------------------------------------------------------------- %% Common Test interface functions ----------------------------------- %%-------------------------------------------------------------------- -suite() -> [{ct_hooks,[ts_install_cth]}]. +suite() -> + [{ct_hooks,[ts_install_cth]}]. all() -> [ - {group, basic}, - {group, v1_crl}, - {group, idp_crl} + {group, check_true}, + {group, check_peer}, + {group, check_best_effort} ]. groups() -> - [{basic, [], basic_tests()}, - {v1_crl, [], v1_crl_tests()}, - {idp_crl, [], idp_crl_tests()}]. + [ + {check_true, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {check_peer, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {check_best_effort, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {v2_crl, [], basic_tests()}, + {v1_crl, [], basic_tests()}, + {idp_crl, [], basic_tests()}]. basic_tests() -> [crl_verify_valid, crl_verify_revoked]. -v1_crl_tests() -> - [crl_verify_valid, crl_verify_revoked]. - -idp_crl_tests() -> - [crl_verify_valid, crl_verify_revoked]. - -%%%================================================================ -%%% Suite init/end init_per_suite(Config0) -> Dog = ct:timetrap(?LONG_TIMEOUT *2), @@ -70,10 +67,7 @@ init_per_suite(Config0) -> false -> {skip, "Openssl not found"}; _ -> - TLSVersion = ?config(tls_version, Config0), OpenSSL_version = (catch os:cmd("openssl version")), - ct:log("TLS version: ~p~nOpenSSL version: ~p~n~n~p:module_info(): ~p~n~nssl:module_info(): ~p~n", - [TLSVersion, OpenSSL_version, ?MODULE, ?MODULE:module_info(), ssl:module_info()]), case ssl_test_lib:enough_openssl_crl_support(OpenSSL_version) of false -> {skip, io_lib:format("Bad openssl version: ~p",[OpenSSL_version])}; @@ -81,7 +75,6 @@ init_per_suite(Config0) -> catch crypto:stop(), try crypto:start() of ok -> - ssl:start(), {ok, Hostname0} = inet:gethostname(), IPfamily = case lists:member(list_to_atom(Hostname0), ct:get_config(ipv6_hosts,[])) of @@ -89,8 +82,7 @@ init_per_suite(Config0) -> false -> inet end, [{ipfamily,IPfamily}, {watchdog, Dog}, {openssl_version,OpenSSL_version} | Config0] - catch _C:_E -> - ct:log("crypto:start() caught ~p:~p",[_C,_E]), + catch _:_ -> {skip, "Crypto did not start"} end end @@ -100,443 +92,175 @@ end_per_suite(_Config) -> ssl:stop(), application:stop(crypto). -%%%================================================================ -%%% Group init/end - -init_per_group(Group, Config) -> - ssl:start(), - inets:start(), - CertDir = filename:join(?config(priv_dir, Config), Group), - DataDir = ?config(data_dir, Config), - ServerRoot = make_dir_path([?config(priv_dir,Config), Group, tmp]), - %% start a HTTP server to serve the CRLs - {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily,Config)}, - {server_name, "localhost"}, {port, 0}, - {server_root, ServerRoot}, - {document_root, CertDir}, - {modules, [mod_get]} - ]), - [{port,Port}] = httpd:info(Httpd, [port]), - ct:log("~p:~p~nHTTPD IP family=~p, port=~p~n", [?MODULE, ?LINE, ?config(ipfamily,Config), Port]), - CertOpts = [{crl_port,Port}|cert_opts(Group)], - Result = make_certs:all(DataDir, CertDir, CertOpts), - ct:log("~p:~p~nmake_certs:all(~n DataDir=~p,~n CertDir=~p,~n ServerRoot=~p~n Opts=~p~n) returned ~p~n", [?MODULE,?LINE,DataDir, CertDir, ServerRoot, CertOpts, Result]), - [{make_cert_result, Result}, {cert_dir, CertDir}, {httpd, Httpd} | Config]. - -cert_opts(v1_crl) -> [{v2_crls, false}]; -cert_opts(idp_crl) -> [{issuing_distribution_point, true}]; -cert_opts(_) -> []. - -make_dir_path(PathComponents) -> - lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end, - "", - PathComponents). - +init_per_group(check_true, Config) -> + [{crl_check, true} | Config]; +init_per_group(check_peer, Config) -> + [{crl_check, peer} | Config]; +init_per_group(check_best_effort, Config) -> + [{crl_check, best_effort} | Config]; +init_per_group(Group, Config0) -> + case is_idp(Group) of + true -> + [{idp_crl, true} | Config0]; + false -> + DataDir = ?config(data_dir, Config0), + CertDir = filename:join(?config(priv_dir, Config0), Group), + {CertOpts, Config} = init_certs(CertDir, Group, Config0), + Result = make_certs:all(DataDir, CertDir, CertOpts), + [{make_cert_result, Result}, {cert_dir, CertDir}, {idp_crl, false} | Config] + end. end_per_group(_GroupName, Config) -> - case ?config(httpd, Config) of - undefined -> ok; - Pid -> - ct:log("Stop httpd ~p",[Pid]), - ok = inets:stop(httpd, Pid) - ,ct:log("Stopped",[]) - end, - inets:stop(), + Config. +init_per_testcase(Case, Config0) -> + case ?config(idp_crl, Config0) of + true -> + end_per_testcase(Case, Config0), + inets:start(), + ssl:start(), + ServerRoot = make_dir_path([?config(priv_dir, Config0), idp_crl, tmp]), + %% start a HTTP server to serve the CRLs + {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily, Config0)}, + {server_name, "localhost"}, {port, 0}, + {server_root, ServerRoot}, + {document_root, + filename:join(?config(priv_dir, Config0), idp_crl)} + ]), + [{port,Port}] = httpd:info(Httpd, [port]), + Config = [{httpd_port, Port} | Config0], + DataDir = ?config(data_dir, Config), + CertDir = filename:join(?config(priv_dir, Config0), idp_crl), + {CertOpts, Config} = init_certs(CertDir, idp_crl, Config), + Result = make_certs:all(DataDir, CertDir, CertOpts), + [{make_cert_result, Result}, {cert_dir, CertDir} | Config]; + false -> + end_per_testcase(Case, Config0), + ssl:start(), + Config0 + end. + +end_per_testcase(_, Config) -> + case ?config(idp_crl, Config) of + true -> + ssl:stop(), + inets:stop(); + false -> + ssl:stop() + end. + %%%================================================================ %%% Test cases +%%%================================================================ crl_verify_valid() -> [{doc,"Verify a simple valid CRL chain"}]. crl_verify_valid(Config) when is_list(Config) -> - process_flag(trap_exit, true), PrivDir = ?config(cert_dir, Config), - ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])}, - {certfile, filename:join([PrivDir, "server", "cert.pem"])}, - {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}], - + Check = ?config(crl_check, Config), + ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])}, + {certfile, filename:join([PrivDir, "server", "cert.pem"])}, + {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}], + ClientOpts = case ?config(idp_crl, Config) of + true -> + [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}, + {crl_check, Check}, + {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}}, + {verify, verify_peer}]; + false -> + [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}, + {crl_check, Check}, + {verify, verify_peer}] + end, {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), - Data = "From openssl to erlang", - Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, - {from, self()}, - {mfa, {?MODULE, erlang_ssl_receive, [Data]}}, - %{mfa, {ssl_test_lib, no_result, []}}, - {options, ServerOpts}]), - ct:log("~p:~p~nreturn from ssl_test_lib:start_server:~n~p",[?MODULE,?LINE,Server]), - Port = ssl_test_lib:inet_port(Server), - - CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])), - - ClientOpts = [{cacerts, CACerts}, - {verify, verify_peer}, - {verify_fun, {fun validate_function/3, {CACerts, []}}}], - - - ct:log("~p:~p~ncalling ssl_test_lib:start_client",[?MODULE,?LINE]), - Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, - {host, Hostname}, - {from, self()}, - {mfa, {?MODULE, - erlang_ssl_send, [Data]}}, - %{mfa, {ssl_test_lib, no_result, []}}, - {options, ClientOpts}]), - ct:log("~p:~p~nreturn from ssl_test_lib:start_client:~n~p",[?MODULE,?LINE,Client]), - - ssl_test_lib:check_result(Client, ok, Server, ok), - - %% Clean close down! Server needs to be closed first !! - ssl_test_lib:close(Server), - ssl_test_lib:close(Client), - process_flag(trap_exit, false). + ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}), + ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}), + + crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts). crl_verify_revoked() -> - [{doc,"Verify a simple valid CRL chain"}]. -crl_verify_revoked(Config) when is_list(Config) -> - process_flag(trap_exit, true), + [{doc,"Verify a simple CRL chain when peer cert is reveoked"}]. +crl_verify_revoked(Config) when is_list(Config) -> PrivDir = ?config(cert_dir, Config), + Check = ?config(crl_check, Config), ServerOpts = [{keyfile, filename:join([PrivDir, "revoked", "key.pem"])}, - {certfile, filename:join([PrivDir, "revoked", "cert.pem"])}, - {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}], - ct:log("~p:~p~nserver opts ~p~n", [?MODULE,?LINE, ServerOpts]), + {certfile, filename:join([PrivDir, "revoked", "cert.pem"])}, + {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}], {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), - Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, - {from, self()}, - %{mfa, {?MODULE, erlang_ssl_receive, [Data]}}, - {mfa, {ssl_test_lib, no_result, []}}, - {options, ServerOpts}]), + Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, + {from, self()}, + {options, ServerOpts}]), Port = ssl_test_lib:inet_port(Server), + + ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}), + ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}), + + ClientOpts = case ?config(idp_crl, Config) of + true -> + [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}, + {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}}, + {crl_check, Check}, + {verify, verify_peer}]; + false -> + [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}, + {crl_check, Check}, + {verify, verify_peer}] + end, + + Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port}, + {host, Hostname}, + {from, self()}, + {options, ClientOpts}]), + receive + {Server, AlertOrColse} -> + ct:pal("Server Alert or Close ~p", [AlertOrColse]) + end, + ssl_test_lib:check_result(Client, {error, {tls_alert, "certificate revoked"}}). - CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])), - ClientOpts = [{cacerts, CACerts}, - {verify, verify_peer}, - {verify_fun, {fun validate_function/3, {CACerts, []}}}], - {connect_failed, _} = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, +crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts) -> + Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, + {from, self()}, + {mfa, {ssl_test_lib, + send_recv_result_active, []}}, + {options, ServerOpts}]), + Port = ssl_test_lib:inet_port(Server), + Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, {from, self()}, - %{mfa, {?MODULE, - %erlang_ssl_receive, [Data]}}, - {mfa, {ssl_test_lib, no_result, []}}, + {mfa, {ssl_test_lib, + send_recv_result_active, []}}, {options, ClientOpts}]), + + ssl_test_lib:check_result(Client, ok, Server, ok), - %% Clean close down! Server needs to be closed first !! ssl_test_lib:close(Server), - process_flag(trap_exit, false). - -%%%================================================================ -%%% Lib - -erlang_ssl_receive(Socket, Data) -> - ct:log("~p:~p~nConnection info: ~p~n", - [?MODULE,?LINE, ssl:connection_info(Socket)]), - receive - {ssl, Socket, Data} -> - ct:log("~p:~p~nReceived ~p~n",[?MODULE,?LINE, Data]), - %% open_ssl server sometimes hangs waiting in blocking read - ssl:send(Socket, "Got it"), - ok; - {ssl, Socket, Byte} when length(Byte) == 1 -> - erlang_ssl_receive(Socket, tl(Data)); - {Port, {data,Debug}} when is_port(Port) -> - ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), - erlang_ssl_receive(Socket,Data); - Other -> - ct:fail({unexpected_message, Other}) - after 4000 -> - ct:fail({did_not_get, Data}) - end. - - -erlang_ssl_send(Socket, Data) -> - ct:log("~p:~p~nConnection info: ~p~n", - [?MODULE,?LINE, ssl:connection_info(Socket)]), - ssl:send(Socket, Data), - ok. - -load_certs(undefined) -> - undefined; -load_certs(CertDir) -> - case file:list_dir(CertDir) of - {ok, Certs} -> - load_certs(lists:map(fun(Cert) -> filename:join(CertDir, Cert) - end, Certs), []); - {error, _} -> - undefined - end. - -load_certs([], Acc) -> - ct:log("~p:~p~nSuccessfully loaded ~p CA certificates~n", [?MODULE,?LINE, length(Acc)]), - Acc; -load_certs([Cert|Certs], Acc) -> - case filelib:is_dir(Cert) of - true -> - load_certs(Certs, Acc); - _ -> - %ct:log("~p:~p~nLoading certificate ~p~n", [?MODULE,?LINE, Cert]), - load_certs(Certs, load_cert(Cert) ++ Acc) - end. - -load_cert(Cert) -> - {ok, Bin} = file:read_file(Cert), - case filename:extension(Cert) of - ".der" -> - %% no decoding necessary - [Bin]; - _ -> - %% assume PEM otherwise - Contents = public_key:pem_decode(Bin), - [DER || {Type, DER, Cipher} <- Contents, Type == 'Certificate', Cipher == 'not_encrypted'] - end. - -%% @doc Validator function for SSL negotiation. -%% -validate_function(Cert, valid_peer, State) -> - ct:log("~p:~p~nvaliding peer ~p with ~p intermediate certs~n", - [?MODULE,?LINE, get_common_name(Cert), - length(element(2, State))]), - %% peer certificate validated, now check the CRL - Res = (catch check_crl(Cert, State)), - ct:log("~p:~p~nCRL validate result for ~p: ~p~n", - [?MODULE,?LINE, get_common_name(Cert), Res]), - {Res, State}; -validate_function(Cert, valid, {TrustedCAs, IntermediateCerts}=State) -> - case public_key:pkix_is_self_signed(Cert) of - true -> - ct:log("~p:~p~nroot certificate~n",[?MODULE,?LINE]), - %% this is a root cert, no CRL - {valid, {TrustedCAs, [Cert|IntermediateCerts]}}; - false -> - %% check is valid CA certificate, add to the list of - %% intermediates - Res = (catch check_crl(Cert, State)), - ct:log("~p:~p~nCRL intermediate CA validate result for ~p: ~p~n", - [?MODULE,?LINE, get_common_name(Cert), Res]), - {Res, {TrustedCAs, [Cert|IntermediateCerts]}} - end; -validate_function(_Cert, _Event, State) -> - %ct:log("~p:~p~nignoring event ~p~n", [?MODULE,?LINE, _Event]), - {valid, State}. + ssl_test_lib:close(Client). -%% @doc Given a certificate, find CRL distribution points for the given -%% certificate, fetch, and attempt to validate each CRL through -%% issuer_function/4. -%% -check_crl(Cert, State) -> - %% pull the CRL distribution point(s) out of the certificate, if any - ct:log("~p:~p~ncheck_crl(~n Cert=~p,~nState=~p~n)",[?MODULE,?LINE,Cert,State]), - case pubkey_cert:select_extension( - ?'id-ce-cRLDistributionPoints', - pubkey_cert:extensions_list(Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.extensions)) of - undefined -> - ct:log("~p:~p~nno CRL distribution points for ~p~n", - [?MODULE,?LINE, get_common_name(Cert)]), - %% fail; we can't validate if there's no CRL - no_crl; - CRLExtension -> - ct:log("~p:~p~nCRLExtension=~p)",[?MODULE,?LINE,CRLExtension]), - CRLDistPoints = CRLExtension#'Extension'.extnValue, - DPointsAndCRLs = lists:foldl(fun(Point, Acc) -> - %% try to read the CRL over http or from a - %% local file - case fetch_point(Point) of - not_available -> - ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,not_available]), - Acc; - Res -> - ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,Res]), - [{Point, Res} | Acc] - end - end, [], CRLDistPoints), - public_key:pkix_crls_validate(Cert, - DPointsAndCRLs, - [{issuer_fun, - {fun issuer_function/4, State}}]) - end. - -%% @doc Given a list of distribution points for CRLs, certificates and -%% both trusted and intermediary certificates, attempt to build and -%% authority chain back via build_chain to verify that it is valid. -%% -issuer_function(_DP, CRL, _Issuer, {TrustedCAs, IntermediateCerts}) -> - %% XXX the 'Issuer' we get passed here is the AuthorityKeyIdentifier, - %% which we are not currently smart enough to understand - %% Read the CA certs out of the file - ct:log("~p:~p~nissuer_function(~nCRL=~p,~nLast param=~p)",[?MODULE,?LINE,CRL, {TrustedCAs, IntermediateCerts}]), - Certs = [public_key:pkix_decode_cert(DER, otp) || DER <- TrustedCAs], - %% get the real issuer out of the CRL - Issuer = public_key:pkix_normalize_name( - pubkey_cert_records:transform( - CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode)), - %% assume certificates are ordered from root to tip - case find_issuer(Issuer, IntermediateCerts ++ Certs) of - undefined -> - ct:log("~p:~p~nunable to find certificate matching CRL issuer ~p~n", - [?MODULE,?LINE, Issuer]), - error; - IssuerCert -> - ct:log("~p:~p~nIssuerCert=~p~n)",[?MODULE,?LINE,IssuerCert]), - case build_chain({public_key:pkix_encode('OTPCertificate', - IssuerCert, - otp), - IssuerCert}, IntermediateCerts, Certs, []) of - undefined -> - error; - {OTPCert, Path} -> - {ok, OTPCert, Path} - end - end. - -%% @doc Attempt to build authority chain back using intermediary -%% certificates, falling back on trusted certificates if the -%% intermediary chain of certificates does not fully extend to the -%% root. -%% -%% Returns: {RootCA :: #OTPCertificate{}, Chain :: [der_encoded()]} -%% -build_chain({DER, Cert}, IntCerts, TrustedCerts, Acc) -> - %% check if this cert is self-signed, if it is, we've reached the - %% root of the chain - Issuer = public_key:pkix_normalize_name( - Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer), - Subject = public_key:pkix_normalize_name( - Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - case Issuer == Subject of - true -> - case find_issuer(Issuer, TrustedCerts) of - undefined -> - ct:log("~p:~p~nself-signed certificate is NOT trusted~n",[?MODULE,?LINE]), - undefined; - TrustedCert -> - %% return the cert from the trusted list, to prevent - %% issuer spoofing - {TrustedCert, - [public_key:pkix_encode( - 'OTPCertificate', TrustedCert, otp)|Acc]} - end; - false -> - Match = lists:foldl( - fun(C, undefined) -> - S = public_key:pkix_normalize_name(C#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - %% compare the subject to the current issuer - case Issuer == S of - true -> - %% we've found our man - {public_key:pkix_encode('OTPCertificate', C, otp), C}; - false -> - undefined - end; - (_E, A) -> - %% already matched - A - end, undefined, IntCerts), - case Match of - undefined when IntCerts /= TrustedCerts -> - %% continue the chain by using the trusted CAs - ct:log("~p:~p~nRan out of intermediate certs, switching to trusted certs~n",[?MODULE,?LINE]), - build_chain({DER, Cert}, TrustedCerts, TrustedCerts, Acc); - undefined -> - ct:log("Can't construct chain of trust beyond ~p~n", - [?MODULE,?LINE, get_common_name(Cert)]), - %% can't find the current cert's issuer - undefined; - Match -> - build_chain(Match, IntCerts, TrustedCerts, [DER|Acc]) - end - end. - -%% @doc Given a certificate and a list of trusted or intermediary -%% certificates, attempt to find a match in the list or bail with -%% undefined. -find_issuer(Issuer, Certs) -> - lists:foldl( - fun(OTPCert, undefined) -> - %% check if this certificate matches the issuer - Normal = public_key:pkix_normalize_name( - OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - case Normal == Issuer of - true -> - OTPCert; - false -> - undefined - end; - (_E, Acc) -> - %% already found a match - Acc - end, undefined, Certs). - -%% @doc Find distribution points for a given CRL and then attempt to -%% fetch the CRL from the first available. -fetch_point(#'DistributionPoint'{distributionPoint={fullName, Names}}) -> - Decoded = [{NameType, - pubkey_cert_records:transform(Name, decode)} - || {NameType, Name} <- Names], - ct:log("~p:~p~ncall fetch(~nDecoded=~p~n)",[?MODULE,?LINE,Decoded]), - fetch(Decoded). - -%% @doc Given a list of locations to retrieve a CRL from, attempt to -%% retrieve either from a file or http resource and bail as soon as -%% it can be found. -%% -%% Currently, only hand a armored PEM or DER encoded file, with -%% defaulting to DER. -%% -fetch([]) -> - not_available; -fetch([{uniformResourceIdentifier, "http"++_=URL}|Rest]) -> - ct:log("~p:~p~ngetting CRL from ~p~n", [?MODULE,?LINE, URL]), - case httpc:request(get, {URL, []}, [], [{body_format, binary}]) of - {ok, {_Status, _Headers, Body}} -> - case Body of - <<"-----BEGIN", _/binary>> -> - ct:log("~p:~p~npublic_key:pem_decode,~nBody=~p~n)",[?MODULE,?LINE,Body]), - [{'CertificateList', - DER, _}=CertList] = public_key:pem_decode(Body), - ct:log("~p:~p~npublic_key:pem_entry_decode,~nCertList=~p~n)",[?MODULE,?LINE,CertList]), - {DER, public_key:pem_entry_decode(CertList)}; - _ -> - ct:log("~p:~p~npublic_key:pem_entry_decode,~nBody=~p~n)",[?MODULE,?LINE,{'CertificateList', Body, not_encrypted}]), - %% assume DER encoded - try - public_key:pem_entry_decode({'CertificateList', Body, not_encrypted}) - of - CertList -> {Body, CertList} - catch - _C:_E -> - ct:log("~p:~p~nfailed DER assumption~nRest=~p", [?MODULE,?LINE,Rest]), - fetch(Rest) - end - end; - {error, _Reason} -> - ct:log("~p:~p~nfailed to get CRL ~p~n", [?MODULE,?LINE, _Reason]), - fetch(Rest); - Other -> - ct:log("~p:~p~nreally failed to get CRL ~p~n", [?MODULE,?LINE, Other]), - fetch(Rest) - end; -fetch([Loc|Rest]) -> - %% unsupported CRL location - ct:log("~p:~p~nunable to fetch CRL from unsupported location ~p~n", - [?MODULE,?LINE, Loc]), - fetch(Rest). +%%-------------------------------------------------------------------- +%% Internal functions ------------------------------------------------ +%%-------------------------------------------------------------------- +is_idp(idp_crl) -> + true; +is_idp(_) -> + false. + +init_certs(_,v1_crl, Config) -> + {[{v2_crls, false}], Config}; +init_certs(_, idp_crl, Config) -> + Port = ?config(httpd_port, Config), + {[{crl_port,Port}, + {issuing_distribution_point, true}], Config + }; +init_certs(_,_,Config) -> + {[], Config}. -%% get the common name attribute out of an OTPCertificate record -get_common_name(OTPCert) -> - %% You'd think there'd be an easier way than this giant mess, but I - %% couldn't find one. - {rdnSequence, Subject} = OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject, - case [Attribute#'AttributeTypeAndValue'.value || [Attribute] <- Subject, - Attribute#'AttributeTypeAndValue'.type == ?'id-at-commonName'] of - [Att] -> - case Att of - {teletexString, Str} -> Str; - {printableString, Str} -> Str; - {utf8String, Bin} -> binary_to_list(Bin) - end; - _ -> - unknown - end. +make_dir_path(PathComponents) -> + lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end, + "", + PathComponents). diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl index 843079e2fe..23584dfcdf 100644 --- a/lib/ssl/test/ssl_pem_cache_SUITE.erl +++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl @@ -113,9 +113,9 @@ get_pem_cache() -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - case element(5, State) of - [_CertDb, _FileRefDb, PemChace] -> - PemChace; + case element(6, State) of + [_CertDb, _FileRefDb, PemCache| _] -> + PemCache; _ -> undefined end. diff --git a/lib/ssl/test/ssl_session_cache_SUITE.erl b/lib/ssl/test/ssl_session_cache_SUITE.erl index 06a41f1260..36d086338e 100644 --- a/lib/ssl/test/ssl_session_cache_SUITE.erl +++ b/lib/ssl/test/ssl_session_cache_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2014. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl index d6fbb73249..7d0546210c 100644 --- a/lib/ssl/test/ssl_test_lib.erl +++ b/lib/ssl/test/ssl_test_lib.erl @@ -254,7 +254,6 @@ check_result(Server, ServerMsg, Client, ClientMsg) -> {Port, {data,Debug}} when is_port(Port) -> ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Server, ServerMsg, Client, ClientMsg); - Unexpected -> Reason = {{expected, {Client, ClientMsg}}, {expected, {Server, ServerMsg}}, {got, Unexpected}}, @@ -268,6 +267,9 @@ check_result(Pid, Msg) -> {Port, {data,Debug}} when is_port(Port) -> ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Pid,Msg); + %% {Port, {exit_status, Status}} when is_port(Port) -> + %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]), + %% check_result(Pid, Msg); Unexpected -> Reason = {{expected, {Pid, Msg}}, {got, Unexpected}}, @@ -837,7 +839,7 @@ string_regex_filter(Str, Search) when is_list(Str) -> _ -> true end; -string_regex_filter(Str, _Search) -> +string_regex_filter(_Str, _Search) -> false. anonymous_suites() -> diff --git a/lib/ssl/test/ssl_upgrade_SUITE.erl b/lib/ssl/test/ssl_upgrade_SUITE.erl index 6a6a1b4a7a..c83fb367dc 100644 --- a/lib/ssl/test/ssl_upgrade_SUITE.erl +++ b/lib/ssl/test/ssl_upgrade_SUITE.erl @@ -38,8 +38,8 @@ all() -> init_per_suite(Config0) -> catch crypto:stop(), - try crypto:start() of - ok -> + try {crypto:start(), erlang:system_info({wordsize, internal}) == erlang:system_info({wordsize, external})} of + {ok, true} -> case ct_release_test:init(Config0) of {skip, Reason} -> {skip, Reason}; @@ -49,7 +49,9 @@ init_per_suite(Config0) -> ?config(priv_dir, Config))), ct:log("Make certs ~p~n", [Result]), ssl_test_lib:cert_options(Config) - end + end; + {ok, false} -> + {skip, "Test server will not handle halfwordemulator correctly. Skip as halfwordemulator is deprecated"} catch _:_ -> {skip, "Crypto did not start"} end. diff --git a/lib/stdlib/doc/src/io.xml b/lib/stdlib/doc/src/io.xml index a28180b42a..8ebfdb2e7f 100644 --- a/lib/stdlib/doc/src/io.xml +++ b/lib/stdlib/doc/src/io.xml @@ -505,7 +505,8 @@ ok <p>Writes the data with standard syntax in the same way as <c>~w</c>, but breaks terms whose printed representation is longer than one line into many lines and indents each - line sensibly. It also tries to detect lists of + line sensibly. Left justification is not supported. + It also tries to detect lists of printable characters and to output these as strings. The Unicode translation modifier is used for determining what characters are printable. For example:</p> diff --git a/lib/stdlib/doc/src/io_lib.xml b/lib/stdlib/doc/src/io_lib.xml index 3312b08064..2117d66381 100644 --- a/lib/stdlib/doc/src/io_lib.xml +++ b/lib/stdlib/doc/src/io_lib.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -59,6 +59,35 @@ <datatype> <name name="latin1_string"/> </datatype> + <datatype> + <name name="format_spec"/> + <desc><p>Description:</p> + <list type="bulleted"> + <item><p><c>control_char</c> is the type of control + sequence: <c>$P</c>, <c>$w</c>, and so on;</p> + </item> + <item><p><c>args</c> is a list of the arguments used by the + control sequence, or an empty list if the control sequence + does not take any arguments;</p> + </item> + <item><p><c>width</c> is the field width;</p> + </item> + <item><p><c>adjust</c> is the adjustment;</p> + </item> + <item><p><c>precision</c> is the precision of the printed + argument;</p> + </item> + <item><p><c>pad_char</c> is the padding character;</p> + </item> + <item><p><c>encoding</c> is set to <c>true</c> if the translation + modifier <c>t</c> is present;</p> + </item> + <item><p><c>strings</c> is set to <c>false</c> if the modifier + <c>l</c> is present.</p> + </item> + </list> + </desc> + </datatype> </datatypes> <funcs> <func> @@ -260,6 +289,45 @@ </desc> </func> <func> + <name name="scan_format" arity="2"/> + <fsummary>Parse all control sequences in the format string</fsummary> + <desc> + <p>Returns a list corresponding to the given format string, + where control sequences have been replaced with + corresponding tuples. This list can be passed to <seealso + marker="#build_text/1">io_lib:build_text/1</seealso> to have + the same effect as <c>io_lib:format(Format, Args)</c>, or to + <seealso + marker="#unscan_format/1">io_lib:unscan_format/1</seealso> + in order to get the corresponding pair of <c>Format</c> and + <c>Args</c> (with every <c>*</c> and corresponding argument + expanded to numeric values).</p> + <p>A typical use of this function is to replace unbounded-size + control sequences like <c>~w</c> and <c>~p</c> with the + depth-limited variants <c>~W</c> and <c>~P</c> before + formatting to text, e.g. in a logger.</p> + </desc> + </func> + <func> + <name name="unscan_format" arity="1"/> + <fsummary>Revert a pre-parsed format list to a plain character list + and a list of arguments</fsummary> + <desc> + <p>See <seealso + marker="#scan_format/2">io_lib:scan_format/2</seealso> for + details.</p> + </desc> + </func> + <func> + <name name="build_text" arity="1"/> + <fsummary>Build the output text for a pre-parsed format list</fsummary> + <desc> + <p>See <seealso + marker="#scan_format/2">io_lib:scan_format/2</seealso> for + details.</p> + </desc> + </func> + <func> <name name="indentation" arity="2"/> <fsummary>Indentation after printing string</fsummary> <desc> diff --git a/lib/stdlib/doc/src/re.xml b/lib/stdlib/doc/src/re.xml index a1833f6a51..5af1468e9b 100644 --- a/lib/stdlib/doc/src/re.xml +++ b/lib/stdlib/doc/src/re.xml @@ -150,7 +150,11 @@ This option makes it possible to include comments inside complicated patterns. N <tag><c>no_start_optimize</c></tag> <item>This option disables optimization that may malfunction if "Special start-of-pattern items" are present in the regular expression. A typical example would be when matching "DEFABC" against "(*COMMIT)ABC", where the start optimization of PCRE would skip the subject up to the "A" and would never realize that the (*COMMIT) instruction should have made the matching fail. This option is only relevant if you use "start-of-pattern items", as discussed in the section "PCRE regular expression details" below.</item> <tag><c>ucp</c></tag> - <item>Specifies that Unicode Character Properties should be used when resolving \B, \b, \D, \d, \S, \s, \Wand \w. Without this flag, only ISO-Latin-1 properties are used. Using Unicode properties hurts performance, but is semantically correct when working with Unicode characters beyond the ISO-Latin-1 range.</item> + <item>Specifies that Unicode Character Properties should be used when + resolving \B, \b, \D, \d, \S, \s, \W and \w. Without this flag, only + ISO-Latin-1 properties are used. Using Unicode properties hurts + performance, but is semantically correct when working with Unicode + characters beyond the ISO-Latin-1 range.</item> <tag><c>never_utf</c></tag> <item>Specifies that the (*UTF) and/or (*UTF8) "start-of-pattern items" are forbidden. This flag can not be combined with <c>unicode</c>. Useful if ISO-Latin-1 patterns from an external source are to be compiled.</item> </taglist> @@ -966,7 +970,7 @@ appearance causes an error. </quote> <p>This has the same effect as setting the <c>ucp</c> option: it causes sequences such as \d and \w to use Unicode properties to determine character types, -instead of recognizing only characters with codes less than 128 via a lookup +instead of recognizing only characters with codes less than 256 via a lookup table. </p> @@ -1307,7 +1311,8 @@ By default, the definition of letters and digits is controlled by PCRE's low-valued character tables, in Erlang's case (and without the <c>unicode</c> option), the ISO-Latin-1 character set.</p> -<p>By default, in <c>unicode</c> mode, characters with values greater than 128 never match +<p>By default, in <c>unicode</c> mode, characters with values greater than 255, +i.e. all characters outside the ISO-Latin-1 character set, never match \d, \s, or \w, and always match \D, \S, and \W. These sequences retain their original meanings from before UTF support was available, mainly for efficiency reasons. However, if the <c>ucp</c> option is set, the behaviour is changed so that Unicode @@ -1954,10 +1959,10 @@ can be included in a class as a literal string of data units, or by using the upper case and lower case versions, so for example, a caseless [aeiou] matches "A" as well as "a", and a caseless [^aeiou] does not match "A", whereas a caseful version would. In a UTF mode, PCRE always understands the concept of -case for characters whose values are less than 128, so caseless matching is +case for characters whose values are less than 256, so caseless matching is always possible. For characters with higher values, the concept of case is supported if PCRE is compiled with Unicode property support, but not otherwise. -If you want to use caseless matching in a UTF mode for characters 128 and +If you want to use caseless matching in a UTF mode for characters 256 and above, you must ensure that PCRE is compiled with Unicode property support as well as with UTF support.</p> @@ -1989,7 +1994,7 @@ matches the letters in either case. For example, [W-c] is equivalent to [][\\^_`wxyzabc], matched caselessly, and in a non-UTF mode, if character tables for a French locale are in use, [\xc8-\xcb] matches accented E characters in both cases. In UTF modes, PCRE supports the concept of case for -characters with values greater than 128 only when it is compiled with Unicode +characters with values greater than 255 only when it is compiled with Unicode property support.</p> <p>The character escape sequences \d, \D, \h, \H, \p, \P, \s, \S, \v, @@ -2062,7 +2067,7 @@ by a ^ character after the colon. For example,</p> syntax [.ch.] and [=ch=] where "ch" is a "collating element", but these are not supported, and an error is given if they are encountered.</p> -<p>By default, in UTF modes, characters with values greater than 128 do not match +<p>By default, in UTF modes, characters with values greater than 255 do not match any of the POSIX character classes. However, if the PCRE_UCP option is passed to <b>pcre_compile()</b>, some of the classes are changed so that Unicode character properties are used. This is achieved by replacing the POSIX classes @@ -2081,7 +2086,7 @@ by other sequences, as follows:</p> <p>Negated versions, such as [:^alpha:] use \P instead of \p. The other POSIX classes are unchanged, and match only characters with code points less than -128.</p> +256.</p> </section> diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl index cf8fb3114a..5a9f63c5e2 100644 --- a/lib/stdlib/src/dict.erl +++ b/lib/stdlib/src/dict.erl @@ -417,6 +417,8 @@ on_bucket(F, T, Slot) -> %% could have implemented map and filter using fold but these are %% faster. We hope! +fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) -> + Acc; fold_dict(F, Acc, D) -> Segs = D#dict.segs, fold_segs(F, Acc, Segs, tuple_size(Segs)). @@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) -> fold_bucket(F, F(Key, Val, Acc), Bkt); fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc. +map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; map_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), Segs1 = map_seg_list(F, Segs0), @@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) -> [?kv(Key,F(Key, Val))|map_bucket(F, Bkt)]; map_bucket(F, []) when is_function(F, 2) -> []. +filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; filter_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), {Segs1,Fc} = filter_seg_list(F, Segs0, [], 0), diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl index b870ccf1f9..cbe6eeec3c 100644 --- a/lib/stdlib/src/erl_lint.erl +++ b/lib/stdlib/src/erl_lint.erl @@ -2270,11 +2270,10 @@ expr({remote,Line,_M,_F}, _Vt, St) -> %% {UsedVarTable,State} expr_list(Es, Vt, St) -> - {Vt1,St1} = foldl(fun (E, {Esvt,St0}) -> - {Evt,St1} = expr(E, Vt, St0), - {vtmerge_pat(Evt, Esvt),St1} - end, {[],St}, Es), - {vtmerge(vtnew(Vt1, Vt), vtold(Vt1, Vt)),St1}. + foldl(fun (E, {Esvt,St0}) -> + {Evt,St1} = expr(E, Vt, St0), + {vtmerge_pat(Evt, Esvt),St1} + end, {[],St}, Es). record_expr(Line, Rec, Vt, St0) -> St1 = warn_invalid_record(Line, Rec, St0), @@ -2292,8 +2291,8 @@ map_fields([{Tag,_,K,V}|Fs], Vt, St, F) when Tag =:= map_field_assoc; {Pvt,St2} = F([K,V], Vt, St), {Vts,St3} = map_fields(Fs, Vt, St2, F), {vtupdate(Pvt, Vts),St3}; -map_fields([], Vt, St, _) -> - {Vt,St}. +map_fields([], _, St, _) -> + {[],St}. %% warn_invalid_record(Line, Record, State0) -> State %% Adds warning if the record is invalid. diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl index 26b0393b35..09c8924650 100644 --- a/lib/stdlib/src/ets.erl +++ b/lib/stdlib/src/ets.erl @@ -1625,13 +1625,18 @@ choice(Height, Width, P, Mode, Tab, Key, Turn, Opos) -> end. get_line(P, Default) -> - case io:get_line(P) of + case line_string(io:get_line(P)) of "\n" -> Default; L -> L end. +%% If the standard input is set to binary mode +%% convert it to a list so we can properly match. +line_string(Binary) when is_binary(Binary) -> unicode:characters_to_list(Binary); +line_string(Other) -> Other. + nonl(S) -> string:strip(S, right, $\n). print_number(Tab, Key, Num) -> diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl index adc9a0cf5f..e90cda0533 100644 --- a/lib/stdlib/src/io_lib.erl +++ b/lib/stdlib/src/io_lib.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -60,6 +60,7 @@ -module(io_lib). -export([fwrite/2,fread/2,fread/3,format/2]). +-export([scan_format/2,unscan_format/1,build_text/1]). -export([print/1,print/4,indentation/2]). -export([write/1,write/2,write/3,nl/0,format_prompt/1,format_prompt/2]). @@ -83,7 +84,7 @@ deep_unicode_char_list/1]). -export_type([chars/0, latin1_string/0, continuation/0, - fread_error/0, fread_item/0]). + fread_error/0, fread_item/0, format_spec/0]). %%---------------------------------------------------------------------- @@ -108,6 +109,18 @@ -type fread_item() :: string() | atom() | integer() | float(). +-type format_spec() :: + #{ + control_char => char(), + args => [any()], + width => 'none' | integer(), + adjust => 'left' | 'right', + precision => 'none' | integer(), + pad_char => char(), + encoding => 'unicode' | 'latin1', + strings => boolean() + }. + %%---------------------------------------------------------------------- %% Interface calls to sub-modules. @@ -156,6 +169,31 @@ format(Format, Args) -> Other end. +-spec scan_format(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | format_spec()]. + +scan_format(Format, Args) -> + try io_lib_format:scan(Format, Args) + catch + _:_ -> erlang:error(badarg, [Format, Args]) + end. + +-spec unscan_format(FormatList) -> {Format, Data} when + FormatList :: [char() | format_spec()], + Format :: io:format(), + Data :: [term()]. + +unscan_format(FormatList) -> + io_lib_format:unscan(FormatList). + +-spec build_text(FormatList) -> chars() when + FormatList :: [char() | format_spec()]. + +build_text(FormatList) -> + io_lib_format:build(FormatList). + -spec print(Term) -> chars() when Term :: term(). diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl index 89ae6fb187..015afb317a 100644 --- a/lib/stdlib/src/io_lib_format.erl +++ b/lib/stdlib/src/io_lib_format.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -20,10 +20,9 @@ %% Formatting functions of io library. --export([fwrite/2,fwrite_g/1,indentation/2]). +-export([fwrite/2,fwrite_g/1,indentation/2,scan/2,unscan/1,build/1]). -%% fwrite(Format, ArgList) -> string(). -%% Format the arguments in ArgList after string Format. Just generate +%% Format the arguments in Args after string Format. Just generate %% an error if there is an error in the arguments. %% %% To do the printing command correctly we need to calculate the @@ -37,15 +36,84 @@ %% and it also splits the handling of the control characters into two %% parts. -fwrite(Format, Args) when is_atom(Format) -> - fwrite(atom_to_list(Format), Args); -fwrite(Format, Args) when is_binary(Format) -> - fwrite(binary_to_list(Format), Args); +-spec fwrite(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | io_lib:format_spec()]. + fwrite(Format, Args) -> - Cs = collect(Format, Args), + build(scan(Format, Args)). + +%% Build the output text for a pre-parsed format list. + +-spec build(FormatList) -> io_lib:chars() when + FormatList :: [char() | io_lib:format_spec()]. + +build(Cs) -> Pc = pcount(Cs), build(Cs, Pc, 0). +%% Parse all control sequences in the format string. + +-spec scan(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | io_lib:format_spec()]. + +scan(Format, Args) when is_atom(Format) -> + scan(atom_to_list(Format), Args); +scan(Format, Args) when is_binary(Format) -> + scan(binary_to_list(Format), Args); +scan(Format, Args) -> + collect(Format, Args). + +%% Revert a pre-parsed format list to a plain character list and a +%% list of arguments. + +-spec unscan(FormatList) -> {Format, Data} when + FormatList :: [char() | io_lib:format_spec()], + Format :: io:format(), + Data :: [term()]. + +unscan(Cs) -> + {print(Cs), args(Cs)}. + +args([#{args := As} | Cs]) -> + As ++ args(Cs); +args([_C | Cs]) -> + args(Cs); +args([]) -> + []. + +print([#{control_char := C, width := F, adjust := Ad, precision := P, + pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) -> + print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs); +print([C | Cs]) -> + [C | print(Cs)]; +print([]) -> + []. + +print(C, F, Ad, P, Pad, Encoding, Strings) -> + [$~] ++ print_field_width(F, Ad) ++ print_precision(P) ++ + print_pad_char(Pad) ++ print_encoding(Encoding) ++ + print_strings(Strings) ++ [C]. + +print_field_width(none, _Ad) -> ""; +print_field_width(F, left) -> integer_to_list(-F); +print_field_width(F, right) -> integer_to_list(F). + +print_precision(none) -> ""; +print_precision(P) -> [$. | integer_to_list(P)]. + +print_pad_char($\s) -> ""; % default, no need to make explicit +print_pad_char(Pad) -> [$., Pad]. + +print_encoding(unicode) -> "t"; +print_encoding(latin1) -> "". + +print_strings(false) -> "l"; +print_strings(true) -> "". + collect([$~|Fmt0], Args0) -> {C,Fmt1,Args1} = collect_cseq(Fmt0, Args0), [C|collect(Fmt1, Args1)]; @@ -60,7 +128,10 @@ collect_cseq(Fmt0, Args0) -> {Encoding,Fmt4,Args4} = encoding(Fmt3, Args3), {Strings,Fmt5,Args5} = strings(Fmt4, Args4), {C,As,Fmt6,Args6} = collect_cc(Fmt5, Args5), - {{C,As,F,Ad,P,Pad,Encoding,Strings},Fmt6,Args6}. + FormatSpec = #{control_char => C, args => As, width => F, adjust => Ad, + precision => P, pad_char => Pad, encoding => Encoding, + strings => Strings}, + {FormatSpec,Fmt6,Args6}. encoding([$t|Fmt],Args) -> true = hd(Fmt) =/= $l, @@ -136,17 +207,19 @@ collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}. pcount(Cs) -> pcount(Cs, 0). -pcount([{$p,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1); -pcount([{$P,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1); +pcount([#{control_char := $p}|Cs], Acc) -> pcount(Cs, Acc+1); +pcount([#{control_char := $P}|Cs], Acc) -> pcount(Cs, Acc+1); pcount([_|Cs], Acc) -> pcount(Cs, Acc); pcount([], Acc) -> Acc. -%% build([Control], Pc, Indentation) -> string(). +%% build([Control], Pc, Indentation) -> io_lib:chars(). %% Interpret the control structures. Count the number of print %% remaining and only calculate indentation when necessary. Must also %% be smart when calculating indentation for characters in format. -build([{C,As,F,Ad,P,Pad,Enc,Str}|Cs], Pc0, I) -> +build([#{control_char := C, args := As, width := F, adjust := Ad, + precision := P, pad_char := Pad, encoding := Enc, + strings := Str} | Cs], Pc0, I) -> S = control(C, As, F, Ad, P, Pad, Enc, Str, I), Pc1 = decr_pc(C, Pc0), if @@ -162,10 +235,14 @@ decr_pc($p, Pc) -> Pc - 1; decr_pc($P, Pc) -> Pc - 1; decr_pc(_, Pc) -> Pc. -%% indentation(String, Indentation) -> Indentation. + %% Calculate the indentation of the end of a string given its start %% indentation. We assume tabs at 8 cols. +-spec indentation(String, StartIndent) -> integer() when + String :: io_lib:chars(), + StartIndent :: integer(). + indentation([$\n|Cs], _I) -> indentation(Cs, 0); indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8); indentation([C|Cs], I) when is_integer(C) -> @@ -366,7 +443,6 @@ float_data([D|Cs], Ds) when D >= $0, D =< $9 -> float_data([_|Cs], Ds) -> float_data(Cs, Ds). -%% fwrite_g(Float) %% Writes the shortest, correctly rounded string that converts %% to Float when read back with list_to_float/1. %% @@ -374,6 +450,8 @@ float_data([_|Cs], Ds) -> %% in Proceedings of the SIGPLAN '96 Conference on Programming %% Language Design and Implementation. +-spec fwrite_g(float()) -> string(). + fwrite_g(0.0) -> "0.0"; fwrite_g(Float) when is_float(Float) -> @@ -642,7 +720,7 @@ prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase) term([Prefix|S], F, Adj, none, Pad) end. -%% char(Char, Field, Adjust, Precision, PadChar) -> string(). +%% char(Char, Field, Adjust, Precision, PadChar) -> chars(). char(C, none, _Adj, none, _Pad) -> [C]; char(C, F, _Adj, none, _Pad) -> chars(C, F); diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl index f8a99f653a..a7c3fd3c2e 100644 --- a/lib/stdlib/test/erl_lint_SUITE.erl +++ b/lib/stdlib/test/erl_lint_SUITE.erl @@ -3708,7 +3708,13 @@ maps(Config) -> ">>, [], {errors,[{4,erl_lint,illegal_map_construction}, - {6,erl_lint,illegal_map_key}],[]}}], + {6,erl_lint,illegal_map_key}],[]}}, + {unused_vars_with_empty_maps, + <<"t(Foo, Bar, Baz) -> {#{},#{}}.">>, + [warn_unused_variables], + {warnings,[{1,erl_lint,{unused_var,'Bar'}}, + {1,erl_lint,{unused_var,'Baz'}}, + {1,erl_lint,{unused_var,'Foo'}}]}}], [] = run(Config, Ts), ok. diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl index 2203dd8f51..8d53949c40 100644 --- a/lib/stdlib/test/io_SUITE.erl +++ b/lib/stdlib/test/io_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1999-2013. All Rights Reserved. +%% Copyright Ericsson AB 1999-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,7 +31,7 @@ printable_range/1, io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1, otp_10836/1, io_lib_width_too_small/1, - io_with_huge_message_queue/1]). + io_with_huge_message_queue/1, format_string/1]). -export([pretty/2]). @@ -71,7 +71,8 @@ all() -> io_fread_newlines, otp_8989, io_lib_fread_literal, printable_range, io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836, - io_lib_width_too_small, io_with_huge_message_queue]. + io_lib_width_too_small, io_with_huge_message_queue, + format_string]. groups() -> []. @@ -1035,7 +1036,14 @@ rp(Term, Col, Ll, D, M, RF) -> lists:flatten(io_lib:format("~s", [R])). fmt(Fmt, Args) -> - lists:flatten(io_lib:format(Fmt, Args)). + FormatList = io_lib:scan_format(Fmt, Args), + {Fmt2, Args2} = io_lib:unscan_format(FormatList), + Chars1 = lists:flatten(io_lib:build_text(FormatList)), + Chars2 = lists:flatten(io_lib:format(Fmt2, Args2)), + Chars3 = lists:flatten(io_lib:format(Fmt, Args)), + Chars1 = Chars2, + Chars2 = Chars3, + Chars3. rfd(a, 0) -> []; @@ -2261,3 +2269,9 @@ writes(0, _) -> ok; writes(N, F1) -> file:write(F1, "hello\n"), writes(N - 1, F1). + +format_string(Config) -> + %% All but padding is tested by fmt/2. + "xxxxxxsssx" = fmt("~10.4.xs", ["sss"]), + "xxxxxxsssx" = fmt("~10.4.*s", [$x, "sss"]), + ok. diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl index 877675772f..81272e62de 100644 --- a/lib/syntax_tools/src/erl_prettypr.erl +++ b/lib/syntax_tools/src/erl_prettypr.erl @@ -50,8 +50,7 @@ | fun((erl_syntax:syntaxTree(), _, _) -> prettypr:document()). -type clause_t() :: 'case_expr' | 'cond_expr' | 'fun_expr' | 'if_expr' | 'receive_expr' | 'try_expr' - | {'function', prettypr:document()} - | {'rule', prettypr:document()}. + | {'function', prettypr:document()}. -record(ctxt, {prec = 0 :: integer(), sub_indent = 2 :: non_neg_integer(), @@ -587,8 +586,6 @@ lay_2(Node, Ctxt) -> make_case_clause(D1, D2, D3, Ctxt); try_expr -> make_case_clause(D1, D2, D3, Ctxt); - {rule, N} -> - make_rule_clause(N, D1, D2, D3, Ctxt); undefined -> %% If a clause is formatted out of context, we %% use a "fun-expression" clause style. @@ -851,14 +848,10 @@ lay_2(Node, Ctxt) -> floating(text(".")), lay(erl_syntax:record_access_field(Node), set_prec(Ctxt, PrecR))), - D3 = case erl_syntax:record_access_type(Node) of - none -> - D2; - T -> - beside(beside(floating(text("#")), - lay(T, reset_prec(Ctxt))), - D2) - end, + T = erl_syntax:record_access_type(Node), + D3 = beside(beside(floating(text("#")), + lay(T, reset_prec(Ctxt))), + D2), maybe_parentheses(beside(D1, D3), Prec, Ctxt); record_expr -> @@ -926,15 +919,6 @@ lay_2(Node, Ctxt) -> D2 = lay(erl_syntax:map_field_exact_value(Node), Ctxt1), par([D1, floating(text(":=")), D2], Ctxt1#ctxt.break_indent); - rule -> - %% Comments on the name will be repeated; cf. - %% `function'. - Ctxt1 = reset_prec(Ctxt), - D1 = lay(erl_syntax:rule_name(Node), Ctxt1), - D2 = lay_clauses(erl_syntax:rule_clauses(Node), - {rule, D1}, Ctxt1), - beside(D2, floating(text("."))); - size_qualifier -> Ctxt1 = set_prec(Ctxt, max_prec()), D1 = lay(erl_syntax:size_qualifier_body(Node), Ctxt1), @@ -1073,10 +1057,6 @@ make_fun_clause_head(N, P, Ctxt) -> beside(N, D) end. -make_rule_clause(N, P, G, B, Ctxt) -> - D = make_fun_clause_head(N, P, Ctxt), - append_rule_body(B, append_guard(G, D, Ctxt), Ctxt). - make_case_clause(P, G, B, Ctxt) -> append_clause_body(B, append_guard(G, P, Ctxt), Ctxt). @@ -1092,9 +1072,6 @@ make_if_clause(_P, G, B, Ctxt) -> append_clause_body(B, D, Ctxt) -> append_clause_body(B, D, floating(text(" ->")), Ctxt). -append_rule_body(B, D, Ctxt) -> - append_clause_body(B, D, floating(text(" :-")), Ctxt). - append_clause_body(B, D, S, Ctxt) -> sep([beside(D, S), nest(Ctxt#ctxt.break_indent, B)]). diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl index de271d7f2f..3f2a3e05dd 100644 --- a/lib/syntax_tools/src/erl_syntax.erl +++ b/lib/syntax_tools/src/erl_syntax.erl @@ -254,7 +254,6 @@ receive_expr_action/1, receive_expr_clauses/1, receive_expr_timeout/1, - record_access/2, record_access/3, record_access_argument/1, record_access_field/1, @@ -271,10 +270,6 @@ record_index_expr/2, record_index_expr_field/1, record_index_expr_type/1, - rule/2, - rule_arity/1, - rule_clauses/1, - rule_name/1, size_qualifier/2, size_qualifier_argument/1, size_qualifier_body/1, @@ -472,19 +467,16 @@ %% <td>record_field</td> %% </tr><tr> %% <td>record_index_expr</td> -%% <td>rule</td> %% <td>size_qualifier</td> %% <td>string</td> -%% </tr><tr> %% <td>text</td> +%% </tr><tr> %% <td>try_expr</td> %% <td>tuple</td> %% <td>underscore</td> -%% </tr><tr> %% <td>variable</td> +%% </tr><tr> %% <td>warning_marker</td> -%% <td></td> -%% <td></td> %% </tr> %% </table></center> %% @@ -540,7 +532,6 @@ %% @see record_expr/2 %% @see record_field/2 %% @see record_index_expr/2 -%% @see rule/2 %% @see size_qualifier/2 %% @see string/1 %% @see text/1 @@ -607,10 +598,8 @@ type(Node) -> {record, _, _, _, _} -> record_expr; {record, _, _, _} -> record_expr; {record_field, _, _, _, _} -> record_access; - {record_field, _, _, _} -> record_access; {record_index, _, _, _} -> record_index_expr; {remote, _, _, _} -> module_qualifier; - {rule, _, _, _, _} -> rule; {'try', _, _, _, _, _} -> try_expr; {tuple, _, _} -> tuple; _ -> @@ -693,10 +682,9 @@ is_leaf(Node) -> %% <td>`comment'</td> %% <td>`error_marker'</td> %% <td>`eof_marker'</td> -%% <td>`form_list'</td> %% </tr><tr> +%% <td>`form_list'</td> %% <td>`function'</td> -%% <td>`rule'</td> %% <td>`warning_marker'</td> %% <td>`text'</td> %% </tr> @@ -709,7 +697,6 @@ is_leaf(Node) -> %% @see error_marker/1 %% @see form_list/1 %% @see function/2 -%% @see rule/2 %% @see warning_marker/1 -spec is_form(syntaxTree()) -> boolean(). @@ -722,7 +709,6 @@ is_form(Node) -> eof_marker -> true; error_marker -> true; form_list -> true; - rule -> true; warning_marker -> true; text -> true; _ -> false @@ -3480,7 +3466,6 @@ module_qualifier_body(Node) -> %% @see function_clauses/1 %% @see function_arity/1 %% @see is_form/1 -%% @see rule/2 %% Don't use the name 'function' for this record, to avoid confusion with %% the tuples on the form {function,Name,Arity} used by erl_parse. @@ -4310,49 +4295,32 @@ record_index_expr_field(Node) -> %% ===================================================================== -%% @equiv record_access(Argument, none, Field) - --spec record_access(syntaxTree(), syntaxTree()) -> syntaxTree(). - -record_access(Argument, Field) -> - record_access(Argument, none, Field). - - -%% ===================================================================== -%% @doc Creates an abstract record field access expression. If -%% `Type' is not `none', the result represents -%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>". -%% -%% If `Type' is `none', the result represents -%% "<code><em>Argument</em>.<em>Field</em></code>". This is a special -%% form only allowed within Mnemosyne queries. +%% @doc Creates an abstract record field access expression. The result +%% represents "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>". %% -%% @see record_access/2 %% @see record_access_argument/1 %% @see record_access_type/1 %% @see record_access_field/1 %% @see record_expr/3 -record(record_access, {argument :: syntaxTree(), - type :: 'none' | syntaxTree(), + type :: syntaxTree(), field :: syntaxTree()}). %% type(Node) = record_access %% data(Node) = #record_access{argument :: Argument, type :: Type, %% field :: Field} %% -%% Argument = Field = syntaxTree() -%% Type = none | syntaxTree() +%% Argument = Type = Field = syntaxTree() %% %% `erl_parse' representation: %% %% {record_field, Pos, Argument, Type, Field} -%% {record_field, Pos, Argument, Field} %% %% Argument = Field = erl_parse() %% Type = atom() --spec record_access(syntaxTree(), 'none' | syntaxTree(), syntaxTree()) -> +-spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) -> syntaxTree(). record_access(Argument, Type, Field) -> @@ -4365,16 +4333,11 @@ revert_record_access(Node) -> Argument = record_access_argument(Node), Type = record_access_type(Node), Field = record_access_field(Node), - if Type =:= none -> - {record_field, Pos, Argument, Field}; - true -> - case type(Type) of - atom -> - {record_field, Pos, - Argument, concrete(Type), Field}; - _ -> - Node - end + case type(Type) of + atom -> + {record_field, Pos, Argument, concrete(Type), Field}; + _ -> + Node end. @@ -4387,8 +4350,6 @@ revert_record_access(Node) -> record_access_argument(Node) -> case unwrap(Node) of - {record_field, _, Argument, _} -> - Argument; {record_field, _, Argument, _, _} -> Argument; Node1 -> @@ -4397,21 +4358,14 @@ record_access_argument(Node) -> %% ===================================================================== -%% @doc Returns the type subtree of a `record_access' node, -%% if any. If `Node' represents -%% "<code><em>Argument</em>.<em>Field</em></code>", `none' -%% is returned, otherwise if `Node' represents -%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>", -%% `Type' is returned. +%% @doc Returns the type subtree of a `record_access' node. %% %% @see record_access/3 --spec record_access_type(syntaxTree()) -> 'none' | syntaxTree(). +-spec record_access_type(syntaxTree()) -> syntaxTree(). record_access_type(Node) -> case unwrap(Node) of - {record_field, _, _, _} -> - none; {record_field, Pos, _, Type, _} -> set_pos(atom(Type), Pos); Node1 -> @@ -4428,8 +4382,6 @@ record_access_type(Node) -> record_access_field(Node) -> case unwrap(Node) of - {record_field, _, _, Field} -> - Field; {record_field, _, _, _, Field} -> Field; Node1 -> @@ -4808,117 +4760,6 @@ binary_comp_body(Node) -> %% ===================================================================== -%% @doc Creates an abstract Mnemosyne rule. If `Clauses' is -%% `[C1, ..., Cn]', the results represents -%% "<code><em>Name</em> <em>C1</em>; ...; <em>Name</em> -%% <em>Cn</em>.</code>". More exactly, if each `Ci' -%% represents "<code>(<em>Pi1</em>, ..., <em>Pim</em>) <em>Gi</em> -> -%% <em>Bi</em></code>", then the result represents -%% "<code><em>Name</em>(<em>P11</em>, ..., <em>P1m</em>) <em>G1</em> :- -%% <em>B1</em>; ...; <em>Name</em>(<em>Pn1</em>, ..., <em>Pnm</em>) -%% <em>Gn</em> :- <em>Bn</em>.</code>". Rules are source code forms. -%% -%% @see rule_name/1 -%% @see rule_clauses/1 -%% @see rule_arity/1 -%% @see is_form/1 -%% @see function/2 - --record(rule, {name :: syntaxTree(), clauses :: [syntaxTree()]}). - -%% type(Node) = rule -%% data(Node) = #rule{name :: Name, clauses :: Clauses} -%% -%% Name = syntaxTree() -%% Clauses = [syntaxTree()] -%% -%% (See `function' for notes on why the arity is not stored.) -%% -%% `erl_parse' representation: -%% -%% {rule, Pos, Name, Arity, Clauses} -%% -%% Name = atom() -%% Arity = integer() -%% Clauses = [Clause] \ [] -%% Clause = {clause, ...} -%% -%% where the number of patterns in each clause should be equal to -%% the integer `Arity'; see `clause' for documentation on -%% `erl_parse' clauses. - --spec rule(syntaxTree(), [syntaxTree()]) -> syntaxTree(). - -rule(Name, Clauses) -> - tree(rule, #rule{name = Name, clauses = Clauses}). - -revert_rule(Node) -> - Name = rule_name(Node), - Clauses = [revert_clause(C) || C <- rule_clauses(Node)], - Pos = get_pos(Node), - case type(Name) of - atom -> - A = rule_arity(Node), - {rule, Pos, concrete(Name), A, Clauses}; - _ -> - Node - end. - - -%% ===================================================================== -%% @doc Returns the name subtree of a `rule' node. -%% -%% @see rule/2 - --spec rule_name(syntaxTree()) -> syntaxTree(). - -rule_name(Node) -> - case unwrap(Node) of - {rule, Pos, Name, _, _} -> - set_pos(atom(Name), Pos); - Node1 -> - (data(Node1))#rule.name - end. - -%% ===================================================================== -%% @doc Returns the list of clause subtrees of a `rule' node. -%% -%% @see rule/2 - --spec rule_clauses(syntaxTree()) -> [syntaxTree()]. - -rule_clauses(Node) -> - case unwrap(Node) of - {rule, _, _, _, Clauses} -> - Clauses; - Node1 -> - (data(Node1))#rule.clauses - end. - -%% ===================================================================== -%% @doc Returns the arity of a `rule' node. The result is the -%% number of parameter patterns in the first clause of the rule; -%% subsequent clauses are ignored. -%% -%% An exception is thrown if `rule_clauses(Node)' returns -%% an empty list, or if the first element of that list is not a syntax -%% tree `C' of type `clause' such that -%% `clause_patterns(C)' is a nonempty list. -%% -%% @see rule/2 -%% @see rule_clauses/1 -%% @see clause/3 -%% @see clause_patterns/1 - --spec rule_arity(syntaxTree()) -> arity(). - -rule_arity(Node) -> - %% Note that this never accesses the arity field of - %% `erl_parse' rule nodes. - length(clause_patterns(hd(rule_clauses(Node)))). - - -%% ===================================================================== %% @doc Creates an abstract generator. The result represents %% "<code><em>Pattern</em> <- <em>Body</em></code>". %% @@ -6403,8 +6244,6 @@ revert_root(Node) -> revert_record_expr(Node); record_index_expr -> revert_record_index_expr(Node); - rule -> - revert_rule(Node); string -> revert_string(Node); try_expr -> @@ -6661,15 +6500,9 @@ subtrees(T) -> receive_expr_action(T)] end; record_access -> - case record_access_type(T) of - none -> - [[record_access_argument(T)], - [record_access_field(T)]]; - R -> - [[record_access_argument(T)], - [R], - [record_access_field(T)]] - end; + [[record_access_argument(T)], + [record_access_type(T)], + [record_access_field(T)]]; record_expr -> case record_expr_argument(T) of none -> @@ -6690,8 +6523,6 @@ subtrees(T) -> record_index_expr -> [[record_index_expr_type(T)], [record_index_expr_field(T)]]; - rule -> - [[rule_name(T)], rule_clauses(T)]; size_qualifier -> [[size_qualifier_body(T)], [size_qualifier_argument(T)]]; @@ -6786,8 +6617,6 @@ make_tree(parentheses, [[E]]) -> parentheses(E); make_tree(prefix_expr, [[F], [A]]) -> prefix_expr(F, A); make_tree(receive_expr, [C]) -> receive_expr(C); make_tree(receive_expr, [C, [E], A]) -> receive_expr(C, E, A); -make_tree(record_access, [[E], [F]]) -> - record_access(E, F); make_tree(record_access, [[E], [T], [F]]) -> record_access(E, T, F); make_tree(record_expr, [[T], F]) -> record_expr(T, F); @@ -6796,7 +6625,6 @@ make_tree(record_field, [[N]]) -> record_field(N); make_tree(record_field, [[N], [E]]) -> record_field(N, E); make_tree(record_index_expr, [[T], [F]]) -> record_index_expr(T, F); -make_tree(rule, [[N], C]) -> rule(N, C); make_tree(size_qualifier, [[N], [A]]) -> size_qualifier(N, A); make_tree(try_expr, [B, C, H, A]) -> try_expr(B, C, H, A); make_tree(tuple, [E]) -> tuple(E). diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl index 2f0488abec..5b5b18d15b 100644 --- a/lib/syntax_tools/src/erl_syntax_lib.erl +++ b/lib/syntax_tools/src/erl_syntax_lib.erl @@ -35,8 +35,7 @@ analyze_function_name/1, analyze_implicit_fun/1, analyze_import_attribute/1, analyze_module_attribute/1, analyze_record_attribute/1, analyze_record_expr/1, - analyze_record_field/1, analyze_rule/1, - analyze_wild_attribute/1, annotate_bindings/1, + analyze_record_field/1, analyze_wild_attribute/1, annotate_bindings/1, annotate_bindings/2, fold/3, fold_subtrees/3, foldl_listlist/3, function_name_expansions/1, is_fail_expr/1, limit/2, limit/3, map/2, map_subtrees/2, mapfold/3, mapfold_subtrees/3, @@ -527,8 +526,6 @@ vann(Tree, Env) -> vann_try_expr(Tree, Env); function -> vann_function(Tree, Env); - rule -> - vann_rule(Tree, Env); fun_expr -> vann_fun_expr(Tree, Env); list_comp -> @@ -569,15 +566,6 @@ vann_function(Tree, Env) -> Bound = [], {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}. -vann_rule(Tree, Env) -> - Cs = erl_syntax:rule_clauses(Tree), - {Cs1, {_, Free}} = vann_clauses(Cs, Env), - N = erl_syntax:rule_name(Tree), - {N1, _, _} = vann(N, Env), - Tree1 = rewrite(Tree, erl_syntax:rule(N1, Cs1)), - Bound = [], - {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}. - vann_fun_expr(Tree, Env) -> Cs = erl_syntax:fun_expr_clauses(Tree), {Cs1, {_, Free}} = vann_clauses(Cs, Env), @@ -946,7 +934,7 @@ is_fail_expr(E) -> %% %% Forms = syntaxTree() | [syntaxTree()] %% Key = attributes | errors | exports | functions | imports -%% | module | records | rules | warnings +%% | module | records | warnings %% %% @doc Analyzes a sequence of "program forms". The given %% `Forms' may be a single syntax tree of type @@ -1047,16 +1035,6 @@ is_fail_expr(E) -> %% that each record name occurs at most once in the list. The %% order of listing is not defined.</dd> %% -%% <dt>`{rules, Rules}'</dt> -%% <dd><ul> -%% <li>`Rules = [{atom(), integer()}]'</li> -%% </ul> -%% `Rules' is a list of the names of the rules that are -%% defined in `Forms' (cf. -%% `analyze_rule/1'). We do not guarantee that each -%% name occurs at most once in the list. The order of listing is -%% not defined.</dd> -%% %% <dt>`{warnings, Warnings}'</dt> %% <dd><ul> %% <li>`Warnings = [term()]'</li> @@ -1074,12 +1052,11 @@ is_fail_expr(E) -> %% @see analyze_import_attribute/1 %% @see analyze_record_attribute/1 %% @see analyze_function/1 -%% @see analyze_rule/1 %% @see erl_syntax:error_marker_info/1 %% @see erl_syntax:warning_marker_info/1 -type key() :: 'attributes' | 'errors' | 'exports' | 'functions' | 'imports' - | 'module' | 'records' | 'rules' | 'warnings'. + | 'module' | 'records' | 'warnings'. -type info_pair() :: {key(), term()}. -spec analyze_forms(erl_syntax:forms()) -> [info_pair()]. @@ -1099,8 +1076,6 @@ collect_form(Node, Info) -> Info; {function, Name} -> finfo_add_function(Name, Info); - {rule, Name} -> - finfo_add_rule(Name, Info); {error_marker, Data} -> finfo_add_error(Data, Info); {warning_marker, Data} -> @@ -1136,8 +1111,7 @@ collect_attribute(_, {N, V}, Info) -> records = [] :: [{atom(), [{atom(), field_default()}]}], errors = [] :: [term()], warnings = [] :: [term()], - functions = [] :: [{atom(), arity()}], - rules = [] :: [{atom(), arity()}]}). + functions = [] :: [{atom(), arity()}]}). -type field_default() :: 'none' | erl_syntax:syntaxTree(). @@ -1183,9 +1157,6 @@ finfo_add_warning(R, Info) -> finfo_add_function(F, Info) -> Info#forms{functions = [F | Info#forms.functions]}. -finfo_add_rule(F, Info) -> - Info#forms{rules = [F | Info#forms.rules]}. - finfo_to_list(Info) -> [{Key, Value} || {Key, {value, Value}} <- @@ -1197,8 +1168,7 @@ finfo_to_list(Info) -> {records, list_value(Info#forms.records)}, {errors, list_value(Info#forms.errors)}, {warnings, list_value(Info#forms.warnings)}, - {functions, list_value(Info#forms.functions)}, - {rules, list_value(Info#forms.rules)} + {functions, list_value(Info#forms.functions)} ]]. list_value([]) -> @@ -1229,10 +1199,6 @@ list_value(List) -> %% %% <dd>where `Info = analyze_function(Node)'.</dd> %% -%% <dt>`{rule, Info}'</dt> -%% -%% <dd>where `Info = analyze_rule(Node)'.</dd> -%% %% <dt>`{warning_marker, Info}'</dt> %% %% <dd>where `Info = @@ -1245,7 +1211,6 @@ list_value(List) -> %% %% @see analyze_attribute/1 %% @see analyze_function/1 -%% @see analyze_rule/1 %% @see erl_syntax:is_form/1 %% @see erl_syntax:error_marker_info/1 %% @see erl_syntax:warning_marker_info/1 @@ -1258,8 +1223,6 @@ analyze_form(Node) -> {attribute, analyze_attribute(Node)}; function -> {function, analyze_function(Node)}; - rule -> - {rule, analyze_rule(Node)}; error_marker -> {error_marker, erl_syntax:error_marker_info(Node)}; warning_marker -> @@ -1669,7 +1632,7 @@ analyze_record_attribute_tuple(Node) -> %% <dt>`record_expr':</dt> %% <dd>`{atom(), [{atom(), Value}]}'</dd> %% <dt>`record_access':</dt> -%% <dd>`{atom(), atom()} | atom()'</dd> +%% <dd>`{atom(), atom()}'</dd> %% <dt>`record_index_expr':</dt> %% <dd>`{atom(), atom()}'</dd> %% </dl> @@ -1679,9 +1642,7 @@ analyze_record_attribute_tuple(Node) -> %% listed in the order they appear. (See %% `analyze_record_field/1' for details on the field %% descriptors). For a `record_access' node, -%% `Info' represents the record name and the field name (or -%% if the record name is not included, only the field name; this is -%% allowed only in Mnemosyne-query syntax). For a +%% `Info' represents the record name and the field name. For a %% `record_index_expr' node, `Info' represents the %% record name and the name field name. %% @@ -1713,18 +1674,14 @@ analyze_record_expr(Node) -> F = erl_syntax:record_access_field(Node), case erl_syntax:type(F) of atom -> - case erl_syntax:record_access_type(Node) of - none -> - {record_access, erl_syntax:atom_value(F)}; - A -> - case erl_syntax:type(A) of - atom -> - {record_access, - {erl_syntax:atom_value(A), - erl_syntax:atom_value(F)}}; - _ -> - throw(syntax_error) - end + A = erl_syntax:record_access_type(Node), + case erl_syntax:type(A) of + atom -> + {record_access, + {erl_syntax:atom_value(A), + erl_syntax:atom_value(F)}}; + _ -> + throw(syntax_error) end; _ -> throw(syntax_error) @@ -1835,8 +1792,6 @@ analyze_file_attribute(Node) -> %% The evaluation throws `syntax_error' if %% `Node' does not represent a well-formed function %% definition. -%% -%% @see analyze_rule/1 -spec analyze_function(erl_syntax:syntaxTree()) -> {atom(), arity()}. @@ -1857,37 +1812,6 @@ analyze_function(Node) -> %% ===================================================================== -%% @spec analyze_rule(Node::syntaxTree()) -> {atom(), integer()} -%% -%% @doc Returns the name and arity of a Mnemosyne rule. The result is a -%% pair `{Name, A}' if `Node' represents a rule -%% "`Name(<em>P_1</em>, ..., <em>P_A</em>) :- ...'". -%% -%% The evaluation throws `syntax_error' if -%% `Node' does not represent a well-formed Mnemosyne -%% rule. -%% -%% @see analyze_function/1 - --spec analyze_rule(erl_syntax:syntaxTree()) -> {atom(), arity()}. - -analyze_rule(Node) -> - case erl_syntax:type(Node) of - rule -> - N = erl_syntax:rule_name(Node), - case erl_syntax:type(N) of - atom -> - {erl_syntax:atom_value(N), - erl_syntax:rule_arity(Node)}; - _ -> - throw(syntax_error) - end; - _ -> - throw(syntax_error) - end. - - -%% ===================================================================== %% @spec analyze_implicit_fun(Node::syntaxTree()) -> FunctionName %% %% FunctionName = atom() | {atom(), integer()} diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl index 38e0c2099b..db7f0939a3 100644 --- a/lib/syntax_tools/src/erl_tidy.erl +++ b/lib/syntax_tools/src/erl_tidy.erl @@ -792,16 +792,11 @@ keep_form(Form, Used, Opts) -> N = erl_syntax_lib:analyze_function(Form), case sets:is_element(N, Used) of false -> - report_removed_def("function", N, Form, Opts), - false; - true -> - true - end; - rule -> - N = erl_syntax_lib:analyze_rule(Form), - case sets:is_element(N, Used) of - false -> - report_removed_def("rule", N, Form, Opts), + {F, A} = N, + File = proplists:get_value(file, Opts, ""), + report({File, erl_syntax:get_pos(Form), + "removing unused function `~w/~w'."}, + [F, A], Opts), false; true -> true @@ -823,12 +818,6 @@ keep_form(Form, Used, Opts) -> true end. -report_removed_def(Type, {N, A}, Form, Opts) -> - File = proplists:get_value(file, Opts, ""), - report({File, erl_syntax:get_pos(Form), - "removing unused ~s `~w/~w'."}, - [Type, N, A], Opts). - collect_functions(Forms) -> lists:foldl( fun (F, {Names, Defs}) -> @@ -837,10 +826,6 @@ collect_functions(Forms) -> N = erl_syntax_lib:analyze_function(F), {sets:add_element(N, Names), dict:store(N, {F, []}, Defs)}; - rule -> - N = erl_syntax_lib:analyze_rule(F), - {sets:add_element(N, Names), - dict:store(N, {F, []}, Defs)}; _ -> {Names, Defs} end @@ -855,11 +840,6 @@ update_forms([F | Fs], Defs, Imports, Opts) -> {F1, Fs1} = dict:fetch(N, Defs), [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, Opts); - rule -> - N = erl_syntax_lib:analyze_rule(F), - {F1, Fs1} = dict:fetch(N, Defs), - [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, - Opts); attribute -> [update_attribute(F, Imports, Opts) | update_forms(Fs, Defs, Imports, Opts)]; diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl index 0420508f2a..eac5af5540 100644 --- a/lib/syntax_tools/src/igor.erl +++ b/lib/syntax_tools/src/igor.erl @@ -1713,8 +1713,6 @@ transform(Tree, Env, St) -> transform_function(Tree, Env, St); implicit_fun -> transform_implicit_fun(Tree, Env, St); - rule -> - transform_rule(Tree, Env, St); record_expr -> transform_record(Tree, Env, St); record_index_expr -> @@ -1778,27 +1776,6 @@ renaming_note(Name) -> rename_atom(Node, Atom) -> rewrite(Node, erl_syntax:atom(Atom)). -%% Renaming Mnemosyne rules (just like function definitions) - -transform_rule(T, Env, St) -> - {T1, St1} = default_transform(T, Env, St), - F = erl_syntax_lib:analyze_rule(T1), - {V, Text} = case (Env#code.map)(F) of - F -> - %% Not renamed - {none, []}; - {Atom, _Arity} -> - %% Renamed - Cs = erl_syntax:rule_clauses(T1), - N = rename_atom( - erl_syntax:rule_name(T1), - Atom), - T2 = rewrite(T1, - erl_syntax:rule(N, Cs)), - {{value, T2}, renaming_note(Atom)} - end, - {maybe_modified(V, T1, 2, Text, Env), St1}. - %% Renaming "implicit fun" expressions (done quietly). transform_implicit_fun(T, Env, St) -> diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl index b9b45cda25..7cfaa2c325 100644 --- a/lib/test_server/src/erl2html2.erl +++ b/lib/test_server/src/erl2html2.erl @@ -22,11 +22,11 @@ %%%------------------------------------------------------------------ -module(erl2html2). --export([convert/2, convert/3]). +-export([convert/3, convert/4]). -convert([], _Dest) -> % Fake clause. +convert([], _Dest, _InclPath) -> % Fake clause. ok; -convert(File, Dest) -> +convert(File, Dest, InclPath) -> %% The generated code uses the BGCOLOR attribute in the %% BODY tag, which wasn't valid until HTML 3.2. Also, %% good HTML should either override all colour attributes @@ -48,12 +48,12 @@ convert(File, Dest) -> "</head>\n\n" "<body bgcolor=\"white\" text=\"black\"" " link=\"blue\" vlink=\"purple\" alink=\"red\">\n"], - convert(File, Dest, Header). + convert(File, Dest, InclPath, Header). -convert(File, Dest, Header) -> +convert(File, Dest, InclPath, Header) -> %% statistics(runtime), - case parse_file(File) of + case parse_file(File, InclPath) of {ok,Functions} -> %% {_, Time1} = statistics(runtime), %% io:format("Parsed file in ~.2f Seconds.~n",[Time1/1000]), @@ -92,8 +92,8 @@ convert(File, Dest, Header) -> %%% Use expanded preprocessor directives if possible (epp). Only if %%% this fails, fall back on using non-expanded code (epp_dodger). -parse_file(File) -> - case epp:open(File, [], []) of +parse_file(File, InclPath) -> + case epp:open(File, InclPath, []) of {ok,Epp} -> try parse_preprocessed_file(Epp,File,false) of Forms -> @@ -145,13 +145,15 @@ parse_non_preprocessed_file(File) -> parse_non_preprocessed_file(Epp, File, Location) -> case epp_dodger:parse_form(Epp, Location) of {ok,Tree,Location1} -> - case erl_syntax:revert(Tree) of + try erl_syntax:revert(Tree) of {function,L,F,A,[_|C]} -> Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C], [{atom_to_list(F),A,L} | Clauses] ++ parse_non_preprocessed_file(Epp, File, Location1); _ -> parse_non_preprocessed_file(Epp, File, Location1) + catch + _:_ -> parse_non_preprocessed_file(Epp, File, Location1) end; {error,_E,Location1} -> parse_non_preprocessed_file(Epp, File, Location1); diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl index 9192a76a17..0e685a2d8a 100644 --- a/lib/test_server/src/test_server.erl +++ b/lib/test_server/src/test_server.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2014. All Rights Reserved. +%% Copyright Ericsson AB 1996-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -130,7 +130,8 @@ cover_compile(CoverInfo=#cover{app=App,excl=all,incl=Include,cross=Cross}) -> io:fwrite("done\n\n",[]), {ok,CoverInfo#cover{mods=Include}} end; -cover_compile(CoverInfo=#cover{app=App,excl=Exclude,incl=Include,cross=Cross}) -> +cover_compile(CoverInfo=#cover{app=App,excl=Exclude, + incl=Include,cross=Cross}) -> CrossMods = lists:flatmap(fun({_,M}) -> M end,Cross), case code:lib_dir(App) of {error,bad_name} -> @@ -177,68 +178,35 @@ module_names(Beams) -> do_cover_compile(Modules) -> cover:start(), - pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)), + Sticky = prepare_cover_compile(Modules,[]), + R = cover:compile_beam(Modules), + [warn_compile(Error) || Error <- R,element(1,Error)=/=ok], + [code:stick_mod(M) || M <- Sticky], ok. -do_cover_compile1(M) -> +warn_compile({error,{Reason,Module}}) -> + io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n", + [Module,{error,Reason}]). + +%% Make sure all modules are loaded and unstick if sticky +prepare_cover_compile([M|Ms],Sticky) -> case {code:is_sticky(M),code:is_loaded(M)} of {true,_} -> code:unstick_mod(M), - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end, - code:stick_mod(M); + prepare_cover_compile(Ms,[M|Sticky]); {false,false} -> case code:load_file(M) of {module,_} -> - do_cover_compile1(M); + prepare_cover_compile([M|Ms],Sticky); Error -> - io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]) + io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]), + prepare_cover_compile(Ms,Sticky) end; {false,_} -> - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end - end. - -pmap1(Fun,List) -> - NTot = length(List), - NProcs = erlang:system_info(schedulers) * 2, - NPerProc = (NTot div NProcs) + 1, - - {[],Pids} = - lists:foldr( - fun(_,{L,Ps}) -> - {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L); - true -> {L,[]} % last chunk - end, - {P,_Ref} = - spawn_monitor(fun() -> - exit(lists:map(Fun,L1)) - end), - {L2,[P|Ps]} - end, - {List,[]}, - lists:seq(1,NProcs)), - collect(Pids,[]). - -collect([],Acc) -> - lists:append(Acc); -collect([Pid|Pids],Acc) -> - receive - {'DOWN', _Ref, process, Pid, Result} -> - %% collect(lists:delete(Pid,Pids),[Result|Acc]) - collect(Pids,[Result|Acc]) - end. - + prepare_cover_compile(Ms,Sticky) + end; +prepare_cover_compile([],Sticky) -> + Sticky. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) -> @@ -268,45 +236,40 @@ collect([Pid|Pids],Acc) -> %% after the test is completed. cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> io:fwrite(user, "Cover analysing... ", []), - DetailsFun = + {ATFOk,ATFFail} = case Analyse of details -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(M) -> - OutFile = filename:join(Dir, - atom_to_list(M) ++ - ".COVER.html"), - case cover:analyse_to_file(M,OutFile,[html]) of - {ok,_} -> - {file,OutFile}; - Error -> - Error - end - end; + {result,Ok1,Fail1} = + cover:analyse_to_file(Modules,[{outdir,Dir},html]), + {lists:map(fun(OutFile) -> + M = list_to_atom( + filename:basename( + filename:rootname(OutFile, + ".COVER.html") + ) + ), + {M,{file,OutFile}} + end, Ok1), + lists:map(fun({Reason,M}) -> + {M,{error,Reason}} + end, Fail1)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end; overview -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(_) -> undefined end; + {[],lists:map(fun(M) -> {M,undefined} end, Modules)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end end, - R = pmap2( - fun(M) -> - case cover:analyse(M,module) of - {ok,{M,{Cov,NotCov}}} -> - {M,{Cov,NotCov,DetailsFun(M)}}; - Err -> - io:fwrite(user, - "\nWARNING: Analysis failed for ~w. Reason: ~p\n", - [M,Err]), - {M,Err} - end - end, Modules), + {result,AOk,AFail} = cover:analyse(Modules,module), + R0 = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++ + [{M,{error,Reason}} || {Reason,M} <- AFail], + R = lists:sort(R0), io:fwrite(user, "done\n\n", []), case Stop of @@ -319,19 +282,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> end, R. -pmap2(Fun,List) -> - Collector = self(), - Pids = lists:map(fun(E) -> - spawn(fun() -> - Collector ! {res,self(),Fun(E)} - end) - end, List), - lists:map(fun(Pid) -> - receive - {res,Pid,Res} -> - Res - end - end, Pids). +merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) -> + case lists:keytake(M,1,ATF) of + {value,{_,R},ATF1} -> + merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]); + false -> + merge_analysis_results(T,ATF,Acc) + end; +merge_analysis_results([],_,Acc) -> + Acc. do_cover_for_node(Node,CoverFunc) -> do_cover_for_node(Node,CoverFunc,true). @@ -779,7 +738,9 @@ do_call_end_conf(Starter,Mod,Func,Data,Conf,TVal) -> EndConfApply = fun() -> timetrap(TVal), - case catch apply(Mod,end_per_testcase,[Func,Conf]) of + case catch apply(Mod, + end_per_testcase, + [Func,Conf]) of {'EXIT',Why} -> timer:sleep(1), group_leader() ! {printout,12, @@ -817,7 +778,9 @@ spawn_fw_call(Mod,{init_per_testcase,Func},CurrConf,Pid, Skip = {skip,{failed,{Mod,init_per_testcase,Why}}}, %% if init_per_testcase fails, the test case %% should be skipped - case catch do_end_tc_call(Mod,Func, {Pid,Skip,[CurrConf]}, Why) of + case catch do_end_tc_call(Mod,Func, + {Pid,Skip,[CurrConf]}, + Why) of {'EXIT',FwEndTCErr} -> exit({fw_notify_done,end_tc,FwEndTCErr}); _ -> @@ -984,12 +947,15 @@ run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit, NewResult = do_end_tc_call(Mod,Func, {{error,Reason},[Conf]}, {fail,Reason}), {{0,NewResult},Where,[]}; - Skip = {skip,_Reason} -> - NewResult = do_end_tc_call(Mod,Func, {Skip,Args0}, Skip), + Skip = {SkipType,_Reason} when SkipType == skip; + SkipType == skipped -> + NewResult = do_end_tc_call(Mod,Func, + {Skip,Args0}, Skip), {{0,NewResult},Where,[]}; AutoSkip = {auto_skip,_Reason} -> %% special case where a conf case "pretends" to be skipped - NewResult = do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip), + NewResult = + do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip), {{0,NewResult},Where,[]} end, exit({Ref,Time,Value,Loc,Opts}). @@ -1000,10 +966,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) -> set_tc_state(init_per_testcase, hd(Args)), ensure_timetrap(Args), case init_per_testcase(Mod, Func, Args) of - Skip = {skip,Reason} -> + Skip = {SkipType,Reason} when SkipType == skip; + SkipType == skipped -> Line = get_loc(), Conf = [{tc_status,{skipped,Reason}}|hd(Args)], - NewRes = do_end_tc_call(Mod,Func, {Skip,[Conf]}, Skip), + NewRes = do_end_tc_call(Mod,Func, + {Skip,[Conf]}, Skip), {{0,NewRes},Line,[]}; {skip_and_save,Reason,SaveCfg} -> Line = get_loc(), @@ -1021,11 +989,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) -> {{0,NewRes},[{Mod,Func}],[]}; {ok,NewConf} -> %% call user callback function if defined - NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf), + NewConf1 = + user_callback(TCCallback, Mod, Func, init, NewConf), %% save current state in controller loop set_tc_state(tc, NewConf1), %% execute the test case - {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()}, + {{T,Return},Loc} = {ts_tc(Mod,Func,[NewConf1]), get_loc()}, {EndConf,TSReturn,FWReturn} = case Return of {E,TCError} when E=='EXIT' ; E==failed -> @@ -1041,30 +1010,39 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) -> {[{tc_status,{skipped,Why}}, {save_config,SaveCfg}|NewConf1], Skip,Skip}; - {skip,Why} -> - {[{tc_status,{skipped,Why}}|NewConf1],Return,Return}; + {SkipType,Why} when SkipType == skip; + SkipType == skipped -> + {[{tc_status,{skipped,Why}}|NewConf1],Return, + Return}; _ -> {[{tc_status,ok}|NewConf1],Return,ok} end, %% call user callback function if defined - EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf), + EndConf1 = + user_callback(TCCallback, Mod, Func, 'end', EndConf), %% update current state in controller loop {FWReturn1,TSReturn1,EndConf2} = case end_per_testcase(Mod, Func, EndConf1) of SaveCfg1={save_config,_} -> - {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config,1, - EndConf1)]}; + {FWReturn,TSReturn, + [SaveCfg1|lists:keydelete(save_config,1, + EndConf1)]}; {fail,ReasonToFail} -> %% user has failed the testcase - fw_error_notify(Mod, Func, EndConf1, ReasonToFail), - {{error,ReasonToFail},{failed,ReasonToFail},EndConf1}; - {failed,{_,end_per_testcase,_}} = Failure when FWReturn == ok -> + fw_error_notify(Mod, Func, EndConf1, + ReasonToFail), + {{error,ReasonToFail}, + {failed,ReasonToFail}, + EndConf1}; + {failed,{_,end_per_testcase,_}} = Failure when + FWReturn == ok -> %% unexpected termination in end_per_testcase %% report this as the result to the framework {Failure,TSReturn,EndConf1}; _ -> - %% test case result should be reported to framework - %% no matter the status of end_per_testcase + %% test case result should be reported to + %% framework no matter the status of + %% end_per_testcase {FWReturn,TSReturn,EndConf1} end, %% clear current state in controller loop @@ -1131,7 +1109,8 @@ process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) -> ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result], %% check if all elements in the list are valid end conf return value tuples case lists:all(fun(Val) when is_tuple(Val) -> - lists:any(fun(T) -> T == element(1, Val) end, ReturnTags); + lists:any(fun(T) -> T == element(1, Val) end, + ReturnTags); (ok) -> true; (_) -> @@ -1165,14 +1144,19 @@ process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts) NewReturn -> {NewReturn,SaveOpts} end; -process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) -> +process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], + Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts); -process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) -> - process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts); -process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) -> +process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], + Loc, _, SaveOpts) -> + process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], + Loc, {skip,Why}, SaveOpts); +process_return_val1([GR={return_group_result,_}|Opts], M,F,A, + Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]); -process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip; - Tag==comment -> +process_return_val1([RetVal={Tag,_}|Opts], M,F,A, + Loc, _, SaveOpts) when Tag==skip; + Tag==comment -> process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts); process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) -> process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts); @@ -1186,7 +1170,8 @@ process_return_val1([], M,F,A, _Loc, Final, SaveOpts) -> user_callback(undefined, _, _, _, Args) -> Args; -user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) -> +user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, + [Args]) when is_list(Args) -> case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of Args1 when is_list(Args1) -> [Args1]; @@ -1778,7 +1763,8 @@ timetrap(Timeout0, TimeToReport0, TCPid, MultAndScale = {Multiplier,Scale}) -> put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}]); List -> List1 = lists:delete({infinity,TCPid,{infinity,false}}, List), - put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}|List1]) + put(test_server_timetraps,[{Handle,TCPid, + {TimeToReport,Scale}}|List1]) end, Handle. @@ -1837,7 +1823,9 @@ time_ms(Ms, _, _) when is_integer(Ms) -> Ms; time_ms(infinity, _, _) -> infinity; time_ms(Fun, TCPid, MultAndScale) when is_function(Fun) -> time_ms_apply(Fun, TCPid, MultAndScale); -time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M), is_atom(F), is_list(A) -> +time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M), + is_atom(F), + is_list(A) -> time_ms_apply(MFA, TCPid, MultAndScale); time_ms(Other, _, _) -> exit({invalid_time_format,Other}). diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl index af8921fe75..488f38d05d 100644 --- a/lib/test_server/src/test_server_ctrl.erl +++ b/lib/test_server/src/test_server_ctrl.erl @@ -1927,15 +1927,20 @@ html_possibly_convert(Src, SrcInfo, Dest) -> {ok,DestInfo} when DestInfo#file_info.mtime >= SrcInfo#file_info.mtime -> ok; % dest file up to date _ -> + InclPath = case application:get_env(test_server, include) of + {ok,Incls} -> Incls; + _ -> [] + end, + OutDir = get(test_server_log_dir_base), case test_server_sup:framework_call(get_html_wrapper, ["Module "++Src,false, OutDir,undefined, encoding(Src)], "") of Empty when (Empty == "") ; (element(2,Empty) == "") -> - erl2html2:convert(Src, Dest); + erl2html2:convert(Src, Dest, InclPath); {_,Header,_} -> - erl2html2:convert(Src, Dest, Header) + erl2html2:convert(Src, Dest, InclPath, Header) end end. diff --git a/lib/test_server/src/ts_make.erl b/lib/test_server/src/ts_make.erl index 8727f7ebfe..9cb77ecb12 100644 --- a/lib/test_server/src/ts_make.erl +++ b/lib/test_server/src/ts_make.erl @@ -67,7 +67,17 @@ get_port_data(Port, Last0, Complete0) -> end. update_last([C|Rest], Line, true) -> - io:put_chars(list_to_binary(Line)), %% Utf-8 list to utf-8 binary + try + %% Utf-8 list to utf-8 binary + %% (e.g. we assume utf-8 bytes from port) + io:put_chars(list_to_binary(Line)) + catch + error:badarg -> + %% io:put_chars/1 badarged + %% this likely means we had unicode code points + %% in our bytes buffer (e.g warning from gcc with åäö) + io:put_chars(unicode:characters_to_binary(Line)) + end, io:nl(), update_last([C|Rest], [], false); update_last([$\r|Rest], Result, Complete) -> diff --git a/lib/test_server/test/erl2html2_SUITE.erl b/lib/test_server/test/erl2html2_SUITE.erl index 37c2b74d8e..908985c879 100644 --- a/lib/test_server/test/erl2html2_SUITE.erl +++ b/lib/test_server/test/erl2html2_SUITE.erl @@ -161,7 +161,7 @@ convert_module(Mod,Config) -> Src = filename:join(DataDir,Mod++".erl"), Dst = filename:join(PrivDir,Mod++".erl.html"), io:format("<a href=\"~s\">~s</a>\n",[Src,filename:basename(Src)]), - ok = erl2html2:convert(Src, Dst, "<html><body>"), + ok = erl2html2:convert(Src, Dst, [], "<html><body>"), io:format("<a href=\"~s\">~s</a>\n",[Dst,filename:basename(Dst)]), {Src,Dst}. diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 07ffa65e3d..914baa7977 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -5,7 +5,7 @@ <header> <copyright> <year>2001</year> - <year>2013</year> + <year>2015</year> <holder>Ericsson AB, All Rights Reserved</holder> </copyright> <legalnotice> @@ -138,17 +138,18 @@ </desc> </func> <func> - <name>compile(ModFile) -> Result</name> - <name>compile(ModFile, Options) -> Result</name> - <name>compile_module(ModFile) -> Result</name> - <name>compile_module(ModFile, Options) -> Result</name> - <fsummary>Compile a module for Cover analysis.</fsummary> + <name>compile(ModFiles) -> Result | [Result]</name> + <name>compile(ModFiles, Options) -> Result | [Result]</name> + <name>compile_module(ModFiles) -> Result | [Result]</name> + <name>compile_module(ModFiles, Options) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis.</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | File</v> <v> Module = atom()</v> <v> File = string()</v> <v>Options = [Option]</v> - <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v> + <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v> <d>See <c>compile:file/2.</c></d> <v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v> </type> @@ -165,6 +166,9 @@ returns <c>{ok,Module}</c>. Otherwise the function returns <c>{error,File}</c>. Errors and warnings are printed as they occur.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> <p>Note that the internal database is (re-)initiated during the compilation, meaning any previously collected coverage data for the module will be lost.</p> @@ -194,9 +198,10 @@ </desc> </func> <func> - <name>compile_beam(ModFile) -> Result</name> - <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary> + <name>compile_beam(ModFiles) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | BeamFile</v> <v> Module = atom()</v> <v> BeamFile = string()</v> @@ -229,6 +234,9 @@ returned.</p> <p><c>{error,BeamFile}</c> is returned if the compiled code can not be loaded on the node.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> </desc> </func> <func> @@ -251,16 +259,21 @@ </desc> </func> <func> - <name>analyse(Module) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name> - <fsummary>Analyse a Cover compiled module.</fsummary> + <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Analyse one or more Cover compiled modules.</fsummary> <type> - <v>Module = atom()</v> + <v>Modules = Module | [Module]</v> + <v>Module = atom() </v> <v>Analysis = coverage | calls</v> <v>Level = line | clause | function | module</v> - <v>Answer = {Module,Value} | [{Item,Value}]</v> + <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v> <v> Item = Line | Clause | Function</v> <v> Line = {M,N}</v> <v> Clause = {M,F,A,C}</v> @@ -269,49 +282,67 @@ <v> N = A = C = integer()</v> <v> Value = {Cov,NotCov} | Calls</v> <v> Cov = NotCov = Calls = integer()</v> - <v>Error = {not_cover_compiled,Module} | not_main_node</v> + <v> Error = {not_cover_compiled,Module}</v> + <v>Ok = [{Module,Value}] | [{Item,Value}]</v> + <v>Fail = [Error]</v> </type> <desc> - <p>Performs analysis of a Cover compiled module <c>Module</c>, as + <p>Performs analysis of one or more Cover compiled modules, as specified by <c>Analysis</c> and <c>Level</c> (see above), by examining the contents of the internal database.</p> <p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c> defaults to <c>function</c>.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>OneResult</c>, else the return will be + <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a given module is not Cover compiled, this is indicated + by the error reason <c>{not_cover_compiled,Module}</c>.</p> </desc> </func> <func> - <name>analyse_to_file(Module) -> </name> - <name>analyse_to_file(Module,Options) -> </name> - <name>analyse_to_file(Module, OutFile) -> </name> - <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name> - <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary> + <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary> <type> + <v>Modules = Module | [Module]</v> <v>Module = atom()</v> - <v>OutFile = string()</v> + <v>OutFile = OutDir = string()</v> <v>Options = [Option]</v> - <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v> + <v>Answer = {ok,OutFile} | {error,Error}</v> + <v>Ok = [OutFile]</v> + <v>Fail = [Error]</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v> <v> File = string()</v> <v> Reason = term()</v> </type> <desc> - <p>Makes a copy <c>OutFile</c> of the source file for a module - <c>Module</c>, where it for each executable line is specified + <p>Makes copies of the source file for the given modules, + where it for each executable line is specified how many times it has been executed.</p> <p>The output file <c>OutFile</c> defaults to <c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the option <c>html</c> was used.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>Answer</c>, else the return will be a + list, <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a module is not Cover compiled, this is indicated by the + error reason <c>{not_cover_compiled,Module}</c>.</p> <p>If the source file and/or the output file cannot be opened using <c>file:open/2</c>, the function returns <c>{error,{file,File,Reason}}</c> where <c>File</c> is the file name and <c>Reason</c> is the error reason.</p> - <p>If the module was cover compiled from the <c>.beam</c> + <p>If a module was cover compiled from the <c>.beam</c> file, i.e. using <c>compile_beam/1</c> or <c>compile_beam_directory/0,1</c>, it is assumed that the source code can be found in the same directory as the @@ -322,10 +353,8 @@ joining <c>../src</c> and the tail of the compiled path below a trailing <c>src</c> component, then the compiled path itself. - If no source code is found, - <c>{error,no_source_code_found}</c> is returned.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + If no source code is found, this is indicated by the error reason + <c>{no_source_code_found,Module}</c>.</p> </desc> </func> <func> @@ -339,7 +368,7 @@ <v>OutFile = string()</v> <v>Options = [Option]</v> <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v> <v> File = string()</v> <v> Reason = term()</v> </type> diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 31754015f7..6c32c47069 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -77,8 +77,11 @@ compile/1, compile/2, compile_module/1, compile_module/2, compile_directory/0, compile_directory/1, compile_directory/2, compile_beam/1, compile_beam_directory/0, compile_beam_directory/1, - analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, + analyse/0, analyse/1, analyse/2, analyse/3, + analyze/0, analyze/1, analyze/2, analyze/3, + analyse_to_file/0, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, + analyze_to_file/0, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, async_analyse_to_file/1,async_analyse_to_file/2, async_analyse_to_file/3, async_analyze_to_file/1, @@ -109,6 +112,7 @@ line = '_' % integer() }). -define(BUMP_REC_NAME,bump). +-define(CHUNK_SIZE, 20000). -record(vars, {module, % atom() Module name @@ -181,10 +185,11 @@ start(Node) when is_atom(Node) -> start(Nodes) -> call({start_nodes,remove_myself(Nodes,[])}). -%% compile(ModFile) -> -%% compile(ModFile, Options) -> -%% compile_module(ModFile) -> Result -%% compile_module(ModFile, Options) -> Result +%% compile(ModFiles) -> +%% compile(ModFiles, Options) -> +%% compile_module(ModFiles) -> Result +%% compile_module(ModFiles, Options) -> Result +%% ModFiles = ModFile | [ModFile] %% ModFile = Module | File %% Module = atom() %% File = string() @@ -198,18 +203,27 @@ compile(ModFile, Options) -> compile_module(ModFile) when is_atom(ModFile); is_list(ModFile) -> compile_module(ModFile, []). -compile_module(Module, Options) when is_atom(Module), is_list(Options) -> - compile_module(atom_to_list(Module), Options); -compile_module(File, Options) when is_list(File), is_list(Options) -> - WithExt = case filename:extension(File) of - ".erl" -> - File; - _ -> - File++".erl" - end, - AbsFile = filename:absname(WithExt), - [R] = compile_modules([AbsFile], Options), - R. +compile_module(ModFile, Options) when is_atom(ModFile); + is_list(ModFile), is_integer(hd(ModFile)) -> + [R] = compile_module([ModFile], Options), + R; +compile_module(ModFiles, Options) when is_list(Options) -> + AbsFiles = + [begin + File = + case ModFile of + _ when is_atom(ModFile) -> atom_to_list(ModFile); + _ when is_list(ModFile) -> ModFile + end, + WithExt = case filename:extension(File) of + ".erl" -> + File; + _ -> + File++".erl" + end, + filename:absname(WithExt) + end || ModFile <- ModFiles], + compile_modules(AbsFiles, Options). %% compile_directory() -> %% compile_directory(Dir) -> @@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) -> compile_modules(Files,Options) -> Options2 = filter_options(Options), - compile_modules(Files,Options2,[]). + %% compile_modules(Files,Options2,[]). + call({compile, Files, Options2}). -compile_modules([File|Files], Options, Result) -> - R = call({compile, File, Options}), - compile_modules(Files,Options,[R|Result]); -compile_modules([],_Opts,Result) -> - lists:reverse(Result). +%% compile_modules([File|Files], Options, Result) -> +%% R = call({compile, File, Options}), +%% compile_modules(Files,Options,[R|Result]); +%% compile_modules([],_Opts,Result) -> +%% lists:reverse(Result). filter_options(Options) -> lists:filter(fun(Option) -> @@ -264,30 +279,17 @@ filter_options(Options) -> %% ModFile - see compile/1 %% Result - see compile/1 %% Reason = non_existing | already_cover_compiled -compile_beam(Module) when is_atom(Module) -> - case code:which(Module) of - non_existing -> +compile_beam(ModFile0) when is_atom(ModFile0); + is_list(ModFile0), is_integer(hd(ModFile0)) -> + case compile_beams([ModFile0]) of + [{error,{non_existing,_}}] -> + %% Backwards compatibility {error,non_existing}; - ?TAG -> - compile_beam(Module,?TAG); - File -> - compile_beam(Module,File) + [Result] -> + Result end; -compile_beam(File) when is_list(File) -> - {WithExt,WithoutExt} - = case filename:rootname(File,".beam") of - File -> - {File++".beam",File}; - Rootname -> - {File,Rootname} - end, - AbsFile = filename:absname(WithExt), - Module = list_to_atom(filename:basename(WithoutExt)), - compile_beam(Module,AbsFile). - -compile_beam(Module,File) -> - call({compile_beam,Module,File}). - +compile_beam(ModFiles) when is_list(ModFiles) -> + compile_beams(ModFiles). %% compile_beam_directory(Dir) -> [Result] | {error,Reason} @@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) -> Error end. -compile_beams(Files) -> - compile_beams(Files,[]). -compile_beams([File|Files],Result) -> - R = compile_beam(File), - compile_beams(Files,[R|Result]); -compile_beams([],Result) -> - lists:reverse(Result). +compile_beams(ModFiles0) -> + ModFiles = get_mods_and_beams(ModFiles0,[]), + call({compile_beams,ModFiles}). - -%% analyse(Module) -> -%% analyse(Module, Analysis) -> -%% analyse(Module, Level) -> -%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error} +get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) -> + case code:which(Module) of + non_existing -> + get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]); + File -> + get_mods_and_beams([{Module,File}|ModFiles],Acc) + end; +get_mods_and_beams([File|ModFiles],Acc) when is_list(File) -> + {WithExt,WithoutExt} + = case filename:rootname(File,".beam") of + File -> + {File++".beam",File}; + Rootname -> + {File,Rootname} + end, + AbsFile = filename:absname(WithExt), + Module = list_to_atom(filename:basename(WithoutExt)), + get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc); +get_mods_and_beams([{Module,File}|ModFiles],Acc) -> + %% Check for duplicates + case lists:keyfind(Module,2,Acc) of + {ok,Module,File} -> + %% Duplicate, but same file so ignore + get_mods_and_beams(ModFiles,Acc); + {ok,Module,_OtherFile} -> + %% Duplicate and differnet file - error + get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]); + _ -> + get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc]) + end; +get_mods_and_beams([],Acc) -> + lists:reverse(Acc). + + +%% analyse(Modules) -> +%% analyse(Analysis) -> +%% analyse(Level) -> +%% analyse(Modules, Analysis) -> +%% analyse(Modules, Level) -> +%% analyse(Analysis, Level) +%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error} +%% Modules = Module | [Module] %% Module = atom() %% Analysis = coverage | calls %% Level = line | clause | function | module @@ -337,48 +372,74 @@ compile_beams([],Result) -> %% N = A = C = integer() %% Value = {Cov,NotCov} | Calls %% Cov = NotCov = Calls = integer() -%% Error = {not_cover_compiled,Module} +%% Error = {not_cover_compiled,Module} | not_main_node +-define(is_analysis(__A__), + (__A__=:=coverage orelse __A__=:=calls)). +-define(is_level(__L__), + (__L__=:=line orelse __L__=:=clause orelse + __L__=:=function orelse __L__=:=module)). +analyse() -> + analyse('_'). + +analyse(Analysis) when ?is_analysis(Analysis) -> + analyse('_', Analysis); +analyse(Level) when ?is_level(Level) -> + analyse('_', Level); analyse(Module) -> analyse(Module, coverage). -analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls -> + +analyse(Analysis, Level) when ?is_analysis(Analysis) andalso + ?is_level(Level) -> + analyse('_', Analysis, Level); +analyse(Module, Analysis) when ?is_analysis(Analysis) -> analyse(Module, Analysis, function); -analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function; - Level=:=module -> +analyse(Module, Level) when ?is_level(Level) -> analyse(Module, coverage, Level). -analyse(Module, Analysis, Level) when is_atom(Module), - Analysis=:=coverage; Analysis=:=calls, - Level=:=line; Level=:=clause; - Level=:=function; Level=:=module -> + +analyse(Module, Analysis, Level) when ?is_analysis(Analysis), + ?is_level(Level) -> call({{analyse, Analysis, Level}, Module}). +analyze() -> analyse( ). analyze(Module) -> analyse(Module). analyze(Module, Analysis) -> analyse(Module, Analysis). analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level). -%% analyse_to_file(Module) -> -%% analyse_to_file(Module, Options) -> -%% analyse_to_file(Module, OutFile) -> -%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error} +%% analyse_to_file() -> +%% analyse_to_file(Modules) -> +%% analyse_to_file(Modules, Options) -> +%% Modules = Module | [Module] %% Module = atom() %% OutFile = string() %% Options = [Option] -%% Option = html +%% Option = html | {outfile,filename()} | {outdir,dirname()} %% Error = {not_cover_compiled,Module} | no_source_code_found | %% {file,File,Reason} %% File = string() %% Reason = term() -analyse_to_file(Module) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []). -analyse_to_file(Module, []) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []); -analyse_to_file(Module, Options) when is_atom(Module), - is_list(Options), is_atom(hd(Options)) -> - analyse_to_file(Module, outfilename(Module,Options), Options); -analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) -> - analyse_to_file(Module, OutFile, []). -analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) -> - call({{analyse_to_file, OutFile, Options}, Module}). - +%% +%% Kept for backwards compatibility: +%% analyse_to_file(Modules, OutFile) -> +%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error} +analyse_to_file() -> + analyse_to_file('_'). +analyse_to_file(Arg) -> + case is_options(Arg) of + true -> + analyse_to_file('_',Arg); + false -> + analyse_to_file(Arg,[]) + end. +analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) -> + %% Kept for backwards compatibility + analyse_to_file(Module, [{outfile,OutFile}]); +analyse_to_file(Module, Options) when is_list(Options) -> + call({{analyse_to_file, Options}, Module}). +analyse_to_file(Module, OutFile, Options) when is_list(OutFile) -> + %% Kept for backwards compatibility + analyse_to_file(Module,[{outfile,OutFile}|Options]). + +analyze_to_file() -> analyse_to_file(). analyze_to_file(Module) -> analyse_to_file(Module). analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> @@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) -> async_analyse_to_file(Module, OutFile, Options) -> do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). +is_options([html]) -> + true; % this is not 100% safe - could be a module named html... +is_options([html|Opts]) -> + is_options(Opts); +is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir -> + true; +is_options(_) -> + false. + do_spawn(M,F,A) -> spawn_link(fun() -> case apply(M,F,A) of @@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) -> async_analyze_to_file(Module, OutFile, Options) -> async_analyse_to_file(Module, OutFile, Options). -outfilename(Module,Opts) -> - case lists:member(html,Opts) of - true -> - atom_to_list(Module)++".COVER.html"; - false -> - atom_to_list(Module)++".COVER.out" - end. +outfilename(undefined, Module, HTML) -> + outfilename(Module, HTML); +outfilename(OutDir, Module, HTML) -> + filename:join(OutDir, outfilename(Module, HTML)). + +outfilename(Module, true) -> + atom_to_list(Module)++".COVER.html"; +outfilename(Module, false) -> + atom_to_list(Module)++".COVER.out". + %% export(File) %% export(File,Module) -> ok | {error,Reason} @@ -559,7 +632,7 @@ init_main(Starter) -> ,{write_concurrency, true} ]), ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), - ets:new(?BINARY_TABLE, [set, named_table]), + ets:new(?BINARY_TABLE, [set, public, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), net_kernel:monitor_nodes(true), @@ -573,55 +646,19 @@ main_process_loop(State) -> reply(From, {ok,StartedNodes}), main_process_loop(State1); - {From, {compile, File, Options}} -> - case do_compile(File, Options) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes,[{Module,File}]), - reply(From, {ok, Module}), - Compiled = add_compiled(Module, File, - State#main_state.compiled), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - error -> - reply(From, {error, File}), - main_process_loop(State) - end; + {From, {compile, Files, Options}} -> + {R,S} = do_compile(Files, Options, State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); - {From, {compile_beam, Module, BeamFile0}} -> - Compiled0 = State#main_state.compiled, - case get_beam_file(Module,BeamFile0,Compiled0) of - {ok,BeamFile} -> - UserOptions = get_compile_options(Module,BeamFile), - {Reply,Compiled} = - case do_compile_beam(Module,BeamFile,UserOptions) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes, - [{Module,BeamFile}]), - C = add_compiled(Module,BeamFile,Compiled0), - {{ok,Module},C}; - error -> - {{error, BeamFile}, Compiled0}; - {error,Reason} -> % no abstract code - {{error, {Reason, BeamFile}}, Compiled0} - end, - reply(From,Reply), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - {error,no_beam} -> - %% The module has first been compiled from .erl, and now - %% someone tries to compile it from .beam - reply(From, - {error,{already_cover_compiled,no_beam_found,Module}}), - main_process_loop(State) - end; + {From, {compile_beams, ModsAndFiles}} -> + {R,S} = do_compile_beams(ModsAndFiles,State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); {From, {export,OutFile,Module}} -> spawn(fun() -> @@ -706,6 +743,16 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); + {From, {{analyse, Analysis, Level}, '_'}} -> + R = analyse_all(Analysis, Level, State), + reply(From, R), + main_process_loop(State); + + {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) -> + R = analyse_list(Modules, Analysis, Level, State), + reply(From, R), + main_process_loop(State); + {From, {{analyse, Analysis, Level}, Module}} -> S = try Loaded = is_loaded(Module, State), @@ -722,15 +769,23 @@ main_process_loop(State) -> end, main_process_loop(S); - {From, {{analyse_to_file, OutFile, Opts},Module}} -> + {From, {{analyse_to_file, Opts},'_'}} -> + R = analyse_all_to_file(Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) -> + R = analyse_list_to_file(Modules, Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Module}} -> S = try Loaded = is_loaded(Module, State), spawn(fun() -> - ?SPAWN_DBG(analyse_to_file, - {Module,OutFile, Opts}), + ?SPAWN_DBG(analyse_to_file,{Module,Opts}), do_parallel_analysis_to_file( - Module, OutFile, Opts, - Loaded, From, State) + Module, Opts, Loaded, From, State) end), State catch throw:Reason -> @@ -848,11 +903,15 @@ remote_process_loop(State) -> {remote,collect,Module,CollectorPid} -> self() ! {remote,collect,Module,CollectorPid, ?SERVER}; - {remote,collect,Module,CollectorPid,From} -> + {remote,collect,Modules0,CollectorPid,From} -> + Modules = case Modules0 of + '_' -> [M || {M,_} <- State#remote_state.compiled]; + _ -> Modules0 + end, spawn(fun() -> ?SPAWN_DBG(remote_collect, - {Module, CollectorPid, From}), - do_collect(Module, CollectorPid, From) + {Modules, CollectorPid, From}), + do_collect(Modules, CollectorPid, From) end), remote_process_loop(State); @@ -893,39 +952,51 @@ remote_process_loop(State) -> end. -do_collect(Module, CollectorPid, From) -> - AllMods = - case Module of - '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); - _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) - end, - - %% Sending clause by clause in order to avoid large lists +do_collect(Modules, CollectorPid, From) -> pmap( - fun({_Mod,Clauses}) -> - lists:map(fun(Clause) -> - send_collected_data(Clause, CollectorPid) - end,Clauses) - end,AllMods), + fun(Module) -> + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + send_chunks(Match, CollectorPid, []) + end,Modules), CollectorPid ! done, remote_reply(From, ok). -send_collected_data({M,F,A,C,_L}, CollectorPid) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps}. +send_chunks('$end_of_table', _CollectorPid, Mons) -> + get_downs(Mons); +send_chunks({Chunk,Continuation}, CollectorPid, Mons) -> + Mon = spawn_monitor( + fun() -> + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Chunk) end), + send_chunk(CollectorPid,Chunk), + send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]). + +send_chunk(CollectorPid,Chunk) -> + CollectorPid ! {chunk,Chunk,self()}, + receive continue -> ok end. + +get_downs([]) -> + ok; +get_downs(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, _Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + get_downs(lists:delete({Pid,Ref},Mons)); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs(Mons) + end + end. -reload_originals([{Module,_File}|Compiled]) -> - do_reload_original(Module), - reload_originals(Compiled); -reload_originals([]) -> - ok. +reload_originals(Compiled) -> + Modules = [M || {M,_} <- Compiled], + pmap(fun do_reload_original/1, Modules). do_reload_original(Module) -> case code:which(Module) of @@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) -> ok; remote_load_compiled(Nodes, Compiled, Acc, ModNum) when Compiled == []; ModNum == ?MAX_MODS -> + RemoteLoadData = get_downs_r(Acc), lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Acc}) + remote_call(Node,{remote,load_compiled,RemoteLoadData}) end, Nodes), remote_load_compiled(Nodes, Compiled, [], 0); remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> remote_load_compiled( - Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). + Nodes, Rest, + [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc], + ModNum + 1). + +spawn_job_r(Fun) -> + spawn_monitor(fun() -> exit(Fun()) end). + +get_downs_r([]) -> + []; +get_downs_r(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} -> + [R|get_downs_r(lists:delete({Pid,Ref},Mons))]; + {'DOWN', Ref, _Type, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs_r(Mons) + end + end. + %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) -> Nodes). %% Collect data from remote nodes - used for analyse or stop(Node) -remote_collect(Module,Nodes,Stop) -> +remote_collect(Modules,Nodes,Stop) -> pmap(fun(Node) -> ?SPAWN_DBG(remote_collect, - {Module, Nodes, Stop}), - do_collection(Node, Module, Stop) + {Modules, Nodes, Stop}), + do_collection(Node, Modules, Stop) end, Nodes). @@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) -> collector_proc() -> ?SPAWN_DBG(collector_proc, []), receive - {chunk,Chunk} -> + {chunk,Chunk,From} -> insert_in_collection_table(Chunk), + From ! continue, collector_proc(); done -> ok @@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) -> add_compiled(Module, File, []) -> [{Module,File}]. +are_loaded([Module|Modules], State, Loaded, Imported, Error) -> + try is_loaded(Module,State) of + {loaded,File} -> + are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error); + {imported,File,_} -> + are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error) + catch throw:_ -> + are_loaded(Modules, State, Loaded, Imported, + [{not_cover_compiled,Module}|Error]) + end; +are_loaded([], _State, Loaded, Imported, Error) -> + {Loaded, Imported, Error}. + is_loaded(Module, State) -> case get_file(Module, State#main_state.compiled) of {ok, File} -> @@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) -> %%%--Compilation--------------------------------------------------------- -%% do_compile(File, Options) -> {ok,Module} | {error,Error} -do_compile(File, UserOptions) -> +do_compile_beams(ModsAndFiles, State) -> + Result0 = pmap(fun({ok,Module,File}) -> + do_compile_beam(Module,File,State); + (Error) -> + Error + end, + ModsAndFiles), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile_beam(Module,BeamFile0,State) -> + case get_beam_file(Module,BeamFile0,State#main_state.compiled) of + {ok,BeamFile} -> + UserOptions = get_compile_options(Module,BeamFile), + case do_compile_beam1(Module,BeamFile,UserOptions) of + {ok, Module} -> + {ok,Module,BeamFile}; + error -> + {error, BeamFile}; + {error,Reason} -> % no abstract code + {error, {Reason, BeamFile}} + end; + {error,no_beam} -> + %% The module has first been compiled from .erl, and now + %% someone tries to compile it from .beam + {error,{already_cover_compiled,no_beam_found,Module}} + end. + +fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) -> + Compiled = add_compiled(Module,BeamFile,State#main_state.compiled), + Imported = remove_imported(Module,State#main_state.imported), + NewState = State#main_state{compiled=Compiled,imported=Imported}, + fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]); +fix_state_and_result([Error|Rest],State,Acc) -> + fix_state_and_result(Rest,State,[Error|Acc]); +fix_state_and_result([],State,Acc) -> + {lists:reverse(Acc),State}. + + +do_compile(Files, Options, State) -> + Result0 = pmap(fun(File) -> + do_compile(File, Options) + end, + Files), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile(File, Options) -> + case do_compile1(File, Options) of + {ok, Module} -> + {ok,Module,File}; + error -> + {error,File} + end. + +%% do_compile1(File, Options) -> {ok,Module} | error +do_compile1(File, UserOptions) -> Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions, case compile:file(File, Options) of {ok, Module, Binary} -> - do_compile_beam(Module,Binary,UserOptions); + do_compile_beam1(Module,Binary,UserOptions); error -> error end. %% Beam is a binary or a .beam file name -do_compile_beam(Module,Beam,UserOptions) -> +do_compile_beam1(Module,Beam,UserOptions) -> %% Clear database do_clear(Module), @@ -1915,10 +2082,21 @@ common_elems(L1, L2) -> collect(Nodes) -> %% local node AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), - pmap(fun move_modules/1,AllClauses), - + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end), + + %% remote nodes + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). + +%% Collect data for a list of modules +collect(Modules,Nodes) -> + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules], + Clauses = ets:select(?COVER_CLAUSE_TABLE,MS), + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end), + %% remote nodes - remote_collect('_',Nodes,false). + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). %% Collect data for one module collect(Module,Clauses,Nodes) -> @@ -1926,25 +2104,26 @@ collect(Module,Clauses,Nodes) -> move_modules({Module,Clauses}), %% remote nodes - remote_collect(Module,Nodes,false). + remote_collect([Module],Nodes,false). %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE move_modules({Module,Clauses}) -> ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), - move_clauses(Clauses). + Pattern = {#bump{module=Module, _='_'}, '_'}, + MatchSpec = [{Pattern,[],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_move_module(Match). -move_clauses([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE,Pattern), +do_move_module({Bumps,Continuation}) -> lists:foreach(fun({Key,Val}) -> ets:insert(?COVER_TABLE, {Key,0}), insert_in_collection_table(Key,Val) end, Bumps), - move_clauses(Clauses); -move_clauses([]) -> + do_move_module(ets:select(Continuation)); +do_move_module('$end_of_table') -> ok. %% Given a .beam file, find the .erl file. Look first in same directory as @@ -2002,6 +2181,26 @@ splice(BeamDir, SrcFile) -> revsplit(Path) -> lists:reverse(filename:split(Path)). +analyse_list(Modules, Analysis, Level, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + Loaded = [M || {M,_} <- LoadedMF], + Imported = [M || {M,_} <- ImportedMF], + collect(Loaded, State#main_state.nodes), + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported], + AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), Error}. + +analyse_all(Analysis, Level, State) -> + collect(State#main_state.nodes), + AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), []}. + do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> analyse_info(Module,State#main_state.imported), C = case Loaded of @@ -2016,7 +2215,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> Clauses end, R = do_analyse(Module, Analysis, Level, C), - reply(From, R). + reply(From, {ok,R}). %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] @@ -2035,37 +2234,44 @@ do_analyse(Module, Analysis, line, _Clauses) -> {{Module,L}, N} end end, - Answer = lists:keysort(1, lists:map(Fun, Bumps)), - {ok, Answer}; -do_analyse(_Module, Analysis, clause, Clauses) -> - Fun = case Analysis of - coverage -> - fun({M,F,A,C,Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},0}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - NotCov = length(Bumps), - {{M,F,A,C}, {Ls-NotCov, NotCov}} - end; - calls -> - fun({M,F,A,C,_Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},'_'}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - {_Bump, Calls} = hd(lists:keysort(1, Bumps)), - {{M,F,A,C}, Calls} - end - end, - Answer = lists:map(Fun, Clauses), - {ok, Answer}; + lists:keysort(1, lists:map(Fun, Bumps)); +do_analyse(Module, Analysis, clause, _Clauses) -> + Pattern = {#bump{module=Module},'_'}, + Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)), + analyse_clause(Analysis,Bumps); do_analyse(Module, Analysis, function, Clauses) -> - {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses), - Result = merge_clauses(ClauseResult, merge_fun(Analysis)), - {ok, Result}; + ClauseResult = do_analyse(Module, Analysis, clause, Clauses), + merge_clauses(ClauseResult, merge_fun(Analysis)); do_analyse(Module, Analysis, module, Clauses) -> - {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses), + FunctionResult = do_analyse(Module, Analysis, function, Clauses), Result = merge_functions(FunctionResult, merge_fun(Analysis)), - {ok, {Module,Result}}. + {Module,Result}. + +analyse_clause(_,[]) -> + []; +analyse_clause(coverage, + [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) -> + analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]); +analyse_clause(calls,Bumps) -> + analyse_clause_calls(Bumps,{x,x,x,x},[]). + +analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps], + {M,F,A,C}=Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc); +analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps, + Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]); +analyse_clause_cov([],Clause,Ls,NotCov,Acc) -> + lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]). + +analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps], + {M,F,A,C}=Clause,Acc) -> + analyse_clause_calls(Bumps,Clause,Acc); +analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps], + _Clause,Acc) -> + analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]); +analyse_clause_calls([],_Clause,Acc) -> + lists:reverse(Acc). merge_fun(coverage) -> fun({Cov1,NotCov1}, {Cov2,NotCov2}) -> @@ -2094,7 +2300,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. -do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> +analyse_list_to_file(Modules, Opts, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + collect([M || {M,_} <- LoadedMF], State#main_state.nodes), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]), + {result,Ok,Error ++ Error1}. + +analyse_all_to_file(Opts, State) -> + collect(State#main_state.nodes), + AllModules = get_all_modules(State), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]), + {result,Ok,Error}. + +get_all_modules(State) -> + get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]). +get_all_modules([{Module,File}|Rest],Acc) -> + get_all_modules(Rest,[{Module,File}|Acc]); +get_all_modules([{Module,File,_}|Rest],Acc) -> + case lists:keymember(Module,1,Acc) of + true -> get_all_modules(Rest,Acc); + false -> get_all_modules(Rest,[{Module,File}|Acc]) + end; +get_all_modules([],Acc) -> + Acc. + +split_ok_error([{ok,R}|Result],Ok,Error) -> + split_ok_error(Result,[R|Ok],Error); +split_ok_error([{error,R}|Result],Ok,Error) -> + split_ok_error(Result,Ok,[R|Error]); +split_ok_error([],Ok,Error) -> + {Ok,Error}. + +do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) -> File = case Loaded of {loaded, File0} -> [{Module,Clauses}] = @@ -2105,24 +2354,32 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> {imported, File0, _} -> File0 end, + HTML = lists:member(html,Opts), + OutFile = + case proplists:get_value(outfile,Opts) of + undefined -> + outfilename(proplists:get_value(outdir,Opts),Module,HTML); + F -> + F + end, + reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)). + +do_analyse_to_file(Module,File,OutFile,HTML,State) -> case find_source(Module, File) of {beam,_BeamFile} -> - reply(From, {error,no_source_code_found}); + {error,{no_source_code_found,Module}}; ErlFile -> analyse_info(Module,State#main_state.imported), - HTML = lists:member(html,Opts), - R = do_analyse_to_file(Module,OutFile, - ErlFile,HTML), - reply(From, R) + do_analyse_to_file1(Module,OutFile,ErlFile,HTML) end. -%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} +%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() -do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> - case file:open(ErlFile, [read]) of +do_analyse_to_file1(Module, OutFile, ErlFile, HTML) -> + case file:open(ErlFile, [read,raw,read_ahead]) of {ok, InFd} -> - case file:open(OutFile, [write]) of + case file:open(OutFile, [write,raw,delayed_write]) of {ok, OutFd} -> if HTML -> Encoding = encoding(ErlFile), @@ -2160,9 +2417,14 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> "**************************************" "\n\n"]), - print_lines(Module, InFd, OutFd, 1, HTML), + Pattern = {#bump{module=Module,line='$1',_='_'},'$2'}, + MS = [{Pattern,[],[{{'$1','$2'}}]}], + CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)), + print_lines(Module, CovLines, InFd, OutFd, 1, HTML), - if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]); + if + HTML -> + file:write(OutFd, "</pre>\n</body>\n</html>\n"); true -> ok end, @@ -2179,21 +2441,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> {error, {file, ErlFile, Reason}} end. -print_lines(Module, InFd, OutFd, L, HTML) -> - case io:get_line(InFd, '') of + +print_lines(Module, CovLines, InFd, OutFd, L, HTML) -> + case file:read_line(InFd) of eof -> ignore; - "%"++_=Line -> %Comment line - not executed. - io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]), - print_lines(Module, InFd, OutFd, L+1, HTML); - RawLine -> + {ok,"%"++_=Line} -> %Comment line - not executed. + file:write(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]), + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML); + {ok,RawLine} -> Line = escape_lt_and_gt(RawLine,HTML), - Pattern = {#bump{module=Module,line=L},'$1'}, - case ets:match(?COLLECTION_TABLE, Pattern) of - [] -> - io:put_chars(OutFd, [tab(),Line]); - Ns -> - N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), + case CovLines of + [{L,N}|CovLines1] -> + %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), if N=:=0, HTML=:=true -> LineNoNL = Line -- "\n", @@ -2201,19 +2461,22 @@ print_lines(Module, InFd, OutFd, L, HTML) -> %%Str = string:right("0", 6, 32), RedLine = ["<font color=red>",Str,fill1(), LineNoNL,"</font>\n"], - io:put_chars(OutFd, RedLine); + file:write(OutFd, RedLine); N<1000000 -> Str = string:right(integer_to_list(N), 6, 32), - io:put_chars(OutFd, [Str,fill1(),Line]); + file:write(OutFd, [Str,fill1(),Line]); N<10000000 -> Str = integer_to_list(N), - io:put_chars(OutFd, [Str,fill2(),Line]); + file:write(OutFd, [Str,fill2(),Line]); true -> Str = integer_to_list(N), - io:put_chars(OutFd, [Str,fill3(),Line]) - end - end, - print_lines(Module, InFd, OutFd, L+1, HTML) + file:write(OutFd, [Str,fill3(),Line]) + end, + print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML); + _ -> + file:write(OutFd, [tab(),Line]), + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML) + end end. tab() -> " | ". @@ -2223,7 +2486,7 @@ fill3() -> "| ". %%%--Export-------------------------------------------------------------- do_export(Module, OutFile, From, State) -> - case file:open(OutFile,[write,binary,raw]) of + case file:open(OutFile,[write,binary,raw,delayed_write]) of {ok,Fd} -> Reply = case Module of @@ -2362,21 +2625,21 @@ do_reset_collection_table(Module) -> ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok -%% The reset is done on a per-clause basis to avoid building +%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), - do_reset2(Clauses). + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_reset2(Match). -do_reset2([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), +do_reset2({Bumps,Continuation}) -> lists:foreach(fun({Bump,_N}) -> ets:insert(?COVER_TABLE, {Bump,0}) end, Bumps), - do_reset2(Clauses); -do_reset2([]) -> + do_reset2(ets:select(Continuation)); +do_reset2('$end_of_table') -> ok. do_clear(Module) -> @@ -2419,31 +2682,43 @@ escape_lt_and_gt1([],Acc) -> escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). -pmap(Fun, List) -> - pmap(Fun, List, 20). -pmap(Fun, List, Limit) -> - pmap(Fun, List, [], Limit, 0, []). -pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> - Collector = self(), - Pid = spawn_link(fun() -> - ?SPAWN_DBG(pmap,E), - Collector ! {res,self(),Fun(E)} - end), - erlang:monitor(process, Pid), - pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); -pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> - receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); - {res, Pid, Res} -> - pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) - end; -pmap(_Fun, [], [], _Limit, 0, Acc) -> - lists:reverse(Acc); -pmap(Fun, [], [], Limit, Cnt, Acc) -> +%%%--Internal functions for parallelization------------------------------ +pmap(Fun,List) -> + NTot = length(List), + NProcs = erlang:system_info(schedulers) * 2, + NPerProc = (NTot div NProcs) + 1, + Mons = pmap_spawn(Fun,NPerProc,List,[]), + pmap_collect(Mons,[]). + +pmap_spawn(_,_,[],Mons) -> + Mons; +pmap_spawn(Fun,NPerProc,List,Mons) -> + {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List); + true -> {List,[]} % last chunk + end, + Mon = + spawn_monitor( + fun() -> + exit({pmap_done,lists:map(Fun,L1)}) + end), + pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]). + +pmap_collect([],Acc) -> + lists:append(Acc); +pmap_collect(Mons,Acc) -> receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, [], [], Limit, Cnt - 1, Acc) + {'DOWN', Ref, process, Pid, {pmap_done,Result}} -> + pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]); + {'DOWN', Ref, process, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + pmap_collect(Mons,Acc) + end end. %%%----------------------------------------------------------------- diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index 80807b1d38..368fa6c3d1 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,6 +33,8 @@ -export([do_coverage/1]). +-export([distribution_performance/1]). + -include_lib("test_server/include/test_server.hrl"). %%---------------------------------------------------------------------- @@ -170,10 +172,15 @@ compile(Config) when is_list(Config) -> ?line {ok, CWD} = file:get_cwd(), ?line Result2 = cover:compile_directory(CWD), ?line SortedResult = lists:sort(Result2), - ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, + ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, ?line [{ok,e}] = cover:compile_directory("d1"), ?line {error,enoent} = cover:compile_directory("d2"), + [] = cover:compile([]), + Result21 = cover:compile([a,b,"cc.erl",d,"f"]), + SortedResult21 = lists:sort(Result21), + [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21, + ?line {ok,a} = cover:compile(a), ?line {ok,b} = compile:file(b), ?line code:purge(b), @@ -213,8 +220,14 @@ compile(Config) when is_list(Config) -> ?line {error,non_existing} = cover:compile_beam(z), ?line [{ok,y}] = cover:compile_beam_directory("d"), ?line Result3 = lists:sort(cover:compile_beam_directory()), - ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, + ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, ?line {error,enoent} = cover:compile_beam_directory("d2"), + + [] = cover:compile_beam([]), + Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]), + SortedResult31 = lists:sort(Result31), + [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31, + ?line decompile([v,w,y]), ?line Files = lsfiles(), ?line remove(files(Files, ".beam")). @@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) -> ?line done = a:start(5), - ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module), - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function), - ?line {ok, [{{a,start,1,1},{6,0}}, - {{a,stop,1,1},{0,1}}, - {{a,pong,1,1},{1,0}}, + {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module), + {ok, [{{a,exit_kalle,0},{1,0}}, + {{a,loop,3},{5,1}}, + {{a,pong,1},{1,0}}, + {{a,start,1},{6,0}}, + {{a,stop,1},{0,1}}, + {{a,trycatch,1},{4,0}}]=ACovFunc} = + cover:analyse(a, coverage, function), + {ok, [{{a,exit_kalle,0,1},{1,0}}, {{a,loop,3,1},{3,1}}, {{a,loop,3,2},{2,0}}, - {{a,trycatch,1,1},{4,0}}, - {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause), + {{a,pong,1,1},{1,0}}, + {{a,start,1,1},{6,0}}, + {{a,stop,1,1},{0,1}}, + {{a,trycatch,1,1},{4,0}}]=ACovClause} = + cover:analyse(a, coverage, clause), ?line {ok, [{{a,9},{1,0}}, {{a,10},{1,0}}, {{a,11},{1,0}}, @@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) -> {{a,47},{1,0}}, {{a,49},{1,0}}, {{a,51},{1,0}}, - {{a,55},{1,0}}]} = cover:analyse(a, coverage, line), - - ?line {ok, {a,15}} = cover:analyse(a, calls, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function), - ?line {ok, [{{a,start,1,1},1}, - {{a,stop,1,1},0}, - {{a,pong,1,1},5}, - {{a,loop,3,1},5}, - {{a,loop,3,2},1}, - {{a,trycatch,1,1},2}, - {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause), + {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line), + + {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module), + {ok, [{{a,exit_kalle,0},1}, + {{a,loop,3},6}, + {{a,pong,1},5}, + {{a,start,1},1}, + {{a,stop,1},0}, + {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function), + {ok, [{{a,exit_kalle,0,1},1}, + {{a,loop,3,1},5}, + {{a,loop,3,2},1}, + {{a,pong,1,1},5}, + {{a,start,1,1},1}, + {{a,stop,1,1},0}, + {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause), ?line {ok, [{{a,9},1}, {{a,10},1}, {{a,11},1}, @@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) -> {{a,47},1}, {{a,49},1}, {{a,51},2}, - {{a,55},1}]} = cover:analyse(a, calls, line), - - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a), - ?line {ok, {a,{17,2}}} = cover:analyse(a, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls), + {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line), + + {ok,ACovFunc} = cover:analyse(a), + {ok,ACovMod} = cover:analyse(a, module), + {ok,ACallsFunc} = cover:analyse(a, calls), ?line {ok, "a.COVER.out"} = cover:analyse_to_file(a), ?line {ok, "e.COVER.out"} = cover:analyse_to_file(e), ?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]), ?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]), + %% Analyse all modules + Modules = cover:modules(), + N = length(Modules), + + {result,CovFunc,[]} = cover:analyse(), % default = coverage, function + ACovFunc = [A || {{a,_,_},_}=A<-CovFunc], + + {result,CovMod,[]} = cover:analyse(coverage,module), + ACovMod = lists:keyfind(a,1,CovMod), + + {result,CovClause,[]} = cover:analyse(coverage,clause), + ACovClause = [A || {{a,_,_,_},_}=A<-CovClause], + + {result,CovLine,[]} = cover:analyse(coverage,line), + ACovLine = [A || {{a,_},_}=A<-CovLine], + + {result,CallsFunc,[]} = cover:analyse(calls,function), + ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc], + + {result,CallsMod,[]} = cover:analyse(calls,module), + ACallsMod = lists:keyfind(a,1,CallsMod), + + {result,CallsClause,[]} = cover:analyse(calls,clause), + ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause], + + {result,CallsLine,[]} = cover:analyse(calls,line), + ACallsLine = [A || {{a,_},_}=A<-CallsLine], + + {result,AllToFile,[]} = cover:analyse_to_file(), + N = length(AllToFile), + true = lists:member("a.COVER.out",AllToFile), + {result,AllToFileHtml,[]} = cover:analyse_to_file([html]), + N = length(AllToFileHtml), + true = lists:member("a.COVER.html",AllToFileHtml), + + %% Analyse list of modules + %% Listing all modules so we can compare result with above result + %% from analysing all. + + {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function + true = lists:sort(CovFunc) == lists:sort(CovFunc1), + + {result,CovMod1,[]} = cover:analyse(Modules,coverage,module), + true = lists:sort(CovMod) == lists:sort(CovMod1), + + {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause), + true = lists:sort(CovClause) == lists:sort(CovClause1), + + {result,CovLine1,[]} = cover:analyse(Modules,coverage,line), + true = lists:sort(CovLine) == lists:sort(CovLine1), + + {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function), + true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1), + + {result,CallsMod1,[]} = cover:analyse(Modules,calls,module), + true = lists:sort(CallsMod) == lists:sort(CallsMod1), + + {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause), + true = lists:sort(CallsClause) == lists:sort(CallsClause1), + + {result,CallsLine1,[]} = cover:analyse(Modules,calls,line), + true = lists:sort(CallsLine) == lists:sort(CallsLine1), + + {result,AllToFile1,[]} = cover:analyse_to_file(Modules), + true = lists:sort(AllToFile) == lists:sort(AllToFile1), + {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]), + true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1), + %% analyse_to_file of file which is compiled from beam ?line {ok,f} = compile:file(f,[debug_info]), ?line code:purge(f), @@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) -> {module,z} = code:load_file(z), {ok,z} = cover:compile_beam(z), ok = file:delete("z.erl"), - {error,no_source_code_found} = cover:analyse_to_file(z), + {error,{no_source_code_found,z}} = cover:analyse_to_file(z), + {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]), code:purge(z), code:delete(z), ?line {error,{not_cover_compiled,b}} = cover:analyse(b), ?line {error,{not_cover_compiled,g}} = cover:analyse(g), + {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]), ?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b), - ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g). + {error,{not_cover_compiled,g}} = cover:analyse_to_file(g), + {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]). misc(suite) -> []; misc(Config) when is_list(Config) -> @@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) -> ?t:stop_node(N1), ok. +distribution_performance(Config) -> + PrivDir = ?config(priv_dir,Config), + Dir = filename:join(PrivDir,"distribution_performance"), + AllFiles = filename:join(Dir,"*"), + ok = filelib:ensure_dir(AllFiles), + code:add_patha(Dir), + M = 9, % Generate M modules + F = 210, % with F functions + C = 10, % and each function of C clauses + Mods = generate_modules(M,F,C,Dir), + +% ?t:break(""), + + NodeName = cover_SUITE_distribution_performance, + {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]), + %% CFun = fun() -> + %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods] + %% end, + CFun = fun() -> cover:compile_beam(Mods) end, + {CT,CA} = timer:tc(CFun), +% erlang:display(CA), + erlang:display({compile,CT}), + + {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end), + erlang:display({start_node,SNT}), + + [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods], + +% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end, +% Fun = fun() -> analyse_all(Mods,calls,function) end, +% Fun = fun() -> cover:analyse('_',calls,function) end, + Fun = fun() -> cover:analyse(Mods,calls,function) end, + +% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end, +% Fun = fun() -> analyse_all_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file([html]) end, + +% Fun = fun() -> cover:reset() end, + + {AT,A} = timer:tc(Fun), + erlang:display({analyse,AT}), +% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])), + + %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]), + %% fprof:profile(), + %% fprof:analyse(dest,[]), + + {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end), + erlang:display({stop_node,SNT2}), + + code:del_path(Dir), + Files = filelib:wildcard(AllFiles), + [ok = file:delete(File) || File <- Files], + ok = file:del_dir(Dir), + ok. + +%% Run analysis in parallel +analyse_all(Mods,Analysis,Level) -> + Pids = [begin + Pid = spawn(fun() -> + {ok,A} = cover:analyse(Mod,Analysis,Level), + exit(A) + end), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +analyse_all_to_file(Mods,Opts) -> + Pids = [begin + Pid = cover:async_analyse_to_file(Mod,Opts), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +get_downs([],Acc) -> + Acc; +get_downs(Pids,Acc) -> + receive + {'DOWN', _Ref, _Type, Pid, A} -> + get_downs(lists:delete(Pid,Pids),[A|Acc]) + end. + +generate_modules(0,_,_,_) -> + []; +generate_modules(M,F,C,Dir) -> + ModStr = "m" ++ integer_to_list(M), + Mod = list_to_atom(ModStr), + Src = ["-module(",ModStr,").\n" + "-compile(export_all).\n" | + generate_functions(F,C)], + Erl = filename:join(Dir,ModStr++".erl"), + ok = file:write_file(Erl,Src), + {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]), + [Mod | generate_modules(M-1,F,C,Dir)]. + +generate_functions(0,_) -> + []; +generate_functions(F,C) -> + Func = "f" ++ integer_to_list(F), + [generate_clauses(C,Func) | generate_functions(F-1,C)]. + +generate_clauses(0,_) -> + []; +generate_clauses(C,Func) -> + CStr = integer_to_list(C), + Sep = if C==1 -> "."; true -> ";" end, + [Func,"(",CStr,") -> ",CStr,Sep,"\n" | + generate_clauses(C-1,Func)]. + + export_import(suite) -> []; export_import(Config) when is_list(Config) -> ?line DataDir = ?config(data_dir, Config), @@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) -> Dog = ?t:timetrap(?t:seconds(10)), - ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]), + {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]), ?line {ok,[N1]} = cover:start(N1), ?line {error,not_main_node} = rpc:call(N1,cover,modules,[]), ?line cover:stop(), + ?t:stop_node(N1), ?t:timetrap_cancel(Dog), ok. @@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) -> ok. + otp_8270(doc) -> ["OTP-8270. Bug."]; otp_8270(suite) -> []; @@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) -> ?line {ok,N3} = ?t:start_node(cover_n3,slave,As), timer:sleep(500), - cover:start(nodes()), + {ok,[_,_,_]} = cover:start(nodes()), Test = << "-module(m).\n" @@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) -> ?line {N2,true} = {N2,is_list(N2_info)}, ?line {N3,true} = {N3,is_list(N3_info)}, + exit(Pid1,kill), ?line ?t:stop_node(N1), ?line ?t:stop_node(N2), ?line ?t:stop_node(N3), @@ -1572,7 +1764,9 @@ is_unloaded(What) -> end. check_f_calls(F1,F2) -> - {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function). + {ok,A} = cover:analyse(f,calls,function), + {_,F1} = lists:keyfind({f,f1,0},1,A), + {_,F2} = lists:keyfind({f,f2,0},1,A). cover_which_nodes(Expected) -> case cover:which_nodes() of |