diff options
Diffstat (limited to 'lib')
552 files changed, 17961 insertions, 10441 deletions
diff --git a/lib/Makefile b/lib/Makefile index 4740e6eb59..ae466ed518 100644 --- a/lib/Makefile +++ b/lib/Makefile @@ -35,7 +35,7 @@ ALL_ERLANG_APPLICATIONS = xmerl edoc erl_docgen snmp otp_mibs erl_interface \ public_key ssl observer odbc diameter \ cosTransactions cosEvent cosTime cosNotification \ cosProperty cosFileTransfer cosEventDomain et megaco \ - eunit ssh typer eldap dialyzer hipe + eunit ssh eldap dialyzer hipe ifdef BUILD_ALL ERLANG_APPLICATIONS += $(ALL_ERLANG_APPLICATIONS) diff --git a/lib/asn1/doc/src/asn1_getting_started.xml b/lib/asn1/doc/src/asn1_getting_started.xml index d40b294c39..c036d289fc 100644 --- a/lib/asn1/doc/src/asn1_getting_started.xml +++ b/lib/asn1/doc/src/asn1_getting_started.xml @@ -187,6 +187,14 @@ erlc -o ../asnfiles -I ../asnfiles -I /usr/local/standards/asn1 Person.asn</pre> <item> <p>DER encoding rule. Only when using option <c>-ber</c>.</p> </item> + <tag><c>+maps</c></tag> + <item> + <p>Use maps instead of records to represent the <c>SEQUENCE</c> and + <c>SET</c> types. No <c>.hrl</c> files will be generated. + See the Section <seealso marker="asn1_getting_started#MAP_SEQ_SET"> + Map representation for SEQUENCE and SET</seealso> + for more information.</p> + </item> <tag><c>+asn1config</c></tag> <item> <p>This functionality works together with option @@ -258,6 +266,10 @@ asn1ct:compile("H323-MESSAGES.asn1",[per]). </pre> <c>{error, {asn1, Description}}</c> where <c>Description</c> is an Erlang term describing the error.</p> + <p>Currently, <c>Description</c> looks like this: + <c>{ErrorDescription, StackTrace}</c>. Applications should + not depend on the exact contents of <c>Description</c> as it + could change in the future.</p> </section> </section> @@ -766,8 +778,11 @@ Pdu ::= SEQUENCE { b REAL, c OBJECT IDENTIFIER, d NULL } </pre> - <p>This is a 4-component structure called <c>Pdu</c>. The record format - is the major format for representation of <c>SEQUENCE</c> in Erlang. + <p>This is a 4-component structure called <c>Pdu</c>. By default, + a <c>SEQUENCE</c> is represented by a record in Erlang. + It can also be represented as a map; see + <seealso marker="asn1_getting_started#MAP_SEQ_SET"> + Map representation for SEQUENCE and SET</seealso>. For each <c>SEQUENCE</c> and <c>SET</c> in an ASN.1 module an Erlang record declaration is generated. For <c>Pdu</c>, a record like the following is defined:</p> @@ -878,6 +893,48 @@ SExt ::= SEQUENCE { </section> <section> + <marker id="MAP_SEQ_SET"></marker> + <title>Map representation for SEQUENCE and SET</title> + <p>If the ASN.1 module has been compiled with option <c>maps</c>, + the types <c>SEQUENCE</c> and <c>SET</c> are represented as maps.</p> + <p>In the following example, this ASN.1 specification is used:</p> + <pre> +File DEFINITIONS AUTOMATIC TAGS ::= +BEGIN +Seq1 ::= SEQUENCE { + a INTEGER DEFAULT 42, + b BOOLEAN OPTIONAL, + c IA5String +} +END </pre> + + <p>Optional fields are to be omitted from the map if they have + no value:</p> + + <pre> +1> <input>asn1ct:compile('File', [per,maps]).</input> +ok +2> <input>{ok,E} = 'File':encode('Seq1', #{a=>0,c=>"string"}).</input> +{ok,<<128,1,0,6,115,116,114,105,110,103>>} </pre> + + <p>When decoding, optional fields will be omitted from the map:</p> + + <pre> +3> <input>'File':decode('Seq1', E).</input> +{ok,#{a => 0,c => "string"}} </pre> + + <p>Default values can be omitted from the map:</p> + <pre> +4> <input>{ok,E2} = 'File':encode('Seq1', #{c=>"string"}).</input> +{ok,<<0,6,115,116,114,105,110,103>>} +5> <input>'File':decode('Seq1', E2).</input> +{ok,#{a => 42,c => "string"}} </pre> + + <note><p>It is not allowed to use the atoms <c>asn1_VALUE</c> and + <c>asn1_DEFAULT</c> with maps.</p></note> + </section> + + <section> <marker id="CHOICE"></marker> <title>CHOICE</title> <p>The type <c>CHOICE</c> is a space saver and is similar to the @@ -1004,11 +1061,16 @@ T ::= CHOICE { <section> <title>Naming of Records in .hrl Files</title> + <p>When the option <c>maps</c> is given, no <c>.hrl</c> files + will be generated. The rest of this section describes the behavior + of the compiler when <c>maps</c> is not used.</p> + <p>When an ASN.1 specification is compiled, all defined types of type - <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the - generated <c>.hrl</c> file. This is because the values for - <c>SET</c> and <c>SEQUENCE</c> are represented as records as - mentioned earlier.</p> + <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the + generated <c>.hrl</c> file. This is because the values for + <c>SET</c> and <c>SEQUENCE</c> are represented as records + by default.</p> + <p>Some special cases of this functionality are presented in the next section.</p> @@ -1144,9 +1206,10 @@ SS ::= SET { <p>This example shows that a function is generated by the compiler that returns a valid Erlang representation of the value, although the value is of a complex type.</p> - <p>Furthermore, a macro is generated for each value in the <c>.hrl</c> - file. So, the defined value <c>tt</c> can also be extracted by - <c>?tt</c> in application code.</p> + <p>Furthermore, if the option <c>maps</c> is not used, + a macro is generated for each value in the <c>.hrl</c> + file. So, the defined value <c>tt</c> can also be extracted by + <c>?tt</c> in application code.</p> </section> <section> diff --git a/lib/asn1/doc/src/asn1ct.xml b/lib/asn1/doc/src/asn1ct.xml index ebe1ce44dc..859d6a50bb 100644 --- a/lib/asn1/doc/src/asn1ct.xml +++ b/lib/asn1/doc/src/asn1ct.xml @@ -170,11 +170,24 @@ File3.asn</pre> as for <c>ber</c>. </p> </item> + <tag><c>maps</c></tag> + <item> + <p>This option changes the representation of the types + <c>SEQUENCE</c> and <c>SET</c> to use maps (instead of + records). This option also suppresses the generation of + <c>.hrl</c> files.</p> + <p>For details, see Section + <seealso marker="asn1_getting_started#MAP_SEQ_SET"> + Map representation for SEQUENCE and SET</seealso> + in the User's Guide. + </p> + </item> <tag><c>compact_bit_string</c></tag> <item> <p> The <c>BIT STRING</c> type is decoded to "compact notation". <em>This option is not recommended for new code.</em> + This option cannot be combined with the option <c>maps</c>. </p> <p>For details, see Section <seealso marker="asn1_getting_started#BIT STRING"> @@ -188,6 +201,7 @@ File3.asn</pre> The <c>BIT STRING</c> type is decoded to the legacy format, that is, a list of zeroes and ones. <em>This option is not recommended for new code.</em> + This option cannot be combined with the option <c>maps</c>. </p> <p>For details, see Section <seealso marker="asn1_getting_started#BIT STRING">BIT STRING</seealso> @@ -202,7 +216,8 @@ File3.asn</pre> marker="asn1_getting_started#BIT STRING">BIT STRING</seealso> and Section <seealso marker="asn1_getting_started#OCTET STRING">OCTET STRING</seealso> in the User's Guide.</p> - <p><em>This option is not recommended for new code.</em></p> + <p><em>This option is not recommended for new code.</em> + This option cannot be combined with the option <c>maps</c>.</p> </item> <tag><c>{n2n, EnumTypeName}</c></tag> <item> diff --git a/lib/asn1/examples/recordnames.txt b/lib/asn1/examples/recordnames.txt index 78e30ab510..9b890b4aa7 100644 --- a/lib/asn1/examples/recordnames.txt +++ b/lib/asn1/examples/recordnames.txt @@ -1,6 +1,6 @@ For each ASN1 types SET and SEQUENCE a record is generated in the .hrl file with the same name as the corresponding type. -A decoded value is also returned as a record with the apropriate name. +A decoded value is also returned as a record with the appropriate name. An internally defined type as the type in component 'a' in the following example will result in a record with name 'Seq_a': diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl index 869ea310aa..a3e45ca915 100644 --- a/lib/asn1/src/asn1_db.erl +++ b/lib/asn1/src/asn1_db.erl @@ -20,7 +20,7 @@ %% -module(asn1_db). --export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2, +-export([dbstart/1,dbnew/3,dbload/1,dbload/4,dbsave/2,dbput/2, dbput/3,dbget/2]). -export([dbstop/0]). @@ -37,13 +37,13 @@ dbstart(Includes0) -> put(?MODULE, spawn_link(fun() -> init(Parent, Includes) end)), ok. -dbload(Module, Erule, Mtime) -> - req({load, Module, Erule, Mtime}). +dbload(Module, Erule, Maps, Mtime) -> + req({load, Module, {Erule,Maps}, Mtime}). dbload(Module) -> req({load, Module, any, {{0,0,0},{0,0,0}}}). -dbnew(Module, Erule) -> req({new, Module, Erule}). +dbnew(Module, Erule, Maps) -> req({new, Module, {Erule,Maps}}). dbsave(OutFile, Module) -> cast({save, OutFile, Module}). dbput(Module, K, V) -> cast({set, Module, K, V}). dbput(Module, Kvs) -> cast({set, Module, Kvs}). @@ -110,19 +110,19 @@ loop(#state{parent = Parent, monitor = MRef, table = Table, ok = ets:tab2file(Mtab, TempFile), ok = file:rename(TempFile, OutFile), loop(State); - {From, {new, Mod, Erule}} -> + {From, {new, Mod, EruleMaps}} -> [] = ets:lookup(Table, Mod), %Assertion. ModTableId = ets:new(list_to_atom(lists:concat(["asn1_",Mod])), []), ets:insert(Table, {Mod, ModTableId}), - ets:insert(ModTableId, {?MAGIC_KEY, info(Erule)}), + ets:insert(ModTableId, {?MAGIC_KEY, info(EruleMaps)}), reply(From, ok), loop(State); - {From, {load, Mod, Erule, Mtime}} -> + {From, {load, Mod, EruleMaps, Mtime}} -> case ets:member(Table, Mod) of true -> reply(From, ok); false -> - case load_table(Mod, Erule, Mtime, Includes) of + case load_table(Mod, EruleMaps, Mtime, Includes) of {ok, ModTableId} -> ets:insert(Table, {Mod, ModTableId}), reply(From, ok); @@ -151,20 +151,20 @@ lookup(Tab, K) -> [{K,V}] -> V end. -info(Erule) -> - {asn1ct:vsn(),Erule}. +info(EruleMaps) -> + {asn1ct:vsn(),EruleMaps}. -load_table(Mod, Erule, Mtime, Includes) -> +load_table(Mod, EruleMaps, Mtime, Includes) -> Base = lists:concat([Mod, ".asn1db"]), case path_find(Includes, Mtime, Base) of error -> error; - {ok,ModTab} when Erule =:= any -> + {ok,ModTab} when EruleMaps =:= any -> {ok,ModTab}; {ok,ModTab} -> Vsn = asn1ct:vsn(), case ets:lookup(ModTab, ?MAGIC_KEY) of - [{_,{Vsn,Erule}}] -> + [{_,{Vsn,EruleMaps}}] -> %% Correct version and encoding rule. {ok,ModTab}; _ -> diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl index af10c1771c..06a9e3ab03 100644 --- a/lib/asn1/src/asn1_records.hrl +++ b/lib/asn1/src/asn1_records.hrl @@ -28,6 +28,7 @@ -define('COMPLETE_ENCODE',1). -define('TLV_DECODE',2). +-define(MISSING_IN_MAP, asn1__MISSING_IN_MAP). -record(module,{pos,name,defid,tagdefault='EXPLICIT',exports={exports,[]},imports={imports,[]}, extensiondefault=empty,typeorval}). @@ -96,6 +97,28 @@ error_context %Top-level thingie (contains line numbers) }). +%% Code generation parameters and options. +-record(gen, + {erule=ber :: 'ber' | 'per', + der=false :: boolean(), + aligned=false :: boolean(), + rec_prefix="" :: string(), + macro_prefix="" :: string(), + pack=record :: 'record' | 'map', + options=[] :: [any()] + }). + +%% Abstract intermediate representation. +-record(abst, + {name :: module(), %Name of module. + types, %Types. + values, %Values. + ptypes, %Parameterized types. + classes, %Classes. + objects, %Objects. + objsets %Object sets. + }). + %% state record used by back-end at partial decode %% active is set to 'yes' when a partial decode function is generated. %% prefix is set to 'dec-inc-' or 'dec-partial-' is for diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl index 4e030861f5..9f77a557e5 100644 --- a/lib/asn1/src/asn1ct.erl +++ b/lib/asn1/src/asn1ct.erl @@ -193,7 +193,7 @@ check_pass(#st{code=M,file=File,includes=Includes, erule=Erule,dbfile=DbFile,opts=Opts, inputmodules=InputModules}=St) -> start(Includes), - case asn1ct_check:storeindb(#state{erule=Erule}, M) of + case asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M) of ok -> Module = asn1_db:dbget(M#module.name, 'MODULE'), State = #state{mname=Module#module.name, @@ -216,8 +216,8 @@ check_pass(#st{code=M,file=File,includes=Includes, {error,St#st{error=Reason}} end. -save_pass(#st{code=M,erule=Erule}=St) -> - ok = asn1ct_check:storeindb(#state{erule=Erule}, M), +save_pass(#st{code=M,erule=Erule,opts=Opts}=St) -> + ok = asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M), {ok,St}. parse_listing(#st{code=Code,outfile=OutFile0}=St) -> @@ -236,12 +236,8 @@ abs_listing(#st{code={M,_},outfile=OutFile}) -> generate_pass(#st{code=Code,outfile=OutFile,erule=Erule,opts=Opts}=St0) -> St = St0#st{code=undefined}, %Reclaim heap space - case generate(Code, OutFile, Erule, Opts) of - {error,Reason} -> - {error,St#st{error=Reason}}; - ok -> - {ok,St} - end. + generate(Code, OutFile, Erule, Opts), + {ok,St}. compile_pass(#st{outfile=OutFile,opts=Opts0}=St) -> asn1_db:dbstop(), %Reclaim memory. @@ -834,37 +830,55 @@ delete_double_of_symbol1([],Acc) -> %%*********************************** -generate({M,GenTOrV}, OutFile, EncodingRule, Options) -> +generate({M,CodeTuple}, OutFile, EncodingRule, Options) -> + {Types,Values,Ptypes,Classes,Objects,ObjectSets} = CodeTuple, + Code = #abst{name=M#module.name, + types=Types,values=Values,ptypes=Ptypes, + classes=Classes,objects=Objects,objsets=ObjectSets}, debug_on(Options), setup_bit_string_format(Options), setup_legacy_erlang_types(Options), - put(encoding_options,Options), asn1ct_table:new(check_functions), + Gen = init_gen_record(EncodingRule, Options), + + check_maps_option(Gen), + %% create decoding function names and taglists for partial decode - case (catch specialized_decode_prepare(EncodingRule,M,GenTOrV,Options)) of - {error, Reason} -> warning("Error in configuration file: ~n~p~n", - [Reason], Options, - "Error in configuration file"); - _ -> ok + try + specialized_decode_prepare(Gen, M) + catch + throw:{error, Reason} -> + warning("Error in configuration file: ~n~p~n", + [Reason], Options, + "Error in configuration file") end, - Result = - case (catch asn1ct_gen:pgen(OutFile,EncodingRule, - M#module.name,GenTOrV,Options)) of - {'EXIT',Reason2} -> - error("~p~n",[Reason2],Options), - {error,Reason2}; - _ -> - ok - end, + asn1ct_gen:pgen(OutFile, Gen, Code), debug_off(Options), - erase(encoding_options), cleanup_bit_string_format(), erase(tlv_format), % used in ber erase(class_default_type),% used in ber asn1ct_table:delete(check_functions), - Result. + ok. + +init_gen_record(EncodingRule, Options) -> + Erule = case EncodingRule of + uper -> per; + _ -> EncodingRule + end, + Der = proplists:get_bool(der, Options), + Aligned = EncodingRule =:= per, + RecPrefix = proplists:get_value(record_name_prefix, Options, ""), + MacroPrefix = proplists:get_value(macro_name_prefix, Options, ""), + Pack = case proplists:get_value(maps, Options, false) of + true -> map; + false -> record + end, + #gen{erule=Erule,der=Der,aligned=Aligned, + rec_prefix=RecPrefix,macro_prefix=MacroPrefix, + pack=Pack,options=Options}. + setup_legacy_erlang_types(Opts) -> F = case lists:member(legacy_erlang_types, Opts) of @@ -910,6 +924,26 @@ cleanup_bit_string_format() -> get_bit_string_format() -> get(bit_string_format). +check_maps_option(#gen{pack=map}) -> + case get_bit_string_format() of + bitstring -> + ok; + _ -> + Message1 = "The 'maps' option must not be combined with " + "'compact_bit_string' or 'legacy_bit_string'", + exit({error,{asn1,Message1}}) + end, + case use_legacy_types() of + false -> + ok; + true -> + Message2 = "The 'maps' option must not be combined with " + "'legacy_erlang_types'", + exit({error,{asn1,Message2}}) + end; +check_maps_option(#gen{}) -> + ok. + %% parse_and_save parses an asn1 spec and saves the unchecked parse %% tree in a data base file. @@ -919,22 +953,27 @@ parse_and_save(Module,S) -> SourceDir = S#state.sourcedir, Includes = [I || {i,I} <- Options], Erule = S#state.erule, + Maps = lists:member(maps, Options), case get_input_file(Module, [SourceDir|Includes]) of %% search for asn1 source {file,SuffixedASN1source} -> Mtime = filelib:last_modified(SuffixedASN1source), - case asn1_db:dbload(Module, Erule, Mtime) of + case asn1_db:dbload(Module, Erule, Maps, Mtime) of ok -> ok; error -> parse_and_save1(S, SuffixedASN1source, Options) end; - Err -> + Err when not Maps -> case asn1_db:dbload(Module) of ok -> + %% FIXME: This should be an error. warning("could not do a consistency check of the ~p file: no asn1 source file was found.~n", [lists:concat([Module,".asn1db"])],Options); error -> ok end, + {error,{asn1,input_file_error,Err}}; + Err -> + %% Always fail directly when the 'maps' option is used. {error,{asn1,input_file_error,Err}} end. @@ -997,9 +1036,8 @@ input_file_type(File) -> end end; ".asn1config" -> - case read_config_file(File,asn1_module) of + case read_config_file_info(File, asn1_module) of {ok,Asn1Module} -> -% put(asn1_config_file,File), input_file_type(Asn1Module); Error -> Error @@ -1092,16 +1130,27 @@ translate_options([H|T]) -> translate_options([]) -> []. remove_asn_flags(Options) -> - [X || X <- Options, - X /= get_rule(Options), - X /= optimize, - X /= compact_bit_string, - X /= legacy_bit_string, - X /= legacy_erlang_types, - X /= debug, - X /= asn1config, - X /= record_name_prefix]. - + [X || X <- Options, not is_asn1_flag(X)]. + +is_asn1_flag(asn1config) -> true; +is_asn1_flag(ber) -> true; +is_asn1_flag(compact_bit_string) -> true; +is_asn1_flag(debug) -> true; +is_asn1_flag(der) -> true; +is_asn1_flag(legacy_bit_string) -> true; +is_asn1_flag({macro_name_prefix,_}) -> true; +is_asn1_flag({n2n,_}) -> true; +is_asn1_flag(noobj) -> true; +is_asn1_flag(no_ok_wrapper) -> true; +is_asn1_flag(optimize) -> true; +is_asn1_flag(per) -> true; +is_asn1_flag({record_name_prefix,_}) -> true; +is_asn1_flag(undec_rec) -> true; +is_asn1_flag(uper) -> true; +is_asn1_flag(verbose) -> true; +%% 'warnings_as_errors' is intentionally passed through to the compiler. +is_asn1_flag(_) -> false. + debug_on(Options) -> case lists:member(debug,Options) of true -> @@ -1370,25 +1419,26 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes). vsn() -> ?vsn. -specialized_decode_prepare(Erule,M,TsAndVs,Options) -> - case lists:member(asn1config,Options) of +specialized_decode_prepare(#gen{erule=ber,options=Options}=Gen, M) -> + case lists:member(asn1config, Options) of true -> - partial_decode_prepare(Erule,M,TsAndVs,Options); - _ -> + special_decode_prepare_1(Gen, M); + false -> ok - end. + end; +specialized_decode_prepare(_, _) -> + ok. + %% Reads the configuration file if it exists and stores information %% about partial decode and incomplete decode -partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) -> +special_decode_prepare_1(#gen{options=Options}=Gen, M) -> %% read configure file - - ModName = - case lists:keysearch(asn1config,1,Options) of - {value,{_,MName}} -> MName; - _ -> M#module.name - end, + ModName = case lists:keyfind(asn1config, 1, Options) of + {_,MName} -> MName; + false -> M#module.name + end, %% io:format("ModName: ~p~nM#module.name: ~p~n~n",[ModName,M#module.name]), - case read_config_file(ModName) of + case read_config_file(Gen, ModName) of no_config_file -> ok; CfgList -> @@ -1407,11 +1457,7 @@ partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) -> Part_inc_tlv_tags = tlv_tags(CommandList2), save_config(partial_incomplete_decode,Part_inc_tlv_tags), save_gen_state(exclusive_decode,ExclusiveDecode,Part_inc_tlv_tags) - end; -partial_decode_prepare(_,_,_,_) -> - ok. - - + end. %% create_partial_inc_decode_gen_info/2 %% @@ -1863,46 +1909,38 @@ tlv_tag1(<<0:1,PartialTag:7>>,Acc) -> tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) -> tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag). -%% reads the content from the configuration file and returns the -%% selected part choosen by InfoType. Assumes that the config file +%% Reads the content from the configuration file and returns the +%% selected part chosen by InfoType. Assumes that the config file %% content is an Erlang term. -read_config_file(ModuleName,InfoType) when is_atom(InfoType) -> - CfgList = read_config_file(ModuleName), - get_config_info(CfgList,InfoType). +read_config_file_info(ModuleName, InfoType) when is_atom(InfoType) -> + Name = ensure_ext(ModuleName, ".asn1config"), + CfgList = read_config_file0(Name, []), + get_config_info(CfgList, InfoType). +read_config_file(#gen{options=Options}, ModuleName) -> + Name = ensure_ext(ModuleName, ".asn1config"), + Includes = [I || {i,I} <- Options], + read_config_file0(Name, ["."|Includes]). -read_config_file(ModuleName) -> - case file:consult(lists:concat([ModuleName,'.asn1config'])) of +read_config_file0(Name, [D|Dirs]) -> + case file:consult(filename:join(D, Name)) of {ok,CfgList} -> CfgList; {error,enoent} -> - Options = get(encoding_options), - Includes = [I || {i,I} <- Options], - read_config_file1(ModuleName,Includes); + read_config_file0(Name, Dirs); {error,Reason} -> Error = "error reading asn1 config file: " ++ file:format_error(Reason), throw({error,Error}) - end. -read_config_file1(ModuleName,[]) -> - case filename:extension(ModuleName) of - ".asn1config" -> - no_config_file; - _ -> - read_config_file(lists:concat([ModuleName,".asn1config"])) end; -read_config_file1(ModuleName,[H|T]) -> -% File = filename:join([H,lists:concat([ModuleName,'.asn1config'])]), - File = filename:join([H,ModuleName]), - case file:consult(File) of - {ok,CfgList} -> - CfgList; - {error,enoent} -> - read_config_file1(ModuleName,T); - {error,Reason} -> - Error = "error reading asn1 config file: " ++ - file:format_error(Reason), - throw({error,Error}) +read_config_file0(_, []) -> + no_config_file. + +ensure_ext(ModuleName, Ext) -> + Name = filename:join([ModuleName]), + case filename:extension(Name) of + Ext -> Name; + _ -> Name ++ Ext end. get_config_info(CfgList,InfoType) -> @@ -2382,8 +2420,10 @@ format_error({write_error,File,Reason}) -> io_lib:format("writing output file ~s failed: ~s", [File,file:format_error(Reason)]). -is_error(S) when is_record(S, state) -> - is_error(S#state.options); +is_error(#state{options=Opts}) -> + is_error(Opts); +is_error(#gen{options=Opts}) -> + is_error(Opts); is_error(O) -> lists:member(errors, O) orelse is_verbose(O). @@ -2392,8 +2432,10 @@ is_warning(S) when is_record(S, state) -> is_warning(O) -> lists:member(warnings, O) orelse is_verbose(O). -is_verbose(S) when is_record(S, state) -> - is_verbose(S#state.options); +is_verbose(#state{options=Opts}) -> + is_verbose(Opts); +is_verbose(#gen{options=Opts}) -> + is_verbose(Opts); is_verbose(O) -> lists:member(verbose, O). diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl index f2c895bfaa..4f04b78241 100644 --- a/lib/asn1/src/asn1ct_check.erl +++ b/lib/asn1/src/asn1ct_check.erl @@ -2239,12 +2239,18 @@ normalized_record(SorS,S,Value,Components,NameList) -> case is_record_normalized(S,NewName,Value,length(Components)) of true -> Value; - _ -> + false -> NoComps = length(Components), ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]), - NoComps = length(ListOfVals), %% Assert - list_to_tuple([NewName|ListOfVals]) + NoComps = length(ListOfVals), %Assertion. + case use_maps(S) of + false -> + list_to_tuple([NewName|ListOfVals]); + true -> + create_map_value(Components, ListOfVals) + end end. + is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) -> case get_referenced_type(S,V) of {_M,#valuedef{type=_T1,value=V2}} -> @@ -2253,9 +2259,20 @@ is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) -> end; is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) -> (tuple_size(Value) =:= (NumComps + 1)) andalso (element(1, Value) =:= Name); +is_record_normalized(_S, _Name, Value, _NumComps) when is_map(Value) -> + true; is_record_normalized(_,_,_,_) -> false. +use_maps(#state{options=Opts}) -> + lists:member(maps, Opts). + +create_map_value(Components, ListOfVals) -> + Zipped = lists:zip(Components, ListOfVals), + L = [{Name,V} || {#'ComponentType'{name=Name},V} <- Zipped, + V =/= asn1_NOVALUE], + maps:from_list(L). + normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs], [#'ComponentType'{name=Cname,typespec=TS}|Cs], @@ -4192,7 +4209,7 @@ iof_associated_type1(S,C) -> %% fieldname=[{typefieldreference,'Type'}], fieldname={'Type',[]}, type=Typefield_type}, - IOFComponents = + IOFComponents0 = [#'ComponentType'{name='type-id', typespec=#type{tag=C1TypeTag, def=ObjectIdentifier, @@ -4209,6 +4226,7 @@ iof_associated_type1(S,C) -> tablecinf=Comp2tablecinf}, prop=mandatory, tags=[{'CONTEXT',0}]}], + IOFComponents = textual_order(IOFComponents0), #'SEQUENCE'{tablecinf=TableCInf, components=simplify_comps(IOFComponents)}. @@ -4930,7 +4948,7 @@ componentrelation_leadingattr(S,CompList) -> %%FIXME expand_ExtAddGroups([C#'ExtensionAdditionGroup'{components=ExtAdds}|T], %% CurrPos,PosAcc,CompAcc) -> -%% expand_ExtAddGroups(T,CurrPos+ L = lenght(ExtAdds),[{CurrPos,L}|PosAcc],ExtAdds++CompAcc); +%% expand_ExtAddGroups(T,CurrPos+ L = length(ExtAdds),[{CurrPos,L}|PosAcc],ExtAdds++CompAcc); %% expand_ExtAddGroups([C|T],CurrPos,PosAcc,CompAcc) -> %% expand_ExtAddGroups(T,CurrPos+ 1,PosAcc,[C|CompAcc]); %% expand_ExtAddGroups([],_CurrPos,PosAcc,CompAcc) -> @@ -5673,7 +5691,8 @@ storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) -> storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) -> NewM = M#module{typeorval=findtypes_and_values(TVlist0)}, - asn1_db:dbnew(ModName, S#state.erule), + Maps = lists:member(maps, S#state.options), + asn1_db:dbnew(ModName, S#state.erule, Maps), asn1_db:dbput(ModName, 'MODULE', NewM), asn1_db:dbput(ModName, TVlist), include_default_class(S, NewM#module.name), diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl index 325bea5879..16af09bca9 100644 --- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl +++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl @@ -32,7 +32,7 @@ -include("asn1_records.hrl"). --import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]). +-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]). -define(ASN1CT_GEN_BER,asn1ct_gen_ber_bin_v2). @@ -57,7 +57,7 @@ %%=============================================================================== %%=============================================================================== -gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> +gen_encode_sequence(Gen, Typename, #type{}=D) -> asn1ct_name:start(), asn1ct_name:new(term), asn1ct_name:new(bytes), @@ -67,8 +67,12 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> ValName = case Typename of ['EXTERNAL'] -> + Tr = case Gen of + #gen{pack=record} -> transform_to_EXTERNAL1990; + #gen{pack=map} -> transform_to_EXTERNAL1990_maps + end, emit([indent(4),"NewVal = ", - {call,ext,transform_to_EXTERNAL1990,["Val"]}, + {call,ext,Tr,["Val"]}, com,nl]), "NewVal"; _ -> @@ -90,18 +94,9 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> {Rl,El} -> Rl ++ El; _ -> CompList end, - -%% don't match recordname for now, because of compatibility reasons -%% emit(["{'",asn1ct_gen:list2rname(Typename),"'"]), - emit(["{_"]), - case length(CompList1) of - 0 -> - true; - CompListLen -> - emit([","]), - mkcindexlist([Tc || Tc <- lists:seq(1,CompListLen)]) - end, - emit(["} = ",ValName,",",nl]), + + enc_match_input(Gen, ValName, CompList1), + EncObj = case TableConsInfo of #simpletableattributes{usedclassfield=Used, @@ -125,7 +120,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> emit([ObjectEncode," = ",nl, " ",{asis,ObjSetMod},":'getenc_",ObjSetName, "'("]), - ValueMatch = value_match(ValueIndex, + ValueMatch = value_match(Gen, ValueIndex, lists:concat(["Cindex",N])), emit([indent(35),ValueMatch,"),",nl]), {AttrN,ObjectEncode}; @@ -144,7 +139,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> end end, - gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj), + gen_enc_sequence_call(Gen, Typename, CompList1, 1, Ext, EncObj), emit([nl," BytesSoFar = "]), case SeqOrSet of @@ -168,7 +163,36 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) -> call(encode_tags, ["TagIn","BytesSoFar","LenSoFar"]), emit([".",nl]). -gen_decode_sequence(Erules,Typename,D) when is_record(D,type) -> +enc_match_input(#gen{pack=record}, ValName, CompList) -> + Len = length(CompList), + Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)], + RecordName = "_", + emit(["{",lists:join(",", [RecordName|Vars]),"} = ",ValName,com,nl]); +enc_match_input(#gen{pack=map}, ValName, CompList) -> + Len = length(CompList), + Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)], + Zipped = lists:zip(CompList, Vars), + M = [[{asis,Name},":=",Var] || + {#'ComponentType'{prop=mandatory,name=Name},Var} <- Zipped], + case M of + [] -> + ok; + [_|_] -> + emit(["#{",lists:join(",", M),"} = ",ValName,com,nl]) + end, + Os0 = [{Name,Var} || + {#'ComponentType'{prop=Prop,name=Name},Var} <- Zipped, + Prop =/= mandatory], + F = fun({Name,Var}) -> + [Var," = case ",ValName," of\n" + " #{",{asis,Name},":=",Var,"_0} -> ", + Var,"_0;\n" + " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n" + "end"] + end, + emit(lists:join(",\n", [F(E) || E <- Os0]++[[]])). + +gen_decode_sequence(Gen, Typename, #type{}=D) -> asn1ct_name:start(), asn1ct_name:new(tag), #'SEQUENCE'{tablecinf=TableConsInfo,components=CList0} = D#type.def, @@ -225,15 +249,20 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) -> _ -> {false,false} end, - RecordName = lists:concat([get_record_name_prefix(), - asn1ct_gen:list2rname(Typename)]), - case gen_dec_sequence_call(Erules,Typename,CompList2,Ext,DecObjInf) of - no_terms -> % an empty sequence - emit([nl,nl]), - demit(["Result = "]), %dbg - %% return value as record + RecordName0 = lists:concat([get_record_name_prefix(Gen), + asn1ct_gen:list2rname(Typename)]), + RecordName = list_to_atom(RecordName0), + case gen_dec_sequence_call(Gen, Typename, CompList2, Ext, DecObjInf) of + no_terms -> % an empty sequence asn1ct_name:new(rb), - emit([" {'",RecordName,"'}.",nl,nl]); + case Gen of + #gen{pack=record} -> + emit([nl,nl, + " {'",RecordName,"'}.",nl,nl]); + #gen{pack=map} -> + emit([nl,nl, + " #{}.",nl,nl]) + end; {LeadingAttrTerm,PostponedDecArgs} -> emit([nl]), case {LeadingAttrTerm,PostponedDecArgs} of @@ -243,7 +272,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) -> ok; {[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} -> DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])), - ValueMatch = value_match(ValueIndex,Term), + ValueMatch = value_match(Gen, ValueIndex,Term), {ObjSetMod,ObjSetName} = ObjSetRef, emit([DecObj," =",nl, " ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(", @@ -263,22 +292,64 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) -> "end,",nl]) end, asn1ct_name:new(rb), - case Typename of - ['EXTERNAL'] -> - emit([" OldFormat={'",RecordName, - "', "]), - mkvlist(asn1ct_name:all(term)), - emit(["},",nl]), - emit([" ", - {call,ext,transform_to_EXTERNAL1994, - ["OldFormat"]},".",nl]); - _ -> - emit([" {'",RecordName,"', "]), - mkvlist(asn1ct_name:all(term)), - emit(["}.",nl,nl]) - end + gen_dec_pack(Gen, RecordName, Typename, CompList), + emit([".",nl]) end. +gen_dec_pack(Gen, RecordName, Typename, CompList) -> + case Typename of + ['EXTERNAL'] -> + dec_external(Gen, RecordName); + _ -> + asn1ct_name:new(res), + gen_dec_do_pack(Gen, RecordName, CompList), + emit([com,nl, + {curr,res}]) + end. + +dec_external(#gen{pack=record}, RecordName) -> + All = [{var,Term} || Term <- asn1ct_name:all(term)], + Record = [{asis,RecordName}|All], + emit(["OldFormat={",lists:join(",", Record),"},",nl, + {call,ext,transform_to_EXTERNAL1994, + ["OldFormat"]}]); +dec_external(#gen{pack=map}, _RecordName) -> + Vars = asn1ct_name:all(term), + Names = ['direct-reference','indirect-reference', + 'data-value-descriptor',encoding], + Zipped = lists:zip(Names, Vars), + MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]), + emit(["OldFormat = #{",MapInit,"}",com,nl, + "ASN11994Format =",nl, + {call,ext,transform_to_EXTERNAL1994_maps, + ["OldFormat"]}]). + +gen_dec_do_pack(#gen{pack=record}, RecordName, _CompList) -> + All = asn1ct_name:all(term), + L = [{asis,RecordName}|[{var,Var} || Var <- All]], + emit([{curr,res}," = {",lists:join(",", L),"}"]); +gen_dec_do_pack(#gen{pack=map}, _, CompList) -> + Zipped = lists:zip(CompList, asn1ct_name:all(term)), + PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false; + ({_,_}) -> true + end, + {Mandatory,Optional} = lists:partition(PF, Zipped), + L = [[{asis,Name},"=>",{var,Var}] || + {#'ComponentType'{name=Name},Var} <- Mandatory], + emit([{curr,res}," = #{",lists:join(",", L),"}"]), + gen_dec_map_optional(Optional). + +gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) -> + asn1ct_name:new(res), + emit([com,nl, + {curr,res}," = case ",{var,Var}," of",nl, + " asn1_NOVALUE -> ",{prev,res},";",nl, + " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl, + "end"]), + gen_dec_map_optional(T); +gen_dec_map_optional([]) -> + ok. + gen_dec_postponed_decs(_,[]) -> emit(nl); gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term, @@ -327,7 +398,7 @@ emit_opt_or_mand_check(Value,TmpTerm) -> gen_encode_set(Erules,Typename,D) when is_record(D,type) -> gen_encode_sequence(Erules,Typename,D). -gen_decode_set(Erules,Typename,D) when is_record(D,type) -> +gen_decode_set(Gen, Typename, #type{}=D) -> asn1ct_name:start(), %% asn1ct_name:new(term), asn1ct_name:new(tag), @@ -393,7 +464,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) -> _ -> emit(["SetFun = fun(FunTlv) ->", nl]), emit(["case FunTlv of ",nl]), - NextNum = gen_dec_set_cases(Erules,Typename,CompList,1), + NextNum = gen_dec_set_cases(Gen, Typename, CompList, 1), emit([indent(6), {curr,else}," -> ",nl, indent(9),"{",NextNum,", ",{curr,else},"}",nl]), emit([indent(3),"end",nl]), @@ -405,14 +476,17 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) -> asn1ct_name:new(tlv) end, - RecordName = lists:concat([get_record_name_prefix(), - asn1ct_gen:list2rname(Typename)]), - case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of - no_terms -> % an empty sequence - emit([nl,nl]), - demit(["Result = "]), %dbg - %% return value as record - emit([" {'",RecordName,"'}.",nl]); + RecordName0 = lists:concat([get_record_name_prefix(Gen), + asn1ct_gen:list2rname(Typename)]), + RecordName = list_to_atom(RecordName0), + case gen_dec_sequence_call(Gen, Typename, CompList, Ext, DecObjInf) of + no_terms -> % an empty SET + case Gen of + #gen{pack=record} -> + emit([nl,nl," {'",RecordName,"'}.",nl,nl]); + #gen{pack=map} -> + emit([nl,nl," #{}.",nl,nl]) + end; {LeadingAttrTerm,PostponedDecArgs} -> emit([nl]), case {LeadingAttrTerm,PostponedDecArgs} of @@ -422,7 +496,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) -> ok; {[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} -> DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])), - ValueMatch = value_match(ValueIndex,Term), + ValueMatch = value_match(Gen, ValueIndex, Term), {ObjSetMod,ObjSetName} = ObjSetRef, emit([DecObj," =",nl, " ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(", @@ -441,9 +515,8 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) -> "}}}) % extra fields not allowed",nl, "end,",nl]) end, - emit([" {'",RecordName,"', "]), - mkvlist(asn1ct_name:all(term)), - emit(["}.",nl]) + gen_dec_pack(Gen, RecordName, Typename, CompList), + emit([".",nl]) end. @@ -504,10 +577,8 @@ gen_decode_sof(Erules,TypeName,_InnerTypeName,D) when is_record(D,type) -> emit([" || ",{curr,v}," <- ",{curr,tlv},"].",nl,nl,nl]). -gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont) - when is_record(Cont,type)-> - - {Objfun,Objfun_novar,EncObj} = +gen_encode_sof_components(Gen, Typename, SeqOrSetOf, #type{}=Cont) -> + {Objfun,Objfun_novar,EncObj} = case Cont#type.tablecinf of [{objfun,_}|_R] -> {", ObjFun",", _",{no_attr,"ObjFun"}}; @@ -517,20 +588,19 @@ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont) emit(["'enc_",asn1ct_gen:list2name(Typename), "_components'([]",Objfun_novar,", AccBytes, AccLen) -> ",nl]), - case catch lists:member(der,get(encoding_options)) of - true when SeqOrSetOf=='SET OF'-> + case {Gen,SeqOrSetOf} of + {#gen{der=true},'SET OF'} -> asn1ct_func:need({ber,dynamicsort_SETOF,1}), emit([indent(3), "{dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]); - _ -> + {_,_} -> emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl]) end, emit(["'enc_",asn1ct_gen:list2name(Typename), "_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]), TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def), - gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3, -% mandatory,"{EncBytes,EncLen} = ",EncObj), - mandatory,EncObj), + gen_enc_line(Gen, Typename, TypeNameSuffix, Cont, "H", 3, + mandatory, EncObj), emit([",",nl]), emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename), "_components'(T",Objfun,","]), @@ -1028,35 +1098,44 @@ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj) emit([nl,indent(7),"end"]) end. -gen_optormand_case(mandatory, _Erules, _TopType, _Cname, _Type, _Element) -> +gen_optormand_case(mandatory, _Gen, _TopType, _Cname, _Type, _Element) -> ok; -gen_optormand_case('OPTIONAL', Erules, _TopType, _Cname, _Type, Element) -> +gen_optormand_case('OPTIONAL', Gen, _TopType, _Cname, _Type, Element) -> emit([" case ",Element," of",nl]), - emit([indent(9),"asn1_NOVALUE -> {", - empty_lb(Erules),",0};",nl]), + Missing = case Gen of + #gen{pack=record} -> asn1_NOVALUE; + #gen{pack=map} -> ?MISSING_IN_MAP + end, + emit([indent(9),Missing," -> {", + empty_lb(Gen),",0};",nl]), emit([indent(9),"_ ->",nl,indent(12)]); -gen_optormand_case({'DEFAULT',DefaultValue}, Erules, _TopType, +gen_optormand_case({'DEFAULT',DefaultValue}, Gen, _TopType, _Cname, Type, Element) -> CurrMod = get(currmod), - case catch lists:member(der,get(encoding_options)) of - true -> - asn1ct_gen_check:emit(Type, DefaultValue, Element); - _ -> - emit([" case ",Element," of",nl]), - emit([indent(9),"asn1_DEFAULT -> {", - empty_lb(Erules), - ",0};",nl]), - case DefaultValue of - #'Externalvaluereference'{module=CurrMod, - value=V} -> - emit([indent(9),"?",{asis,V}," -> {", - empty_lb(Erules),",0};",nl]); - _ -> - emit([indent(9),{asis, - DefaultValue}," -> {", - empty_lb(Erules),",0};",nl]) - end, - emit([indent(9),"_ ->",nl,indent(12)]) + case Gen of + #gen{erule=ber,der=true} -> + asn1ct_gen_check:emit(Gen, Type, DefaultValue, Element); + #gen{erule=ber,der=false,pack=Pack} -> + Ind9 = indent(9), + DefMarker = case Pack of + record -> asn1_DEFAULT; + map -> ?MISSING_IN_MAP + end, + emit([" case ",Element," of",nl, + Ind9,{asis,DefMarker}," ->",nl, + Ind9,indent(3),"{",empty_lb(Gen),",0};",nl, + Ind9,"_ when ",Element," =:= "]), + Dv = case DefaultValue of + #'Externalvaluereference'{module=CurrMod, + value=V} -> + ["?",{asis,V}]; + _ -> + [{asis,DefaultValue}] + end, + emit(Dv++[" ->",nl, + Ind9,indent(3),"{",empty_lb(Gen),",0};",nl, + Ind9,"_ ->",nl, + indent(12)]) end. %% Use for SEQUENCE OF and CHOICE. @@ -1207,7 +1286,7 @@ gen_dec_call({typefield,_},_,_,Cname,Type,BytesVar,Tag,_,_,_DecObjInf,OptOrMandC (Type#type.def)#'ObjectClassFieldType'.fieldname, [{Cname,RefedFieldName,asn1ct_gen:mk_var(asn1ct_name:curr(term)), asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}]; -gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar, +gen_dec_call(InnerType, Gen, TopType, Cname, Type, BytesVar, Tag, _PrimOptOrMand, _OptOrMand, DecObjInf,_) -> WhatKind = asn1ct_gen:type(InnerType), gen_dec_call1(WhatKind, InnerType, TopType, Cname, @@ -1215,7 +1294,7 @@ gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar, case DecObjInf of {Cname,{_,OSet,_UniqueFName,ValIndex}} -> Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)), - ValueMatch = value_match(ValIndex,Term), + ValueMatch = value_match(Gen, ValIndex, Term), {ObjSetMod,ObjSetName} = OSet, emit([",",nl,"ObjFun = ",{asis,ObjSetMod},":'getdec_",ObjSetName, "'(",ValueMatch,")"]); @@ -1340,19 +1419,6 @@ gen_dec_call1(WhatKind, _, TopType, Cname, Type, BytesVar, Tag) -> indent(N) -> lists:duplicate(N,32). % 32 = space -mkcindexlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ " - emit(["Cindex",H,Sep]), - mkcindexlist([T1|T], Sep); -mkcindexlist([H|T], Sep) -> - emit(["Cindex",H]), - mkcindexlist(T, Sep); -mkcindexlist([], _) -> - true. - -mkcindexlist(L) -> - mkcindexlist(L,", "). - - mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ " emit([{var,H},Sep]), mkvlist([T1|T], Sep); @@ -1429,19 +1495,25 @@ mkfuncname(TopType,Cname,WhatKind,Prefix,Suffix) -> {F, "?MODULE", F} end. -empty_lb(ber) -> +empty_lb(#gen{erule=ber}) -> "<<>>". -value_match(Index,Value) when is_atom(Value) -> - value_match(Index,atom_to_list(Value)); -value_match([],Value) -> +value_match(#gen{pack=record}, VIs, Value) -> + value_match_rec(VIs, Value); +value_match(#gen{pack=map}, VIs, Value) -> + value_match_map(VIs, Value). + +value_match_rec([], Value) -> + Value; +value_match_rec([{VI,_}|VIs], Value0) -> + Value = value_match_rec(VIs, Value0), + lists:concat(["element(",VI,", ",Value,")"]). + +value_match_map([], Value) -> Value; -value_match([{VI,_}|VIs],Value) -> - value_match1(Value,VIs,lists:concat(["element(",VI,","]),1). -value_match1(Value,[],Acc,Depth) -> - Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")")); -value_match1(Value,[{VI,_}|VIs],Acc,Depth) -> - value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1). +value_match_map([{_,Name}|VIs], Value0) -> + Value = value_match_map(VIs, Value0), + lists:concat(["maps:get(",Name,", ",Value,")"]). call(F, Args) -> asn1ct_func:call(ber, F, Args). diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl index a34b25182c..9cd9864b80 100644 --- a/lib/asn1/src/asn1ct_constructed_per.erl +++ b/lib/asn1/src/asn1ct_constructed_per.erl @@ -32,17 +32,27 @@ -include("asn1_records.hrl"). %-compile(export_all). --import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]). --import(asn1ct_func, [call/3]). +-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]). + +-type type_name() :: any(). + %% ENCODE GENERATOR FOR SEQUENCE TYPE ** ********** -gen_encode_set(Erules,TypeName,D) -> - gen_encode_constructed(Erules,TypeName,D). +-spec gen_encode_set(Gen, TypeName, #type{}) -> 'ok' when + Gen :: #gen{}, + TypeName :: type_name(). + +gen_encode_set(Gen, TypeName, D) -> + gen_encode_constructed(Gen, TypeName, D). + +-spec gen_encode_sequence(Gen, TypeName, #type{}) -> 'ok' when + Gen :: #gen{}, + TypeName :: type_name(). -gen_encode_sequence(Erules,TypeName,D) -> - gen_encode_constructed(Erules,TypeName,D). +gen_encode_sequence(Gen, TypeName, D) -> + gen_encode_constructed(Gen, TypeName, D). gen_encode_constructed(Erule, Typename, #type{}=D) -> asn1ct_name:start(), @@ -50,88 +60,23 @@ gen_encode_constructed(Erule, Typename, #type{}=D) -> asn1ct_imm:enc_cg(Imm, is_aligned(Erule)), emit([".",nl]). -gen_encode_constructed_imm(Erule, Typename, #type{}=D) -> - {ExtAddGroup,TmpCompList,TableConsInfo} = - case D#type.def of - #'SEQUENCE'{tablecinf=TCI,components=CL,extaddgroup=ExtAddGroup0} -> - {ExtAddGroup0,CL,TCI}; - #'SET'{tablecinf=TCI,components=CL} -> - {undefined,CL,TCI} - end, - - CompList = case ExtAddGroup of - undefined -> - TmpCompList; - _ when is_integer(ExtAddGroup) -> - %% This is a fake SEQUENCE representing an ExtensionAdditionGroup - %% Reset the textual order so we get the right - %% index of the components - [Comp#'ComponentType'{textual_order=undefined}|| - Comp<-TmpCompList] - end, - ExternalImm = - case Typename of - ['EXTERNAL'] -> - Next = asn1ct_gen:mk_var(asn1ct_name:next(val)), - Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)), - asn1ct_name:new(val), - [{call,ext,transform_to_EXTERNAL1990,[{var,Curr}],{var,Next}}]; - _ -> - [] - end, - Aligned = is_aligned(Erule), - Value0 = make_var(val), +gen_encode_constructed_imm(Gen, Typename, #type{}=D) -> + {CompList,TableConsInfo} = enc_complist(D), + ExternalImm = external_imm(Gen, Typename), Optionals = optionals(to_textual_order(CompList)), - ImmOptionals = [asn1ct_imm:per_enc_optional(Value0, Opt, Aligned) || - Opt <- Optionals], + ImmOptionals = enc_optionals(Gen, Optionals), Ext = extensible_enc(CompList), + Aligned = is_aligned(Gen), ExtImm = case Ext of {ext,ExtPos,NumExt} when NumExt > 0 -> - gen_encode_extaddgroup(CompList), + gen_encode_extaddgroup(Gen, CompList), Value = make_var(val), - asn1ct_imm:per_enc_extensions(Value, ExtPos, - NumExt, Aligned); + enc_extensions(Gen, Value, ExtPos, NumExt, Aligned); _ -> [] end, - {EncObj,ObjSetImm} = - case TableConsInfo of - #simpletableattributes{usedclassfield=Used, - uniqueclassfield=Unique} when Used /= Unique -> - {false,[]}; - %% ObjectSet, name of the object set in constraints - %% - %%{ObjectSet,AttrN,N,UniqueFieldName} -> %% N is index of attribute that determines constraint - #simpletableattributes{objectsetname=ObjectSet, - c_name=AttrN, - c_index=N, - usedclassfield=UniqueFieldName, - uniqueclassfield=UniqueFieldName, - valueindex=ValueIndex0 - } -> %% N is index of attribute that determines constraint - {Module,ObjSetName} = ObjectSet, - #typedef{typespec=#'ObjectSet'{gen=Gen}} = - asn1_db:dbget(Module, ObjSetName), - case Gen of - true -> - ValueIndex = ValueIndex0 ++ [{N+1,top}], - Val = make_var(val), - {ObjSetImm0,Dst} = enc_dig_out_value(ValueIndex, Val), - {{AttrN,Dst},ObjSetImm0}; - false -> - {false,[]} - end; - _ -> - case D#type.tablecinf of - [{objfun,_}|_] -> - %% when the simpletableattributes was at an outer - %% level and the objfun has been passed through the - %% function call - {{"got objfun through args",{var,"ObjFun"}},[]}; - _ -> - {false,[]} - end - end, + MatchImm = enc_map_match(Gen, CompList), + {EncObj,ObjSetImm} = enc_table(Gen, TableConsInfo, D), ImmSetExt = case Ext of {ext,_Pos,NumExt2} when NumExt2 > 0 -> @@ -141,38 +86,195 @@ gen_encode_constructed_imm(Erule, Typename, #type{}=D) -> _ -> [] end, - ImmBody = gen_enc_components_call(Erule, Typename, CompList, EncObj, Ext), - ExternalImm ++ ExtImm ++ ObjSetImm ++ + ImmBody = gen_enc_components_call(Gen, Typename, CompList, EncObj, Ext), + ExternalImm ++ MatchImm ++ ExtImm ++ ObjSetImm ++ asn1ct_imm:enc_append([ImmSetExt] ++ ImmOptionals ++ ImmBody). -gen_encode_extaddgroup(CompList) -> +external_imm(Gen, ['EXTERNAL']) -> + Next = asn1ct_gen:mk_var(asn1ct_name:next(val)), + Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)), + asn1ct_name:new(val), + F = case Gen of + #gen{pack=record} -> transform_to_EXTERNAL1990; + #gen{pack=map} -> transform_to_EXTERNAL1990_maps + end, + [{call,ext,F,[{var,Curr}],{var,Next}}]; +external_imm(_, _) -> + []. + +enc_extensions(#gen{pack=record}, Value, ExtPos, NumExt, Aligned) -> + asn1ct_imm:per_enc_extensions(Value, ExtPos, NumExt, Aligned); +enc_extensions(#gen{pack=map}, Value, ExtPos, NumExt, Aligned) -> + Vars = [{var,lists:concat(["Input@",Pos])} || + Pos <- lists:seq(ExtPos, ExtPos+NumExt-1)], + Undefined = atom_to_list(?MISSING_IN_MAP), + asn1ct_imm:per_enc_extensions_map(Value, Vars, Undefined, Aligned). + +enc_complist(#type{def=Def}) -> + case Def of + #'SEQUENCE'{tablecinf=TCI,components=CL0,extaddgroup=ExtAddGroup} -> + case ExtAddGroup of + undefined -> + {CL0,TCI}; + _ when is_integer(ExtAddGroup) -> + %% This is a fake SEQUENCE representing an + %% ExtensionAdditionGroup. Renumber the textual + %% order so we get the right index of the + %% components. + CL = add_textual_order(CL0), + {CL,TCI} + end; + #'SET'{tablecinf=TCI,components=CL} -> + {CL,TCI} + end. + +enc_table(Gen, #simpletableattributes{objectsetname=ObjectSet, + c_name=AttrN, + c_index=N, + usedclassfield=UniqueFieldName, + uniqueclassfield=UniqueFieldName, + valueindex=ValueIndex0}, _) -> + {Module,ObjSetName} = ObjectSet, + #typedef{typespec=#'ObjectSet'{gen=MustGen}} = + asn1_db:dbget(Module, ObjSetName), + case MustGen of + true -> + ValueIndex = ValueIndex0 ++ [{N+1,'ASN1_top'}], + Val = make_var(val), + {ObjSetImm,Dst} = enc_dig_out_value(Gen, ValueIndex, Val), + {{AttrN,Dst},ObjSetImm}; + false -> + {false,[]} + end; +enc_table(_Gen, #simpletableattributes{}, _) -> + {false,[]}; +enc_table(_Gen, _, #type{tablecinf=TCInf}) -> + case TCInf of + [{objfun,_}|_] -> + %% The simpletableattributes was at an outer + %% level and the objfun has been passed through the + %% function call. + {{"got objfun through args",{var,"ObjFun"}},[]}; + _ -> + {false,[]} + end. + +enc_optionals(Gen, Optionals) -> + Var = make_var(val), + enc_optionals_1(Gen, Optionals, Var). + +enc_optionals_1(#gen{pack=record}=Gen, [{Pos,DefVals}|T], Var) -> + {Imm0,Element} = asn1ct_imm:enc_element(Pos+1, Var), + Imm = asn1ct_imm:per_enc_optional(Element, DefVals), + [Imm0++Imm|enc_optionals_1(Gen, T, Var)]; +enc_optionals_1(#gen{pack=map}=Gen, [{Pos,DefVals0}|T], V) -> + Var = {var,lists:concat(["Input@",Pos])}, + DefVals = translate_missing_value(Gen, DefVals0), + Imm = asn1ct_imm:per_enc_optional(Var, DefVals), + [Imm|enc_optionals_1(Gen, T, V)]; +enc_optionals_1(_, [], _) -> + []. + +enc_map_match(#gen{pack=record}, _Cs) -> + []; +enc_map_match(#gen{pack=map}, Cs0) -> + Var0 = "Input", + Cs = enc_flatten_components(Cs0), + M = [[quote_atom(Name),":=",lists:concat([Var0,"@",Order])] || + #'ComponentType'{prop=mandatory,name=Name, + textual_order=Order} <- Cs], + Mand = case M of + [] -> + []; + [_|_] -> + Patt = {expr,lists:flatten(["#{",lists:join(",", M),"}"])}, + [{assign,Patt,{var,asn1ct_name:curr(val)}}] + end, + + Os0 = [{Name,Order} || + #'ComponentType'{prop=Prop,name=Name, + textual_order=Order} <- Cs, + Prop =/= mandatory], + {var,Val} = make_var(val), + F = fun({Name,Order}) -> + Var = lists:concat([Var0,"@",Order]), + P0 = ["case ",Val," of\n" + " #{",quote_atom(Name),":=",Var,"_0} -> ", + Var,"_0;\n" + " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n" + "end"], + P = lists:flatten(P0), + {assign,{var,Var},P} + end, + Os = [F(O) || O <- Os0], + Mand ++ Os. + +enc_flatten_components({Root1,Ext0,Root2}=CL) -> + {_,Gs} = extgroup_pos_and_length(CL), + Ext = wrap_extensionAdditionGroups(Ext0, Gs), + Root1 ++ Root2 ++ [mark_optional(C) || C <- Ext]; +enc_flatten_components({Root,Ext}) -> + enc_flatten_components({Root,Ext,[]}); +enc_flatten_components(Cs) -> + Cs. + +gen_encode_extaddgroup(#gen{pack=record}, CompList) -> case extgroup_pos_and_length(CompList) of {extgrouppos,[]} -> ok; {extgrouppos,ExtGroupPosLenList} -> - _ = [do_gen_encode_extaddgroup(G) || G <- ExtGroupPosLenList], + _ = [gen_encode_eag_record(G) || + G <- ExtGroupPosLenList], ok - end. + end; +gen_encode_extaddgroup(#gen{pack=map}, Cs0) -> + Cs = enc_flatten_components(Cs0), + gen_encode_eag_map(Cs). + +gen_encode_eag_map([#'ComponentType'{name=Group,typespec=Type}|Cs]) -> + case Type of + #type{def=#'SEQUENCE'{extaddgroup=G,components=GCs0}} + when is_integer(G) -> + Ns = [N || #'ComponentType'{name=N,prop=mandatory} <- GCs0], + test_for_mandatory(Ns, Group), + gen_encode_eag_map(Cs); + _ -> + gen_encode_eag_map(Cs) + end; +gen_encode_eag_map([]) -> + ok. + +test_for_mandatory([Mand|_], Group) -> + emit([{next,val}," = case ",{curr,val}," of",nl, + "#{",quote_atom(Mand),":=_} -> ", + {curr,val},"#{",{asis,Group},"=>",{curr,val},"};",nl, + "#{} -> ",{curr,val},nl, + "end,",nl]), + asn1ct_name:new(val); +test_for_mandatory([], _) -> + ok. -do_gen_encode_extaddgroup({ActualGroupPos,GroupVirtualPos,GroupLen}) -> +gen_encode_eag_record({ActualPos,VirtualPos,Len}) -> Val = asn1ct_gen:mk_var(asn1ct_name:curr(val)), - Elements = make_elements(GroupVirtualPos+1, - Val, - lists:seq(1, GroupLen)), - Expr = any_non_value(GroupVirtualPos+1, Val, GroupLen, ""), + Elements = get_input_vars(Val, VirtualPos, Len), + Expr = any_non_value(Val, VirtualPos, Len), emit([{next,val}," = case ",Expr," of",nl, - "false -> setelement(",{asis,ActualGroupPos+1},", ", + "false -> setelement(",{asis,ActualPos+1},", ", {curr,val},", asn1_NOVALUE);",nl, - "true -> setelement(",{asis,ActualGroupPos+1},", ", + "true -> setelement(",{asis,ActualPos+1},", ", {curr,val},", {extaddgroup,", Elements,"})",nl, "end,",nl]), asn1ct_name:new(val). -any_non_value(_, _, 0, _) -> +any_non_value(Val, Pos, N) -> + L = any_non_value_1(Val, Pos, N), + lists:join(" orelse ", L). + +any_non_value_1(_, _, 0) -> []; -any_non_value(Pos, Val, N, Sep) -> - Sep ++ [make_element(Pos, Val)," =/= asn1_NOVALUE"] ++ - any_non_value(Pos+1, Val, N-1, [" orelse",nl]). +any_non_value_1(Val, Pos, N) -> + Var = get_input_var(Val, Pos), + [Var ++ " =/= asn1_NOVALUE"|any_non_value_1(Val, Pos+1, N-1)]. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% generate decode function for SEQUENCE and SET @@ -306,55 +408,105 @@ gen_dec_constructed_imm(Erule, Typename, #type{}=D) -> {DecObjInf,_,_} = ObjSetInfo, EmitComp = gen_dec_components_call(Erule, Typename, CompList, DecObjInf, Ext, length(Optionals)), - EmitRest = fun({AccTerm,AccBytes}) -> - gen_dec_constructed_imm_2(Erule, Typename, - CompList, - ObjSetInfo, - AccTerm, AccBytes) - end, - [EmitExt,EmitOpt|EmitComp++[{safe,EmitRest}]]. + EmitObjSets = gen_dec_objsets_fun(Erule, ObjSetInfo), + EmitPack = fun(_) -> + gen_dec_pack(Erule, Typename, CompList) + end, + RestGroup = {group,[{safe,EmitObjSets},{safe,EmitPack}]}, + [EmitExt,EmitOpt|EmitComp++[RestGroup]]. + +gen_dec_objsets_fun(Gen, ObjSetInfo) -> + fun({AccTerm,AccBytes}) -> + {_,_UniqueFName,ValueIndex} = ObjSetInfo, + case {AccTerm,AccBytes} of + {[],[]} -> + ok; + {_,[]} -> + ok; + {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} -> + ValueMatch = value_match(Gen, ValueIndex, Term), + _ = [begin + gen_dec_open_type(Gen, ValueMatch, ObjSet, + LeadingAttr, T), + emit([com,nl]) + end || T <- ListOfOpenTypes], + ok + end + end. -gen_dec_constructed_imm_2(Erule, Typename, CompList, - ObjSetInfo, AccTerm, AccBytes) -> - {_,_UniqueFName,ValueIndex} = ObjSetInfo, - case {AccTerm,AccBytes} of - {[],[]} -> - ok; - {_,[]} -> - ok; - {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} -> - ValueMatch = value_match(ValueIndex, Term), - _ = [begin - gen_dec_open_type(Erule, ValueMatch, ObjSet, - LeadingAttr, T), - emit([com,nl]) - end || T <- ListOfOpenTypes], - ok - end, - %% we don't return named lists any more Cnames = mkcnamelist(CompList), - demit({"Result = "}), %dbg - %% return value as record - RecordName = record_name(Typename), +gen_dec_pack(Gen, Typename, CompList) -> case Typename of ['EXTERNAL'] -> - emit({" OldFormat={'",RecordName, - "'"}), - mkvlist(asn1ct_name:all(term)), - emit({"},",nl}), - emit([" ASN11994Format =",nl, - " ", - {call,ext,transform_to_EXTERNAL1994, - ["OldFormat"]},com,nl]), - emit(" {ASN11994Format,"); + dec_external(Gen, Typename); _ -> - emit(["{{'",RecordName,"'"]), - %% CompList is used here because we don't want - %% ExtensionAdditionGroups to be wrapped in SEQUENCES when - %% we are ordering the fields according to textual order - mkvlist(textual_order(to_encoding_order(CompList),asn1ct_name:all(term))), - emit("},") - end, - emit({{curr,bytes},"}"}). + asn1ct_name:new(res), + gen_dec_do_pack(Gen, Typename, CompList), + emit([com,nl, + "{",{curr,res},",",{curr,bytes},"}"]) + end. + +dec_external(#gen{pack=record}=Gen, Typename) -> + RecordName = list_to_atom(record_name(Gen, Typename)), + All = [{var,Term} || Term <- asn1ct_name:all(term)], + Record = [{asis,RecordName}|All], + emit(["OldFormat={",lists:join(",", Record),"},",nl, + "ASN11994Format =",nl, + {call,ext,transform_to_EXTERNAL1994, + ["OldFormat"]},com,nl, + "{ASN11994Format,",{curr,bytes},"}"]); +dec_external(#gen{pack=map}, _Typename) -> + Vars = asn1ct_name:all(term), + Names = ['direct-reference','indirect-reference', + 'data-value-descriptor',encoding], + Zipped = lists:zip(Names, Vars), + MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]), + emit(["OldFormat = #{",MapInit,"}",com,nl, + "ASN11994Format =",nl, + {call,ext,transform_to_EXTERNAL1994_maps, + ["OldFormat"]},com,nl, + "{ASN11994Format,",{curr,bytes},"}"]). + +gen_dec_do_pack(#gen{pack=record}=Gen, TypeName, CompList) -> + Zipped0 = zip_components(CompList, asn1ct_name:all(term)), + Zipped = textual_order(Zipped0), + RecordName = ["'",record_name(Gen, TypeName),"'"], + L = [RecordName|[{var,Var} || {_,Var} <- Zipped]], + emit([{curr,res}," = {",lists:join(",", L),"}"]); +gen_dec_do_pack(#gen{pack=map}, _, CompList0) -> + CompList = enc_flatten_components(CompList0), + Zipped0 = zip_components(CompList, asn1ct_name:all(term)), + Zipped = textual_order(Zipped0), + PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false; + ({_,_}) -> true + end, + {Mandatory,Optional} = lists:partition(PF, Zipped), + L = [[{asis,Name},"=>",{var,Var}] || + {#'ComponentType'{name=Name},Var} <- Mandatory], + emit([{curr,res}," = #{",lists:join(",", L),"}"]), + gen_dec_map_optional(Optional), + gen_dec_merge_maps(asn1ct_name:all(map)). + +gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) -> + asn1ct_name:new(res), + emit([com,nl, + {curr,res}," = case ",{var,Var}," of",nl, + " asn1_NOVALUE -> ",{prev,res},";",nl, + " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl, + "end"]), + gen_dec_map_optional(T); +gen_dec_map_optional([]) -> + ok. + +gen_dec_merge_maps([M|Ms]) -> + asn1ct_name:new(res), + emit([com,nl, + {curr,res}," = maps:merge(",{prev,res},", ",{var,M},")"]), + gen_dec_merge_maps(Ms); +gen_dec_merge_maps([]) -> + ok. + +quote_atom(A) when is_atom(A) -> + io_lib:format("~p", [A]). %% record_name([TypeName]) -> RecordNameString %% Construct a record name for the constructed type, ignoring any @@ -362,10 +514,10 @@ gen_dec_constructed_imm_2(Erule, Typename, CompList, %% group. Such fake sequences never appear as a top type, and their %% name always start with "ExtAddGroup". -record_name(Typename0) -> +record_name(Gen, Typename0) -> [TopType|Typename1] = lists:reverse(Typename0), Typename = filter_ext_add_groups(Typename1, [TopType]), - lists:concat([get_record_name_prefix(), + lists:concat([get_record_name_prefix(Gen), asn1ct_gen:list2rname(Typename)]). filter_ext_add_groups([H|T], Acc) when is_atom(H) -> @@ -379,17 +531,26 @@ filter_ext_add_groups([H|T], Acc) -> filter_ext_add_groups(T, [H|Acc]); filter_ext_add_groups([], Acc) -> Acc. -textual_order([#'ComponentType'{textual_order=undefined}|_],TermList) -> - TermList; -textual_order(CompList,TermList) when is_list(CompList) -> - OrderList = [Ix||#'ComponentType'{textual_order=Ix} <- CompList], - [Term||{_,Term}<- - lists:sort(lists:zip(OrderList, - lists:sublist(TermList,length(OrderList))))]; - %% sublist is just because Termlist can sometimes be longer than - %% OrderList, which it really shouldn't -textual_order({Root,Ext},TermList) -> - textual_order(Root ++ Ext,TermList). +zip_components({Root,Ext}, Vars) -> + zip_components({Root,Ext,[]}, Vars); +zip_components({R1,Ext0,R2}, Vars) -> + Ext = [mark_optional(C) || C <- Ext0], + zip_components(R1++R2++Ext, Vars); +zip_components(Cs, Vars) when is_list(Cs) -> + zip_components_1(Cs, Vars). + +zip_components_1([#'ComponentType'{}=C|Cs], [V|Vs]) -> + [{C,V}|zip_components_1(Cs, Vs)]; +zip_components_1([_|Cs], Vs) -> + zip_components_1(Cs, Vs); +zip_components_1([], []) -> + []. + +textual_order([{#'ComponentType'{textual_order=undefined},_}|_]=L) -> + L; +textual_order(L0) -> + L = [{Ix,P} || {#'ComponentType'{textual_order=Ix},_}=P <- L0], + [C || {_,C} <- lists:sort(L)]. to_textual_order({Root,Ext}) -> {to_textual_order(Root),Ext}; @@ -458,7 +619,7 @@ dec_objset_default(N, _, _, true) -> end]). dec_objset_1(Erule, N, {Id,Obj}, RestFields, Typename) -> - emit([{asis,N},"(Bytes, ",{asis,Id},") ->",nl]), + emit([{asis,N},"(Bytes, Id) when Id =:= ",{asis,Id}," ->",nl]), dec_objset_2(Erule, Obj, RestFields, Typename). dec_objset_2(Erule, Obj, RestFields0, Typename) -> @@ -595,8 +756,7 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) -> emit([",",nl, {asis,F},"(",Num,", ",Buf,ObjFun,", [])"]). -is_aligned(per) -> true; -is_aligned(uper) -> false. +is_aligned(#gen{erule=per,aligned=Aligned}) -> Aligned. gen_decode_length(Constraint, Erule) -> emit(["%% Length with constraint ",{asis,Constraint},nl]), @@ -640,22 +800,7 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% General and special help functions (not exported) - -mkvlist([H|T]) -> - emit(","), - mkvlist2([H|T]); -mkvlist([]) -> - true. -mkvlist2([H,T1|T]) -> - emit({{var,H},","}), - mkvlist2([T1|T]); -mkvlist2([H|T]) -> - emit({{var,H}}), - mkvlist2(T); -mkvlist2([]) -> - true. - +%% General and special help functions (not exported) extensible_dec(CompList) when is_list(CompList) -> noext; @@ -728,28 +873,26 @@ gen_dec_optionals(Optionals) -> {imm,Imm0,E}. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% Produce a list with positions (in the Value record) where -%% there are optional components, start with 2 because first element -%% is the record name - -optionals({L1,Ext,L2}) -> - Opt1 = optionals(L1,[],2), - ExtComps = length([C||C = #'ComponentType'{}<-Ext]), - Opt2 = optionals(L2,[],2+length(L1)+ExtComps), - Opt1 ++ Opt2; -optionals({L,_Ext}) -> optionals(L,[],2); -optionals(L) -> optionals(L,[],2). -optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Acc, Pos) -> - optionals(Rest, [Pos|Acc], Pos+1); -optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Rest], - Acc, Pos) -> +optionals({Root1,Ext,Root2}) -> + Opt1 = optionals(Root1, 1), + ExtComps = length([C || C = #'ComponentType'{} <- Ext]), + Opt2 = optionals(Root2, 1 + length(Root1) + ExtComps), + Opt1 ++ Opt2; +optionals({L,_Ext}) -> + optionals(L, 1); +optionals(L) -> + optionals(L, 1). + +optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Pos) -> + [{Pos,[asn1_NOVALUE]}|optionals(Rest, Pos+1)]; +optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Cs], Pos) -> Vals = def_values(T, Val), - optionals(Rest, [{Pos,Vals}|Acc], Pos+1); -optionals([#'ComponentType'{}|Rest], Acc, Pos) -> - optionals(Rest, Acc, Pos+1); -optionals([], Acc, _) -> - lists:reverse(Acc). + [{Pos,Vals}|optionals(Cs, Pos+1)]; +optionals([#'ComponentType'{}|Rest], Pos) -> + optionals(Rest, Pos+1); +optionals([], _) -> + []. %%%%%%%%%%%%%%%%%%%%%% %% create_optionality_table(Cs=[#'ComponentType'{textual_order=undefined}|_]) -> @@ -779,13 +922,6 @@ get_optionality_pos(TextPos,OptTable) -> no_num end. -to_encoding_order(Cs) when is_list(Cs) -> - Cs; -to_encoding_order(Cs = {_Root,_Ext}) -> - Cs; -to_encoding_order({R1,Ext,R2}) -> - {R1++R2,Ext}. - add_textual_order(Cs) when is_list(Cs) -> {NewCs,_} = add_textual_order1(Cs,1), NewCs; @@ -810,69 +946,81 @@ add_textual_order1(Cs,NumIn) -> end, NumIn,Cs). -gen_enc_components_call(Erule,TopType,{Root,ExtList}, DynamicEnc,Ext) -> - gen_enc_components_call(Erule,TopType,{Root,ExtList,[]}, DynamicEnc,Ext); -gen_enc_components_call(Erule,TopType,CL={Root,ExtList,Root2}, DynamicEnc,Ext) -> - %% The type has extensionmarker - {Imm0,Rpos} = gen_enc_components_call1(Erule,TopType,Root++Root2,1, DynamicEnc,noext,[]), +gen_enc_components_call(Erule, TopType, {Root,ExtList}, DynamicEnc, Ext) -> + gen_enc_components_call(Erule, TopType, {Root,ExtList,[]}, DynamicEnc, Ext); +gen_enc_components_call(Erule, TopType, {R1,ExtList0,R2}=CL, DynamicEnc, Ext) -> + Root = R1 ++ R2, + Imm0 = gen_enc_components_call1(Erule, TopType, Root, DynamicEnc, noext), ExtImm = case Ext of {ext,_,ExtNum} when ExtNum > 0 -> [{var,"Extensions"}]; _ -> [] end, - %handle extensions {extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL), - NewExtList = wrap_extensionAdditionGroups(ExtList,ExtGroupPosLen), - {Imm1,_} = gen_enc_components_call1(Erule,TopType,NewExtList,Rpos,DynamicEnc,Ext,[]), + ExtList1 = wrap_extensionAdditionGroups(ExtList0, ExtGroupPosLen), + ExtList = [mark_optional(C) || C <- ExtList1], + Imm1 = gen_enc_components_call1(Erule, TopType, ExtList, DynamicEnc, Ext), Imm0 ++ [ExtImm|Imm1]; -gen_enc_components_call(Erule,TopType, CompList, DynamicEnc, Ext) -> - %% The type has no extensionmarker - {Imm,_} = gen_enc_components_call1(Erule,TopType,CompList,1,DynamicEnc,Ext,[]), - Imm. +gen_enc_components_call(Erule, TopType, CompList, DynamicEnc, Ext) -> + %% No extension marker. + gen_enc_components_call1(Erule, TopType, CompList, DynamicEnc, Ext). + +mark_optional(#'ComponentType'{prop=Prop0}=C) -> + Prop = case Prop0 of + mandatory -> 'OPTIONAL'; + 'OPTIONAL'=Keep -> Keep; + {'DEFAULT',_}=Keep -> Keep + end, + C#'ComponentType'{prop=Prop}; +mark_optional(Other) -> + Other. + +gen_enc_components_call1(Gen, TopType, [C|Rest], DynamicEnc, Ext) -> + #'ComponentType'{name=Cname,typespec=Type, + prop=Prop,textual_order=Num} = C, + InnerType = asn1ct_gen:get_inner(Type#type.def), + CommentString = attribute_comment(InnerType, Num, Cname), + ImmComment = asn1ct_imm:enc_comment(CommentString), -gen_enc_components_call1(Erule,TopType, - [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest], - Tpos, - DynamicEnc, Ext, Acc) -> - - TermNo = - case C#'ComponentType'.textual_order of - undefined -> - Tpos; - CanonicalNum -> - CanonicalNum - end, - Val = make_var(val), - {Imm0,Element} = asn1ct_imm:enc_element(TermNo+1, Val), - Imm1 = gen_enc_line_imm(Erule, TopType, Cname, Type, Element, DynamicEnc, Ext), - Category = case {Prop,Ext} of - {'OPTIONAL',_} -> - optional; - {{'DEFAULT',DefVal},_} -> - {default,DefVal}; - {_,{ext,ExtPos,_}} when Tpos >= ExtPos -> - optional; - {_,_} -> - mandatory - end, - Imm2 = case Category of + {Imm0,Element} = enc_fetch_field(Gen, Num, Prop), + Imm1 = gen_enc_line_imm(Gen, TopType, Cname, Type, + Element, DynamicEnc, Ext), + Imm2 = case Prop of mandatory -> Imm1; - optional -> - asn1ct_imm:enc_absent(Element, [asn1_NOVALUE], Imm1); - {default,Def} -> + 'OPTIONAL' -> + enc_absent(Gen, Element, [asn1_NOVALUE], Imm1); + {'DEFAULT',Def} -> DefValues = def_values(Type, Def), - asn1ct_imm:enc_absent(Element, DefValues, Imm1) + enc_absent(Gen, Element, DefValues, Imm1) end, Imm = case Imm2 of [] -> []; - _ -> Imm0 ++ Imm2 + _ -> [ImmComment|Imm0 ++ Imm2] end, - gen_enc_components_call1(Erule, TopType, Rest, Tpos+1, DynamicEnc, Ext, [Imm|Acc]); -gen_enc_components_call1(_Erule,_TopType,[],Pos,_,_, Acc) -> - ImmList = lists:reverse(Acc), - {ImmList,Pos}. + [Imm|gen_enc_components_call1(Gen, TopType, Rest, DynamicEnc, Ext)]; +gen_enc_components_call1(_Gen, _TopType, [], _, _) -> + []. + +enc_absent(Gen, Var, Absent0, Imm) -> + Absent = translate_missing_value(Gen, Absent0), + asn1ct_imm:enc_absent(Var, Absent, Imm). + +translate_missing_value(#gen{pack=record}, Optionals) -> + Optionals; +translate_missing_value(#gen{pack=map}, Optionals) -> + case Optionals of + [asn1_NOVALUE|T] -> [?MISSING_IN_MAP|T]; + [asn1_DEFAULT|T] -> [?MISSING_IN_MAP|T]; + {call,_,_,_} -> Optionals + end. + +enc_fetch_field(#gen{pack=record}, Num, _Prop) -> + Val = make_var(val), + asn1ct_imm:enc_element(Num+1, Val); +enc_fetch_field(#gen{pack=map}, Num, _) -> + {[],{var,lists:concat(["Input@",Num])}}. def_values(#type{def=#'Externaltypereference'{module=Mod,type=Type}}, Def) -> #typedef{typespec=T} = asn1_db:dbget(Mod, Type), @@ -1115,27 +1263,31 @@ gen_dec_components_call(Erule, TopType, {Root,ExtList}, DecInfObj, Ext, NumberOfOptionals) -> gen_dec_components_call(Erule,TopType,{Root,ExtList,[]}, DecInfObj,Ext,NumberOfOptionals); -gen_dec_components_call(Erule,TopType,CL={Root1,ExtList,Root2}, - DecInfObj,Ext,NumberOfOptionals) -> +gen_dec_components_call(Gen, TopType, {Root1,ExtList,Root2}=CL, + DecInfObj, Ext, NumberOfOptionals) -> %% The type has extensionmarker OptTable = create_optionality_table(Root1++Root2), Init = {ignore,fun(_) -> {[],[]} end}, {EmitRoot,Tpos} = - gen_dec_comp_calls(Root1++Root2, Erule, TopType, OptTable, + gen_dec_comp_calls(Root1++Root2, Gen, TopType, OptTable, DecInfObj, noext, NumberOfOptionals, 1, []), - EmitGetExt = gen_dec_get_extension(Erule), + EmitGetExt = gen_dec_get_extension(Gen), {extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL), NewExtList = wrap_extensionAdditionGroups(ExtList, ExtGroupPosLen), - {EmitExts,_} = gen_dec_comp_calls(NewExtList, Erule, TopType, OptTable, + {EmitExts,_} = gen_dec_comp_calls(NewExtList, Gen, TopType, OptTable, DecInfObj, Ext, NumberOfOptionals, Tpos, []), NumExtsToSkip = ext_length(ExtList), Finish = fun(St) -> emit([{next,bytes},"= "]), - call(Erule, skipextensions, - [{curr,bytes},NumExtsToSkip+1,"Extensions"]), + Mod = case Gen of + #gen{erule=per,aligned=false} -> uper; + #gen{erule=per,aligned=true} -> per + end, + asn1ct_func:call(Mod, skipextensions, + [{curr,bytes},NumExtsToSkip+1,"Extensions"]), asn1ct_name:new(bytes), St end, @@ -1178,29 +1330,19 @@ gen_dec_comp_calls([C|Cs], Erule, TopType, OptTable, DecInfObj, gen_dec_comp_calls([], _, _, _, _, _, _, Tpos, Acc) -> {lists:append(lists:reverse(Acc)),Tpos}. -gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj, +gen_dec_comp_call(Comp, Gen, TopType, Tpos, OptTable, DecInfObj, Ext, NumberOfOptionals) -> - #'ComponentType'{typespec=Type,prop=Prop,textual_order=TextPos} = Comp, + #'ComponentType'{name=Cname,typespec=Type, + prop=Prop,textual_order=TextPos} = Comp, Pos = case Ext of noext -> Tpos; {ext,Epos,_Enum} -> Tpos - Epos + 1 end, - InnerType = - case Type#type.def of - #'ObjectClassFieldType'{type=InType} -> - InType; - Def -> - asn1ct_gen:get_inner(Def) - end, + InnerType = asn1ct_gen:get_inner(Type#type.def), - DispType = case InnerType of - #'Externaltypereference'{type=T} -> T; - IT when is_tuple(IT) -> element(2,IT); - _ -> InnerType - end, + CommentString = attribute_comment(InnerType, TextPos, Cname), Comment = fun(St) -> - emit([nl,"%% attribute number ",TextPos, - " with type ",DispType,nl]), + emit([nl,"%% ",CommentString,nl]), St end, @@ -1219,15 +1361,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj, _ -> case Type of #type{def=#'SEQUENCE'{ - extaddgroup=Number1, - components=ExtGroupCompList1}} when is_integer(Number1)-> - fun(St) -> - emit(["{{_,"]), - emit_extaddgroupTerms(term,ExtGroupCompList1), - emit(["}"]), - emit([",",{next,bytes},"} = "]), - St - end; + extaddgroup=GroupNum, + components=CompList}} when is_integer(GroupNum)-> + dec_match_extadd_fun(Gen, CompList); _ -> fun(St) -> asn1ct_name:new(term), @@ -1237,9 +1373,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj, end end end, - {Pre,Post} = comp_call_pre_post(Ext, Prop, Pos, Type, TextPos, + {Pre,Post} = comp_call_pre_post(Gen, Ext, Prop, Pos, Type, TextPos, OptTable, NumberOfOptionals, Ext), - Lines = gen_dec_seq_line_imm(Erule, TopType, Comp, Tpos, DecInfObj, Ext), + Lines = gen_dec_seq_line_imm(Gen, TopType, Comp, Tpos, DecInfObj, Ext), AdvBuffer = {ignore,fun(St) -> asn1ct_name:new(bytes), St @@ -1247,9 +1383,24 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj, [{group,[{safe,Comment},{safe,Preamble}] ++ Pre ++ Lines ++ Post ++ [{safe,AdvBuffer}]}]. -comp_call_pre_post(noext, mandatory, _, _, _, _, _, _) -> +dec_match_extadd_fun(#gen{pack=record}, CompList) -> + fun(St) -> + emit(["{{_,"]), + emit_extaddgroupTerms(term, CompList), + emit(["}"]), + emit([",",{next,bytes},"} = "]), + St + end; +dec_match_extadd_fun(#gen{pack=map}, _CompList) -> + fun(St) -> + asn1ct_name:new(map), + emit(["{",{curr,map},",",{next,bytes},"} = "]), + St + end. + +comp_call_pre_post(_Gen, noext, mandatory, _, _, _, _, _, _) -> {[],[]}; -comp_call_pre_post(noext, Prop, _, Type, TextPos, +comp_call_pre_post(_Gen, noext, Prop, _, Type, TextPos, OptTable, NumOptionals, Ext) -> %% OPTIONAL or DEFAULT OptPos = get_optionality_pos(TextPos, OptTable), @@ -1273,32 +1424,53 @@ comp_call_pre_post(noext, Prop, _, Type, TextPos, "end"]), St end]}; -comp_call_pre_post({ext,_,_}, Prop, Pos, Type, _, _, _, Ext) -> +comp_call_pre_post(Gen, {ext,_,_}, Prop, Pos, Type, _, _, _, Ext) -> %% Extension {[fun(St) -> emit(["case Extensions of",nl, " <<_:",Pos-1,",1:1,_/bitstring>> ->",nl]), St end], - [fun(St) -> - emit([";",nl, - "_ ->",nl, - "{"]), - case Type of - #type{def=#'SEQUENCE'{ - extaddgroup=Number2, - components=ExtGroupCompList2}} - when is_integer(Number2)-> - emit("{extAddGroup,"), - gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2), - emit("}"); - _ -> - gen_dec_component_no_val(Ext, Type, Prop) - end, - emit([",",{curr,bytes},"}",nl, - "end"]), - St - end]}. + [extadd_group_fun(Gen, Prop, Type, Ext)]}. + +extadd_group_fun(#gen{pack=record}, Prop, Type, Ext) -> + fun(St) -> + emit([";",nl, + "_ ->",nl, + "{"]), + case Type of + #type{def=#'SEQUENCE'{ + extaddgroup=Number2, + components=ExtGroupCompList2}} + when is_integer(Number2)-> + emit("{extAddGroup,"), + gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2), + emit("}"); + _ -> + gen_dec_component_no_val(Ext, Type, Prop) + end, + emit([",",{curr,bytes},"}",nl, + "end"]), + St + end; +extadd_group_fun(#gen{pack=map}, Prop, Type, Ext) -> + fun(St) -> + emit([";",nl, + "_ ->",nl, + "{"]), + case Type of + #type{def=#'SEQUENCE'{ + extaddgroup=Number2, + components=Comp}} + when is_integer(Number2)-> + dec_map_extaddgroup_no_val(Ext, Type, Comp); + _ -> + gen_dec_component_no_val(Ext, Type, Prop) + end, + emit([",",{curr,bytes},"}",nl, + "end"]), + St + end. is_mandatory_predef_tab_c(noext, mandatory, {"got objfun through args","ObjFun"}) -> @@ -1325,7 +1497,20 @@ gen_dec_component_no_val(_, _, 'OPTIONAL') -> emit({"asn1_NOVALUE"}); gen_dec_component_no_val({ext,_,_}, _, mandatory) -> emit({"asn1_NOVALUE"}). - + +dec_map_extaddgroup_no_val(Ext, Type, Comp) -> + L0 = [dec_map_extaddgroup_no_val_1(N, P, Ext, Type) || + #'ComponentType'{name=N,prop=P} <- Comp], + L = [E || E <- L0, E =/= []], + emit(["#{",lists:join(",", L),"}"]). + +dec_map_extaddgroup_no_val_1(Name, {'DEFAULT',DefVal0}, _Ext, Type) -> + DefVal = asn1ct_gen:conform_value(Type, DefVal0), + [Name,"=>",{asis,DefVal}]; +dec_map_extaddgroup_no_val_1(_Name, 'OPTIONAL', _, _) -> + []; +dec_map_extaddgroup_no_val_1(_Name, mandatory, {ext,_,_}, _) -> + []. gen_dec_choice_line(Erule, TopType, Comp, Pre) -> Imm0 = gen_dec_line_imm(Erule, TopType, Comp, false, Pre), @@ -1461,29 +1646,29 @@ gen_dec_line_special(Erule, {typefield,_}, _TopType, Comp, Prop}],PrevSt} end end; -gen_dec_line_special(Erule, Atype, TopType, Comp, DecInfObj) -> - case gen_dec_line_other(Erule, Atype, TopType, Comp) of +gen_dec_line_special(Gen, Atype, TopType, Comp, DecInfObj) -> + case gen_dec_line_other(Gen, Atype, TopType, Comp) of Fun when is_function(Fun, 1) -> fun({BytesVar,PrevSt}) -> Fun(BytesVar), - gen_dec_line_dec_inf(Comp, DecInfObj), + gen_dec_line_dec_inf(Gen,Comp, DecInfObj), {[],PrevSt} end; Imm0 -> {imm,Imm0, fun(Imm, {BytesVar,PrevSt}) -> asn1ct_imm:dec_code_gen(Imm, BytesVar), - gen_dec_line_dec_inf(Comp, DecInfObj), + gen_dec_line_dec_inf(Gen, Comp, DecInfObj), {[],PrevSt} end} end. -gen_dec_line_dec_inf(Comp, DecInfObj) -> +gen_dec_line_dec_inf(Gen, Comp, DecInfObj) -> #'ComponentType'{name=Cname} = Comp, case DecInfObj of {Cname,{_,_OSet,_UniqueFName,ValIndex}} -> Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)), - ValueMatch = value_match(ValIndex,Term), + ValueMatch = value_match(Gen, ValIndex,Term), emit([",",nl, "ObjFun = ",ValueMatch]); _ -> @@ -1705,20 +1890,17 @@ gen_dec_choice2(Erule, TopType, [H0|T], Pos, Sep0, Pre) -> gen_dec_choice2(Erule, TopType, T, Pos+1, Sep, Pre); gen_dec_choice2(_, _, [], _, _, _) -> ok. -make_elements(I,Val,ExtCnames) -> - make_elements(I,Val,ExtCnames,[]). +get_input_vars(Val, I, N) -> + L = get_input_vars_1(Val, I, N), + lists:join(",", L). -make_elements(I,Val,[_ExtCname],Acc)-> % the last one, no comma needed - Element = make_element(I, Val), - make_elements(I+1,Val,[],[Element|Acc]); -make_elements(I,Val,[_ExtCname|Rest],Acc)-> - Element = make_element(I, Val), - make_elements(I+1,Val,Rest,[", ",Element|Acc]); -make_elements(_I,_,[],Acc) -> - lists:reverse(Acc). +get_input_vars_1(_Val, _I, 0) -> + []; +get_input_vars_1(Val, I, N) -> + [get_input_var(Val, I)|get_input_vars_1(Val, I+1, N-1)]. -make_element(I, Val) -> - lists:flatten(io_lib:format("element(~w, ~s)", [I,Val])). +get_input_var(Val, I) -> + lists:flatten(io_lib:format("element(~w, ~s)", [I+1,Val])). emit_extaddgroupTerms(VarSeries,[_]) -> asn1ct_name:new(VarSeries), @@ -1735,63 +1917,76 @@ flat_complist({Rl1,El,Rl2}) -> Rl1 ++ El ++ Rl2; flat_complist({Rl,El}) -> Rl ++ El; flat_complist(CompList) -> CompList. -%%wrap_compList({Root1,Ext,Root2}) -> -%% {Root1,wrap_extensionAdditionGroups(Ext),Root2}; -%%wrap_compList({Root1,Ext}) -> -%% {Root1,wrap_extensionAdditionGroups(Ext)}; -%%wrap_compList(CompList) -> -%% CompList. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% Will convert all componentTypes following 'ExtensionAdditionGroup' +%% Convert all componentTypes following 'ExtensionAdditionGroup' %% up to the matching 'ExtensionAdditionGroupEnd' into one componentType -%% of type SEQUENCE with the componentTypes as components +%% of type SEQUENCE with the componentTypes as components. %% -wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen) -> - wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen,[],0,0). +wrap_extensionAdditionGroups(ExtCompList, ExtGroupPosLen) -> + wrap_eags(ExtCompList, ExtGroupPosLen, 0, 0). -wrap_extensionAdditionGroups([{'ExtensionAdditionGroup',_Number}|Rest], - [{ActualPos,_,_}|ExtGroupPosLenRest],Acc,_ExtAddGroupDiff,ExtGroupNum) -> - {ExtGroupCompList,['ExtensionAdditionGroupEnd'|Rest2]} = +wrap_eags([{'ExtensionAdditionGroup',_Number}|T0], + [{ActualPos,_,_}|Gs], _ExtAddGroupDiff, ExtGroupNum) -> + {ExtGroupCompList,['ExtensionAdditionGroupEnd'|T]} = lists:splitwith(fun(#'ComponentType'{}) -> true; (_) -> false - end, - Rest), - wrap_extensionAdditionGroups(Rest2,ExtGroupPosLenRest, - [#'ComponentType'{ - name=list_to_atom("ExtAddGroup"++ - integer_to_list(ExtGroupNum+1)), - typespec=#type{def=#'SEQUENCE'{ - extaddgroup=ExtGroupNum+1, - components=ExtGroupCompList}}, - textual_order = ActualPos, - prop='OPTIONAL'}|Acc],length(ExtGroupCompList)-1, - ExtGroupNum+1); -wrap_extensionAdditionGroups([H=#'ComponentType'{textual_order=Tord}|T], - ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) when is_integer(Tord) -> - wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H#'ComponentType'{ - textual_order=Tord - ExtAddGroupDiff}|Acc],ExtAddGroupDiff,ExtGroupNum); -wrap_extensionAdditionGroups([H|T],ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) -> - wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H|Acc],ExtAddGroupDiff,ExtGroupNum); -wrap_extensionAdditionGroups([],_,Acc,_,_) -> - lists:reverse(Acc). - -value_match(Index,Value) when is_atom(Value) -> - value_match(Index,atom_to_list(Value)); -value_match([],Value) -> + end, T0), + Name = list_to_atom(lists:concat(["ExtAddGroup",ExtGroupNum+1])), + Seq = #type{def=#'SEQUENCE'{extaddgroup=ExtGroupNum+1, + components=ExtGroupCompList}}, + Comp = #'ComponentType'{name=Name, + typespec=Seq, + textual_order=ActualPos, + prop='OPTIONAL'}, + [Comp|wrap_eags(T, Gs, length(ExtGroupCompList)-1, ExtGroupNum+1)]; +wrap_eags([#'ComponentType'{textual_order=Tord}=H|T], + ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum) + when is_integer(Tord) -> + Comp = H#'ComponentType'{textual_order=Tord - ExtAddGroupDiff}, + [Comp|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)]; +wrap_eags([H|T], ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum) -> + [H|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)]; +wrap_eags([], _, _, _) -> + []. + +value_match(#gen{pack=record}, VIs, Value) -> + value_match_rec(VIs, Value); +value_match(#gen{pack=map}, VIs, Value) -> + value_match_map(VIs, Value). + +value_match_rec([], Value) -> + Value; +value_match_rec([{VI,_}|VIs], Value0) -> + Value = value_match_rec(VIs, Value0), + lists:concat(["element(",VI,", ",Value,")"]). + +value_match_map([], Value) -> Value; -value_match([{VI,_}|VIs],Value) -> - value_match1(Value,VIs,lists:concat(["element(",VI,","]),1). -value_match1(Value,[],Acc,Depth) -> - Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")")); -value_match1(Value,[{VI,_}|VIs],Acc,Depth) -> - value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1). - -enc_dig_out_value([], Value) -> +value_match_map([{_,Name}|VIs], Value0) -> + Value = value_match_map(VIs, Value0), + lists:concat(["maps:get(",Name,", ",Value,")"]). + +enc_dig_out_value(_Gen, [], Value) -> {[],Value}; -enc_dig_out_value([{N,_}|T], Value) -> - {Imm0,Dst0} = enc_dig_out_value(T, Value), +enc_dig_out_value(#gen{pack=record}=Gen, [{N,_}|T], Value) -> + {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value), {Imm,Dst} = asn1ct_imm:enc_element(N, Dst0), + {Imm0++Imm,Dst}; +enc_dig_out_value(#gen{pack=map}, [{N,'ASN1_top'}], _Value) -> + {[],{var,lists:concat(["Input@",N-1])}}; +enc_dig_out_value(#gen{pack=map}=Gen, [{_,Name}|T], Value) -> + {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value), + {Imm,Dst} = asn1ct_imm:enc_maps_get(Name, Dst0), {Imm0++Imm,Dst}. make_var(Base) -> {var,atom_to_list(asn1ct_gen:mk_var(asn1ct_name:curr(Base)))}. + +attribute_comment(InnerType, TextPos, Cname) -> + DispType = case InnerType of + #'Externaltypereference'{type=T} -> T; + IT when is_tuple(IT) -> element(2,IT); + _ -> InnerType + end, + Comment = ["attribute ",Cname,"(",TextPos,") with type ",DispType], + lists:concat(Comment). diff --git a/lib/asn1/src/asn1ct_eval_ext.funcs b/lib/asn1/src/asn1ct_eval_ext.funcs index 5761901f89..01c67e7b5a 100644 --- a/lib/asn1/src/asn1ct_eval_ext.funcs +++ b/lib/asn1/src/asn1ct_eval_ext.funcs @@ -1 +1,2 @@ {ext,transform_to_EXTERNAL1994,1}. +{ext,transform_to_EXTERNAL1994_maps,1}. diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl index bfaffa13bf..9f628c7b04 100644 --- a/lib/asn1/src/asn1ct_gen.erl +++ b/lib/asn1/src/asn1ct_gen.erl @@ -34,10 +34,10 @@ insert_once/2, ct_gen_module/1, index2suffix/1, - get_record_name_prefix/0, + get_record_name_prefix/1, conform_value/2, named_bitstring_value/2]). --export([pgen/5, +-export([pgen/3, mk_var/1, un_hyphen_var/1]). -export([gen_encode_constructed/4, @@ -45,23 +45,19 @@ -define(SUPPRESSION_FUNC, 'dialyzer-suppressions'). + %% pgen(Outfile, Erules, Module, TypeOrVal, Options) -%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module -%% .hrl file is only generated if necessary -%% Erules = per | ber -%% Module = atom() -%% TypeOrVal = {TypeList,ValueList} -%% TypeList = ValueList = [atom()] -%% Options = [Options] from asn1ct:compile() - -pgen(OutFile,Erules,Module,TypeOrVal,Options) -> - pgen_module(OutFile,Erules,Module,TypeOrVal,Options,true). - - -pgen_module(OutFile,Erules,Module, - TypeOrVal = {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets}, - Options,Indent) -> - N2nConvEnums = [CName|| {n2n,CName} <- get(encoding_options)], +%% Generate Erlang module (.erl) and (.hrl) file corresponding to +%% an ASN.1 module. The .hrl file is only generated if necessary. + +-spec pgen(Outfile, Gen, Code) -> 'ok' when + Outfile :: any(), + Gen :: #gen{}, + Code :: #abst{}. + +pgen(OutFile, #gen{options=Options}=Gen, Code) -> + #abst{name=Module,types=Types} = Code, + N2nConvEnums = [CName|| {n2n,CName} <- Options], case N2nConvEnums -- Types of [] -> ok; @@ -69,30 +65,30 @@ pgen_module(OutFile,Erules,Module, exit({"Non existing ENUMERATION types used in n2n option", UnmatchedTypes}) end, - put(outfile,OutFile), - HrlGenerated = pgen_hrl(Erules,Module,TypeOrVal,Options,Indent), + put(outfile, OutFile), + put(currmod, Module), + HrlGenerated = pgen_hrl(Gen, Code), asn1ct_name:start(), ErlFile = lists:concat([OutFile,".erl"]), _ = open_output_file(ErlFile), asn1ct_func:start_link(), - gen_head(Erules,Module,HrlGenerated), - pgen_exports(Erules,Module,TypeOrVal), - pgen_dispatcher(Erules,Module,TypeOrVal), + gen_head(Gen, Module, HrlGenerated), + pgen_exports(Gen, Code), + pgen_dispatcher(Gen, Types), pgen_info(), - pgen_typeorval(Erules,Module,N2nConvEnums,TypeOrVal), - pgen_partial_incomplete_decode(Erules), -% gen_vars(asn1_db:mod_to_vars(Module)), -% gen_tag_table(AllTypes), + pgen_typeorval(Gen, N2nConvEnums, Code), + pgen_partial_incomplete_decode(Gen), emit([nl, "%%%",nl, "%%% Run-time functions.",nl, "%%%",nl]), - dialyzer_suppressions(Erules), + dialyzer_suppressions(Gen), Fd = get(gen_file_out), asn1ct_func:generate(Fd), close_output_file(), _ = erase(outfile), - asn1ct:verbose("--~p--~n",[{generated,ErlFile}],Options). + asn1ct:verbose("--~p--~n", [{generated,ErlFile}], Gen), + ok. dialyzer_suppressions(Erules) -> emit([nl, @@ -100,20 +96,27 @@ dialyzer_suppressions(Erules) -> Rtmod = ct_gen_module(Erules), Rtmod:dialyzer_suppressions(Erules). -pgen_typeorval(Erules,Module,N2nConvEnums,{Types,Values,_Ptypes,_Classes,Objects,ObjectSets}) -> +pgen_typeorval(Erules, N2nConvEnums, Code) -> + #abst{name=Module,types=Types,values=Values, + objects=Objects,objsets=ObjectSets} = Code, Rtmod = ct_gen_module(Erules), pgen_types(Rtmod,Erules,N2nConvEnums,Module,Types), - pgen_values(Erules,Module,Values), + pgen_values(Values, Module), pgen_objects(Rtmod,Erules,Module,Objects), pgen_objectsets(Rtmod,Erules,Module,ObjectSets), pgen_partial_decode(Rtmod,Erules,Module). -pgen_values(_,_,[]) -> - true; -pgen_values(Erules,Module,[H|T]) -> - Valuedef = asn1_db:dbget(Module,H), - gen_value(Valuedef), - pgen_values(Erules,Module,T). +%% Generate a function 'V'/0 for each Value V defined in the ASN.1 module. +%% The function returns the value in an Erlang representation which can be +%% used as input to the runtime encode functions. + +pgen_values([H|T], Module) -> + #valuedef{name=Name,value=Value} = asn1_db:dbget(Module, H), + emit([{asis,Name},"() ->",nl, + {asis,Value},".",nl,nl]), + pgen_values(T, Module); +pgen_values([], _) -> + ok. pgen_types(_, _, _, _, []) -> true; @@ -181,10 +184,10 @@ pgen_objectsets(Rtmod,Erules,Module,[H|T]) -> Rtmod:gen_objectset_code(Erules,TypeDef), pgen_objectsets(Rtmod,Erules,Module,T). -pgen_partial_decode(Rtmod,Erule,Module) when Erule == ber -> - pgen_partial_inc_dec(Rtmod,Erule,Module), - pgen_partial_dec(Rtmod,Erule,Module); -pgen_partial_decode(_,_,_) -> +pgen_partial_decode(Rtmod, #gen{erule=ber}=Gen, Module) -> + pgen_partial_inc_dec(Rtmod, Gen, Module), + pgen_partial_dec(Rtmod, Gen, Module); +pgen_partial_decode(_, _, _) -> ok. pgen_partial_inc_dec(Rtmod,Erules,Module) -> @@ -225,7 +228,7 @@ pgen_partial_inc_dec1(Rtmod,Erules,Module,[P|Ps]) -> pgen_partial_inc_dec1(_,_,_,[]) -> ok. -gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber -> +gen_partial_inc_dec_refed_funcs(Rtmod, #gen{erule=ber}=Gen) -> case asn1ct:next_refed_func() of [] -> ok; @@ -233,19 +236,17 @@ gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber -> TypeDef = asn1_db:dbget(M,Name), asn1ct:update_gen_state(namelist,Pattern), asn1ct:set_current_sindex(Sindex), - Rtmod:gen_inc_decode(Erule,TypeDef), - gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,[Name]), - gen_partial_inc_dec_refed_funcs(Rtmod,Erule); + Rtmod:gen_inc_decode(Gen, TypeDef), + gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, [Name]), + gen_partial_inc_dec_refed_funcs(Rtmod, Gen); {Name,Sindex,Pattern,Type} -> TypeDef=#typedef{name=asn1ct_gen:list2name(Name),typespec=Type}, asn1ct:update_gen_state(namelist,Pattern), asn1ct:set_current_sindex(Sindex), - Rtmod:gen_inc_decode(Erule,TypeDef), - gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,Name), - gen_partial_inc_dec_refed_funcs(Rtmod,Erule) - end; -gen_partial_inc_dec_refed_funcs(_,_) -> - ok. + Rtmod:gen_inc_decode(Gen, TypeDef), + gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, Name), + gen_partial_inc_dec_refed_funcs(Rtmod, Gen) + end. pgen_partial_dec(_Rtmod,Erules,_Module) -> Type_pattern = asn1ct:get_gen_state_field(type_pattern), @@ -254,16 +255,16 @@ pgen_partial_dec(_Rtmod,Erules,_Module) -> pgen_partial_types(Erules,Type_pattern), ok. -pgen_partial_types(Erules,Type_pattern) -> - % until this functionality works on all back-ends - Options = get(encoding_options), - case lists:member(asn1config,Options) of +pgen_partial_types(#gen{options=Options}=Gen, TypePattern) -> + %% until this functionality works on all back-ends + case lists:member(asn1config, Options) of true -> - pgen_partial_types1(Erules,Type_pattern); - _ -> ok + pgen_partial_types1(Gen, TypePattern); + false -> + ok end. - + pgen_partial_types1(Erules,[{FuncName,[TopType|RestTypes]}|Rest]) -> % emit([FuncName,"(Bytes) ->",nl]), CurrMod = get(currmod), @@ -441,7 +442,8 @@ pgen_partial_incomplete_decode(Erule) -> _ -> ok end. -pgen_partial_incomplete_decode1(ber) -> + +pgen_partial_incomplete_decode1(#gen{erule=ber}) -> case asn1ct:read_config_data(partial_incomplete_decode) of undefined -> ok; @@ -451,7 +453,7 @@ pgen_partial_incomplete_decode1(ber) -> GeneratedFs= asn1ct:get_gen_state_field(gen_refed_funcs), % io:format("GeneratedFs :~n~p~n",[GeneratedFs]), gen_part_decode_funcs(GeneratedFs,0); -pgen_partial_incomplete_decode1(_) -> ok. +pgen_partial_incomplete_decode1(#gen{}) -> ok. emit_partial_incomplete_decode({FuncName,TopType,Pattern}) -> TypePattern = asn1ct:get_gen_state_field(inc_type_pattern), @@ -578,18 +580,6 @@ un_hyphen_var([H|T]) -> un_hyphen_var([]) -> []. -%% Generate value functions *************** -%% **************************************** -%% Generates a function 'V'/0 for each Value V defined in the ASN.1 module -%% the function returns the value in an Erlang representation which can be -%% used as input to the runtime encode functions - -gen_value(Value) when is_record(Value,valuedef) -> -%% io:format(" ~w ",[Value#valuedef.name]), - emit({"'",Value#valuedef.name,"'() ->",nl}), - V = Value#valuedef.value, - emit([{asis,V},".",nl,nl]). - gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) -> Rtmod = ct_constructed_module(Erules), case InnerType of @@ -654,78 +644,32 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) -> gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec). -pgen_exports(Erules,_Module,{Types,Values,_,_,Objects,ObjectSets}) -> - emit(["-export([encoding_rule/0,bit_string_format/0,",nl, +pgen_exports(#gen{options=Options}=Gen, Code) -> + #abst{types=Types,values=Values,objects=Objects,objsets=ObjectSets} = Code, + emit(["-export([encoding_rule/0,maps/0,bit_string_format/0,",nl, " legacy_erlang_types/0]).",nl]), emit(["-export([",{asis,?SUPPRESSION_FUNC},"/1]).",nl]), - case Types of - [] -> ok; - _ -> - emit({"-export([",nl}), - case Erules of - ber -> - gen_exports1(Types,"enc_",2); - _ -> - gen_exports1(Types,"enc_",1) - end, - emit({"-export([",nl}), - case Erules of - ber -> - gen_exports1(Types, "dec_", 2); - _ -> - gen_exports1(Types, "dec_", 1) - end - end, - case [X || {n2n,X} <- get(encoding_options)] of - [] -> ok; - A2nNames -> - emit({"-export([",nl}), - gen_exports1(A2nNames,"name2num_",1), - emit({"-export([",nl}), - gen_exports1(A2nNames,"num2name_",1) - end, - case Values of - [] -> ok; - _ -> - emit({"-export([",nl}), - gen_exports1(Values,"",0) + case Gen of + #gen{erule=ber} -> + gen_exports(Types, "enc_", 2), + gen_exports(Types, "dec_", 2), + gen_exports(Objects, "enc_", 3), + gen_exports(Objects, "dec_", 3), + gen_exports(ObjectSets, "getenc_", 1), + gen_exports(ObjectSets, "getdec_", 1); + #gen{erule=per} -> + gen_exports(Types, "enc_", 1), + gen_exports(Types, "dec_", 1) end, - case Objects of - [] -> ok; - _ -> - case erule(Erules) of - per -> - ok; - ber -> - emit({"-export([",nl}), - gen_exports1(Objects,"enc_",3), - emit({"-export([",nl}), - gen_exports1(Objects,"dec_",3) - end - end, - case ObjectSets of - [] -> ok; - _ -> - case erule(Erules) of - per -> - ok; - ber -> - emit({"-export([",nl}), - gen_exports1(ObjectSets, "getenc_",1), - emit({"-export([",nl}), - gen_exports1(ObjectSets, "getdec_",1) - end - end, - emit({"-export([info/0]).",nl}), - gen_partial_inc_decode_exports(), - gen_selected_decode_exports(), - emit({nl,nl}). -gen_exports1([F1,F2|T],Prefix,Arity) -> - emit({"'",Prefix,F1,"'/",Arity,com,nl}), - gen_exports1([F2|T],Prefix,Arity); -gen_exports1([Flast|_T],Prefix,Arity) -> - emit({"'",Prefix,Flast,"'/",Arity,nl,"]).",nl,nl}). + A2nNames = [X || {n2n,X} <- Options], + gen_exports(A2nNames, "name2num_", 1), + gen_exports(A2nNames, "num2name_", 1), + + gen_exports(Values, "", 0), + emit(["-export([info/0]).",nl,nl]), + gen_partial_inc_decode_exports(), + gen_selected_decode_exports(). gen_partial_inc_decode_exports() -> case {asn1ct:read_config_data(partial_incomplete_decode), @@ -734,66 +678,54 @@ gen_partial_inc_decode_exports() -> ok; {_,undefined} -> ok; - {Data,_} -> - gen_partial_inc_decode_exports(Data), - emit(["-export([decode_part/2]).",nl]) + {Data0,_} -> + Data = [Name || {Name,_,_} <- Data0], + gen_exports(Data, "", 1), + emit(["-export([decode_part/2]).",nl,nl]) end. -gen_partial_inc_decode_exports([]) -> - ok; -gen_partial_inc_decode_exports([{Name,_,_}|Rest]) -> - emit(["-export([",Name,"/1"]), - gen_partial_inc_decode_exports1(Rest); -gen_partial_inc_decode_exports([_|Rest]) -> - gen_partial_inc_decode_exports(Rest). - -gen_partial_inc_decode_exports1([]) -> - emit(["]).",nl]); -gen_partial_inc_decode_exports1([{Name,_,_}|Rest]) -> - emit([", ",Name,"/1"]), - gen_partial_inc_decode_exports1(Rest); -gen_partial_inc_decode_exports1([_|Rest]) -> - gen_partial_inc_decode_exports1(Rest). gen_selected_decode_exports() -> case asn1ct:get_gen_state_field(type_pattern) of undefined -> ok; - L -> - gen_selected_decode_exports(L) + Data0 -> + Data = [Name || {Name,_} <- Data0], + gen_exports(Data, "", 1) end. -gen_selected_decode_exports([]) -> +gen_exports([], _Prefix, _Arity) -> ok; -gen_selected_decode_exports([{FuncName,_}|Rest]) -> - emit(["-export([",FuncName,"/1"]), - gen_selected_decode_exports1(Rest). -gen_selected_decode_exports1([]) -> - emit(["]).",nl,nl]); -gen_selected_decode_exports1([{FuncName,_}|Rest]) -> - emit([",",nl," ",FuncName,"/1"]), - gen_selected_decode_exports1(Rest). - -pgen_dispatcher(Erules,_Module,{[],_Values,_,_,_Objects,_ObjectSets}) -> +gen_exports([_|_]=L0, Prefix, Arity) -> + FF = fun(F0) -> + F = list_to_atom(lists:concat([Prefix,F0])), + [{asis,F},"/",Arity] + end, + L = lists:join(",\n", [FF(F) || F <- L0]), + emit(["-export([",nl, + L,nl, + "]).",nl,nl]). + +pgen_dispatcher(Erules, []) -> gen_info_functions(Erules); -pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) -> +pgen_dispatcher(Gen, Types) -> emit(["-export([encode/2,decode/2]).",nl,nl]), - gen_info_functions(Erules), + gen_info_functions(Gen), - Options = get(encoding_options), + Options = Gen#gen.options, NoFinalPadding = lists:member(no_final_padding, Options), NoOkWrapper = proplists:get_bool(no_ok_wrapper, Options), - Call = case Erules of - per -> - asn1ct_func:need({Erules,complete,1}), + Call = case Gen of + #gen{erule=per,aligned=true} -> + asn1ct_func:need({per,complete,1}), "complete(encode_disp(Type, Data))"; - ber -> + #gen{erule=ber} -> "iolist_to_binary(element(1, encode_disp(Type, Data)))"; - uper when NoFinalPadding == true -> - asn1ct_func:need({Erules,complete_NFP,1}), + #gen{erule=per,aligned=false} when NoFinalPadding -> + asn1ct_func:need({uper,complete_NFP,1}), "complete_NFP(encode_disp(Type, Data))"; - uper -> - asn1ct_func:need({Erules,complete,1}), + #gen{erule=per,aligned=false} -> + asn1ct_func:need({uper,complete,1}), "complete(encode_disp(Type, Data))" end, @@ -809,36 +741,36 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) -> end, emit([nl,nl]), - Return_rest = proplists:get_bool(undec_rest, Options), - Data = case {Erules,Return_rest} of - {ber,true} -> "Data0"; - _ -> "Data" + ReturnRest = proplists:get_bool(undec_rest, Gen#gen.options), + Data = case Gen#gen.erule =:= ber andalso ReturnRest of + true -> "Data0"; + false -> "Data" end, - emit(["decode(Type,",Data,") ->",nl]), + emit(["decode(Type, ",Data,") ->",nl]), DecWrap = - case {Erules,Return_rest} of - {ber,false} -> + case {Gen,ReturnRest} of + {#gen{erule=ber},false} -> asn1ct_func:need({ber,ber_decode_nif,1}), "element(1, ber_decode_nif(Data))"; - {ber,true} -> + {#gen{erule=ber},true} -> asn1ct_func:need({ber,ber_decode_nif,1}), emit(["{Data,Rest} = ber_decode_nif(Data0),",nl]), "Data"; - _ -> + {_,_} -> "Data" end, emit([case NoOkWrapper of false -> "try"; true -> "case" end, " decode_disp(Type, ",DecWrap,") of",nl]), - case erule(Erules) of - ber -> + case Gen of + #gen{erule=ber} -> emit([" Result ->",nl]); - per -> + #gen{erule=per} -> emit([" {Result,Rest} ->",nl]) end, - case Return_rest of + case ReturnRest of false -> result_line(NoOkWrapper, ["Result"]); true -> result_line(NoOkWrapper, ["Result","Rest"]) end, @@ -849,18 +781,11 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) -> emit([nl,"end.",nl,nl]) end, - gen_decode_partial_incomplete(Erules), + gen_decode_partial_incomplete(Gen), + gen_partial_inc_dispatcher(Gen), - case Erules of - ber -> - gen_dispatcher(Types,"encode_disp","enc_",""), - gen_dispatcher(Types,"decode_disp","dec_",""), - gen_partial_inc_dispatcher(); - _PerOrPer_bin -> - gen_dispatcher(Types,"encode_disp","enc_",""), - gen_dispatcher(Types,"decode_disp","dec_","") - end, - emit([nl,nl]). + gen_dispatcher(Types, "encode_disp", "enc_"), + gen_dispatcher(Types, "decode_disp", "dec_"). result_line(NoOkWrapper, Items) -> S = [" "|case NoOkWrapper of @@ -877,23 +802,35 @@ result_line_1(Items) -> try_catch() -> [" catch",nl, " Class:Exception when Class =:= error; Class =:= exit ->",nl, + " Stk = erlang:get_stacktrace(),",nl, " case Exception of",nl, - " {error,Reason}=Error ->",nl, - " Error;",nl, + " {error,{asn1,Reason}} ->",nl, + " {error,{asn1,{Reason,Stk}}};",nl, " Reason ->",nl, - " {error,{asn1,Reason}}",nl, + " {error,{asn1,{Reason,Stk}}}",nl, " end",nl, "end."]. -gen_info_functions(Erules) -> +gen_info_functions(Gen) -> + Erule = case Gen of + #gen{erule=ber} -> ber; + #gen{erule=per,aligned=false} -> uper; + #gen{erule=per,aligned=true} -> per + end, + Maps = case Gen of + #gen{pack=record} -> false; + #gen{pack=map} -> true + end, emit(["encoding_rule() -> ", - {asis,Erules},".",nl,nl, + {asis,Erule},".",nl,nl, + "maps() -> ", + {asis,Maps},".",nl,nl, "bit_string_format() -> ", {asis,asn1ct:get_bit_string_format()},".",nl,nl, "legacy_erlang_types() -> ", {asis,asn1ct:use_legacy_types()},".",nl,nl]). -gen_decode_partial_incomplete(ber) -> +gen_decode_partial_incomplete(#gen{erule=ber}) -> case {asn1ct:read_config_data(partial_incomplete_decode), asn1ct:get_gen_state_field(inc_type_pattern)} of {undefined,_} -> @@ -931,10 +868,10 @@ gen_decode_partial_incomplete(ber) -> EmitCaseClauses(), emit([".",nl,nl]) end; -gen_decode_partial_incomplete(_Erule) -> +gen_decode_partial_incomplete(#gen{}) -> ok. -gen_partial_inc_dispatcher() -> +gen_partial_inc_dispatcher(#gen{erule=ber}) -> case {asn1ct:read_config_data(partial_incomplete_decode), asn1ct:get_gen_state_field(inc_type_pattern)} of {undefined,_} -> @@ -944,7 +881,9 @@ gen_partial_inc_dispatcher() -> {Data1,Data2} -> % io:format("partial_incomplete_decode: ~p~ninc_type_pattern: ~p~n",[Data,Data2]), gen_partial_inc_dispatcher(Data1, Data2, "") - end. + end; +gen_partial_inc_dispatcher(#gen{}) -> + ok. gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep) -> TPattern = @@ -968,12 +907,18 @@ gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep) gen_partial_inc_dispatcher([], _, _) -> emit([".",nl]). -gen_dispatcher([F1,F2|T],FuncName,Prefix,ExtraArg) -> - emit([FuncName,"('",F1,"',Data) -> '",Prefix,F1,"'(Data",ExtraArg,")",";",nl]), - gen_dispatcher([F2|T],FuncName,Prefix,ExtraArg); -gen_dispatcher([Flast|_T],FuncName,Prefix,ExtraArg) -> - emit([FuncName,"('",Flast,"',Data) -> '",Prefix,Flast,"'(Data",ExtraArg,")",";",nl]), - emit([FuncName,"(","Type",",_Data) -> exit({error,{asn1,{undefined_type,Type}}}).",nl,nl,nl]). +gen_dispatcher(L, DispFunc, Prefix) -> + gen_dispatcher_1(L, DispFunc, Prefix), + emit([DispFunc,"(","Type",", _Data) ->" + " exit({error,{asn1,{undefined_type,Type}}}).",nl,nl]). + +gen_dispatcher_1([F|T], FuncName, Prefix) -> + Func = list_to_atom(lists:concat([Prefix,F])), + emit([FuncName,"(",{asis,F},", Data) -> ", + {asis,Func},"(Data)",";",nl]), + gen_dispatcher_1(T, FuncName, Prefix); +gen_dispatcher_1([], _, _) -> + ok. pgen_info() -> emit(["info() ->",nl, @@ -1092,22 +1037,21 @@ open_output_file(F) -> close_output_file() -> ok = file:close(erase(gen_file_out)). -pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) -> - put(currmod,Module), - {Types,Values,Ptypes,_,_,_} = TypeOrVal, +pgen_hrl(#gen{pack=record}=Gen, Code) -> + #abst{name=Module,types=Types,values=Values,ptypes=Ptypes} = Code, Ret = - case pgen_hrltypes(Erules,Module,Ptypes++Types,0) of + case pgen_hrltypes(Gen, Module, Ptypes++Types, 0) of 0 -> case Values of [] -> 0; _ -> - open_hrl(get(outfile),get(currmod)), - pgen_macros(Erules,Module,Values), + open_hrl(get(outfile), Module), + pgen_macros(Gen, Module, Values), 1 end; X -> - pgen_macros(Erules,Module,Values), + pgen_macros(Gen, Module, Values), X end, case Ret of @@ -1119,62 +1063,61 @@ pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) -> close_output_file(), asn1ct:verbose("--~p--~n", [{generated,lists:concat([get(outfile),".hrl"])}], - Options), + Gen), Y - end. + end; +pgen_hrl(#gen{pack=map}, _) -> + 0. pgen_macros(_,_,[]) -> true; -pgen_macros(Erules,Module,[H|T]) -> - Valuedef = asn1_db:dbget(Module,H), - gen_macro(Valuedef), - pgen_macros(Erules,Module,T). +pgen_macros(Gen, Module, [H|T]) -> + Valuedef = asn1_db:dbget(Module, H), + gen_macro(Gen, Valuedef), + pgen_macros(Gen, Module, T). pgen_hrltypes(_,_,[],NumRecords) -> NumRecords; -pgen_hrltypes(Erules,Module,[H|T],NumRecords) -> -% io:format("records = ~p~n",NumRecords), - Typedef = asn1_db:dbget(Module,H), - AddNumRecords = gen_record(Typedef,NumRecords), - pgen_hrltypes(Erules,Module,T,NumRecords+AddNumRecords). +pgen_hrltypes(Gen, Module, [H|T], NumRecords) -> + Typedef = asn1_db:dbget(Module, H), + AddNumRecords = gen_record(Gen, Typedef, NumRecords), + pgen_hrltypes(Gen, Module, T, NumRecords+AddNumRecords). %% Generates a macro for value Value defined in the ASN.1 module -gen_macro(Value) when is_record(Value,valuedef) -> - Prefix = get_macro_name_prefix(), - emit({"-define('",Prefix,Value#valuedef.name,"', ", - {asis,Value#valuedef.value},").",nl}). +gen_macro(Gen, #valuedef{name=Name,value=Value}) -> + Prefix = get_macro_name_prefix(Gen), + emit(["-define('",Prefix,Name,"', ",{asis,Value},").",nl]). %% Generate record functions ************** %% Generates an Erlang record for each named and unnamed SEQUENCE and SET in the ASN.1 %% module. If no SEQUENCE or SET is found there is no .hrl file generated -gen_record(Tdef,NumRecords) when is_record(Tdef,typedef) -> +gen_record(Gen, #typedef{}=Tdef, NumRecords) -> Name = [Tdef#typedef.name], Type = Tdef#typedef.typespec, - gen_record(type,Name,Type,NumRecords); - -gen_record(Tdef,NumRecords) when is_record(Tdef,ptypedef) -> + gen_record(Gen, type, Name, Type, NumRecords); +gen_record(Gen, #ptypedef{}=Tdef, NumRecords) -> Name = [Tdef#ptypedef.name], Type = Tdef#ptypedef.typespec, - gen_record(ptype,Name,Type,NumRecords). - -gen_record(TorPtype,Name,[#'ComponentType'{name=Cname,typespec=Type}|T],Num) -> - Num2 = gen_record(TorPtype,[Cname|Name],Type,Num), - gen_record(TorPtype,Name,T,Num2); -gen_record(TorPtype,Name,{Clist1,Clist2},Num) + gen_record(Gen, ptype, Name, Type, NumRecords). + +gen_record(Gen, TorPtype, Name, + [#'ComponentType'{name=Cname,typespec=Type}|T], Num) -> + Num2 = gen_record(Gen, TorPtype, [Cname|Name], Type, Num), + gen_record(Gen, TorPtype, Name, T, Num2); +gen_record(Gen, TorPtype, Name, {Clist1,Clist2}, Num) when is_list(Clist1), is_list(Clist2) -> - gen_record(TorPtype,Name,Clist1++Clist2,Num); -gen_record(TorPtype,Name,{Clist1,EClist,Clist2},Num) + gen_record(Gen, TorPtype, Name, Clist1++Clist2, Num); +gen_record(Gen, TorPtype, Name, {Clist1,EClist,Clist2}, Num) when is_list(Clist1), is_list(EClist), is_list(Clist2) -> - gen_record(TorPtype,Name,Clist1++EClist++Clist2,Num); -gen_record(TorPtype,Name,[_|T],Num) -> % skip EXTENSIONMARK - gen_record(TorPtype,Name,T,Num); -gen_record(_TorPtype,_Name,[],Num) -> + gen_record(Gen, TorPtype, Name, Clist1++EClist++Clist2, Num); +gen_record(Gen, TorPtype, Name, [_|T], Num) -> % skip EXTENSIONMARK + gen_record(Gen, TorPtype, Name, T, Num); +gen_record(_Gen, _TorPtype, _Name, [], Num) -> Num; - -gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) -> +gen_record(Gen, TorPtype, Name, #type{}=Type, Num) -> Def = Type#type.def, Rec = case Def of Seq when is_record(Seq,'SEQUENCE') -> @@ -1209,127 +1152,103 @@ gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) -> 0 -> open_hrl(get(outfile),get(currmod)); _ -> true end, - Prefix = get_record_name_prefix(), - emit({"-record('",Prefix,list2name(Name),"',{",nl}), - RootList = case CompList of - _ when is_list(CompList) -> - CompList; - {Rl,_} -> Rl; - {Rl1,_Ext,_Rl2} -> Rl1 - end, - gen_record2(Name,'SEQUENCE',RootList), - NewCompList = + do_gen_record(Gen, Name, CompList), + NewCompList = case CompList of {CompList1,[]} -> - emit({"}). % with extension mark",nl,nl}), CompList1; {Tr,ExtensionList2} -> - case Tr of - [] -> true; - _ -> emit({",",nl}) - end, - emit({"%% with extensions",nl}), - gen_record2(Name, 'SEQUENCE', ExtensionList2, - "", ext), - emit({"}).",nl,nl}), Tr ++ ExtensionList2; {Rootl1,Extl,Rootl2} -> - case Rootl1 =/= [] andalso Extl++Rootl2 =/= [] of - true -> emit([com]); - false -> ok - end, - case Rootl1 of - [_|_] -> emit([nl]); - [] -> ok - end, - emit(["%% with extensions",nl]), - gen_record2(Name,'SEQUENCE',Extl,"",ext), - case Extl =/= [] andalso Rootl2 =/= [] of - true -> emit([com]); - false -> ok - end, - case Extl of - [_|_] -> emit([nl]); - [] -> ok - end, - emit(["%% end of extensions",nl]), - gen_record2(Name,'SEQUENCE',Rootl2,"",noext), - emit(["}).",nl,nl]), Rootl1++Extl++Rootl2; - _ -> - emit({"}).",nl,nl}), + _ -> CompList end, - gen_record(TorPtype,Name,NewCompList,Num+1); + gen_record(Gen, TorPtype, Name, NewCompList, Num+1); {inner,{'CHOICE', CompList}} -> - gen_record(TorPtype,Name,CompList,Num); + gen_record(Gen, TorPtype, Name, CompList, Num); {NewName,{_, CompList}} -> - gen_record(TorPtype,NewName,CompList,Num) + gen_record(Gen, TorPtype, NewName, CompList, Num) end; -gen_record(_,_,_,NumRecords) -> % skip CLASS etc for now. +gen_record(_, _, _, _, NumRecords) -> % skip CLASS etc for now. NumRecords. - -gen_head(Erules,Mod,Hrl) -> - Options = get(encoding_options), - case Erules of - per -> - emit(["%% Generated by the Erlang ASN.1 PER-" - "compiler version, utilizing bit-syntax:", - asn1ct:vsn(),nl]); - ber -> - emit(["%% Generated by the Erlang ASN.1 BER_V2-" - "compiler version, utilizing bit-syntax:", - asn1ct:vsn(),nl]); - uper -> - emit(["%% Generated by the Erlang ASN.1 UNALIGNED" - " PER-compiler version, utilizing bit-syntax:", - asn1ct:vsn(),nl]) + +do_gen_record(Gen, Name, CL0) -> + CL = case CL0 of + {Root,[]} -> + Root ++ [{comment,"with extension mark"}]; + {Root,Ext} -> + Root ++ [{comment,"with exensions"}] ++ + only_components(Ext); + {Root1,Ext,Root2} -> + Root1 ++ [{comment,"with exensions"}] ++ + only_components(Ext) ++ + [{comment,"end of extensions"}] ++ Root2; + _ when is_list(CL0) -> + CL0 + end, + Prefix = get_record_name_prefix(Gen), + emit(["-record('",Prefix,list2name(Name),"', {"] ++ + do_gen_record_1(CL) ++ + [nl,"}).",nl,nl]). + +only_components(CL) -> + [C || #'ComponentType'{}=C <- CL]. + +do_gen_record_1([#'ComponentType'{name=Name,prop=Prop}|T]) -> + Val = case Prop of + 'OPTIONAL' -> + " = asn1_NOVALUE"; + {'DEFAULT',_} -> + " = asn1_DEFAULT"; + _ -> + [] + end, + Com = case needs_trailing_comma(T) of + true -> [com]; + false -> [] end, - emit({"%% Purpose: encoder and decoder to the types in mod ",Mod,nl,nl}), - emit({"-module('",Mod,"').",nl}), - put(currmod,Mod), - emit({"-compile(nowarn_unused_vars).",nl}), - emit({"-dialyzer(no_improper_lists).",nl}), + [nl," ",{asis,Name},Val,Com|do_gen_record_1(T)]; +do_gen_record_1([{comment,Text}|T]) -> + [nl," %% ",Text|do_gen_record_1(T)]; +do_gen_record_1([]) -> + []. + +needs_trailing_comma([#'ComponentType'{}|_]) -> true; +needs_trailing_comma([_|T]) -> needs_trailing_comma(T); +needs_trailing_comma([]) -> false. + +gen_head(#gen{options=Options}=Gen, Mod, Hrl) -> + Name = case Gen of + #gen{erule=per,aligned=false} -> + "PER (unaligned)"; + #gen{erule=per,aligned=true} -> + "PER (aligned)"; + #gen{erule=ber} -> + "BER" + end, + emit(["%% Generated by the Erlang ASN.1 ",Name, + " compiler. Version: ",asn1ct:vsn(),nl, + "%% Purpose: Encoding and decoding of the types in ", + Mod,".",nl,nl, + "-module('",Mod,"').",nl, + "-compile(nowarn_unused_vars).",nl, + "-dialyzer(no_improper_lists).",nl]), case Hrl of 0 -> ok; - _ -> emit({"-include(\"",Mod,".hrl\").",nl}) + _ -> emit(["-include(\"",Mod,".hrl\").",nl]) end, emit(["-asn1_info([{vsn,'",asn1ct:vsn(),"'},",nl, " {module,'",Mod,"'},",nl, " {options,",io_lib:format("~p",[Options]),"}]).",nl,nl]). - - -gen_hrlhead(Mod) -> - emit({"%% Generated by the Erlang ASN.1 compiler version:",asn1ct:vsn(),nl}), - emit({"%% Purpose: Erlang record definitions for each named and unnamed",nl}), - emit({"%% SEQUENCE and SET, and macro definitions for each value",nl}), - emit({"%% definition,in module ",Mod,nl,nl}), - emit({nl,nl}). -gen_record2(Name,SeqOrSet,Comps) -> - gen_record2(Name,SeqOrSet,Comps,"",noext). -gen_record2(_Name,_SeqOrSet,[],_Com,_Extension) -> - true; -gen_record2(_Name,_SeqOrSet,[H = #'ComponentType'{name=Cname}],Com,Extension) -> - emit(Com), - emit({asis,Cname}), - gen_record_default(H, Extension); -gen_record2(Name,SeqOrSet,[H = #'ComponentType'{name=Cname}|T],Com, Extension) -> - emit(Com), - emit({asis,Cname}), - gen_record_default(H, Extension), - gen_record2(Name,SeqOrSet,T,", ", Extension); -gen_record2(Name,SeqOrSet,[_|T],Com,Extension) -> - %% skip EXTENSIONMARK, ExtensionAdditionGroup and other markers - gen_record2(Name,SeqOrSet,T,Com,Extension). - -gen_record_default(#'ComponentType'{prop='OPTIONAL'}, _)-> - emit(" = asn1_NOVALUE"); -gen_record_default(#'ComponentType'{prop={'DEFAULT',_}}, _)-> - emit(" = asn1_DEFAULT"); -gen_record_default(_, _) -> - true. +gen_hrlhead(Mod) -> + emit(["%% Generated by the Erlang ASN.1 compiler. Version: ", + asn1ct:vsn(),nl, + "%% Purpose: Erlang record definitions for each named and unnamed",nl, + "%% SEQUENCE and SET, and macro definitions for each value",nl, + "%% definition in module ",Mod,".",nl,nl]). %% May only be a list or a two-tuple. to_textual_order({Root,Ext}) -> @@ -1585,27 +1504,19 @@ constructed_suffix('SEQUENCE OF',_) -> constructed_suffix('SET OF',_) -> 'SETOF'. -erule(ber) -> ber; -erule(per) -> per; -erule(uper) -> per. - index2suffix(0) -> ""; index2suffix(N) -> lists:concat(["_",N]). -ct_gen_module(ber) -> +ct_gen_module(#gen{erule=ber}) -> asn1ct_gen_ber_bin_v2; -ct_gen_module(per) -> - asn1ct_gen_per; -ct_gen_module(uper) -> +ct_gen_module(#gen{erule=per}) -> asn1ct_gen_per. -ct_constructed_module(ber) -> +ct_constructed_module(#gen{erule=ber}) -> asn1ct_constructed_ber_bin_v2; -ct_constructed_module(per) -> - asn1ct_constructed_per; -ct_constructed_module(uper) -> +ct_constructed_module(#gen{erule=per}) -> asn1ct_constructed_per. get_constraint(C,Key) -> @@ -1617,19 +1528,9 @@ get_constraint(C,Key) -> {value,Cnstr} -> Cnstr end. - -get_record_name_prefix() -> - case lists:keysearch(record_name_prefix,1,get(encoding_options)) of - false -> - ""; - {value,{_,Prefix}} -> - Prefix - end. -get_macro_name_prefix() -> - case lists:keysearch(macro_name_prefix,1,get(encoding_options)) of - false -> - ""; - {value,{_,Prefix}} -> - Prefix - end. +get_record_name_prefix(#gen{rec_prefix=Prefix}) -> + Prefix. + +get_macro_name_prefix(#gen{macro_prefix=Prefix}) -> + Prefix. diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl index b884d14b0d..6c6d4193f3 100644 --- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl +++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl @@ -1200,11 +1200,13 @@ gen_objset_enc(Erules, ObjSetName, UniqueName, {no_mod,no_name} -> gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj); {CurrMod,Name} -> - emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl, + emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ", + {asis,Val}," ->",nl, " fun 'enc_",Name,"'/3;",nl]), {[],NthObj}; {ModuleName,Name} -> - emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]), + emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ", + {asis,Val}," ->",nl]), emit_ext_fun(enc,ModuleName,Name), emit([";",nl]), {[],NthObj}; @@ -1382,11 +1384,13 @@ gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T], {no_mod,no_name} -> gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj); {CurrMod,Name} -> - emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl, + emit(["'getdec_",ObjSName,"'(Id) when Id =:= ", + {asis,Val}," ->",nl, " fun 'dec_",Name,"'/3;", nl]), NthObj; {ModuleName,Name} -> - emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]), + emit(["'getdec_",ObjSName,"'(Id) when Id =:= ", + {asis,Val}," ->",nl]), emit_ext_fun(dec,ModuleName,Name), emit([";",nl]), NthObj; diff --git a/lib/asn1/src/asn1ct_gen_check.erl b/lib/asn1/src/asn1ct_gen_check.erl index abe77dd0cb..ccc62a3ce3 100644 --- a/lib/asn1/src/asn1ct_gen_check.erl +++ b/lib/asn1/src/asn1ct_gen_check.erl @@ -21,45 +21,51 @@ %% -module(asn1ct_gen_check). --export([emit/3]). +-export([emit/4]). -import(asn1ct_gen, [emit/1]). -include("asn1_records.hrl"). -emit(Type, Default, Value) -> +emit(Gen, Type, Default, Value) -> Key = {Type,Default}, - Gen = fun(Fd, Name) -> - file:write(Fd, gen(Name, Type, Default)) - end, + DoGen = fun(Fd, Name) -> + file:write(Fd, gen(Gen, Name, Type, Default)) + end, emit(" case "), - asn1ct_func:call_gen("is_default_", Key, Gen, [Value]), + asn1ct_func:call_gen("is_default_", Key, DoGen, [Value]), emit([" of",nl, "true -> {[],0};",nl, "false ->",nl]). -gen(Name, #type{def=T}, Default) -> +gen(#gen{pack=Pack}=Gen, Name, #type{def=T}, Default) -> + DefMarker = case Pack of + record -> "asn1_DEFAULT"; + map -> atom_to_list(?MISSING_IN_MAP) + end, NameStr = atom_to_list(Name), - [NameStr,"(asn1_DEFAULT) ->\n", - "true;\n"|case do_gen(T, Default) of - {literal,Literal} -> - [NameStr,"(",term2str(Literal),") ->\n","true;\n", - NameStr,"(_) ->\n","false.\n\n"]; - {exception,Func,Args} -> - [NameStr,"(Value) ->\n", - "try ",Func,"(Value",arg2str(Args),") of\n", - "_ -> true\n" - "catch throw:false -> false\n" - "end.\n\n"] - end]. + [NameStr,"(",DefMarker,") ->\n", + "true;\n"| + case do_gen(Gen, T, Default) of + {literal,Literal} -> + [NameStr,"(Def) when Def =:= ",term2str(Literal)," ->\n", + "true;\n", + NameStr,"(_) ->\n","false.\n\n"]; + {exception,Func,Args} -> + [NameStr,"(Value) ->\n", + "try ",Func,"(Value",arg2str(Args),") of\n", + "_ -> true\n" + "catch throw:false -> false\n" + "end.\n\n"] + end]. -do_gen(_, asn1_NOVALUE) -> +do_gen(_Gen, _, asn1_NOVALUE) -> {literal,asn1_NOVALUE}; -do_gen(#'Externaltypereference'{module=M,type=T}, Default) -> +do_gen(Gen, #'Externaltypereference'{module=M,type=T}, Default) -> #typedef{typespec=#type{def=Td}} = asn1_db:dbget(M, T), - do_gen(Td, Default); -do_gen('BOOLEAN', Default) -> + do_gen(Gen, Td, Default); +do_gen(_Gen, 'BOOLEAN', Default) -> {literal,Default}; -do_gen({'BIT STRING',[]}, Default) -> +do_gen(_Gen, {'BIT STRING',[]}, Default) -> true = is_bitstring(Default), %Assertion. case asn1ct:use_legacy_types() of false -> @@ -67,17 +73,17 @@ do_gen({'BIT STRING',[]}, Default) -> true -> {exception,need(check_legacy_bitstring, 2),[Default]} end; -do_gen({'BIT STRING',[_|_]=NBL}, Default) -> +do_gen(_Gen, {'BIT STRING',[_|_]=NBL}, Default) -> do_named_bitstring(NBL, Default); -do_gen({'ENUMERATED',_}, Default) -> +do_gen(_Gen, {'ENUMERATED',_}, Default) -> {literal,Default}; -do_gen('INTEGER', Default) -> +do_gen(_Gen, 'INTEGER', Default) -> {literal,Default}; -do_gen({'INTEGER',NNL}, Default) -> +do_gen(_Gen, {'INTEGER',NNL}, Default) -> {exception,need(check_int, 3),[Default,NNL]}; -do_gen('NULL', Default) -> +do_gen(_Gen, 'NULL', Default) -> {literal,Default}; -do_gen('OCTET STRING', Default) -> +do_gen(_Gen, 'OCTET STRING', Default) -> true = is_binary(Default), %Assertion. case asn1ct:use_legacy_types() of false -> @@ -85,34 +91,34 @@ do_gen('OCTET STRING', Default) -> true -> {exception,need(check_octetstring, 2),[Default]} end; -do_gen('OBJECT IDENTIFIER', Default0) -> +do_gen(_Gen, 'OBJECT IDENTIFIER', Default0) -> Default = pre_process_oid(Default0), {exception,need(check_objectidentifier, 2),[Default]}; -do_gen({'CHOICE',Cs}, Default) -> +do_gen(Gen, {'CHOICE',Cs}, Default) -> {Tag,Value} = Default, [Type] = [Type || #'ComponentType'{name=T,typespec=Type} <- Cs, T =:= Tag], - case do_gen(Type#type.def, Value) of + case do_gen(Gen, Type#type.def, Value) of {literal,Lit} -> {literal,{Tag,Lit}}; {exception,Func0,Args} -> Key = {Tag,Func0,Args}, - Gen = fun(Fd, Name) -> - S = gen_choice(Name, Tag, Func0, Args), - ok = file:write(Fd, S) + DoGen = fun(Fd, Name) -> + S = gen_choice(Name, Tag, Func0, Args), + ok = file:write(Fd, S) end, - Func = asn1ct_func:call_gen("is_default_choice", Key, Gen), + Func = asn1ct_func:call_gen("is_default_choice", Key, DoGen), {exception,atom_to_list(Func),[]} end; -do_gen(#'SEQUENCE'{components=Cs}, Default) -> - do_seq_set(Cs, Default); -do_gen({'SEQUENCE OF',Type}, Default) -> - do_sof(Type, Default); -do_gen(#'SET'{components=Cs}, Default) -> - do_seq_set(Cs, Default); -do_gen({'SET OF',Type}, Default) -> - do_sof(Type, Default); -do_gen(Type, Default) -> +do_gen(Gen, #'SEQUENCE'{components=Cs}, Default) -> + do_seq_set(Gen, Cs, Default); +do_gen(Gen, {'SEQUENCE OF',Type}, Default) -> + do_sof(Gen, Type, Default); +do_gen(Gen, #'SET'{components=Cs}, Default) -> + do_seq_set(Gen, Cs, Default); +do_gen(Gen, {'SET OF',Type}, Default) -> + do_sof(Gen, Type, Default); +do_gen(_Gen, Type, Default) -> case asn1ct_gen:unify_if_string(Type) of restrictedstring -> {exception,need(check_restrictedstring, 2),[Default]}; @@ -136,39 +142,58 @@ do_named_bitstring(_, Default) when is_bitstring(Default) -> end, {exception,need(Func, 3),[Default,bit_size(Default)]}. -do_seq_set(Cs0, Default) -> +do_seq_set(#gen{pack=record}=Gen, Cs0, Default) -> Tag = element(1, Default), Cs1 = [T || #'ComponentType'{typespec=T} <- Cs0], - Cs = components(Cs1, tl(tuple_to_list(Default))), + Cs = components(Gen, Cs1, tl(tuple_to_list(Default))), case are_all_literals(Cs) of true -> Literal = list_to_tuple([Tag|[L || {literal,L} <- Cs]]), {literal,Literal}; false -> Key = {Cs,Default}, - Gen = fun(Fd, Name) -> - S = gen_components(Name, Tag, Cs), - ok = file:write(Fd, S) - end, - Func = asn1ct_func:call_gen("is_default_cs_", Key, Gen), + DoGen = fun(Fd, Name) -> + S = gen_components(Name, Tag, Cs), + ok = file:write(Fd, S) + end, + Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen), + {exception,atom_to_list(Func),[]} + end; +do_seq_set(#gen{pack=map}=Gen, Cs0, Default) -> + Cs1 = [{N,T} || #'ComponentType'{name=N,typespec=T} <- Cs0], + Cs = map_components(Gen, Cs1, Default), + AllLiterals = lists:all(fun({_,{literal,_}}) -> true; + ({_,_}) -> false + end, Cs), + case AllLiterals of + true -> + L = [{Name,Lit} || {Name,{literal,Lit}} <- Cs], + {literal,maps:from_list(L)}; + false -> + Key = {Cs,Default}, + DoGen = fun(Fd, Name) -> + S = gen_map_components(Name, Cs), + ok = file:write(Fd, S) + end, + Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen), {exception,atom_to_list(Func),[]} end. -do_sof(Type, Default0) -> +do_sof(Gen, Type, Default0) -> Default = lists:sort(Default0), Cs0 = lists:duplicate(length(Default), Type), - Cs = components(Cs0, Default), + Cs = components(Gen, Cs0, Default), case are_all_literals(Cs) of true -> Literal = [Lit || {literal,Lit} <- Cs], {exception,need(check_literal_sof, 2),[Literal]}; false -> Key = Cs, - Gen = fun(Fd, Name) -> - S = gen_sof(Name, Cs), - ok = file:write(Fd, S) + DoGen = fun(Fd, Name) -> + S = gen_sof(Name, Cs), + ok = file:write(Fd, S) end, - Func = asn1ct_func:call_gen("is_default_sof", Key, Gen), + Func = asn1ct_func:call_gen("is_default_sof", Key, DoGen), {exception,atom_to_list(Func),[]} end. @@ -199,6 +224,39 @@ gen_cs_2([], _) -> "throw(false)\n" "end.\n"]. +gen_map_components(Name, Cs) -> + [atom_to_list(Name),"(Value) ->\n", + "case Value of\n", + "#{"|gen_map_cs_1(Cs, 1, "", [])]. + +gen_map_cs_1([{Name,{literal,Lit}}|T], I, Sep, Acc) -> + Var = "E"++integer_to_list(I), + G = Var ++ " =:= " ++ term2str(Lit), + [Sep,term2str(Name),":=",Var| + gen_map_cs_1(T, I+1, ",\n", [{guard,G}|Acc])]; +gen_map_cs_1([{Name,Exc}|T], I, Sep, Acc) -> + Var = "E"++integer_to_list(I), + [Sep,term2str(Name),":=",Var| + gen_map_cs_1(T, I+1, ",\n", [{exc,{Var,Exc}}|Acc])]; +gen_map_cs_1([], _, _, Acc) -> + G = lists:join(", ", [S || {guard,S} <- Acc]), + Exc = [E || {exc,E} <- Acc], + Body = gen_map_cs_2(Exc, ""), + case G of + [] -> + ["} ->\n"|Body]; + [_|_] -> + ["} when ",G," ->\n"|Body] + end. + +gen_map_cs_2([{Var,{exception,Func,Args}}|T], Sep) -> + [Sep,Func,"(",Var,arg2str(Args),")"|gen_map_cs_2(T, ",\n")]; +gen_map_cs_2([], _) -> + [";\n", + "_ ->\n" + "throw(false)\n" + "end.\n"]. + gen_sof(Name, Cs) -> [atom_to_list(Name),"(Value) ->\n", "case length(Value) of\n", @@ -221,9 +279,18 @@ gen_sof_1([{exception,Func,Args}|Cs], I) -> gen_sof_1([], _) -> ".\n". -components([#type{def=Def}|Ts], [V|Vs]) -> - [do_gen(Def, V)|components(Ts, Vs)]; -components([], []) -> []. +components(Gen, [#type{def=Def}|Ts], [V|Vs]) -> + [do_gen(Gen, Def, V)|components(Gen, Ts, Vs)]; +components(_Gen, [], []) -> []. + +map_components(Gen, [{Name,#type{def=Def}}|Ts], Value) -> + case maps:find(Name, Value) of + {ok,V} -> + [{Name,do_gen(Gen, Def, V)}|map_components(Gen, Ts, Value)]; + error -> + map_components(Gen, Ts, Value) + end; +map_components(_Gen, [], _Value) -> []. gen_choice(Name, Tag, Func, Args) -> NameStr = atom_to_list(Name), diff --git a/lib/asn1/src/asn1ct_gen_per.erl b/lib/asn1/src/asn1ct_gen_per.erl index aa7223904e..9671a566bf 100644 --- a/lib/asn1/src/asn1ct_gen_per.erl +++ b/lib/asn1/src/asn1ct_gen_per.erl @@ -113,11 +113,7 @@ gen_encode_prim(Erules, D) -> Value = {var,atom_to_list(asn1ct_gen:mk_var(asn1ct_name:curr(val)))}, gen_encode_prim(Erules, D, Value). -gen_encode_prim(Erules, #type{}=D, Value) -> - Aligned = case Erules of - uper -> false; - per -> true - end, +gen_encode_prim(#gen{erule=per,aligned=Aligned}, #type{}=D, Value) -> Imm = gen_encode_prim_imm(Value, D, Aligned), asn1ct_imm:enc_cg(Imm, Aligned). @@ -284,11 +280,7 @@ gen_dec_external(Ext, BytesVar) -> _ -> [{asis,Mod},":"] end,{asis,dec_func(Type)},"(",BytesVar,")"]). -gen_dec_imm(Erule, #type{def=Name,constraint=C}) -> - Aligned = case Erule of - uper -> false; - per -> true - end, +gen_dec_imm(#gen{erule=per,aligned=Aligned}, #type{def=Name,constraint=C}) -> gen_dec_imm_1(Name, C, Aligned). gen_dec_imm_1('ASN1_OPEN_TYPE', Constraint, Aligned) -> diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl index 8b96242c56..130f68c21d 100644 --- a/lib/asn1/src/asn1ct_imm.erl +++ b/lib/asn1/src/asn1ct_imm.erl @@ -37,9 +37,12 @@ per_enc_open_type/2, per_enc_restricted_string/3, per_enc_small_number/2]). --export([per_enc_extension_bit/2,per_enc_extensions/4,per_enc_optional/3]). +-export([per_enc_extension_bit/2,per_enc_extensions/4, + per_enc_extensions_map/4, + per_enc_optional/2]). -export([per_enc_sof/5]). --export([enc_absent/3,enc_append/1,enc_element/2]). +-export([enc_absent/3,enc_append/1,enc_element/2,enc_maps_get/2, + enc_comment/1]). -export([enc_cg/2]). -export([optimize_alignment/1,optimize_alignment/2, dec_slim_cg/2,dec_code_gen/2]). @@ -214,7 +217,8 @@ per_enc_legacy_bit_string(Val0, NNL0, Constraint0, Aligned) -> per_enc_boolean(Val0, _Aligned) -> {B,[Val]} = mk_vars(Val0, []), B++build_cond([[{eq,Val,false},{put_bits,0,1,[1]}], - [{eq,Val,true},{put_bits,1,1,[1]}]]). + [{eq,Val,true},{put_bits,1,1,[1]}], + ['_',{error,{illegal_boolean,Val}}]]). per_enc_choice(Val0, Cs0, _Aligned) -> {B,[Val]} = mk_vars(Val0, []), @@ -235,7 +239,7 @@ per_enc_enumerated(Val0, Root, Aligned) -> B++[{'cond',Cs++enumerated_error(Val)}]. enumerated_error(Val) -> - [['_',{error,Val}]]. + [['_',{error,{illegal_enumerated,Val}}]]. per_enc_integer(Val0, Constraint0, Aligned) -> {B,[Val]} = mk_vars(Val0, []), @@ -349,27 +353,32 @@ per_enc_extensions(Val0, Pos0, NumBits, Aligned) when NumBits > 0 -> ['_'|Length ++ PutBits]]}], {var,"Extensions"}}]. -per_enc_optional(Val0, {Pos,DefVals}, _Aligned) when is_integer(Pos), - is_list(DefVals) -> - {B,Val} = enc_element(Pos, Val0), +per_enc_extensions_map(Val0, Vars, Undefined, Aligned) -> + NumBits = length(Vars), + {B,[_Val,Bitmap]} = mk_vars(Val0, [bitmap]), + Length = per_enc_small_length(NumBits, Aligned), + PutBits = case NumBits of + 1 -> [{put_bits,1,1,[1]}]; + _ -> [{put_bits,Bitmap,NumBits,[1]}] + end, + BitmapExpr = extensions_bitmap(Vars, Undefined), + B++[{assign,Bitmap,BitmapExpr}, + {list,[{'cond',[[{eq,Bitmap,0}], + ['_'|Length ++ PutBits]]}], + {var,"Extensions"}}]. + +per_enc_optional(Val, DefVals) when is_list(DefVals) -> Zero = {put_bits,0,1,[1]}, One = {put_bits,1,1,[1]}, - B++[{'cond', - [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}]; -per_enc_optional(Val0, {Pos,{call,M,F,A}}, _Aligned) when is_integer(Pos) -> - {B,Val} = enc_element(Pos, Val0), + [{'cond', + [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}]; +per_enc_optional(Val, {call,M,F,A}) -> {[],[[],Tmp]} = mk_vars([], [tmp]), Zero = {put_bits,0,1,[1]}, One = {put_bits,1,1,[1]}, - B++[{call,M,F,[Val|A],Tmp}, - {'cond', - [[{eq,Tmp,true},Zero],['_',One]]}]; -per_enc_optional(Val0, Pos, _Aligned) when is_integer(Pos) -> - {B,Val} = enc_element(Pos, Val0), - Zero = {put_bits,0,1,[1]}, - One = {put_bits,1,1,[1]}, - B++[{'cond',[[{eq,Val,asn1_NOVALUE},Zero], - ['_',One]]}]. + [{call,M,F,[Val|A],Tmp}, + {'cond', + [[{eq,Tmp,true},Zero],['_',One]]}]. per_enc_sof(Val0, Constraint, ElementVar, ElementImm, Aligned) -> {B,[Val,Len]} = mk_vars(Val0, [len]), @@ -423,6 +432,16 @@ enc_element(N, Val0) -> {[],[Val,Dst]} = mk_vars(Val0, [element]), {[{call,erlang,element,[N,Val],Dst}],Dst}. +enc_maps_get(N, Val0) -> + {[],[Val,Dst0]} = mk_vars(Val0, [element]), + {var,Dst} = Dst0, + DstExpr = {expr,lists:concat(["#{",N,":=",Dst,"}"])}, + {var,SrcVar} = Val, + {[{assign,DstExpr,SrcVar}],Dst0}. + +enc_comment(Comment) -> + {comment,Comment}. + enc_cg(Imm0, false) -> Imm1 = enc_cse(Imm0), Imm2 = enc_pre_cg(Imm1), @@ -860,10 +879,8 @@ flatten_map_cs_1([integer_default], {Int,_}) -> [{'_',Int}]; flatten_map_cs_1([enum_default], {Int,_}) -> [{'_',["{asn1_enum,",Int,"}"]}]; -flatten_map_cs_1([enum_error], {Var,Cs}) -> - Vs = [V || {_,V} <- Cs], - [{'_',["exit({error,{asn1,{decode_enumerated,{",Var,",", - {asis,Vs},"}}}})"]}]; +flatten_map_cs_1([enum_error], {Var,_}) -> + [{'_',["exit({error,{asn1,{decode_enumerated,",Var,"}}})"]}]; flatten_map_cs_1([], _) -> []. flatten_hoist_align([[{align_bits,_,_}=Ab|T]|Cs]) -> @@ -1037,6 +1054,7 @@ split_off_nonbuilding(Imm) -> is_nonbuilding({assign,_,_}) -> true; is_nonbuilding({call,_,_,_,_}) -> true; +is_nonbuilding({comment,_}) -> true; is_nonbuilding({lc,_,_,_,_}) -> true; is_nonbuilding({set,_,_}) -> true; is_nonbuilding({list,_,_}) -> true; @@ -1093,7 +1111,7 @@ per_enc_integer_1(Val0, [{{_,_}=Constr,[]}], Aligned) -> per_enc_integer_1(Val0, [Constr], Aligned) -> {Prefix,Check,Action} = per_enc_integer_2(Val0, Constr, Aligned), Prefix++build_cond([[Check|Action], - ['_',{error,Val0}]]). + ['_',{error,{illegal_integer,Val0}}]]). per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) -> per_enc_constrained(Val, Sv, Sv, Aligned); @@ -1240,6 +1258,20 @@ enc_length(Len, {Lb,Ub}, Aligned) when is_integer(Lb) -> enc_length(Len, Sv, _Aligned) when is_integer(Sv) -> [{'cond',[[{eq,Len,Sv}]]}]. +extensions_bitmap(Vs, Undefined) -> + Highest = 1 bsl (length(Vs)-1), + Cs = extensions_bitmap_1(Vs, Undefined, Highest), + lists:flatten(lists:join(" bor ", Cs)). + +extensions_bitmap_1([{var,V}|Vs], Undefined, Power) -> + S = ["case ",V," of\n", + " ",Undefined," -> 0;\n" + " _ -> ",integer_to_list(Power),"\n" + "end"], + [S|extensions_bitmap_1(Vs, Undefined, Power bsr 1)]; +extensions_bitmap_1([], _, _) -> + []. + put_bits_binary(Bin, _Unit, Aligned) when is_binary(Bin) -> Sz = byte_size(Bin), <<Int:Sz/unit:8>> = Bin, @@ -1903,6 +1935,8 @@ enc_opt({'cond',Cs0}, St0) -> {Cs,Type} = enc_opt_cond_1(Cs1, Type0, [{Cond,Imm}]), {{'cond',Cs},St0#ost{t=Type}} end; +enc_opt({comment,_}=Imm, St) -> + {Imm,St#ost{t=undefined}}; enc_opt({cons,H0,T0}, St0) -> {H,#ost{t=TypeH}=St1} = enc_opt(H0, St0), {T,#ost{t=TypeT}=St} = enc_opt(T0, St1), @@ -2292,6 +2326,9 @@ enc_cg({block,Imm}) -> enc_cg(Imm), emit([nl, "end"]); +enc_cg({seq,{comment,Comment},Then}) -> + emit(["%% ",Comment,nl]), + enc_cg(Then); enc_cg({seq,First,Then}) -> enc_cg(First), emit([com,nl]), @@ -2325,9 +2362,9 @@ enc_cg({'cond',Cs}) -> enc_cg_cond(Cs); enc_cg({error,Error}) when is_function(Error, 0) -> Error(); -enc_cg({error,Var0}) -> +enc_cg({error,{Tag,Var0}}) -> Var = mk_val(Var0), - emit(["exit({error,{asn1,{illegal_value,",Var,"}}})"]); + emit(["exit({error,{asn1,{",Tag,",",Var,"}}})"]); enc_cg({integer,Int}) -> emit(mk_val(Int)); enc_cg({lc,Body,Var,List}) -> @@ -2590,6 +2627,8 @@ enc_opt_al({call,per_common,encode_unconstrained_number,[_]}=Call, _) -> {[Call],0}; enc_opt_al({call,_,_,_,_}=Call, Al) -> {[Call],Al}; +enc_opt_al({comment,_}=Imm, Al) -> + {[Imm],Al}; enc_opt_al({'cond',Cs0}, Al0) -> {Cs,Al} = enc_opt_al_cond(Cs0, Al0), {[{'cond',Cs}],Al}; @@ -2686,6 +2725,8 @@ per_fixup([{block,Block}|T]) -> [{block,per_fixup(Block)}|per_fixup(T)]; per_fixup([{'assign',_,_}=H|T]) -> [H|per_fixup(T)]; +per_fixup([{comment,_}=H|T]) -> + [H|per_fixup(T)]; per_fixup([{'cond',Cs0}|T]) -> Cs = [[C|per_fixup(Act)] || [C|Act] <- Cs0], [{'cond',Cs}|per_fixup(T)]; diff --git a/lib/asn1/src/asn1ct_value.erl b/lib/asn1/src/asn1ct_value.erl index b3d41dd9f3..8bd99d995b 100644 --- a/lib/asn1/src/asn1ct_value.erl +++ b/lib/asn1/src/asn1ct_value.erl @@ -64,7 +64,11 @@ from_type(M,Typename,Type) when is_record(Type,type) -> end; {constructed,bif} when Typename == ['EXTERNAL'] -> Val=from_type_constructed(M,Typename,InnerType,Type), - asn1ct_eval_ext:transform_to_EXTERNAL1994(Val); + T = case M:maps() of + false -> transform_to_EXTERNAL1994; + true -> transform_to_EXTERNAL1994_maps + end, + asn1ct_eval_ext:T(Val); {constructed,bif} -> from_type_constructed(M,Typename,InnerType,Type) end; @@ -118,11 +122,13 @@ get_sequence(M,Typename,Type) -> #'SEQUENCE'{components=Cl} -> {'SEQUENCE',Cl}; #'SET'{components=Cl} -> {'SET',to_textual_order(Cl)} end, - case get_components(M,Typename,CompList) of - [] -> - {list_to_atom(asn1ct_gen:list2rname(Typename))}; - C -> - list_to_tuple([list_to_atom(asn1ct_gen:list2rname(Typename))|C]) + Cs = get_components(M, Typename, CompList), + case M:maps() of + false -> + RecordTag = list_to_atom(asn1ct_gen:list2rname(Typename)), + list_to_tuple([RecordTag|[Val || {_,Val} <- Cs]]); + true -> + maps:from_list(Cs) end. get_components(M,Typename,{Root,Ext}) -> @@ -130,9 +136,9 @@ get_components(M,Typename,{Root,Ext}) -> %% Should enhance this *** HERE *** with proper handling of extensions -get_components(M,Typename,[H|T]) -> - [from_type(M,Typename,H)| - get_components(M,Typename,T)]; +get_components(M, Typename, [H|T]) -> + #'ComponentType'{name=Name} = H, + [{Name,from_type(M, Typename, H)}|get_components(M, Typename, T)]; get_components(_,_,[]) -> []. diff --git a/lib/asn1/src/asn1rtt_ext.erl b/lib/asn1/src/asn1rtt_ext.erl index 3bf01823db..161b2db691 100644 --- a/lib/asn1/src/asn1rtt_ext.erl +++ b/lib/asn1/src/asn1rtt_ext.erl @@ -19,7 +19,8 @@ %% -module(asn1rtt_ext). --export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1994/1]). +-export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1990_maps/1, + transform_to_EXTERNAL1994/1,transform_to_EXTERNAL1994_maps/1]). transform_to_EXTERNAL1990({_,_,_,_}=Val) -> transform_to_EXTERNAL1990(tuple_to_list(Val), []); @@ -51,6 +52,30 @@ transform_to_EXTERNAL1990([Data_value], Acc) list_to_tuple(lists:reverse([{'octet-aligned',Data_value}|Acc])). +transform_to_EXTERNAL1990_maps(#{identification:=Id,'data-value':=Value}=V) -> + M0 = case Id of + {syntax,DRef} -> + #{'direct-reference'=>DRef}; + {'presentation-context-id',IndRef} -> + #{'indirect-reference'=>IndRef}; + {'context-negotiation', + #{'presentation-context-id':=IndRef, + 'transfer-syntax':=DRef}} -> + #{'direct-reference'=>DRef, + 'indirect-reference'=>IndRef} + end, + M = case V of + #{'data-value-descriptor':=Dvd} -> + M0#{'data-value-descriptor'=>Dvd}; + #{} -> + M0 + end, + M#{encoding=>{'octet-aligned',Value}}; +transform_to_EXTERNAL1990_maps(#{encoding:=_}=V) -> + %% Already in the EXTERNAL 1990 format. + V. + + transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) -> Identification = case {DRef,IndRef} of @@ -71,3 +96,38 @@ transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) -> %% information. V end. + +transform_to_EXTERNAL1994_maps(V0) -> + Identification = + case V0 of + #{'direct-reference':=DRef, + 'indirect-reference':=asn1_NOVALUE} -> + {syntax,DRef}; + #{'direct-reference':=asn1_NOVALUE, + 'indirect-reference':=IndRef} -> + {'presentation-context-id',IndRef}; + #{'direct-reference':=DRef, + 'indirect-reference':=IndRef} -> + {'context-negotiation', + #{'transfer-syntax'=>DRef, + 'presentation-context-id'=>IndRef}} + end, + case V0 of + #{encoding:={'octet-aligned',Val}} + when is_list(Val); is_binary(Val) -> + %% Transform to the EXTERNAL 1994 definition. + V = #{identification=>Identification, + 'data-value'=>Val}, + case V0 of + #{'data-value-descriptor':=asn1_NOVALUE} -> + V; + #{'data-value-descriptor':=Dvd} -> + V#{'data-value-descriptor'=>Dvd} + end; + _ -> + %% Keep the EXTERNAL 1990 definition to avoid losing + %% information. + V = [{K,V} || {K,V} <- maps:to_list(V0), + V =/= asn1_NOVALUE], + maps:from_list(V) + end. diff --git a/lib/asn1/src/asn1rtt_per_common.erl b/lib/asn1/src/asn1rtt_per_common.erl index 3896cb7fa5..e7edfb1ee0 100644 --- a/lib/asn1/src/asn1rtt_per_common.erl +++ b/lib/asn1/src/asn1rtt_per_common.erl @@ -140,6 +140,8 @@ encode_relative_oid(Val) when is_tuple(Val) -> encode_relative_oid(Val) when is_list(Val) -> list_to_binary([e_object_element(X)||X <- Val]). +encode_unconstrained_number(Val) when not is_integer(Val) -> + exit({error,{asn1,{illegal_integer,Val}}}); encode_unconstrained_number(Val) when Val >= 0 -> if Val < 16#80 -> diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile index 40575e8a2f..afd063aa8e 100644 --- a/lib/asn1/test/Makefile +++ b/lib/asn1/test/Makefile @@ -82,6 +82,7 @@ MODULES= \ testInfObjExtract \ testParameterizedInfObj \ testFragmented \ + testMaps \ testMergeCompile \ testMultipleLevels \ testDeepTConstr \ @@ -114,8 +115,7 @@ MODULES= \ testImporting \ testExtensibilityImplied \ asn1_test_lib \ - asn1_app_test \ - asn1_appup_test \ + asn1_app_SUITE \ asn1_SUITE \ error_SUITE \ syntax_SUITE diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl index b6430134ab..580c919b9d 100644 --- a/lib/asn1/test/asn1_SUITE.erl +++ b/lib/asn1/test/asn1_SUITE.erl @@ -21,6 +21,9 @@ -module(asn1_SUITE). +%% Suppress compilation of an addititional module compiled for maps. +-define(NO_MAPS_MODULE, asn1_test_lib_no_maps). + -define(only_ber(Func), if Rule =:= ber -> Func; true -> ok @@ -39,10 +42,11 @@ suite() -> {timetrap,{minutes,60}}]. all() -> - [{group, compile}, + [xref, + xref_export_all, + + {group, compile}, {group, parallel}, - {group, app_test}, - {group, appup_test}, % TODO: Investigate parallel running of these: testComment, @@ -64,13 +68,8 @@ groups() -> ber_optional, tagdefault_automatic]}, - {app_test, [], [{asn1_app_test, all}]}, - - {appup_test, [], [{asn1_appup_test, all}]}, - {parallel, Parallel, [cover, - xref, {group, ber}, % Uses 'P-Record', 'Constraints', 'MEDIA-GATEWAY-CONTROL'... {group, [], [parse, @@ -102,6 +101,7 @@ groups() -> testMultipleLevels, testOpt, testSeqDefault, + testMaps, % Uses 'External' {group, [], [testExternal, testSeqExtension]}, @@ -176,8 +176,11 @@ groups() -> {performance, [], [testTimer_ber, + testTimer_ber_maps, testTimer_per, - testTimer_uper]}]. + testTimer_per_maps, + testTimer_uper, + testTimer_uper_maps]}]. %%------------------------------------------------------------------------------ %% Init/end @@ -441,6 +444,16 @@ testDEFAULT(Config, Rule, Opts) -> testDef:main(Rule), testSeqSetDefaultVal:main(Rule, Opts). +testMaps(Config) -> + test(Config, fun testMaps/3, + [{ber,[maps,no_ok_wrapper]}, + {ber,[maps,der,no_ok_wrapper]}, + {per,[maps,no_ok_wrapper]}, + {uper,[maps,no_ok_wrapper]}]). +testMaps(Config, Rule, Opts) -> + asn1_test_lib:compile_all(['Maps'], Config, [Rule|Opts]), + testMaps:main(Rule). + testOpt(Config) -> test(Config, fun testOpt/3). testOpt(Config, Rule, Opts) -> asn1_test_lib:compile("Opt", Config, [Rule|Opts]), @@ -614,12 +627,12 @@ parse(Config) -> [asn1_test_lib:compile(M, Config, [abs]) || M <- test_modules()]. per(Config) -> - test(Config, fun per/3, [per,uper]). + test(Config, fun per/3, [per,uper,{per,[maps]},{uper,[maps]}]). per(Config, Rule, Opts) -> [module_test(M, Config, Rule, Opts) || M <- per_modules()]. ber_other(Config) -> - test(Config, fun ber_other/3, [ber]). + test(Config, fun ber_other/3, [ber,{ber,[maps]}]). ber_other(Config, Rule, Opts) -> [module_test(M, Config, Rule, Opts) || M <- ber_modules()]. @@ -628,7 +641,7 @@ der(Config) -> asn1_test_lib:compile_all(ber_modules(), Config, [der]). module_test(M0, Config, Rule, Opts) -> - asn1_test_lib:compile(M0, Config, [Rule|Opts]), + asn1_test_lib:compile(M0, Config, [Rule,?NO_MAPS_MODULE|Opts]), case list_to_atom(M0) of 'LDAP' -> %% Because of the recursive definition of 'Filter' in @@ -995,7 +1008,9 @@ testS1AP(Config, Rule, Opts) -> testRfcs() -> [{timetrap,{minutes,90}}]. -testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]). +testRfcs(Config) -> test(Config, fun testRfcs/3, + [{ber,[der,?NO_MAPS_MODULE]}, + {ber,[der,maps]}]). testRfcs(Config, Rule, Opts) -> case erlang:system_info(system_architecture) of "sparc-sun-solaris2.10" -> @@ -1010,7 +1025,8 @@ test_compile_options(Config) -> ok = test_compile_options:path(Config), ok = test_compile_options:noobj(Config), ok = test_compile_options:record_name_prefix(Config), - ok = test_compile_options:verbose(Config). + ok = test_compile_options:verbose(Config), + ok = test_compile_options:maps(Config). testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3). testDoubleEllipses(Config, Rule, Opts) -> @@ -1027,18 +1043,6 @@ test_modified_x420(Config, Rule, Opts) -> test_modified_x420:test(Config). -testX420() -> - [{timetrap,{minutes,90}}]. -testX420(Config) -> - case erlang:system_info(system_architecture) of - "sparc-sun-solaris2.10" -> - {skip,"Too slow for an old Sparc"}; - _ -> - Rule = ber, - testX420:compile(Rule, [der], Config), - ok = testX420:ticket7759(Rule, Config) - end. - test_x691(Config) -> test(Config, fun test_x691/3, [per, uper]). test_x691(Config, Rule, Opts) -> @@ -1069,7 +1073,7 @@ test_x691(Config, Rule, Opts) -> ok. ticket_6143(Config) -> - ok = test_compile_options:ticket_6143(Config). + asn1_test_lib:compile("AA1", Config, [?NO_MAPS_MODULE]). testExtensionAdditionGroup(Config) -> test(Config, fun testExtensionAdditionGroup/3). @@ -1157,20 +1161,33 @@ END ok = asn1ct:compile(File, [{outdir, PrivDir}]). -timer_compile(Config, Rule) -> - asn1_test_lib:compile_all(["H235-SECURITY-MESSAGES", "H323-MESSAGES"], - Config, [no_ok_wrapper,Rule]). +timer_compile(Config, Opts0) -> + Files = ["H235-SECURITY-MESSAGES", "H323-MESSAGES"], + Opts = [no_ok_wrapper,?NO_MAPS_MODULE|Opts0], + asn1_test_lib:compile_all(Files, Config, Opts). testTimer_ber(Config) -> - timer_compile(Config, ber), + timer_compile(Config, [ber]), testTimer:go(). testTimer_per(Config) -> - timer_compile(Config, per), + timer_compile(Config, [per]), testTimer:go(). testTimer_uper(Config) -> - timer_compile(Config, uper), + timer_compile(Config, [uper]), + testTimer:go(). + +testTimer_ber_maps(Config) -> + timer_compile(Config, [ber,maps]), + testTimer:go(). + +testTimer_per_maps(Config) -> + timer_compile(Config, [per,maps]), + testTimer:go(). + +testTimer_uper_maps(Config) -> + timer_compile(Config, [uper,maps]), testTimer:go(). %% Test of multiple-line comment, OTP-8043 @@ -1179,9 +1196,11 @@ testComment(Config) -> asn1_test_lib:roundtrip('Comment', 'Seq', {'Seq',12,true}). testName2Number(Config) -> - N2NOptions = [{n2n,Type} || Type <- ['CauseMisc', 'CauseProtocol', - 'CauseRadioNetwork', - 'CauseTransport','CauseNas']], + N2NOptions0 = [{n2n,Type} || + Type <- ['CauseMisc', 'CauseProtocol', + 'CauseRadioNetwork', + 'CauseTransport','CauseNas']], + N2NOptions = [?NO_MAPS_MODULE|N2NOptions0], asn1_test_lib:compile("S1AP-IEs", Config, N2NOptions), 0 = 'S1AP-IEs':name2num_CauseMisc('control-processing-overload'), @@ -1191,8 +1210,9 @@ testName2Number(Config) -> %% Test that n2n option generates name2num and num2name functions supporting %% values not within the extension root if the enumeration type has an %% extension marker. - N2NOptionsExt = [{n2n, 'NoExt'}, {n2n, 'Ext'}, {n2n, 'Ext2'}], + N2NOptionsExt = [?NO_MAPS_MODULE,{n2n,'NoExt'},{n2n,'Ext'},{n2n,'Ext2'}], asn1_test_lib:compile("EnumN2N", Config, N2NOptionsExt), + %% Previously, name2num and num2name was not generated if the type didn't %% have an extension marker: 0 = 'EnumN2N':name2num_NoExt('blue'), @@ -1210,9 +1230,11 @@ testName2Number(Config) -> ok. ticket_7407(Config) -> - asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper]), + Opts = [uper,?NO_MAPS_MODULE], + asn1_test_lib:compile("EUTRA-extract-7407", Config, Opts), ticket_7407_code(true), - asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper,no_final_padding]), + asn1_test_lib:compile("EUTRA-extract-7407", Config, + [no_final_padding|Opts]), ticket_7407_code(false). ticket_7407_code(FinalPadding) -> @@ -1287,16 +1309,72 @@ ticket7904(Config) -> {ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1), {ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1). + +%% Make sure that functions exported from other modules are +%% actually used. + xref(_Config) -> - xref:start(s), - xref:set_default(s, [{verbose,false},{warnings,false},{builtins,true}]), + S = ?FUNCTION_NAME, + xref:start(S), + xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]), Test = filename:dirname(code:which(?MODULE)), - {ok,_PMs} = xref:add_directory(s, Test), - UnusedExports = "X - XU - asn1_appup_test - asn1_app_test - \".*_SUITE\" : Mod", - case xref:q(s, UnusedExports) of + {ok,_PMs} = xref:add_directory(S, Test), + Q = "X - XU - \".*_SUITE\" : Mod", + UnusedExports = xref:q(S, Q), + xref:stop(S), + case UnusedExports of {ok,[]} -> ok; {ok,[_|_]=Res} -> io:format("Exported, but unused: ~p\n", [Res]), ?t:fail() end. + +%% Ensure that all functions that are implicitly exported by +%% 'export_all' in this module are actually used. + +xref_export_all(_Config) -> + S = ?FUNCTION_NAME, + xref:start(S), + xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]), + {ok,_PMs} = xref:add_module(S, code:which(?MODULE)), + AllCalled = all_called(), + Def = "Called := " ++ lists:flatten(io_lib:format("~p", [AllCalled])), + {ok,_} = xref:q(S, Def), + {ok,Unused} = xref:q(S, "X - Called - range (closure E | Called)"), + xref:stop(S), + case Unused of + [] -> + ok; + [_|_] -> + S = [io_lib:format("~p:~p/~p\n", [M,F,A]) || {M,F,A} <- Unused], + io:format("There are unused functions:\n\n~s\n", [S]), + ?t:fail(unused_functions) + end. + +%% Collect all functions that common_test will call in this module. + +all_called() -> + [{?MODULE,end_per_group,2}, + {?MODULE,end_per_suite,1}, + {?MODULE,end_per_testcase,2}, + {?MODULE,init_per_group,2}, + {?MODULE,init_per_suite,1}, + {?MODULE,init_per_testcase,2}, + {?MODULE,suite,0}] ++ + all_called_1(all() ++ groups()). + +all_called_1([{_,_}|T]) -> + all_called_1(T); +all_called_1([{_Name,_Flags,Fs}|T]) -> + all_called_1(Fs ++ T); +all_called_1([F|T]) when is_atom(F) -> + L = case erlang:function_exported(?MODULE, F, 0) of + false -> + [{?MODULE,F,1}]; + true -> + [{?MODULE,F,0},{?MODULE,F,1}] + end, + L ++ all_called_1(T); +all_called_1([]) -> + []. diff --git a/lib/asn1/test/asn1_SUITE_data/Maps.asn1 b/lib/asn1/test/asn1_SUITE_data/Maps.asn1 new file mode 100644 index 0000000000..fd5f373e45 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/Maps.asn1 @@ -0,0 +1,17 @@ +Maps DEFINITIONS AUTOMATIC TAGS ::= +BEGIN + +XY ::= SEQUENCE { x INTEGER DEFAULT 0, y INTEGER DEFAULT 0 } + +xy1 XY ::= { x 42, y 17 } +xy2 XY ::= { } +xy3 XY ::= { y 999 } + +S ::= SEQUENCE { + xy XY DEFAULT { x 100, y 100 }, + os OCTET STRING OPTIONAL +} + +s1 S ::= {} + +END diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1 index 4fe0901683..91c8696e61 100644 --- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1 @@ -18,6 +18,8 @@ BEGIN IntExpPri ::= [PRIVATE 51] EXPLICIT INTEGER IntExpApp ::= [APPLICATION 52] EXPLICIT INTEGER + IntConstrained ::= INTEGER (0..255) + IntEnum ::= INTEGER {first(1),last(31)} Enum ::= ENUMERATED {monday(1),tuesday(2),wednesday(3),thursday(4), diff --git a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 index 5fda19303a..e866ef2f4f 100644 --- a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 @@ -48,6 +48,17 @@ SeqExt6 ::= SEQUENCE [[ i6 [106] INTEGER, i7 [107] INTEGER ]] } +SeqExt7 ::= SEQUENCE +{ + -- The spaces between the ellipsis and the comma will prevent them + -- from being removed. + ... , + [[ a INTEGER (0..65535) OPTIONAL, + b OCTET STRING OPTIONAL, + c BOOLEAN + ]] +} + SeqExt1X ::= XSeqExt1 SeqExt2X ::= XSeqExt2 diff --git a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn index b9be9934e4..12a4475422 100644 --- a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn +++ b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn @@ -4,7 +4,7 @@ --
-- **************************************************************
-NBAP-PDU-Discriptions {
+NBAP-PDU-Descriptions {
itu-t (0) identified-organization (4) etsi (0) mobileDomain (0)
umts-Access (20) modules (3) nbap (2) version1 (1) nbap-PDU-Descriptions (0) }
diff --git a/lib/asn1/test/asn1_SUITE_data/test_records.erl b/lib/asn1/test/asn1_SUITE_data/test_records.erl index 9fd07c1449..afb1c8c80b 100644 --- a/lib/asn1/test/asn1_SUITE_data/test_records.erl +++ b/lib/asn1/test/asn1_SUITE_data/test_records.erl @@ -25,7 +25,7 @@ -define(line,put(test_server_loc,{?MODULE,?LINE}),). --include("NBAP-PDU-Discriptions.hrl"). +-include("NBAP-PDU-Descriptions.hrl"). -include("NBAP-PDU-Contents.hrl"). -include("NBAP-Containers.hrl"). -include("NBAP-CommonDataTypes.hrl"). diff --git a/lib/asn1/test/asn1_SUITE_data/testobj.erl b/lib/asn1/test/asn1_SUITE_data/testobj.erl index e547ea4572..66f4a92188 100644 --- a/lib/asn1/test/asn1_SUITE_data/testobj.erl +++ b/lib/asn1/test/asn1_SUITE_data/testobj.erl @@ -967,7 +967,7 @@ pdu_pdp() -> 116,101,115,116, % lable1 = test 4, % length lable2 116,101,115,116, % lable2 = test - 4, % lenght lable3 + 4, % length lable3 116,101,115,116, % lable3 = test 4, % length lable3 116,101,115,116, % lable4 = test diff --git a/lib/asn1/test/asn1_app_test.erl b/lib/asn1/test/asn1_app_SUITE.erl index 028322f555..c089a7267c 100644 --- a/lib/asn1/test/asn1_app_test.erl +++ b/lib/asn1/test/asn1_app_SUITE.erl @@ -21,23 +21,24 @@ %%---------------------------------------------------------------------- %% Purpose: Verify the application specifics of the asn1 application %%---------------------------------------------------------------------- --module(asn1_app_test). - --compile(export_all). +-module(asn1_app_SUITE). +-export([all/0,groups/0,init_per_group/2,end_per_group/2, + init_per_suite/1,end_per_suite/1, + appup/1,fields/1,modules/1,export_all/1,app_depend/1]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -all() -> - [fields, modules, exportall, app_depend]. +all() -> + [appup, fields, modules, export_all, app_depend]. -groups() -> +groups() -> []. init_per_group(_GroupName, Config) -> - Config. + Config. end_per_group(_GroupName, Config) -> - Config. + Config. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -65,12 +66,15 @@ is_app(App) -> end_per_suite(Config) when is_list(Config) -> Config. +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +appup(Config) when is_list(Config) -> + ok = test_server:appup_test(asn1). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% . fields(Config) when is_list(Config) -> - AppFile = key1search(app_file, Config), + AppFile = key1find(app_file, Config), Fields = [vsn, description, modules, registered, applications], case check_fields(Fields, AppFile, []) of [] -> @@ -96,10 +100,9 @@ check_field(Name, AppFile, Missing) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% . modules(Config) when is_list(Config) -> - AppFile = key1search(app_file, Config), - Mods = key1search(modules, AppFile), + AppFile = key1find(app_file, Config), + Mods = key1find(modules, AppFile), EbinList = get_ebin_mods(asn1), case missing_modules(Mods, EbinList, []) of [] -> @@ -112,10 +115,9 @@ modules(Config) when is_list(Config) -> ok; Extra -> check_asn1ct_modules(Extra) -% throw({error, {extra_modules, Extra}}) end, {ok, Mods}. - + get_ebin_mods(App) -> LibDir = code:lib_dir(App), EbinDir = filename:join([LibDir,"ebin"]), @@ -166,10 +168,9 @@ extra_modules(Mods, [Mod|Ebins], Extra) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% . -exportall(Config) when is_list(Config) -> - AppFile = key1search(app_file, Config), - Mods = key1search(modules, AppFile), +export_all(Config) when is_list(Config) -> + AppFile = key1find(app_file, Config), + Mods = key1find(modules, AppFile), check_export_all(Mods). @@ -180,10 +181,10 @@ check_export_all([Mod|Mods]) -> {'EXIT', {undef, _}} -> check_export_all(Mods); O -> - case lists:keysearch(options, 1, O) of + case lists:keyfind(options, 1, O) of false -> check_export_all(Mods); - {value, {options, List}} -> + {options, List} -> case lists:member(export_all, List) of true -> throw({error, {export_all, Mod}}); @@ -193,13 +194,12 @@ check_export_all([Mod|Mods]) -> end end. - + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% . app_depend(Config) when is_list(Config) -> - AppFile = key1search(app_file, Config), - Apps = key1search(applications, AppFile), + AppFile = key1find(app_file, Config), + Apps = key1find(applications, AppFile), check_apps(Apps). @@ -220,10 +220,10 @@ check_apps([App|Apps]) -> fail(Reason) -> exit({suite_failed, Reason}). -key1search(Key, L) -> - case lists:keysearch(Key, 1, L) of - undefined -> +key1find(Key, L) -> + case lists:keyfind(Key, 1, L) of + false -> fail({not_found, Key, L}); - {value, {Key, Value}} -> + {Key, Value} -> Value end. diff --git a/lib/asn1/test/asn1_appup_test.erl b/lib/asn1/test/asn1_appup_test.erl deleted file mode 100644 index 54540e53cc..0000000000 --- a/lib/asn1/test/asn1_appup_test.erl +++ /dev/null @@ -1,58 +0,0 @@ -%% -%% %CopyrightBegin% -%% -%% Copyright Ericsson AB 2005-2016. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%% -%% %CopyrightEnd% -%% -%% -%%---------------------------------------------------------------------- -%% Purpose: Verify the application specifics of the asn1 application -%%---------------------------------------------------------------------- --module(asn1_appup_test). --compile(export_all). --include_lib("common_test/include/ct.hrl"). - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -all() -> - [appup]. - -groups() -> - []. - -init_per_group(_GroupName, Config) -> - Config. - -end_per_group(_GroupName, Config) -> - Config. - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -init_per_suite(Config) when is_list(Config) -> - Config. - - -end_per_suite(Config) when is_list(Config) -> - Config. - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -appup() -> - [{doc, "perform a simple check of the asn1 appup file"}]. -appup(Config) when is_list(Config) -> - ok = ?t:appup_test(asn1). diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl index dc614db4f2..a79958d229 100644 --- a/lib/asn1/test/asn1_test_lib.erl +++ b/lib/asn1/test/asn1_test_lib.erl @@ -25,7 +25,8 @@ hex_to_bin/1, match_value/2, parallel/0, - roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]). + roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4, + map_roundtrip/3]). -include_lib("common_test/include/ct.hrl"). @@ -94,15 +95,58 @@ module(F0) -> list_to_atom(F). %% filename:join(CaseDir, F ++ ".beam"). -compile_file(File, Options) -> +compile_file(File, Options0) -> + Options = [warnings_as_errors|Options0], try - ok = asn1ct:compile(File, [warnings_as_errors|Options]) + ok = asn1ct:compile(File, Options), + ok = compile_maps(File, Options) catch _:Reason -> ct:print("Failed to compile ~s\n~p", [File,Reason]), error end. +compile_maps(File, Options) -> + unload_map_mod(File), + Incompat = [abs,compact_bit_string,legacy_bit_string, + legacy_erlang_types,maps,asn1_test_lib_no_maps], + case lists:any(fun(E) -> lists:member(E, Incompat) end, Options) of + true -> + ok; + false -> + compile_maps_1(File, Options) + end. + +compile_maps_1(File, Options) -> + ok = asn1ct:compile(File, [maps,no_ok_wrapper,noobj|Options]), + OutDir = proplists:get_value(outdir, Options), + Base0 = filename:rootname(filename:basename(File)), + Base = case filename:extension(Base0) of + ".set" -> + filename:rootname(Base0); + _ -> + Base0 + end, + ErlBase = Base ++ ".erl", + ErlFile = filename:join(OutDir, ErlBase), + {ok,Erl0} = file:read_file(ErlFile), + Erl = re:replace(Erl0, <<"-module\\('">>, "&maps_"), + MapsErlFile = filename:join(OutDir, "maps_" ++ ErlBase), + ok = file:write_file(MapsErlFile, Erl), + {ok,_} = compile:file(MapsErlFile, [report,{outdir,OutDir},{i,OutDir}]), + ok. + +unload_map_mod(File0) -> + File1 = filename:basename(File0), + File2 = filename:rootname(File1, ".asn"), + File3 = filename:rootname(File2, ".asn1"), + File4 = filename:rootname(File3, ".py"), + File = filename:rootname(File4, ".set"), + MapMod = list_to_atom("maps_"++File), + code:delete(MapMod), + code:purge(MapMod), + ok. + compile_erlang(Mod, Config, Options) -> DataDir = proplists:get_value(data_dir, Config), CaseDir = proplists:get_value(case_dir, Config), @@ -147,24 +191,60 @@ roundtrip(Mod, Type, Value) -> roundtrip(Mod, Type, Value, Value). roundtrip(Mod, Type, Value, ExpectedValue) -> - {ok,Encoded} = Mod:encode(Type, Value), - {ok,ExpectedValue} = Mod:decode(Type, Encoded), - test_ber_indefinite(Mod, Type, Encoded, ExpectedValue), - ok. + roundtrip_enc(Mod, Type, Value, ExpectedValue). roundtrip_enc(Mod, Type, Value) -> roundtrip_enc(Mod, Type, Value, Value). roundtrip_enc(Mod, Type, Value, ExpectedValue) -> - {ok,Encoded} = Mod:encode(Type, Value), - {ok,ExpectedValue} = Mod:decode(Type, Encoded), + case Mod:encode(Type, Value) of + {ok,Encoded} -> + {ok,ExpectedValue} = Mod:decode(Type, Encoded); + Encoded when is_binary(Encoded) -> + ExpectedValue = Mod:decode(Type, Encoded) + end, + map_roundtrip(Mod, Type, Encoded), test_ber_indefinite(Mod, Type, Encoded, ExpectedValue), Encoded. +map_roundtrip(Mod, Type, Encoded) -> + MapMod = list_to_atom("maps_"++atom_to_list(Mod)), + try MapMod:maps() of + true -> + map_roundtrip_1(MapMod, Type, Encoded) + catch + error:undef -> + ok + end. + %%% %%% Internal functions. %%% +map_roundtrip_1(Mod, Type, Encoded) -> + Decoded = Mod:decode(Type, Encoded), + case Mod:encode(Type, Decoded) of + Encoded -> + ok; + OtherEncoding -> + case is_named_bitstring(Decoded) of + true -> + %% In BER, named BIT STRINGs with different number of + %% trailing zeroes decode to the same value. + ok; + false -> + error({encode_mismatch,Decoded,Encoded,OtherEncoding}) + end + end, + ok. + +is_named_bitstring([H|T]) -> + is_atom(H) andalso is_named_bitstring(T); +is_named_bitstring([]) -> + true; +is_named_bitstring(_) -> + false. + hex2num(C) when $0 =< C, C =< $9 -> C - $0; hex2num(C) when $A =< C, C =< $F -> C - $A + 10; hex2num(C) when $a =< C, C =< $f -> C - $a + 10. @@ -179,7 +259,12 @@ test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) -> case Mod:encoding_rule() of ber -> Indefinite = iolist_to_binary(ber_indefinite(Encoded)), - {ok,ExpectedValue} = Mod:decode(Type, Indefinite); + case Mod:decode(Type, Indefinite) of + {ok,ExpectedValue} -> + ok; + ExpectedValue -> + ok + end; _ -> ok end. diff --git a/lib/asn1/test/ber_decode_error.erl b/lib/asn1/test/ber_decode_error.erl index c0840e02d7..c45d130ff4 100644 --- a/lib/asn1/test/ber_decode_error.erl +++ b/lib/asn1/test/ber_decode_error.erl @@ -26,48 +26,41 @@ run([]) -> {ok,B} = 'Constructed':encode('S3', {'S3',17}), [T,L|V] = binary_to_list(B), Bytes = list_to_binary([T,L+3|V] ++ [2,1,3]), - case 'Constructed':decode('S3', Bytes) of - {error,{asn1,{unexpected,_}}} -> ok - end, + {unexpected,_} = dec_error('S3', Bytes), %% Unexpected bytes must be accepted if there is an extensionmark {ok,{'S3ext',17}} = 'Constructed':decode('S3ext', Bytes), %% Truncated tag. - {error,{asn1,{invalid_tag,_}}} = - (catch 'Constructed':decode('I', <<31,255,255>>)), + {invalid_tag,_} = dec_error('I', <<31,255,255>>), %% Overlong tag. - {error,{asn1,{invalid_tag,_}}} = - (catch 'Constructed':decode('I', <<31,255,255,255,127>>)), + {invalid_tag,_} = dec_error('I', <<31,255,255,255,127>>), %% Invalid length. - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('I', <<8,255>>)), + {invalid_length,_} = dec_error('I', <<8,255>>), %% Other errors. - {error,{asn1,{invalid_value,_}}} = - (catch 'Constructed':decode('I', <<>>)), + {invalid_value,_} = dec_error('I', <<>>), - {error,{asn1,{invalid_value,_}}} = - (catch 'Constructed':decode('I', <<8,7>>)), + {invalid_value,_} = dec_error('I', <<8,7>>), %% Short indefinite length. Make sure that the decoder doesn't look %% beyond the end of binary when looking for a 0,0 terminator. - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('S', sub(<<8,16#80,0,0>>, 3))), - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('S', sub(<<8,16#80,0,0>>, 2))), - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('S', sub(<<40,16#80,1,1,255,0,0>>, 6))), - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('S', sub(<<40,16#80,1,1,255,0,0>>, 5))), + {invalid_length,_} = dec_error('S', sub(<<8,16#80,0,0>>, 3)), + {invalid_length,_} = dec_error('S', sub(<<8,16#80,0,0>>, 2)), + {invalid_length,_} = dec_error('S', sub(<<40,16#80,1,1,255,0,0>>, 6)), + {invalid_length,_} = dec_error('S', sub(<<40,16#80,1,1,255,0,0>>, 5)), %% A primitive must not be encoded with an indefinite length. - {error,{asn1,{invalid_length,_}}} = - (catch 'Constructed':decode('OS', <<4,128,4,3,97,98,99,0,0>>)), + {invalid_length,_} = dec_error('OS', <<4,128,4,3,97,98,99,0,0>>), ok. +dec_error(T, Bin) -> + {error,{asn1,{Reason,Stk}}} = 'Constructed':decode(T, Bin), + [{_,_,_,_}|_] = Stk, + Reason. + sub(Bin, Bytes) -> <<B:Bytes/binary,_/binary>> = Bin, B. diff --git a/lib/asn1/test/h323test.erl b/lib/asn1/test/h323test.erl index 935af0ba09..41a9159335 100644 --- a/lib/asn1/test/h323test.erl +++ b/lib/asn1/test/h323test.erl @@ -27,6 +27,8 @@ run(per) -> run(); run(_Rules) -> ok. run() -> + roundtrip('EndpointType', endpoint()), + roundtrip('Alerting-UUIE', alerting_uuie()), roundtrip('H323-UserInformation', alerting_val(), alerting_enc()), roundtrip('H323-UserInformation', connect_val(), connect_enc()), general_string(), @@ -36,18 +38,24 @@ alerting_val() -> {'H323-UserInformation', {'H323-UU-PDU', {alerting, - {'Alerting-UUIE', - {0,0,8,2250,0,2}, - {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE, - asn1_NOVALUE,asn1_NOVALUE, - {'TerminalInfo',asn1_NOVALUE}, - false,false}, - asn1_NOVALUE, - {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>}, - asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}}, + alerting_uuie()}, asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}, asn1_NOVALUE}. +endpoint() -> + {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE, + asn1_NOVALUE,asn1_NOVALUE, + {'TerminalInfo',asn1_NOVALUE}, + false,false}. + +alerting_uuie() -> + {'Alerting-UUIE', + {0,0,8,2250,0,2}, + endpoint(), + asn1_NOVALUE, + {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>}, + asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}. + alerting_enc() -> "0380060008914a0002020120110000000000000000000000000000000000". @@ -82,6 +90,9 @@ general_string() -> UI = <<109,64,1,57>>, {ok, _V} = 'MULTIMEDIA-SYSTEM-CONTROL':decode(Type, UI). +roundtrip(T, V) -> + asn1_test_lib:roundtrip('H323-MESSAGES', T, V). + roundtrip(T, V, HexString) -> Enc = asn1_test_lib:hex_to_bin(HexString), Enc = asn1_test_lib:roundtrip_enc('H323-MESSAGES', T, V), diff --git a/lib/asn1/test/testChoPrim.erl b/lib/asn1/test/testChoPrim.erl index 573c482f2b..61b6ab2d05 100644 --- a/lib/asn1/test/testChoPrim.erl +++ b/lib/asn1/test/testChoPrim.erl @@ -31,10 +31,10 @@ bool(Rules) -> roundtrip('ChoCon', {int2,233}), case Rules of ber -> - {error,{asn1,{invalid_choice_type,wrong}}} = - (catch 'ChoPrim':encode('ChoCon', {wrong,233})), - {error,{asn1,{invalid_choice_tag,_WrongTag}}} = - (catch 'ChoPrim':decode('ChoCon', <<131,2,0,233>>)); + {error,{asn1,{{invalid_choice_type,wrong},[_|_]}}} = + (catch 'ChoPrim':encode('ChoCon', {wrong,233})), + {error,{asn1,{{invalid_choice_tag,_WrongTag},[_|_]}}} = + (catch 'ChoPrim':decode('ChoCon', <<131,2,0,233>>)); per -> ok; uper -> diff --git a/lib/asn1/test/testContextSwitchingTypes.erl b/lib/asn1/test/testContextSwitchingTypes.erl index 10012908a9..5688d8afd6 100644 --- a/lib/asn1/test/testContextSwitchingTypes.erl +++ b/lib/asn1/test/testContextSwitchingTypes.erl @@ -90,5 +90,6 @@ check_object_identifier(Tuple) when is_tuple(Tuple) -> enc_dec(T, V0) -> M = 'ContextSwitchingTypes', {ok,Enc} = M:encode(T, V0), + asn1_test_lib:map_roundtrip(M, T, Enc), {ok,V} = M:decode(T, Enc), V. diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl index 5a9f47d865..c519c70cdf 100644 --- a/lib/asn1/test/testInfObj.erl +++ b/lib/asn1/test/testInfObj.erl @@ -197,5 +197,6 @@ roundtrip(M, T, V) -> enc_dec(M, T, V0) -> {ok,Enc} = M:encode(T, V0), + asn1_test_lib:map_roundtrip(M, T, Enc), {ok,V} = M:decode(T, Enc), V. diff --git a/lib/asn1/test/testInfObjectClass.erl b/lib/asn1/test/testInfObjectClass.erl index 560986fac9..540407fa51 100644 --- a/lib/asn1/test/testInfObjectClass.erl +++ b/lib/asn1/test/testInfObjectClass.erl @@ -33,19 +33,29 @@ main(Rule) -> roundtrip('Seq', Val), %% OTP-5783 - {error,{asn1,{'Type not compatible with table constraint', - {component,'ArgumentType'}, - {value,_},_}}} = 'InfClass':encode('Seq', {'Seq',12,13,1}), + {'Type not compatible with table constraint', + {component,'ArgumentType'}, + {value,_},_} = enc_error('Seq', {'Seq',12,13,1}), Bytes2 = case Rule of ber -> <<48,9,2,1,12,2,1,11,2,1,1>>; _ -> <<1,12,1,11,1,1>> end, - {error,{asn1,{'Type not compatible with table constraint', - {{component,_}, - {value,_B},_}}}} = 'InfClass':decode('Seq', Bytes2), + {'Type not compatible with table constraint', + {{component,_}, + {value,_B},_}} = dec_error('Seq', Bytes2), ok. roundtrip(T, V) -> asn1_test_lib:roundtrip('InfClass', T, V). + +enc_error(T, V) -> + {error,{asn1,{Reason,Stk}}} = 'InfClass':encode(T, V), + [{_,_,_,_}|_] = Stk, + Reason. + +dec_error(T, Bin) -> + {error,{asn1,{Reason,Stk}}} = 'InfClass':decode(T, Bin), + [{_,_,_,_}|_] = Stk, + Reason. diff --git a/lib/asn1/test/testMaps.erl b/lib/asn1/test/testMaps.erl new file mode 100644 index 0000000000..45dd2255ba --- /dev/null +++ b/lib/asn1/test/testMaps.erl @@ -0,0 +1,50 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2017. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% +%% +-module(testMaps). + +-export([main/1]). + +main(_) -> + M = 'Maps', + true = M:maps(), + + true = M:xy1() =:= #{x=>42,y=>17}, + true = M:xy2() =:= #{x=>0,y=>0}, + true = M:xy3() =:= #{x=>0,y=>999}, + true = M:s1() =:= #{xy=>#{x=>100,y=>100}}, + + roundtrip('XY', M:xy1()), + roundtrip('XY', M:xy2()), + roundtrip('XY', M:xy3()), + roundtrip('XY', #{}, #{x=>0,y=>0}), + + roundtrip('S', M:s1()), + roundtrip('S', #{}, #{xy=>#{x=>100,y=>100}}), + roundtrip('S', #{os=><<1,2,3>>}, #{xy=>#{x=>100,y=>100}, + os=><<1,2,3>>}), + + ok. + +roundtrip(Type, Value) -> + roundtrip(Type, Value, Value). + +roundtrip(Type, Value, Expected) -> + asn1_test_lib:roundtrip('Maps', Type, Value, Expected). diff --git a/lib/asn1/test/testMultipleLevels.erl b/lib/asn1/test/testMultipleLevels.erl index c610e59f3d..e9d83665aa 100644 --- a/lib/asn1/test/testMultipleLevels.erl +++ b/lib/asn1/test/testMultipleLevels.erl @@ -24,5 +24,7 @@ main(_) -> Data = {'Top',{short,"abc"},{long,"a long string follows here"}}, - {ok,B} = 'MultipleLevels':encode('Top', Data), - {ok,Data} = 'MultipleLevels':decode('Top', iolist_to_binary(B)). + roundtrip('Top', Data). + +roundtrip(T, V) -> + asn1_test_lib:roundtrip('MultipleLevels', T, V). diff --git a/lib/asn1/test/testNBAPsystem.erl b/lib/asn1/test/testNBAPsystem.erl index 1af283af42..8d61ca18ce 100644 --- a/lib/asn1/test/testNBAPsystem.erl +++ b/lib/asn1/test/testNBAPsystem.erl @@ -84,7 +84,7 @@ compile(Config, Options) -> M <- ["NBAP-CommonDataTypes.asn", "NBAP-IEs.asn", "NBAP-PDU-Contents.asn", - "NBAP-PDU-Discriptions.asn", + "NBAP-PDU-Descriptions.asn", "NBAP-Constants.asn", "NBAP-Containers.asn"]], asn1_test_lib:compile_all(Fs, Config, Options), @@ -98,16 +98,16 @@ test(_Erule,Config) -> ticket_5812(Config) -> Msg = v_5812(), - {ok,B2} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg), + {ok,B2} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg), V = <<0,28,74,0,3,48,0,0,1,0,123,64,41,0,0,0,126,64,35,95,208,2,89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,145,0,1,205,0,0,0,0,2,98,64,1,128>>, ok = compare(V,B2), - {ok,Msg2} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B2), + {ok,Msg2} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B2), ok = check_record_names(Msg2,Config). enc_audit_req_msg() -> Msg = {initiatingMessage, audit_req_msg()}, - {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg), - {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B), + {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg), + {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B), {initiatingMessage, #'InitiatingMessage'{value=#'AuditRequest'{protocolIEs=[{_,114,ignore,_}], protocolExtensions = asn1_NOVALUE}}} = _Msg, @@ -116,8 +116,8 @@ enc_audit_req_msg() -> cell_setup_req_msg_test() -> Msg = {initiatingMessage, cell_setup_req_msg()}, - {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg), - {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B), + {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg), + {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B), io:format("Msg: ~P~n~n_Msg: ~P~n",[Msg,15,_Msg,15]), ok. diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl index 96a2dd6c79..b2933dfabc 100644 --- a/lib/asn1/test/testPrim.erl +++ b/lib/asn1/test/testPrim.erl @@ -34,15 +34,12 @@ bool(Rules) -> Types = ['Bool','BoolCon','BoolPri','BoolApp', 'BoolExpCon','BoolExpPri','BoolExpApp'], [roundtrip(T, V) || T <- Types, V <- [true,false]], - case Rules of - ber -> - [begin - {error,{asn1,{encode_boolean,517}}} = enc_error(T, 517) - end || T <- Types], - ok; - _ -> - ok - end. + Tag = case Rules of + ber -> encode_boolean; + _ -> illegal_boolean + end, + [{Tag,517} = enc_error(T, 517) || T <- Types], + ok. int(Rules) -> @@ -60,10 +57,22 @@ int(Rules) -> 123456789,12345678901234567890, -1,-2,-3,-4,-100,-127,-255,-256,-257, -1234567890,-2147483648], - [roundtrip(T, V) || - T <- ['Int','IntCon','IntPri','IntApp', - 'IntExpCon','IntExpPri','IntExpApp'], - V <- [1|Values]], + Types = ['Int','IntCon','IntPri','IntApp', + 'IntExpCon','IntExpPri','IntExpApp'], + _ = [roundtrip(T, V) || T <- Types, V <- [1|Values]], + Tag = case Rules of + ber -> encode_integer; + _ -> illegal_integer + end, + _ = [{Tag,V} = enc_error(T, V) || + T <- Types, V <- [atom,42.0,{a,b,c}]], + case Rules of + ber -> + ok; + _ -> + _ = [{Tag,V} = enc_error('IntConstrained', V) || + V <- [atom,-1,256,42.0]] + end, %%========================================================== %% IntEnum ::= INTEGER {first(1),last(31)} @@ -119,7 +128,11 @@ enum(Rules) -> roundtrip('Enum', monday), roundtrip('Enum', thursday), - {error,{asn1,{_,4}}} = enc_error('Enum', 4), + Tag = case Rules of + ber -> enumerated_not_in_range; + _ -> illegal_enumerated + end, + {Tag,4} = enc_error('Enum', 4), case Rules of Per when Per =:= per; Per =:= uper -> @@ -182,13 +195,15 @@ roundtrip(Type, Value, ExpectedValue) -> enc_error(T, V) -> case get(no_ok_wrapper) of false -> - 'Prim':encode(T, V); + {error,{asn1,{Reason,Stk}}} = 'Prim':encode(T, V), + [{_,_,_,_}|_] = Stk, + Reason; true -> try 'Prim':encode(T, V) of _ -> ?t:fail() catch - _:Reason -> + _:{error,{asn1,Reason}} -> Reason end end. diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl index da7333ef98..20176e35eb 100644 --- a/lib/asn1/test/testRfcs.erl +++ b/lib/asn1/test/testRfcs.erl @@ -35,22 +35,27 @@ compile(Config, Erules, Options0) -> asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]). test() -> - {1,3,6,1,5,5,7,48,1,2} = - IdPkixOcspNonce = - 'OCSP-2009':'id-pkix-ocsp-nonce'(), - roundtrip('OCSP-2009', 'OCSPRequest', - {'OCSPRequest', - {'TBSRequest', - 0, - {rfc822Name,"name string"}, - [{'Request', - {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE}, - <<"POTATOHASH">>,<<"HASHBROWN">>,42}, - [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}], - asn1_NOVALUE}, - asn1_NOVALUE}), - otp_7759(), - ok. + M = 'OCSP-2009', + case M:maps() of + false -> + {1,3,6,1,5,5,7,48,1,2} = + IdPkixOcspNonce = + 'OCSP-2009':'id-pkix-ocsp-nonce'(), + roundtrip('OCSP-2009', 'OCSPRequest', + {'OCSPRequest', + {'TBSRequest', + 0, + {rfc822Name,"name string"}, + [{'Request', + {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE}, + <<"POTATOHASH">>,<<"HASHBROWN">>,42}, + [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}], + asn1_NOVALUE}, + asn1_NOVALUE}), + otp_7759(records); + true -> + otp_7759(maps) + end. roundtrip(Module, Type, Value0) -> Enc = Module:encode(Type, Value0), @@ -58,7 +63,7 @@ roundtrip(Module, Type, Value0) -> asn1_test_lib:match_value(Value0, Value1), ok. -otp_7759() -> +otp_7759(Pack) -> %% The release note for asn-1.6.6 says: %% Decode of an open_type when the value was empty tagged %% type encoded with indefinite length failed. @@ -66,10 +71,15 @@ otp_7759() -> Encoded = encoded_msg(), ContentInfo = Mod:decode('ContentInfo', Encoded), io:format("~p\n", [ContentInfo]), - {'ContentInfo',_Id,PKCS7_content} = ContentInfo, - X = Mod:decode('SignedData', PKCS7_content), + Content = case ContentInfo of + {'ContentInfo',_Id,Content0} when Pack =:= records -> + Content0; + #{'content-type':=_,'pkcs7-content':=Content0} + when Pack =:= maps -> + Content0 + end, + X = Mod:decode('SignedData', Content), io:format("~p\n", [X]), - io:nl(), ok. encoded_msg() -> diff --git a/lib/asn1/test/testSeqExtension.erl b/lib/asn1/test/testSeqExtension.erl index f7885cb002..be1d1c2490 100644 --- a/lib/asn1/test/testSeqExtension.erl +++ b/lib/asn1/test/testSeqExtension.erl @@ -31,6 +31,7 @@ -record('SeqExt4',{bool, int}). -record('SeqExt5',{name, shoesize}). -record('SeqExt6',{i1,i2,i3,i4,i5,i6,i7}). +-record('SeqExt7',{a=asn1_NOVALUE,b=asn1_NOVALUE,c}). -record('SuperSeq',{s1,s2,s3,s4,s5,s6,i}). main(Erule, DataDir, Opts) -> @@ -45,8 +46,35 @@ main(Erule, DataDir, Opts) -> roundtrip('SeqExt4', #'SeqExt4'{bool=true,int=12345}), roundtrip('SeqExt4', #'SeqExt4'{bool=false,int=123456}), + case Erule of + ber -> + %% BER currently does not handle Extension Addition Groups + %% correctly. + ok; + _ -> + v_roundtrip3('SeqExt5', #'SeqExt5'{name=asn1_NOVALUE, + shoesize=asn1_NOVALUE}, + Erule, #{per=>"00", + uper=>"00"}), + v_roundtrip3('SeqExt7', #'SeqExt7'{c=asn1_NOVALUE}, + Erule, #{per=>"00", + uper=>"00"}) + end, roundtrip('SeqExt5', #'SeqExt5'{name = <<"Arne">>,shoesize=47}), + v_roundtrip3('SeqExt7', #'SeqExt7'{c=false}, + Erule, #{per=>"80800100", + uper=>"80808000"}), + v_roundtrip3('SeqExt7', #'SeqExt7'{c=true}, + Erule, #{per=>"80800120", + uper=>"80809000"}), + v_roundtrip3('SeqExt7', #'SeqExt7'{a=777,b = <<16#AA>>,c=false}, + Erule, #{per=>"808006C0 030901AA 00", + uper=>"8082E061 20354000"}), + v_roundtrip3('SeqExt7', #'SeqExt7'{a=8888,c=false}, + Erule, #{per=>"80800480 22B800", + uper=>"8081C457 0000"}), + %% Encode a value with this version of the specification. BigInt = 128638468966, SuperSeq = #'SuperSeq'{s1=#'SeqExt1'{}, @@ -106,6 +134,7 @@ main(Erule, DataDir, Opts) -> v_roundtrip2(Erule, 'SeqExt130', list_to_tuple(['SeqExt130'| lists:duplicate(129, asn1_NOVALUE)++[199]])), + ok. roundtrip(Type, Value) -> @@ -118,6 +147,15 @@ v_roundtrip2(Erule, Type, Value) -> roundtrip2(Type, Value) -> asn1_test_lib:roundtrip_enc('SeqExtension2', Type, Value). +v_roundtrip3(Type, Value, Erule, Map) -> + case maps:find(Erule, Map) of + {ok,Hex} -> + Encoded = asn1_test_lib:hex_to_bin(Hex), + Encoded = asn1_test_lib:roundtrip_enc('SeqExtension', Type, Value); + error -> + asn1_test_lib:roundtrip('SeqExtension', Type, Value) + end. + v(ber, 'SeqExt66') -> "30049F41 017D"; v(per, 'SeqExt66') -> "C0420000 00000000 00004001 FA"; v(uper, 'SeqExt66') -> "D0800000 00000000 00101FA0"; diff --git a/lib/asn1/test/testTCAP.erl b/lib/asn1/test/testTCAP.erl index 422ae1f0fc..a6f0f9fad7 100644 --- a/lib/asn1/test/testTCAP.erl +++ b/lib/asn1/test/testTCAP.erl @@ -92,5 +92,6 @@ test_asn1config() -> enc_dec(T, V0) -> M = 'TCAPPackage', {ok,Enc} = M:encode(T, V0), + asn1_test_lib:map_roundtrip(M, T, Enc), {ok,V} = M:decode(T, Enc), V. diff --git a/lib/asn1/test/testTimer.erl b/lib/asn1/test/testTimer.erl index bd8da85735..3edeb1b712 100644 --- a/lib/asn1/test/testTimer.erl +++ b/lib/asn1/test/testTimer.erl @@ -25,7 +25,42 @@ -define(times, 5000). -val() -> +go() -> + Module = 'H323-MESSAGES', + Type = 'H323-UserInformation', + Value = case Module:maps() of + false -> val_records(); + true -> val_maps() + end, + Bytes = Module:encode(Type, Value), + Value = Module:decode(Type, Bytes), + + {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end), + io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]), + + done = decode(2, Module, Type, Bytes), + + {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end), + io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]), + + Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++ + " micro, decode: "++integer_to_list(round(ValRead /?times)) ++ + " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]", + {comment,Comment}. + +encode(0, _Module,_Type,_Value) -> + done; +encode(N, Module,Type,Value) -> + Module:encode(Type, Value), + encode(N-1, Module, Type, Value). + +decode(0, _Module, _Type, _Value) -> + done; +decode(N, Module, Type, Value) -> + Module:decode(Type, Value), + decode(N-1, Module, Type, Value). + +val_records() -> {'H323-UserInformation',{'H323-UU-PDU', {callProceeding, {'CallProceeding-UUIE', @@ -126,34 +161,66 @@ val() -> {'H323-UserInformation_user-data',24,<<"O">>}}. -go() -> - Module = 'H323-MESSAGES', - Type = 'H323-UserInformation', - Value = val(), - Bytes = Module:encode(Type, Value), - Value = Module:decode(Type, Bytes), - - {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end), - io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]), - - done = decode(2, Module, Type, Bytes), - - {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end), - io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]), - - Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++ - " micro, decode: "++integer_to_list(round(ValRead /?times)) ++ - " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]", - {comment,Comment}. - -encode(0, _Module,_Type,_Value) -> - done; -encode(N, Module,Type,Value) -> - Module:encode(Type, Value), - encode(N-1, Module, Type, Value). - -decode(0, _Module, _Type, _Value) -> - done; -decode(N, Module, Type, Value) -> - Module:decode(Type, Value), - decode(N-1, Module, Type, Value). +val_maps() -> +#{'h323-uu-pdu' => #{h245Control => [], + h245Tunneling => true, + 'h323-message-body' => {callProceeding,#{callIdentifier => #{guid => <<"OCTET STRINGOCTE">>}, + cryptoTokens => [{cryptoGKPwdEncr,#{algorithmOID => {1,18,467,467}, + encryptedData => <<"OC">>, + paramS => #{iv8 => <<"OCTET ST">>, + ranInt => -7477016}}}, + {cryptoGKPwdEncr,#{algorithmOID => {1,19,486,486}, + encryptedData => <<>>, + paramS => #{iv8 => <<"OCTET ST">>, + ranInt => -2404513}}}], + destinationInfo => #{gatekeeper => #{nonStandardData => #{data => <<"O">>, + nonStandardIdentifier => {object,{0,10,260}}}}, + gateway => #{nonStandardData => #{data => <<"O">>, + nonStandardIdentifier => {object,{0,13,326}}}, + protocol => [{h320,#{dataRatesSupported => [#{channelMultiplier => 78, + channelRate => 1290470518, + nonStandardData => #{data => <<"O">>, + nonStandardIdentifier => {object,{0,11,295}}}}], + nonStandardData => #{data => <<"O">>, + nonStandardIdentifier => {object,{0,11,282}}}, + supportedPrefixes => [#{nonStandardData => #{data => <<"O">>, + nonStandardIdentifier => {object,{0,12,312}}}, + prefix => {'h323-ID',"BM"}}]}}]}, + mc => true, + mcu => #{nonStandardData => #{data => <<"OC">>, + nonStandardIdentifier => {object,{1,13,340,340}}}}, + nonStandardData => #{data => <<"O">>,nonStandardIdentifier => {object,{0,9,237}}}, + terminal => #{nonStandardData => #{data => <<"OC">>, + nonStandardIdentifier => {object,{1,14,353,354}}}}, + undefinedNode => true, + vendor => #{productId => <<"OC">>, + vendor => #{manufacturerCode => 16282, + t35CountryCode => 62, + t35Extension => 63}, + versionId => <<"OC">>}}, + fastStart => [], + h245Address => {ipxAddress,#{netnum => <<"OCTE">>, + node => <<"OCTET ">>, + port => <<"OC">>}}, + h245SecurityMode => {noSecurity,'NULL'}, + protocolIdentifier => {0,8,222}, + tokens => [#{certificate => #{certificate => <<"OC">>,type => {1,16,405,406}}, + challenge => <<"OCTET STR">>, + dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>}, + generalID => "BMP", + nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,16,414,415}}, + password => "BM", + random => -26430296, + timeStamp => 1667517741}, + #{certificate => #{certificate => <<"OC">>,type => {1,17,442,443}}, + challenge => <<"OCTET STRI">>, + dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>}, + generalID => "BMP", + nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,18,452,452}}, + password => "BMP", + random => -16356110, + timeStamp => 1817656756}]}}, + h4501SupplementaryService => [], + nonStandardControl => [], + nonStandardData => #{data => <<>>,nonStandardIdentifier => {object,{0,3,84}}}}, + 'user-data' => #{'protocol-discriminator' => 24,'user-information' => <<"O">>}}. diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl index 4d3ec94391..30cbceb577 100644 --- a/lib/asn1/test/testUniqueObjectSets.erl +++ b/lib/asn1/test/testUniqueObjectSets.erl @@ -27,6 +27,7 @@ seq_roundtrip(I, D0) -> M = 'UniqueObjectSets', try {ok,Enc} = M:encode('Seq', {'Seq',I,D0}), + asn1_test_lib:map_roundtrip(M, 'Seq', Enc), {ok,{'Seq',I,D}} = M:decode('Seq', Enc), D catch C:E -> diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl index ac74470537..c15e61550c 100644 --- a/lib/asn1/test/test_compile_options.erl +++ b/lib/asn1/test/test_compile_options.erl @@ -24,8 +24,8 @@ -include_lib("common_test/include/ct.hrl"). --export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1, - record_name_prefix/1,verbose/1]). +-export([wrong_path/1,comp/2,path/1,noobj/1, + record_name_prefix/1,verbose/1,maps/1]). %% OTP-5689 wrong_path(Config) -> @@ -64,8 +64,6 @@ path(Config) -> file:set_cwd(CWD), ok. -ticket_6143(Config) -> asn1_test_lib:compile("AA1", Config, []). - noobj(Config) -> DataDir = proplists:get_value(data_dir,Config), OutDir = proplists:get_value(priv_dir,Config), @@ -130,6 +128,28 @@ verbose(Config) when is_list(Config) -> [] = test_server:capture_get(), ok. +maps(Config) -> + DataDir = proplists:get_value(data_dir, Config), + OutDir = proplists:get_value(case_dir, Config), + InFile = filename:join(DataDir, "P-Record"), + + do_maps(ber, InFile, OutDir), + do_maps(per, InFile, OutDir), + do_maps(uper, InFile, OutDir). + +do_maps(Erule, InFile, OutDir) -> + Opts = [Erule,maps,{outdir,OutDir}], + ok = asn1ct:compile(InFile, Opts), + + %% Make sure that no .hrl files are generated. + [] = filelib:wildcard(filename:join(OutDir, "*.hrl")), + + %% Remove all generated files. + All = filelib:wildcard(filename:join(OutDir, "*")), + _ = [file:delete(N) || N <- All], + + ok. + outfiles_check(OutDir) -> outfiles_check(OutDir,outfiles1()). diff --git a/lib/common_test/doc/src/Makefile b/lib/common_test/doc/src/Makefile index e495f587a3..152ece5d25 100644 --- a/lib/common_test/doc/src/Makefile +++ b/lib/common_test/doc/src/Makefile @@ -53,7 +53,8 @@ XML_REF3_FILES = ct.xml \ ct_slave.xml \ ct_property_test.xml \ ct_netconfc.xml \ - ct_hooks.xml + ct_hooks.xml \ + ct_testspec.xml XML_REF6_FILES = common_test_app.xml XML_PART_FILES = part.xml diff --git a/lib/common_test/doc/src/common_test_app.xml b/lib/common_test/doc/src/common_test_app.xml index 48ffe653e4..d407a0a53f 100644 --- a/lib/common_test/doc/src/common_test_app.xml +++ b/lib/common_test/doc/src/common_test_app.xml @@ -224,7 +224,9 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, then <seealso + marker="#Module:end_per_suite-1"><c>end_per_suite/1</c></seealso> + must also be defined.</p> <p>This configuration function is called as the first function in the suite. It typically contains initializations that are common for @@ -256,7 +258,9 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, then <seealso + marker="#Module:init_per_suite-1"><c>init_per_suite/1</c></seealso> + must also be defined.</p> <p>This function is called as the last test case in the suite. It is meant to be used for cleaning up after @@ -360,7 +364,9 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, then <seealso + marker="#Module:end_per_group-2"><c>end_per_group/2</c></seealso> + must also be defined.</p> <p>This configuration function is called before execution of a test case group. It typically contains initializations that are @@ -396,7 +402,9 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, then <seealso + marker="#Module:init_per_group-2"><c>init_per_group/2</c></seealso> + must also be defined.</p> <p>This function is called after the execution of a test case group is finished. It is meant to be used for cleaning up after @@ -427,7 +435,10 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, + then <seealso marker="#Module:end_per_testcase-2"> + <c>end_per_testcase/2</c></seealso> must also be + defined.</p> <p>This function is called before each test case. Argument <c>TestCase</c> is the test case name, and @@ -454,7 +465,10 @@ </type> <desc> - <p>OPTIONAL</p> + <p>OPTIONAL; if this function is defined, + then <seealso marker="#Module:init_per_testcase-2"> + <c>init_per_testcase/2</c></seealso> must also be + defined.</p> <p>This function is called after each test case, and can be used to clean up after diff --git a/lib/common_test/doc/src/ct_hooks.xml b/lib/common_test/doc/src/ct_hooks.xml index c2cf29c530..a085f30262 100644 --- a/lib/common_test/doc/src/ct_hooks.xml +++ b/lib/common_test/doc/src/ct_hooks.xml @@ -208,9 +208,10 @@ </func> <func> - <name>Module:pre_init_per_group(GroupName, InitData, CTHState) -> Result</name> + <name>Module:pre_init_per_group(SuiteName, GroupName, InitData, CTHState) -> Result</name> <fsummary>Called before init_per_group.</fsummary> <type> + <v>SuiteName = atom()</v> <v>GroupName = atom()</v> <v>InitData = Config | SkipOrFail</v> <v>Config = NewConfig = [{Key,Value}]</v> @@ -231,13 +232,19 @@ but for function <seealso marker="common_test#Module:init_per_group-2"><c>init_per_group</c></seealso> instead.</p> + + <p>If <c>Module:pre_init_per_group/4</c> is not exported, common_test + will attempt to call <c>Module:pre_init_per_group(GroupName, + InitData, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:post_init_per_group(GroupName, Config, Return, CTHState) -> Result</name> + <name>Module:post_init_per_group(SuiteName, GroupName, Config, Return, CTHState) -> Result</name> <fsummary>Called after init_per_group.</fsummary> <type> + <v>SuiteName = atom()</v> <v>GroupName = atom()</v> <v>Config = [{Key,Value}]</v> <v>Return = NewReturn = Config | SkipOrFail | term()</v> @@ -258,13 +265,19 @@ but for function <seealso marker="common_test#Module:init_per_group-2"><c>init_per_group</c></seealso> instead.</p> + + <p>If <c>Module:post_init_per_group/5</c> is not exported, common_test + will attempt to call <c>Module:post_init_per_group(GroupName, + Config, Return, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:pre_init_per_testcase(TestcaseName, InitData, CTHState) -> Result</name> + <name>Module:pre_init_per_testcase(SuiteName, TestcaseName, InitData, CTHState) -> Result</name> <fsummary>Called before init_per_testcase.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestcaseName = atom()</v> <v>InitData = Config | SkipOrFail</v> <v>Config = NewConfig = [{Key,Value}]</v> @@ -286,6 +299,11 @@ <seealso marker="common_test#Module:init_per_testcase-2"><c>init_per_testcase</c></seealso> instead.</p> + <p>If <c>Module:pre_init_per_testcase/4</c> is not exported, common_test + will attempt to call <c>Module:pre_init_per_testcase(TestcaseName, + InitData, CTHState)</c> instead. This is for backwards + compatibility.</p> + <p>CTHs cannot be added here right now. That feature may be added in a later release, but it would right now break backwards compatibility.</p> @@ -293,9 +311,10 @@ </func> <func> - <name>Module:post_init_per_testcase(TestcaseName, Config, Return, CTHState) -> Result</name> + <name>Module:post_init_per_testcase(SuiteName, TestcaseName, Config, Return, CTHState) -> Result</name> <fsummary>Called after init_per_testcase.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestcaseName = atom()</v> <v>Config = [{Key,Value}]</v> <v>Return = NewReturn = Config | SkipOrFail | term()</v> @@ -316,15 +335,21 @@ but for function <seealso marker="common_test#Module:init_per_testcase-2"><c>init_per_testcase</c></seealso> instead.</p> + + <p>If <c>Module:post_init_per_testcase/5</c> is not exported, common_test + will attempt to call <c>Module:post_init_per_testcase(TestcaseName, + Config, Return, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:pre_end_per_testcase(TestcaseName, InitData, CTHState) -> Result</name> + <name>Module:pre_end_per_testcase(SuiteName, TestcaseName, EndData, CTHState) -> Result</name> <fsummary>Called before end_per_testcase.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestcaseName = atom()</v> - <v>InitData = Config</v> + <v>EndData = Config</v> <v>Config = NewConfig = [{Key,Value}]</v> <v>CTHState = NewCTHState = term()</v> <v>Result = {NewConfig, NewCTHState}</v> @@ -345,14 +370,20 @@ <p>This function can not change the result of the test case by returning skip or fail tuples, but it may insert items in <c>Config</c> that can be read in - <c>end_per_testcase/2</c> or in <c>post_end_per_testcase/4</c>.</p> + <c>end_per_testcase/2</c> or in <c>post_end_per_testcase/5</c>.</p> + + <p>If <c>Module:pre_end_per_testcase/4</c> is not exported, common_test + will attempt to call <c>Module:pre_end_per_testcase(TestcaseName, + EndData, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:post_end_per_testcase(TestcaseName, Config, Return, CTHState) -> Result</name> + <name>Module:post_end_per_testcase(SuiteName, TestcaseName, Config, Return, CTHState) -> Result</name> <fsummary>Called after end_per_testcase.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestcaseName = atom()</v> <v>Config = [{Key,Value}]</v> <v>Return = NewReturn = Config | SkipOrFail | term()</v> @@ -373,13 +404,19 @@ but for function <seealso marker="common_test#Module:end_per_testcase-2"><c>end_per_testcase</c></seealso> instead.</p> + + <p>If <c>Module:post_end_per_testcase/5</c> is not exported, common_test + will attempt to call <c>Module:post_end_per_testcase(TestcaseName, + Config, Return, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:pre_end_per_group(GroupName, EndData, CTHState) -> Result</name> + <name>Module:pre_end_per_group(SuiteName, GroupName, EndData, CTHState) -> Result</name> <fsummary>Called before end_per_group.</fsummary> <type> + <v>SuiteName = atom()</v> <v>GroupName = atom()</v> <v>EndData = Config | SkipOrFail</v> <v>Config = NewConfig = [{Key,Value}]</v> @@ -400,13 +437,19 @@ but for function <seealso marker="common_test#Module:end_per_group-2"><c>end_per_group</c></seealso> instead.</p> + + <p>If <c>Module:pre_end_per_group/4</c> is not exported, common_test + will attempt to call <c>Module:pre_end_per_group(GroupName, + EndData, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:post_end_per_group(GroupName, Config, Return, CTHState) -> Result</name> + <name>Module:post_end_per_group(SuiteName, GroupName, Config, Return, CTHState) -> Result</name> <fsummary>Called after end_per_group.</fsummary> <type> + <v>SuiteName = atom()</v> <v>GroupName = atom()</v> <v>Config = [{Key,Value}]</v> <v>Return = NewReturn = Config | SkipOrFail | term()</v> @@ -427,6 +470,11 @@ but for function <seealso marker="common_test#Module:end_per_group-2">end_per_group</seealso> instead.</p> + + <p>If <c>Module:post_end_per_group/5</c> is not exported, common_test + will attempt to call <c>Module:post_end_per_group(GroupName, + Config, Return, CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> @@ -485,9 +533,10 @@ </func> <func> - <name>Module:on_tc_fail(TestName, Reason, CTHState) -> NewCTHState</name> + <name>Module:on_tc_fail(SuiteName, TestName, Reason, CTHState) -> NewCTHState</name> <fsummary>Called after the CTH scope ends.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestName = init_per_suite | end_per_suite | {init_per_group,GroupName} | {end_per_group,GroupName} | {FuncName,GroupName} | FuncName</v> <v>FuncName = atom()</v> <v>GroupName = atom()</v> @@ -505,7 +554,7 @@ <item><p>If <c>init_per_suite</c> fails, this function is called after <seealso marker="#Module:post_init_per_suite-4"><c>post_init_per_suite</c></seealso>.</p></item> <item><p>If a test case fails, this funcion is called after - <seealso marker="#Module:post_end_per_testcase-4"><c>post_end_per_testcase</c></seealso>.</p></item> + <seealso marker="#Module:post_end_per_testcase-5"><c>post_end_per_testcase</c></seealso>.</p></item> </list> <p>If the failed test case belongs to a test case group, the first @@ -519,13 +568,19 @@ For details, see section <seealso marker="event_handler_chapter#events">Event Handling</seealso> in the User's Guide.</p> + + <p>If <c>Module:on_tc_fail/4</c> is not exported, common_test + will attempt to call <c>Module:on_tc_fail(TestName, Reason, + CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> <func> - <name>Module:on_tc_skip(TestName, Reason, CTHState) -> NewCTHState</name> + <name>Module:on_tc_skip(SuiteName, TestName, Reason, CTHState) -> NewCTHState</name> <fsummary>Called after the CTH scope ends.</fsummary> <type> + <v>SuiteName = atom()</v> <v>TestName = init_per_suite | end_per_suite | {init_per_group,GroupName} | {end_per_group,GroupName} | {FuncName,GroupName} | FuncName</v> <v>FuncName = atom()</v> <v>GroupName = atom()</v> @@ -542,9 +597,9 @@ <list type="bulleted"> <item><p>If <c>init_per_group</c> is skipped, this function is called after - <seealso marker="#Module:post_init_per_group-4"><c>post_init_per_group</c></seealso>.</p></item> + <seealso marker="#Module:post_init_per_group-5"><c>post_init_per_group</c></seealso>.</p></item> <item><p>If a test case is skipped, this function is called after - <seealso marker="#Module:post_end_per_testcase-4"><c>post_end_per_testcase</c></seealso>.</p></item> + <seealso marker="#Module:post_end_per_testcase-5"><c>post_end_per_testcase</c></seealso>.</p></item> </list> <p>If the skipped test case belongs to a test case group, the first @@ -559,6 +614,11 @@ For details, see section <seealso marker="event_handler_chapter#events">Event Handling</seealso> in the User's Guide.</p> + + <p>If <c>Module:on_tc_skip/4</c> is not exported, common_test + will attempt to call <c>Module:on_tc_skip(TestName, Reason, + CTHState)</c> instead. This is for backwards + compatibility.</p> </desc> </func> diff --git a/lib/common_test/doc/src/ct_hooks_chapter.xml b/lib/common_test/doc/src/ct_hooks_chapter.xml index 0e4c35e11f..bfad96e489 100644 --- a/lib/common_test/doc/src/ct_hooks_chapter.xml +++ b/lib/common_test/doc/src/ct_hooks_chapter.xml @@ -38,7 +38,7 @@ extensions of the default behavior of <c>Common Test</c> using hooks before and after all test suite calls. CTHs allow advanced <c>Common Test</c> users to abstract out behavior that is common to multiple test suites - without littering all test suites with library calls. this can be used + without littering all test suites with library calls. This can be used for logging, starting, and monitoring external systems, building C files needed by the tests, and so on.</p> @@ -175,10 +175,10 @@ <row> <cell><seealso marker="common_test#Module:init_per_group-2"> init_per_group/2</seealso></cell> - <cell><seealso marker="ct_hooks#Module:post_init_per_group-4"> - post_init_per_group/4</seealso> is called</cell> - <cell><seealso marker="ct_hooks#Module:post_end_per_suite-4"> - post_end_per_group/4</seealso> has been called for that group</cell> + <cell><seealso marker="ct_hooks#Module:post_init_per_group-5"> + post_init_per_group/5</seealso> is called</cell> + <cell><seealso marker="ct_hooks#Module:post_end_per_group-5"> + post_end_per_group/5</seealso> has been called for that group</cell> </row> <tcaption>Scope of a CTH</tcaption> </table> @@ -245,16 +245,18 @@ </list> <p> - This is done in the CTH functions called pre_<name of function>. - These functions take the same three arguments, <c>Name</c>, + This is done in the CTH functions called <c>pre_<name of function></c>. + These functions take the arguments <c>SuiteName</c>, <c>Name</c> (group or test case name, if applicable), <c>Config</c>, and <c>CTHState</c>. The return value of the CTH function is always a combination of a result for the suite/group/test and an updated <c>CTHState</c>.</p> <p>To let the test suite continue on executing, return the configuration - list that you want the test to use as the result. To skip or - fail the test, return a tuple with <c>skip</c> or <c>fail</c>, and a reason - as the result.</p> + list that you want the test to use as the result.</p> + + <p>All pre hooks, except <c>pre_end_per_testcase/4</c>, can + skip or fail the test by returning a tuple with <c>skip</c> or + <c>fail</c>, and a reason as the result.</p> <p><em>Example:</em></p> <code> @@ -290,7 +292,7 @@ <p> This is done in the CTH functions called <c>post_<name of function></c>. - These functions take the same four arguments, <c>Name</c>, + These functions take the arguments <c>SuiteName</c>, <c>Name</c> (group or test case name, if applicable), <c>Config</c>, <c>Return</c>, and <c>CTHState</c>. <c>Config</c> in this case is the same <c>Config</c> as the testcase is called with. <c>Return</c> is the value returned by the testcase. If the testcase @@ -308,7 +310,7 @@ <p><em>Example:</em></p> <code> - post_end_per_testcase(_TC, Config, {'EXIT',{_,_}}, CTHState) -> + post_end_per_testcase(_Suite, _TC, Config, {'EXIT',{_,_}}, CTHState) -> case db:check_consistency() of true -> %% DB is good, pass the test. @@ -317,7 +319,7 @@ %% DB is not good, mark as skipped instead of failing {{skip, "DB is inconsisten!"}, CTHState} end; - post_end_per_testcase(_TC, Config, Return, CTHState) -> + post_end_per_testcase(_Suite, _TC, Config, Return, CTHState) -> %% Do nothing if tc does not crash. {Return, CTHState}.</code> @@ -331,8 +333,8 @@ <title>Skip and Fail Hooks</title> <p> After any post hook has been executed for all installed CTHs, - <seealso marker="ct_hooks#Module:on_tc_fail-3">on_tc_fail</seealso> - or <seealso marker="ct_hooks#Module:on_tc_skip-3">on_tc_skip</seealso> + <seealso marker="ct_hooks#Module:on_tc_fail-4">on_tc_fail</seealso> + or <seealso marker="ct_hooks#Module:on_tc_skip-4">on_tc_skip</seealso> is called if the testcase failed or was skipped, respectively. You cannot affect the outcome of the tests any further at this point. </p> @@ -389,18 +391,18 @@ -export([pre_end_per_suite/3]). -export([post_end_per_suite/4]). - -export([pre_init_per_group/3]). - -export([post_init_per_group/4]). - -export([pre_end_per_group/3]). - -export([post_end_per_group/4]). + -export([pre_init_per_group/4]). + -export([post_init_per_group/5]). + -export([pre_end_per_group/4]). + -export([post_end_per_group/5]). - -export([pre_init_per_testcase/3]). - -export([post_init_per_testcase/4]). - -export([pre_end_per_testcase/3]). - -export([post_end_per_testcase/4]). + -export([pre_init_per_testcase/4]). + -export([post_init_per_testcase/5]). + -export([pre_end_per_testcase/4]). + -export([post_end_per_testcase/5]). - -export([on_tc_fail/3]). - -export([on_tc_skip/3]). + -export([on_tc_fail/4]). + -export([on_tc_skip/4]). -export([terminate/1]). @@ -435,46 +437,46 @@ total = State#state.total + State#state.suite_total } }. %% @doc Called before each init_per_group. - pre_init_per_group(Group,Config,State) -> + pre_init_per_group(Suite,Group,Config,State) -> {Config, State}. %% @doc Called after each init_per_group. - post_init_per_group(Group,Config,Return,State) -> + post_init_per_group(Suite,Group,Config,Return,State) -> {Return, State}. %% @doc Called before each end_per_group. - pre_end_per_group(Group,Config,State) -> + pre_end_per_group(Suite,Group,Config,State) -> {Config, State}. %% @doc Called after each end_per_group. - post_end_per_group(Group,Config,Return,State) -> + post_end_per_group(Suite,Group,Config,Return,State) -> {Return, State}. %% @doc Called before each init_per_testcase. - pre_init_per_testcase(TC,Config,State) -> + pre_init_per_testcase(Suite,TC,Config,State) -> {Config, State#state{ ts = now(), total = State#state.suite_total + 1 } }. %% Called after each init_per_testcase (immediately before the test case). - post_init_per_testcase(TC,Config,Return,State) -> + post_init_per_testcase(Suite,TC,Config,Return,State) -> {Return, State} %% @doc Called before each end_per_testcase (immediately after the test case). - pre_end_per_testcase(TC,Config,State) -> + pre_end_per_testcase(Suite,TC,Config,State) -> {Config, State}. %% @doc Called after each end_per_testcase. - post_end_per_testcase(TC,Config,Return,State) -> - TCInfo = {testcase, TC, Return, timer:now_diff(now(), State#state.ts)}, + post_end_per_testcase(Suite,TC,Config,Return,State) -> + TCInfo = {testcase, Suite, TC, Return, timer:now_diff(now(), State#state.ts)}, {Return, State#state{ ts = undefined, tcs = [TCInfo | State#state.tcs] } }. %% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group, %% post_end_per_group and post_end_per_testcase if the suite, group or test case failed. - on_tc_fail(TC, Reason, State) -> + on_tc_fail(Suite, TC, Reason, State) -> State. %% @doc Called when a test case is skipped by either user action %% or due to an init function failing. - on_tc_skip(TC, Reason, State) -> + on_tc_skip(Suite, TC, Reason, State) -> State. %% @doc Called when the scope of the CTH is done diff --git a/lib/common_test/doc/src/ct_testspec.xml b/lib/common_test/doc/src/ct_testspec.xml new file mode 100644 index 0000000000..36893f66cf --- /dev/null +++ b/lib/common_test/doc/src/ct_testspec.xml @@ -0,0 +1,84 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE erlref SYSTEM "erlref.dtd"> + +<erlref> + <header> + <copyright> + <year>2016</year> + <holder>Ericsson AB. All Rights Reserved.</holder> + </copyright> + <legalnotice> + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + </legalnotice> + + <title>ct_testspec</title> + <prepared></prepared> + <responsible></responsible> + <docno></docno> + <approved></approved> + <checked></checked> + <date></date> + <rev>A</rev> + <file>ct_testspec.xml</file> + </header> + <module>ct_testspec</module> + <modulesummary>Parsing of test specifications for Common Test. + </modulesummary> + +<description> + + <p>Parsing of test specifications for <c>Common Test</c>.</p> + + <p>This module exports help functions for parsing of test specifications.</p> + +</description> + + <funcs> + <func> + <name>get_tests(SpecsIn) -> {ok, [{Specs,Tests}]} | {error, Reason}</name> + <fsummary>Parse the given test specification files and return the tests to run and skip.</fsummary> + <type> + <v>SpecsIn = [string()] | [[string()]]</v> + <v>Specs = [string()]</v> + <v>Test = [{Node,Run,Skip}]</v> + <v>Node = atom()</v> + <v>Run = {Dir,Suites,Cases}</v> + <v>Skip = {Dir,Suites,Comment} | {Dir,Suites,Cases,Comment}</v> + <v>Dir = string()</v> + <v>Suites = atom | [atom()] | all</v> + <v>Cases = atom | [atom()] | all</v> + <v>Comment = string()</v> + <v>Reason = term()</v> + </type> + <desc><marker id="add_nodes-1"/> + <p>Parse the given test specification files and return the + tests to run and skip.</p> + + <p>If <c>SpecsIn=[Spec1,Spec2,...]</c>, separate tests will be + created per specification. If + <c>SpecsIn=[[Spec1,Spec2,...]]</c>, all specifications will be + merge into one test.</p> + + <p>For each test, a <c>{Specs,Tests}</c> element is returned, + where <c>Specs</c> is a list of all included test + specifications, and <c>Tests</c> specifies actual tests to + run/skip per node.</p> + </desc> + </func> + + </funcs> + +</erlref> + + diff --git a/lib/common_test/doc/src/notes.xml b/lib/common_test/doc/src/notes.xml index 83e6511c04..efeacd4a72 100644 --- a/lib/common_test/doc/src/notes.xml +++ b/lib/common_test/doc/src/notes.xml @@ -33,6 +33,123 @@ <file>notes.xml</file> </header> +<section><title>Common_Test 1.14</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p>The following corrections and improvements are done in + the common_test hook handling:</p> <list> <item> <p>An + extra argument, <c>Suite</c>, is added as the first + argument to each of the following hook callback + functions:</p> <list> + <item><c>pre_init_per_group</c></item> + <item><c>post_init_per_group</c></item> + <item><c>pre_end_per_group</c></item> + <item><c>post_end_per_group</c></item> + <item><c>pre_init_per_testcase</c></item> + <item><c>post_init_per_testcase</c></item> + <item><c>pre_end_per_testcase</c></item> + <item><c>post_end_per_testcase</c></item> + <item><c>on_tc_fail</c></item> + <item><c>on_tc_skip</c></item> </list> <p>For backwards + compatibility, if the new function is not exported from a + hook callback module, <c>common_test</c> will fall back + to the old interface and call the function without the + <c>Suite</c> argument.</p> </item> <item> <p>If either + <c>init_per_suite</c> or <c>end_per_suite</c> exists, but + not the other, then the non-existing function will be + reported as failed with reason <c>undef</c> in the test + log. The same goes for <c>init/end_per_group</c>. This + has always been a requirement according to the user's + guide, but now <c>common_test</c> is more explicit in the + report.</p> </item> <item> <p>If <c>init_per_suite</c> + was exported from a test suite, but not + <c>end_per_suite</c>, then <c>pre/post_end_per_suite</c> + was called with <c>Suite=ct_framework</c> instead of the + correct suite name. This is now corrected.</p> </item> + <item> <p>If <c>end_per_group</c> was exported from a + suite, but not <c>init_per_group</c>, then + <c>end_per_group</c> was never called. This is now + corrected.</p> </item> <item> <p>Tests that were skipped + before calling <c>pre_init_per_*</c> got faulty calls to + the corresponding <c>post_init_per_*</c>. E.g. if a test + was skipped because <c>suite/0</c> failed, then + <c>post_init_per_suite</c> would be called even though + <c>pre_init_per_suite</c> and <c>init_per_suite</c> were + not called. This is now corrected so a <c>post_*</c> + callback will never be called unless the corresponding + <c>pre_*</c> callback has been called first.</p> </item> + <item> <p>Tests that were skipped before or in + <c>init_per_testcase</c> got faulty calls to + <c>pre_end_per_testcase</c> and + <c>post_end_per_testcase</c>. This is now corrected so + <c>pre/post_end_per_testcase</c> are not called when + <c>end_per_testcase</c> is not called.</p> </item> <item> + <p>If an exit signal causes the test case process to die + while running <c>init_per_testcase</c>, the case was + earlier reported as failed with reason <c>{skip,...}</c>. + This is now corrected so the case will be marked as + skipped.</p> </item> <item> <p>If an exist signal causes + the test case process to die while running + <c>end_per_testcase</c>, the case was earlier marked as + failed. This is now corrected so the status of the test + case is not changed - there is only a warning added to + the comment field.</p> </item> <item> <p>If a test case + was skipped because of option + <c>{force_stop,skip_rest}</c> or because of a failed + sequence, then no <c>tc_start</c> event would be sent, + only <c>tc_done</c>. This is now corrected so both events + are sent.</p> </item> <item> <p>When skipping or failing + in a configuration function, the configuration function + itself would get <c>{auto_skipped,Reason}</c>, + <c>{skipped,Reason}</c> or <c>{failed,Reason}</c> in the + hook callbacks <c>on_tc_skip</c> or <c>on_tc_fail</c>. + The other test cases that were skipped as a result of + this would only get <c>Reason</c> in <c>on_tc_skip</c>. + This is now corrected so even the configuration function + that caused the skip/fail will only get <c>Reason</c> in + the hook callback.</p> </item> </list> + <p> + Own Id: OTP-10599 Aux Id: kunagi-344 [255] </p> + </item> + <item> + <p> + When a test case was skipped by a <c>skip_cases</c> + statement in a test spec, then <c>cth_surefire</c> would + erroneously mark the previous test case as skipped in the + xml report. The actually skipped test case would not be + present in the xml report at all. This is now corrected.</p> + <p> + Own Id: OTP-14129 Aux Id: seq13244 </p> + </item> + <item> + <p>The <c>multiply_timetraps</c> and + <c>scale_timetraps</c> options did not work with test + specifications, which has been corrected.</p> + <p> + Own Id: OTP-14210</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + ct_testspec:get_tests/1 is added. This is used by rebar3 + to get all directories that must be compiled when running + tests from testspec - instead of implementing testspec + parsing in rebar3.</p> + <p> + Own Id: OTP-14132</p> + </item> + </list> + </section> + +</section> + <section><title>Common_Test 1.13</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/common_test/doc/src/ref_man.xml b/lib/common_test/doc/src/ref_man.xml index d1567e2d3c..1ac20db5c2 100644 --- a/lib/common_test/doc/src/ref_man.xml +++ b/lib/common_test/doc/src/ref_man.xml @@ -47,6 +47,7 @@ <xi:include href="ct_slave.xml"/> <xi:include href="ct_hooks.xml"/> <xi:include href="ct_property_test.xml"/> + <xi:include href="ct_testspec.xml"/> </application> diff --git a/lib/common_test/doc/src/write_test_chapter.xml b/lib/common_test/doc/src/write_test_chapter.xml index f70bdb16c5..6a0d87bcaf 100644 --- a/lib/common_test/doc/src/write_test_chapter.xml +++ b/lib/common_test/doc/src/write_test_chapter.xml @@ -566,7 +566,7 @@ for the test cases in the group. After execution of the group is finished, function <seealso marker="common_test#Module:end_per_group-2"><c>end_per_group(GroupName, Config)</c></seealso> is called. This function is meant to be used for cleaning up after - <c>init_per_group/2</c>.</p> + <c>init_per_group/2</c>. If the init function is defined, so must the end function be.</p> <p>Whenever a group is executed, if <c>init_per_group</c> and <c>end_per_group</c> do not exist in the suite, <c>Common Test</c> calls diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl index 291a4d716c..43f1c9de0f 100644 --- a/lib/common_test/src/ct_framework.erl +++ b/lib/common_test/src/ct_framework.erl @@ -52,6 +52,10 @@ %%% %%% @doc Test server framework callback, called by the test_server %%% when a new test case is started. +init_tc(_,{end_per_testcase_not_run,_},[Config]) -> + %% Testcase is completed (skipped or failed), but end_per_testcase + %% is not run - don't call pre-hook. + {ok,[Config]}; init_tc(Mod,EPTC={end_per_testcase,_},[Config]) -> %% in case Mod == ct_framework, lookup the suite name Suite = get_suite_name(Mod, Config), @@ -62,7 +66,7 @@ init_tc(Mod,EPTC={end_per_testcase,_},[Config]) -> Other end; -init_tc(Mod,Func0,Args) -> +init_tc(Mod,Func0,Args) -> %% in case Mod == ct_framework, lookup the suite name Suite = get_suite_name(Mod, Args), {Func,HookFunc} = case Func0 of @@ -84,12 +88,15 @@ init_tc(Mod,Func0,Args) -> andalso Func=/=end_per_group andalso ct_util:get_testdata(skip_rest) of true -> + initialize(false,Mod,Func,Args), {auto_skip,"Repeated test stopped by force_stop option"}; _ -> case ct_util:get_testdata(curr_tc) of {Suite,{suite0_failed,{require,Reason}}} -> + initialize(false,Mod,Func,Args), {auto_skip,{require_failed_in_suite0,Reason}}; {Suite,{suite0_failed,_}=Failure} -> + initialize(false,Mod,Func,Args), {fail,Failure}; _ -> ct_util:update_testdata(curr_tc, @@ -118,16 +125,14 @@ init_tc(Mod,Func0,Args) -> end, init_tc1(Mod,Suite,Func,HookFunc,Args); {failed,Seq,BadFunc} -> - {auto_skip,{sequence_failed,Seq,BadFunc}} + initialize(false,Mod,Func,Args), + {auto_skip,{sequence_failed,Seq,BadFunc}} end end end. init_tc1(?MODULE,_,error_in_suite,_,[Config0]) when is_list(Config0) -> - ct_logs:init_tc(false), - ct_event:notify(#event{name=tc_start, - node=node(), - data={?MODULE,error_in_suite}}), + initialize(false,?MODULE,error_in_suite), _ = ct_suite_init(?MODULE,error_in_suite,[],Config0), case ?val(error,Config0) of undefined -> @@ -177,27 +182,21 @@ init_tc1(Mod,Suite,Func,HookFunc,[Config0]) when is_list(Config0) -> ct_config:delete_default_config(testcase), HookFunc end, - Initialize = fun() -> - ct_logs:init_tc(false), - ct_event:notify(#event{name=tc_start, - node=node(), - data={Mod,FuncSpec}}) - end, case add_defaults(Mod,Func,AllGroups) of Error = {suite0_failed,_} -> - Initialize(), + initialize(false,Mod,FuncSpec), ct_util:set_testdata({curr_tc,{Suite,Error}}), {error,Error}; Error = {group0_failed,_} -> - Initialize(), + initialize(false,Mod,FuncSpec), {auto_skip,Error}; Error = {testcase0_failed,_} -> - Initialize(), + initialize(false,Mod,FuncSpec), {auto_skip,Error}; {SuiteInfo,MergeResult} -> case MergeResult of {error,Reason} -> - Initialize(), + initialize(false,Mod,FuncSpec), {fail,Reason}; _ -> init_tc2(Mod,Suite,Func,HookFunc1, @@ -236,11 +235,8 @@ init_tc2(Mod,Suite,Func,HookFunc,SuiteInfo,MergeResult,Config) -> Conns -> ct_util:silence_connections(Conns) end, - ct_logs:init_tc(Func == init_per_suite), FuncSpec = group_or_func(Func,Config), - ct_event:notify(#event{name=tc_start, - node=node(), - data={Mod,FuncSpec}}), + initialize((Func==init_per_suite),Mod,FuncSpec), case catch configure(MergedInfo,MergedInfo,SuiteInfo, FuncSpec,[],Config) of @@ -268,6 +264,18 @@ init_tc2(Mod,Suite,Func,HookFunc,SuiteInfo,MergeResult,Config) -> end end. +initialize(RefreshLogs,Mod,Func,[Config]) when is_list(Config) -> + initialize(RefreshLogs,Mod,group_or_func(Func,Config)); +initialize(RefreshLogs,Mod,Func,_) -> + initialize(RefreshLogs,Mod,Func). + +initialize(RefreshLogs,Mod,FuncSpec) -> + ct_logs:init_tc(RefreshLogs), + ct_event:notify(#event{name=tc_start, + node=node(), + data={Mod,FuncSpec}}). + + ct_suite_init(Suite,HookFunc,PostInitHook,Config) when is_list(Config) -> case ct_hooks:init_tc(Suite,HookFunc,Config) of NewConfig when is_list(NewConfig) -> @@ -675,22 +683,35 @@ end_tc(Mod,Func,{Result,[Args]}, Return) -> end_tc(Mod,Func,self(),Result,Args,Return). end_tc(Mod,IPTC={init_per_testcase,_Func},_TCPid,Result,Args,Return) -> - %% in case Mod == ct_framework, lookup the suite name - Suite = get_suite_name(Mod, Args), - case ct_hooks:end_tc(Suite,IPTC,Args,Result,Return) of - '$ct_no_change' -> - ok; - HookResult -> - HookResult + case end_hook_func(IPTC,Return,IPTC) of + undefined -> ok; + _ -> + %% in case Mod == ct_framework, lookup the suite name + Suite = get_suite_name(Mod, Args), + case ct_hooks:end_tc(Suite,IPTC,Args,Result,Return) of + '$ct_no_change' -> + ok; + HookResult -> + HookResult + end end; end_tc(Mod,Func0,TCPid,Result,Args,Return) -> %% in case Mod == ct_framework, lookup the suite name Suite = get_suite_name(Mod, Args), - {EPTC,Func} = case Func0 of - {end_per_testcase,F} -> {true,F}; - _ -> {false,Func0} - end, + {Func,FuncSpec,HookFunc} = + case Func0 of + {end_per_testcase_not_run,F} -> + %% Testcase is completed (skipped or failed), but + %% end_per_testcase is not run - don't call post-hook. + {F,F,undefined}; + {end_per_testcase,F} -> + {F,F,Func0}; + _ -> + FS = group_or_func(Func0,Args), + HF = end_hook_func(Func0,Return,FS), + {Func0,FS,HF} + end, test_server:timetrap_cancel(), @@ -717,20 +738,18 @@ end_tc(Mod,Func0,TCPid,Result,Args,Return) -> end, ct_util:delete_suite_data(last_saved_config), - {FuncSpec,HookFunc} = - if not EPTC -> - FS = group_or_func(Func,Args), - {FS,FS}; - true -> - {Func,Func0} - end, {Result1,FinalNotify} = - case ct_hooks:end_tc(Suite,HookFunc,Args,Result,Return) of - '$ct_no_change' -> - {ok,Result}; - HookResult -> - {HookResult,HookResult} - end, + case HookFunc of + undefined -> + {ok,Result}; + _ -> + case ct_hooks:end_tc(Suite,HookFunc,Args,Result,Return) of + '$ct_no_change' -> + {ok,Result}; + HookResult -> + {HookResult,HookResult} + end + end, FinalResult = case get('$test_server_framework_test') of undefined -> @@ -821,6 +840,34 @@ end_tc(Mod,Func0,TCPid,Result,Args,Return) -> end, FinalResult. +%% This is to make sure that no post_init_per_* is ever called if the +%% corresponding pre_init_per_* was not called. +%% The skip or fail reasons are those that can be returned from +%% init_tc above in situations where we never came to call +%% ct_hooks:init_tc/3, e.g. if suite/0 fails, then we never call +%% ct_hooks:init_tc for init_per_suite, and thus we must not call +%% ct_hooks:end_tc for init_per_suite either. +end_hook_func({init_per_testcase,_},{auto_skip,{sequence_failed,_,_}},_) -> + undefined; +end_hook_func({init_per_testcase,_},{auto_skip,"Repeated test stopped by force_stop option"},_) -> + undefined; +end_hook_func({init_per_testcase,_},{fail,{config_name_already_in_use,_}},_) -> + undefined; +end_hook_func({init_per_testcase,_},{auto_skip,{InfoFuncError,_}},_) + when InfoFuncError==testcase0_failed; + InfoFuncError==require_failed -> + undefined; +end_hook_func(init_per_group,{auto_skip,{InfoFuncError,_}},_) + when InfoFuncError==group0_failed; + InfoFuncError==require_failed -> + undefined; +end_hook_func(init_per_suite,{auto_skip,{require_failed_in_suite0,_}},_) -> + undefined; +end_hook_func(init_per_suite,{auto_skip,{failed,{error,{suite0_failed,_}}}},_) -> + undefined; +end_hook_func(_,_,Default) -> + Default. + %% {error,Reason} | {skip,Reason} | {timetrap_timeout,TVal} | %% {testcase_aborted,Reason} | testcase_aborted_or_killed | %% {'EXIT',Reason} | {fail,Reason} | {failed,Reason} | @@ -1339,25 +1386,25 @@ report(What,Data) -> ok; tc_done -> {Suite,{Func,GrName},Result} = Data, - Data1 = if GrName == undefined -> {Suite,Func,Result}; - true -> Data - end, + FuncSpec = if GrName == undefined -> Func; + true -> {Func,GrName} + end, %% Register the group leader for the process calling the report %% function, making it possible for a hook function to print %% in the test case log file ReportingPid = self(), ct_logs:register_groupleader(ReportingPid, group_leader()), case Result of - {failed, _} -> - ct_hooks:on_tc_fail(What, Data1); - {skipped,{failed,{_,init_per_testcase,_}}} -> - ct_hooks:on_tc_skip(tc_auto_skip, Data1); - {skipped,{require_failed,_}} -> - ct_hooks:on_tc_skip(tc_auto_skip, Data1); - {skipped,_} -> - ct_hooks:on_tc_skip(tc_user_skip, Data1); - {auto_skipped,_} -> - ct_hooks:on_tc_skip(tc_auto_skip, Data1); + {failed, Reason} -> + ct_hooks:on_tc_fail(What, {Suite,FuncSpec,Reason}); + {skipped,{failed,{_,init_per_testcase,_}}=Reason} -> + ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason}); + {skipped,{require_failed,_}=Reason} -> + ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason}); + {skipped,Reason} -> + ct_hooks:on_tc_skip(tc_user_skip, {Suite,FuncSpec,Reason}); + {auto_skipped,Reason} -> + ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason}); _Else -> ok end, diff --git a/lib/common_test/src/ct_groups.erl b/lib/common_test/src/ct_groups.erl index 1375e7dcc7..1c9faf6a70 100644 --- a/lib/common_test/src/ct_groups.erl +++ b/lib/common_test/src/ct_groups.erl @@ -442,17 +442,21 @@ make_conf(Mod, Name, Props, TestSpec) -> ok end, {InitConf,EndConf,ExtraProps} = - case erlang:function_exported(Mod,init_per_group,2) of - true -> - {{Mod,init_per_group},{Mod,end_per_group},[]}; - false -> + case {erlang:function_exported(Mod,init_per_group,2), + erlang:function_exported(Mod,end_per_group,2)} of + {false,false} -> ct_logs:log("TEST INFO", "init_per_group/2 and " "end_per_group/2 missing for group " "~w in ~w, using default.", [Name,Mod]), {{ct_framework,init_per_group}, {ct_framework,end_per_group}, - [{suite,Mod}]} + [{suite,Mod}]}; + _ -> + %% If any of these exist, the other should too + %% (required and documented). If it isn't, it will fail + %% with reason 'undef'. + {{Mod,init_per_group},{Mod,end_per_group},[]} end, {conf,[{name,Name}|Props++ExtraProps],InitConf,TestSpec,EndConf}. diff --git a/lib/common_test/src/ct_hooks.erl b/lib/common_test/src/ct_hooks.erl index c9a4abb5ee..60d1ea2b1c 100644 --- a/lib/common_test/src/ct_hooks.erl +++ b/lib/common_test/src/ct_hooks.erl @@ -92,15 +92,17 @@ init_tc(Mod, end_per_suite, Config) -> call(fun call_generic/3, Config, [pre_end_per_suite, Mod]); init_tc(Mod, {init_per_group, GroupName, Properties}, Config) -> maybe_start_locker(Mod, GroupName, Properties), - call(fun call_generic/3, Config, [pre_init_per_group, GroupName]); -init_tc(_Mod, {end_per_group, GroupName, _}, Config) -> - call(fun call_generic/3, Config, [pre_end_per_group, GroupName]); -init_tc(_Mod, {init_per_testcase,TC}, Config) -> - call(fun call_generic/3, Config, [pre_init_per_testcase, TC]); -init_tc(_Mod, {end_per_testcase,TC}, Config) -> - call(fun call_generic/3, Config, [pre_end_per_testcase, TC]); -init_tc(_Mod, TC = error_in_suite, Config) -> - call(fun call_generic/3, Config, [pre_init_per_testcase, TC]). + call(fun call_generic_fallback/3, Config, + [pre_init_per_group, Mod, GroupName]); +init_tc(Mod, {end_per_group, GroupName, _}, Config) -> + call(fun call_generic_fallback/3, Config, + [pre_end_per_group, Mod, GroupName]); +init_tc(Mod, {init_per_testcase,TC}, Config) -> + call(fun call_generic_fallback/3, Config, [pre_init_per_testcase, Mod, TC]); +init_tc(Mod, {end_per_testcase,TC}, Config) -> + call(fun call_generic_fallback/3, Config, [pre_end_per_testcase, Mod, TC]); +init_tc(Mod, TC = error_in_suite, Config) -> + call(fun call_generic_fallback/3, Config, [pre_init_per_testcase, Mod, TC]). %% @doc Called as each test case is completed. This includes all configuration %% tests. @@ -126,23 +128,23 @@ end_tc(Mod, init_per_suite, Config, _Result, Return) -> end_tc(Mod, end_per_suite, Config, Result, _Return) -> call(fun call_generic/3, Result, [post_end_per_suite, Mod, Config], '$ct_no_change'); -end_tc(_Mod, {init_per_group, GroupName, _}, Config, _Result, Return) -> - call(fun call_generic/3, Return, [post_init_per_group, GroupName, Config], - '$ct_no_change'); +end_tc(Mod, {init_per_group, GroupName, _}, Config, _Result, Return) -> + call(fun call_generic_fallback/3, Return, + [post_init_per_group, Mod, GroupName, Config], '$ct_no_change'); end_tc(Mod, {end_per_group, GroupName, Properties}, Config, Result, _Return) -> - Res = call(fun call_generic/3, Result, - [post_end_per_group, GroupName, Config], '$ct_no_change'), + Res = call(fun call_generic_fallback/3, Result, + [post_end_per_group, Mod, GroupName, Config], '$ct_no_change'), maybe_stop_locker(Mod, GroupName, Properties), Res; -end_tc(_Mod, {init_per_testcase,TC}, Config, Result, _Return) -> - call(fun call_generic/3, Result, [post_init_per_testcase, TC, Config], - '$ct_no_change'); -end_tc(_Mod, {end_per_testcase,TC}, Config, Result, _Return) -> - call(fun call_generic/3, Result, [post_end_per_testcase, TC, Config], - '$ct_no_change'); -end_tc(_Mod, TC = error_in_suite, Config, Result, _Return) -> - call(fun call_generic/3, Result, [post_end_per_testcase, TC, Config], - '$ct_no_change'). +end_tc(Mod, {init_per_testcase,TC}, Config, Result, _Return) -> + call(fun call_generic_fallback/3, Result, + [post_init_per_testcase, Mod, TC, Config], '$ct_no_change'); +end_tc(Mod, {end_per_testcase,TC}, Config, Result, _Return) -> + call(fun call_generic_fallback/3, Result, + [post_end_per_testcase, Mod, TC, Config], '$ct_no_change'); +end_tc(Mod, TC = error_in_suite, Config, Result, _Return) -> + call(fun call_generic_fallback/3, Result, + [post_end_per_testcase, Mod, TC, Config], '$ct_no_change'). %% Case = TestCase | {TestCase,GroupName} @@ -181,15 +183,21 @@ call_terminate(#ct_hook_config{ module = Mod, state = State} = Hook, _, _) -> {[],Hook}. call_cleanup(#ct_hook_config{ module = Mod, state = State} = Hook, - Reason, [Function, _Suite | Args]) -> + Reason, [Function | Args]) -> NewState = catch_apply(Mod,Function, Args ++ [Reason, State], - State), + State, true), {Reason, Hook#ct_hook_config{ state = NewState } }. -call_generic(#ct_hook_config{ module = Mod, state = State} = Hook, - Value, [Function | Args]) -> +call_generic(Hook, Value, Meta) -> + do_call_generic(Hook, Value, Meta, false). + +call_generic_fallback(Hook, Value, Meta) -> + do_call_generic(Hook, Value, Meta, true). + +do_call_generic(#ct_hook_config{ module = Mod, state = State} = Hook, + Value, [Function | Args], Fallback) -> {NewValue, NewState} = catch_apply(Mod, Function, Args ++ [Value, State], - {Value,State}), + {Value,State}, Fallback), {NewValue, Hook#ct_hook_config{ state = NewState } }. %% Generic call function @@ -257,15 +265,15 @@ remove(Key,List) when is_list(List) -> remove(_, Else) -> Else. -%% Translate scopes, i.e. init_per_group,group1 -> end_per_group,group1 etc -scope([pre_init_per_testcase, TC|_]) -> - [post_init_per_testcase, TC]; -scope([pre_end_per_testcase, TC|_]) -> - [post_end_per_testcase, TC]; -scope([pre_init_per_group, GroupName|_]) -> - [post_end_per_group, GroupName]; -scope([post_init_per_group, GroupName|_]) -> - [post_end_per_group, GroupName]; +%% Translate scopes, i.e. is_tuplenit_per_group,group1 -> end_per_group,group1 etc +scope([pre_init_per_testcase, SuiteName, TC|_]) -> + [post_init_per_testcase, SuiteName, TC]; +scope([pre_end_per_testcase, SuiteName, TC|_]) -> + [post_end_per_testcase, SuiteName, TC]; +scope([pre_init_per_group, SuiteName, GroupName|_]) -> + [post_end_per_group, SuiteName, GroupName]; +scope([post_init_per_group, SuiteName, GroupName|_]) -> + [post_end_per_group, SuiteName, GroupName]; scope([pre_init_per_suite, SuiteName|_]) -> [post_end_per_suite, SuiteName]; scope([post_init_per_suite, SuiteName|_]) -> @@ -273,14 +281,29 @@ scope([post_init_per_suite, SuiteName|_]) -> scope(init) -> none. -terminate_if_scope_ends(HookId, [on_tc_skip,_Suite,{end_per_group,Name}], +strip_config([post_init_per_testcase, SuiteName, TC|_]) -> + [post_init_per_testcase, SuiteName, TC]; +strip_config([post_end_per_testcase, SuiteName, TC|_]) -> + [post_end_per_testcase, SuiteName, TC]; +strip_config([post_init_per_group, SuiteName, GroupName|_]) -> + [post_init_per_group, SuiteName, GroupName]; +strip_config([post_end_per_group, SuiteName, GroupName|_]) -> + [post_end_per_group, SuiteName, GroupName]; +strip_config([post_init_per_suite, SuiteName|_]) -> + [post_init_per_suite, SuiteName]; +strip_config([post_end_per_suite, SuiteName|_]) -> + [post_end_per_suite, SuiteName]; +strip_config(Other) -> + Other. + + +terminate_if_scope_ends(HookId, [on_tc_skip,Suite,{end_per_group,Name}], Hooks) -> - terminate_if_scope_ends(HookId, [post_end_per_group, Name], Hooks); + terminate_if_scope_ends(HookId, [post_end_per_group, Suite, Name], Hooks); terminate_if_scope_ends(HookId, [on_tc_skip,Suite,end_per_suite], Hooks) -> terminate_if_scope_ends(HookId, [post_end_per_suite, Suite], Hooks); -terminate_if_scope_ends(HookId, [Function,Tag|T], Hooks) when T =/= [] -> - terminate_if_scope_ends(HookId,[Function,Tag],Hooks); -terminate_if_scope_ends(HookId, Function, Hooks) -> +terminate_if_scope_ends(HookId, Function0, Hooks) -> + Function = strip_config(Function0), case lists:keyfind(HookId, #ct_hook_config.id, Hooks) of #ct_hook_config{ id = HookId, scope = Function} = Hook -> terminate([Hook]), @@ -384,21 +407,29 @@ pos(Id,[_|Rest],Num) -> catch_apply(M,F,A, Default) -> + catch_apply(M,F,A,Default,false). +catch_apply(M,F,A, Default, Fallback) -> + not erlang:module_loaded(M) andalso (catch M:module_info()), + case erlang:function_exported(M,F,length(A)) of + false when Fallback -> + catch_apply(M,F,tl(A),Default,false); + false -> + Default; + true -> + catch_apply(M,F,A) + end. + +catch_apply(M,F,A) -> try - erlang:apply(M,F,A) + erlang:apply(M,F,A) catch _:Reason -> - case erlang:get_stacktrace() of - %% Return the default if it was the CTH module which did not have the function. - [{M,F,A,_}|_] when Reason == undef -> - Default; - Trace -> - ct_logs:log("Suite Hook","Call to CTH failed: ~w:~p", - [error,{Reason,Trace}]), - throw({error_in_cth_call, - lists:flatten( - io_lib:format("~w:~w/~w CTH call failed", - [M,F,length(A)]))}) - end + Trace = erlang:get_stacktrace(), + ct_logs:log("Suite Hook","Call to CTH failed: ~w:~p", + [error,{Reason,Trace}]), + throw({error_in_cth_call, + lists:flatten( + io_lib:format("~w:~w/~w CTH call failed", + [M,F,length(A)]))}) end. diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl index d783f8d04e..c53e72ee88 100644 --- a/lib/common_test/src/ct_release_test.erl +++ b/lib/common_test/src/ct_release_test.erl @@ -132,7 +132,7 @@ %%----------------------------------------------------------------- -define(testnode, 'ct_release_test-upgrade'). --define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps +-define(exclude_apps, [hipe, dialyzer]). % never include these apps %%----------------------------------------------------------------- -record(ct_data, {from,to}). diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl index a049ef5695..cac176de3a 100644 --- a/lib/common_test/src/ct_run.erl +++ b/lib/common_test/src/ct_run.erl @@ -76,8 +76,8 @@ abort_if_missing_suites, silent_connections = [], stylesheet, - multiply_timetraps = 1, - scale_timetraps = false, + multiply_timetraps, + scale_timetraps, create_priv_dir, testspec_files = [], current_testspec, @@ -264,11 +264,11 @@ script_start1(Parent, Args) -> [], Args), Verbosity = verbosity_args2opts(Args), MultTT = get_start_opt(multiply_timetraps, - fun([MT]) -> list_to_integer(MT) end, 1, Args), + fun([MT]) -> list_to_integer(MT) end, Args), ScaleTT = get_start_opt(scale_timetraps, fun([CT]) -> list_to_atom(CT); ([]) -> true - end, false, Args), + end, Args), CreatePrivDir = get_start_opt(create_priv_dir, fun([PD]) -> list_to_atom(PD); ([]) -> auto_per_tc @@ -1055,8 +1055,8 @@ run_test2(StartOpts) -> CoverStop = get_start_opt(cover_stop, value, StartOpts), %% timetrap manipulation - MultiplyTT = get_start_opt(multiply_timetraps, value, 1, StartOpts), - ScaleTT = get_start_opt(scale_timetraps, value, false, StartOpts), + MultiplyTT = get_start_opt(multiply_timetraps, value, StartOpts), + ScaleTT = get_start_opt(scale_timetraps, value, StartOpts), %% create unique priv dir names CreatePrivDir = get_start_opt(create_priv_dir, value, StartOpts), @@ -2280,8 +2280,19 @@ do_run_test(Tests, Skip, Opts0) -> _Lower -> ok end, - test_server_ctrl:multiply_timetraps(Opts0#opts.multiply_timetraps), - test_server_ctrl:scale_timetraps(Opts0#opts.scale_timetraps), + + case Opts0#opts.multiply_timetraps of + undefined -> MultTT = 1; + MultTT -> MultTT + end, + case Opts0#opts.scale_timetraps of + undefined -> ScaleTT = false; + ScaleTT -> ScaleTT + end, + ct_logs:log("TEST INFO","Timetrap time multiplier = ~w~n" + "Timetrap scaling enabled = ~w", [MultTT,ScaleTT]), + test_server_ctrl:multiply_timetraps(MultTT), + test_server_ctrl:scale_timetraps(ScaleTT), test_server_ctrl:create_priv_dir(choose_val( Opts0#opts.create_priv_dir, diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl index 991abb0666..466a2c7658 100644 --- a/lib/common_test/src/ct_testspec.erl +++ b/lib/common_test/src/ct_testspec.erl @@ -26,7 +26,8 @@ -export([prepare_tests/1, prepare_tests/2, collect_tests_from_list/2, collect_tests_from_list/3, - collect_tests_from_file/2, collect_tests_from_file/3]). + collect_tests_from_file/2, collect_tests_from_file/3, + get_tests/1]). -export([testspec_rec2list/1, testspec_rec2list/2]). @@ -803,6 +804,31 @@ list_nodes(#testspec{nodes=NodeRefs}) -> lists:map(fun({_Ref,Node}) -> Node end, NodeRefs). +%%%----------------------------------------------------------------- +%%% Parse the given test specs and return the complete set of specs +%%% and tests to run/skip. +%%% [Spec1,Spec2,...] means create separate tests per spec +%%% [[Spec1,Spec2,...]] means merge all specs into one +-spec get_tests(Specs) -> {ok,[{Specs,Tests}]} | {error,Reason} when + Specs :: [string()] | [[string()]], + Tests :: {Node,Run,Skip}, + Node :: atom(), + Run :: {Dir,Suites,Cases}, + Skip :: {Dir,Suites,Comment} | {Dir,Suites,Cases,Comment}, + Dir :: string(), + Suites :: atom | [atom()] | all, + Cases :: atom | [atom()] | all, + Comment :: string(), + Reason :: term(). + +get_tests(Specs) -> + case collect_tests_from_file(Specs,true) of + Tests when is_list(Tests) -> + {ok,[{S,prepare_tests(R)} || {S,R} <- Tests]}; + Error -> + Error + end. + %% ----------------------------------------------------- %% / \ %% | When adding test/config terms, remember to update | @@ -1132,6 +1158,11 @@ handle_data(verbosity,Node,VLvls,_Spec) when is_list(VLvls) -> VLvls1 = lists:map(fun(VLvl = {_Cat,_Lvl}) -> VLvl; (Lvl) -> {'$unspecified',Lvl} end, VLvls), [{Node,VLvls1}]; +handle_data(multiply_timetraps,Node,Mult,_Spec) when is_integer(Mult) -> + [{Node,Mult}]; +handle_data(scale_timetraps,Node,Scale,_Spec) when Scale == true; + Scale == false -> + [{Node,Scale}]; handle_data(silent_connections,Node,all,_Spec) -> [{Node,[all]}]; handle_data(silent_connections,Node,Conn,_Spec) when is_atom(Conn) -> @@ -1150,6 +1181,8 @@ should_be_added(Tag,Node,_Data,Spec) -> Tag == label; Tag == auto_compile; Tag == abort_if_missing_suites; Tag == stylesheet; Tag == verbosity; + Tag == multiply_timetraps; + Tag == scale_timetraps; Tag == silent_connections -> lists:keymember(ref2node(Node,Spec#testspec.nodes),1, read_field(Spec,Tag)) == false; diff --git a/lib/common_test/src/cth_conn_log.erl b/lib/common_test/src/cth_conn_log.erl index 883da0da0a..ce8852b3ea 100644 --- a/lib/common_test/src/cth_conn_log.erl +++ b/lib/common_test/src/cth_conn_log.erl @@ -54,8 +54,8 @@ -include_lib("common_test/include/ct.hrl"). -export([init/2, - pre_init_per_testcase/3, - post_end_per_testcase/4]). + pre_init_per_testcase/4, + post_end_per_testcase/5]). %%---------------------------------------------------------------------- %% Exported types @@ -104,7 +104,7 @@ get_log_opts(Mod,Opts) -> Hosts = proplists:get_value(hosts,Opts,[]), {LogType,Hosts}. -pre_init_per_testcase(TestCase,Config,CthState) -> +pre_init_per_testcase(_Suite,TestCase,Config,CthState) -> Logs = lists:map( fun({ConnMod,{LogType,Hosts}}) -> @@ -158,7 +158,7 @@ pre_init_per_testcase(TestCase,Config,CthState) -> ct_util:update_testdata(?MODULE, Update, [create]), {Config,CthState}. -post_end_per_testcase(TestCase,_Config,Return,CthState) -> +post_end_per_testcase(_Suite,TestCase,_Config,Return,CthState) -> Update = fun(PrevUsers) -> case lists:delete(TestCase, PrevUsers) of diff --git a/lib/common_test/src/cth_log_redirect.erl b/lib/common_test/src/cth_log_redirect.erl index 6d77d7ee9e..eda090d4f5 100644 --- a/lib/common_test/src/cth_log_redirect.erl +++ b/lib/common_test/src/cth_log_redirect.erl @@ -28,10 +28,10 @@ %% CTH Callbacks -export([id/1, init/2, pre_init_per_suite/3, pre_end_per_suite/3, post_end_per_suite/4, - pre_init_per_group/3, post_init_per_group/4, - pre_end_per_group/3, post_end_per_group/4, - pre_init_per_testcase/3, post_init_per_testcase/4, - pre_end_per_testcase/3, post_end_per_testcase/4]). + pre_init_per_group/4, post_init_per_group/5, + pre_end_per_group/4, post_end_per_group/5, + pre_init_per_testcase/4, post_init_per_testcase/5, + pre_end_per_testcase/4, post_end_per_testcase/5]). %% Event handler Callbacks -export([init/1, @@ -71,11 +71,11 @@ post_end_per_suite(_Suite, Config, Return, State) -> set_curr_func(undefined, Config), {Return, State}. -pre_init_per_group(Group, Config, State) -> +pre_init_per_group(_Suite, Group, Config, State) -> set_curr_func({group,Group,init_per_group}, Config), {Config, State}. -post_init_per_group(Group, Config, Result, tc_log_async) when is_list(Config) -> +post_init_per_group(_Suite, Group, Config, Result, tc_log_async) when is_list(Config) -> case lists:member(parallel,proplists:get_value( tc_group_properties,Config,[])) of true -> @@ -83,33 +83,33 @@ post_init_per_group(Group, Config, Result, tc_log_async) when is_list(Config) -> false -> {Result, tc_log_async} end; -post_init_per_group(_Group, _Config, Result, State) -> +post_init_per_group(_Suite, _Group, _Config, Result, State) -> {Result, State}. -pre_init_per_testcase(TC, Config, State) -> +pre_init_per_testcase(_Suite, TC, Config, State) -> set_curr_func(TC, Config), {Config, State}. -post_init_per_testcase(_TC, _Config, Return, State) -> +post_init_per_testcase(_Suite, _TC, _Config, Return, State) -> {Return, State}. -pre_end_per_testcase(_TC, Config, State) -> +pre_end_per_testcase(_Suite, _TC, Config, State) -> {Config, State}. -post_end_per_testcase(_TC, _Config, Result, State) -> +post_end_per_testcase(_Suite, _TC, _Config, Result, State) -> %% Make sure that the event queue is flushed %% before ending this test case. gen_event:call(error_logger, ?MODULE, flush, 300000), {Result, State}. -pre_end_per_group(Group, Config, {tc_log, Group}) -> +pre_end_per_group(_Suite, Group, Config, {tc_log, Group}) -> set_curr_func({group,Group,end_per_group}, Config), {Config, set_log_func(tc_log_async)}; -pre_end_per_group(Group, Config, State) -> +pre_end_per_group(_Suite, Group, Config, State) -> set_curr_func({group,Group,end_per_group}, Config), {Config, State}. -post_end_per_group(_Group, Config, Return, State) -> +post_end_per_group(_Suite, _Group, Config, Return, State) -> set_curr_func({group,undefined}, Config), {Return, State}. diff --git a/lib/common_test/src/cth_surefire.erl b/lib/common_test/src/cth_surefire.erl index 59b916851e..c4941948cc 100644 --- a/lib/common_test/src/cth_surefire.erl +++ b/lib/common_test/src/cth_surefire.erl @@ -33,16 +33,16 @@ -export([pre_end_per_suite/3]). -export([post_end_per_suite/4]). --export([pre_init_per_group/3]). --export([post_init_per_group/4]). --export([pre_end_per_group/3]). --export([post_end_per_group/4]). +-export([pre_init_per_group/4]). +-export([post_init_per_group/5]). +-export([pre_end_per_group/4]). +-export([post_end_per_group/5]). --export([pre_init_per_testcase/3]). --export([post_end_per_testcase/4]). +-export([pre_init_per_testcase/4]). +-export([post_end_per_testcase/5]). --export([on_tc_fail/3]). --export([on_tc_skip/3]). +-export([on_tc_fail/4]). +-export([on_tc_skip/4]). -export([terminate/1]). @@ -116,29 +116,29 @@ pre_end_per_suite(_Suite,Config,State) -> post_end_per_suite(_Suite,Config,Result,State) -> {Result, end_tc(end_per_suite,Config,Result,State)}. -pre_init_per_group(Group,Config,State) -> +pre_init_per_group(_Suite,Group,Config,State) -> {Config, init_tc(State#state{ curr_group = [Group|State#state.curr_group]}, Config)}. -post_init_per_group(_Group,Config,Result,State) -> +post_init_per_group(_Suite,_Group,Config,Result,State) -> {Result, end_tc(init_per_group,Config,Result,State)}. -pre_end_per_group(_Group,Config,State) -> +pre_end_per_group(_Suite,_Group,Config,State) -> {Config, init_tc(State, Config)}. -post_end_per_group(_Group,Config,Result,State) -> +post_end_per_group(_Suite,_Group,Config,Result,State) -> NewState = end_tc(end_per_group, Config, Result, State), {Result, NewState#state{ curr_group = tl(NewState#state.curr_group)}}. -pre_init_per_testcase(_TC,Config,State) -> +pre_init_per_testcase(_Suite,_TC,Config,State) -> {Config, init_tc(State, Config)}. -post_end_per_testcase(TC,Config,Result,State) -> +post_end_per_testcase(_Suite,TC,Config,Result,State) -> {Result, end_tc(TC,Config, Result,State)}. -on_tc_fail(_TC, _Res, State = #state{test_cases = []}) -> +on_tc_fail(_Suite,_TC, _Res, State = #state{test_cases = []}) -> State; -on_tc_fail(_TC, Res, State) -> +on_tc_fail(_Suite,_TC, Res, State) -> TCs = State#state.test_cases, TC = hd(TCs), NewTC = TC#testcase{ @@ -146,10 +146,9 @@ on_tc_fail(_TC, Res, State) -> {fail,lists:flatten(io_lib:format("~p",[Res]))} }, State#state{ test_cases = [NewTC | tl(TCs)]}. -on_tc_skip({ConfigFunc,_GrName},{Type,_Reason} = Res, State0) - when Type == tc_auto_skip; Type == tc_user_skip -> - on_tc_skip(ConfigFunc, Res, State0); -on_tc_skip(Tc,{Type,_Reason} = Res, State0) when Type == tc_auto_skip -> +on_tc_skip(Suite,{ConfigFunc,_GrName}, Res, State) -> + on_tc_skip(Suite,ConfigFunc, Res, State); +on_tc_skip(Suite,Tc, Res, State0) -> TcStr = atom_to_list(Tc), State = case State0#state.test_cases of @@ -158,11 +157,7 @@ on_tc_skip(Tc,{Type,_Reason} = Res, State0) when Type == tc_auto_skip -> _ -> State0 end, - do_tc_skip(Res, end_tc(Tc,[],Res,init_tc(State,[]))); -on_tc_skip(_Tc, _Res, State = #state{test_cases = []}) -> - State; -on_tc_skip(_Tc, Res, State) -> - do_tc_skip(Res, State). + do_tc_skip(Res, end_tc(Tc,[],Res,init_tc(set_suite(Suite,State),[]))). do_tc_skip(Res, State) -> TCs = State#state.test_cases, @@ -209,6 +204,12 @@ end_tc(Name, _Config, _Res, State = #state{ curr_suite = Suite, result = passed }| State#state.test_cases], tc_log = ""}. % so old tc_log is not set if next is on_tc_skip + +set_suite(Suite,#state{curr_suite=undefined}=State) -> + State#state{curr_suite=Suite, curr_suite_ts=?now}; +set_suite(_,State) -> + State. + close_suite(#state{ test_cases = [] } = State) -> State; close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) -> @@ -228,7 +229,8 @@ close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) -> testcases = lists:reverse(TCs), log = SuiteLog, url = SuiteUrl}, - State#state{ test_cases = [], + State#state{ curr_suite = undefined, + test_cases = [], test_suites = [Suite | State#state.test_suites]}. terminate(State = #state{ test_cases = [] }) -> diff --git a/lib/common_test/src/test_server.erl b/lib/common_test/src/test_server.erl index 924086f2bd..be49191f2e 100644 --- a/lib/common_test/src/test_server.erl +++ b/lib/common_test/src/test_server.erl @@ -778,9 +778,9 @@ spawn_fw_call(Mod,IPTC={init_per_testcase,Func},CurrConf,Pid, %% if init_per_testcase fails, the test case %% should be skipped try begin do_end_tc_call(Mod,IPTC, {Pid,Skip,[CurrConf]}, Why), - do_init_tc_call(Mod,{end_per_testcase,Func}, + do_init_tc_call(Mod,{end_per_testcase_not_run,Func}, [CurrConf],{ok,[CurrConf]}), - do_end_tc_call(Mod,{end_per_testcase,Func}, + do_end_tc_call(Mod,{end_per_testcase_not_run,Func}, {Pid,Skip,[CurrConf]}, Why) end of _ -> ok catch @@ -1151,14 +1151,14 @@ do_end_tc_call(Mod, IPTC={init_per_testcase,Func}, Res, Return) -> Args end, EPTCInitRes = - case do_init_tc_call(Mod,{end_per_testcase,Func}, + case do_init_tc_call(Mod,{end_per_testcase_not_run,Func}, IPTCEndRes,Return) of {ok,EPTCInitConfig} when is_list(EPTCInitConfig) -> {Return,EPTCInitConfig}; _ -> - Return + {Return,IPTCEndRes} end, - do_end_tc_call1(Mod, {end_per_testcase,Func}, + do_end_tc_call1(Mod, {end_per_testcase_not_run,Func}, EPTCInitRes, Return); _Ok -> do_end_tc_call1(Mod, IPTC, Res, Return) diff --git a/lib/common_test/src/test_server_ctrl.erl b/lib/common_test/src/test_server_ctrl.erl index b52e4bef9b..39c523f8b3 100644 --- a/lib/common_test/src/test_server_ctrl.erl +++ b/lib/common_test/src/test_server_ctrl.erl @@ -2051,17 +2051,21 @@ add_init_and_end_per_suite([], _LastMod, skipped_suite, _FwMod) -> add_init_and_end_per_suite([], LastMod, LastRef, FwMod) -> %% we'll add end_per_suite here even if it's not exported %% (and simply let the call fail if it's missing) - case erlang:function_exported(LastMod, end_per_suite, 1) of - true -> - [{conf,LastRef,[],{LastMod,end_per_suite}}]; - false -> + case {erlang:function_exported(LastMod, end_per_suite, 1), + erlang:function_exported(LastMod, init_per_suite, 1)} of + {false,false} -> %% let's call a "fake" end_per_suite if it exists case erlang:function_exported(FwMod, end_per_suite, 1) of true -> [{conf,LastRef,[{suite,LastMod}],{FwMod,end_per_suite}}]; false -> [{conf,LastRef,[],{LastMod,end_per_suite}}] - end + end; + _ -> + %% If any of these exist, the other should too + %% (required and documented). If it isn't, it will fail + %% with reason 'undef'. + [{conf,LastRef,[],{LastMod,end_per_suite}}] end. do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) -> @@ -2070,11 +2074,9 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) -> _ -> ok end, {Init,NextMod,NextRef} = - case erlang:function_exported(Mod, init_per_suite, 1) of - true -> - Ref = make_ref(), - {[{conf,Ref,[],{Mod,init_per_suite}}],Mod,Ref}; - false -> + case {erlang:function_exported(Mod, init_per_suite, 1), + erlang:function_exported(Mod, end_per_suite, 1)} of + {false,false} -> %% let's call a "fake" init_per_suite if it exists case erlang:function_exported(FwMod, init_per_suite, 1) of true -> @@ -2083,8 +2085,13 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) -> {FwMod,init_per_suite}}],Mod,Ref}; false -> {[],Mod,undefined} - end - + end; + _ -> + %% If any of these exist, the other should too + %% (required and documented). If it isn't, it will fail + %% with reason 'undef'. + Ref = make_ref(), + {[{conf,Ref,[],{Mod,init_per_suite}}],Mod,Ref} end, Cases = if LastRef==undefined -> @@ -2094,10 +2101,9 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) -> true -> %% we'll add end_per_suite here even if it's not exported %% (and simply let the call fail if it's missing) - case erlang:function_exported(LastMod, end_per_suite, 1) of - true -> - [{conf,LastRef,[],{LastMod,end_per_suite}}|Init]; - false -> + case {erlang:function_exported(LastMod, end_per_suite, 1), + erlang:function_exported(LastMod, init_per_suite, 1)} of + {false,false} -> %% let's call a "fake" end_per_suite if it exists case erlang:function_exported(FwMod, end_per_suite, 1) of true -> @@ -2105,8 +2111,13 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) -> {FwMod,end_per_suite}}|Init]; false -> [{conf,LastRef,[],{LastMod,end_per_suite}}|Init] - end - end + end; + _ -> + %% If any of these exist, the other should too + %% (required and documented). If it isn't, it will fail + %% with reason 'undef'. + [{conf,LastRef,[],{LastMod,end_per_suite}}|Init] + end end, {Cases,NextMod,NextRef}. @@ -2115,11 +2126,9 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod, FwMod) -> No when No==undefined ; No==skipped_suite -> {[],Mod,skipped_suite}; _Ref -> - case erlang:function_exported(LastMod, end_per_suite, 1) of - true -> - {[{conf,LastRef,[],{LastMod,end_per_suite}}], - Mod,skipped_suite}; - false -> + case {erlang:function_exported(LastMod, end_per_suite, 1), + erlang:function_exported(LastMod, init_per_suite, 1)} of + {false,false} -> case erlang:function_exported(FwMod, end_per_suite, 1) of true -> %% let's call "fake" end_per_suite if it exists @@ -2128,7 +2137,13 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod, FwMod) -> false -> {[{conf,LastRef,[],{LastMod,end_per_suite}}], Mod,skipped_suite} - end + end; + _ -> + %% If any of these exist, the other should too + %% (required and documented). If it isn't, it will fail + %% with reason 'undef'. + {[{conf,LastRef,[],{LastMod,end_per_suite}}], + Mod,skipped_suite} end end. @@ -2924,22 +2939,21 @@ run_test_cases_loop([{Mod,Func,Args}|Cases], Config, TimetrapData, Mode, Status) exit(framework_error); %% sequential execution of test case finished {Time,RetVal,_} -> + RetTag = + if is_tuple(RetVal) -> element(1,RetVal); + true -> undefined + end, {Failed,Status1} = - case Time of - died -> - {true,update_status(failed, Mod, Func, Status)}; - _ when is_tuple(RetVal) -> - case element(1, RetVal) of - R when R=='EXIT'; R==failed -> - {true,update_status(failed, Mod, Func, Status)}; - R when R==skip; R==skipped -> - {false,update_status(skipped, Mod, Func, Status)}; - _ -> - {false,update_status(ok, Mod, Func, Status)} - end; - _ -> - {false,update_status(ok, Mod, Func, Status)} - end, + case RetTag of + Skip when Skip==skip; Skip==skipped -> + {false,update_status(skipped, Mod, Func, Status)}; + Fail when Fail=='EXIT'; Fail==failed -> + {true,update_status(failed, Mod, Func, Status)}; + _ when Time==died, RetVal=/=ok -> + {true,update_status(failed, Mod, Func, Status)}; + _ -> + {false,update_status(ok, Mod, Func, Status)} + end, case check_prop(sequence, Mode) of false -> stop_minor_log_file(), @@ -3794,7 +3808,15 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit, {died,{timetrap_timeout,TimetrapTimeout}} -> progress(failed, Num, Mod, Func, GrName, Loc, timetrap_timeout, TimetrapTimeout, Comment, Style); - {died,Reason} -> + {died,{Skip,Reason}} when Skip==skip; Skip==skipped -> + %% died in init_per_testcase + progress(skip, Num, Mod, Func, GrName, Loc, Reason, + Time, Comment, Style); + {died,Reason} when Reason=/=ok -> + %% (If Reason==ok it means that process died in + %% end_per_testcase after successfully completing the + %% test case itself - then we shall not fail, but a + %% warning will be issued in the comment field.) progress(failed, Num, Mod, Func, GrName, Loc, Reason, Time, Comment, Style); {_,{'EXIT',{Skip,Reason}}} when Skip==skip; Skip==skipped; @@ -3943,6 +3965,9 @@ progress(skip, CaseNum, Mod, Func, GrName, Loc, Reason, Time, [get_info_str(Mod,Func, CaseNum, get(test_server_cases))]), test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName}, {ReportTag,Reason1}}]), + TimeStr = io_lib:format(if is_float(Time) -> "~.3fs"; + true -> "~w" + end, [Time]), ReasonStr = escape_chars(reason_to_string(Reason1)), ReasonStr1 = lists:flatten([string:strip(S,left) || S <- string:tokens(ReasonStr,[$\n])]), @@ -3957,10 +3982,10 @@ progress(skip, CaseNum, Mod, Func, GrName, Loc, Reason, Time, _ -> xhtml("<br>(","<br />(") ++ to_string(Comment) ++ ")" end, print(html, - "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>" + "<td>" ++ St0 ++ "~ts" ++ St1 ++ "</td>" "<td><font color=\"~ts\">SKIPPED</font></td>" "<td>~ts~ts</td></tr>\n", - [Time,Color,ReasonStr2,Comment1]), + [TimeStr,Color,ReasonStr2,Comment1]), FormatLoc = test_server_sup:format_loc(Loc), print(minor, "=== Location: ~ts", [FormatLoc]), print(minor, "=== Reason: ~ts", [ReasonStr1]), @@ -4098,6 +4123,9 @@ progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time, Comment0, {St0,St1}) -> print(minor, "successfully completed test case", []), test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName},ok}]), + TimeStr = io_lib:format(if is_float(Time) -> "~.3fs"; + true -> "~w" + end, [Time]), Comment = case RetVal of {comment,RetComment} -> @@ -4116,10 +4144,10 @@ progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time, end, print(major, "=elapsed ~p", [Time]), print(html, - "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>" + "<td>" ++ St0 ++ "~ts" ++ St1 ++ "</td>" "<td><font color=\"green\">Ok</font></td>" "~ts</tr>\n", - [Time,Comment]), + [TimeStr,Comment]), print(minor, escape_chars(io_lib:format("=== Returned value: ~tp", [RetVal])), []), diff --git a/lib/common_test/test/ct_error_SUITE.erl b/lib/common_test/test/ct_error_SUITE.erl index fae23484e6..621f3b6d2d 100644 --- a/lib/common_test/test/ct_error_SUITE.erl +++ b/lib/common_test/test/ct_error_SUITE.erl @@ -1531,17 +1531,17 @@ test_events(config_func_errors) -> {?eh,tc_start,{config_func_error_1_SUITE,exit_in_iptc}}, {?eh,tc_done,{config_func_error_1_SUITE,exit_in_iptc,'_'}}, - {?eh,test_stats,{0,1,{0,0}}}, + {?eh,test_stats,{0,0,{0,1}}}, {?eh,tc_start,{config_func_error_1_SUITE,exit_in_eptc}}, {?eh,tc_done,{config_func_error_1_SUITE,exit_in_eptc,'_'}}, - {?eh,test_stats,{0,2,{0,0}}}, + {?eh,test_stats,{1,0,{0,1}}}, [{?eh,tc_start,{config_func_error_1_SUITE,{init_per_group,g1,[]}}}, {?eh,tc_done,{config_func_error_1_SUITE,{init_per_group,g1,[]},ok}}, {?eh,tc_start,{config_func_error_1_SUITE,exit_in_iptc}}, {?eh,tc_done,{config_func_error_1_SUITE,exit_in_iptc,'_'}}, - {?eh,test_stats,{0,3,{0,0}}}, + {?eh,test_stats,{1,0,{0,2}}}, {?eh,tc_start,{config_func_error_1_SUITE,{end_per_group,g1,[]}}}, {?eh,tc_done,{config_func_error_1_SUITE,{end_per_group,g1,[]},ok}}], @@ -1549,7 +1549,7 @@ test_events(config_func_errors) -> {?eh,tc_done,{config_func_error_1_SUITE,{init_per_group,g2,[]},ok}}, {?eh,tc_start,{config_func_error_1_SUITE,exit_in_eptc}}, {?eh,tc_done,{config_func_error_1_SUITE,exit_in_eptc,'_'}}, - {?eh,test_stats,{0,4,{0,0}}}, + {?eh,test_stats,{2,0,{0,2}}}, {?eh,tc_start,{config_func_error_1_SUITE,{end_per_group,g2,[]}}}, {?eh,tc_done,{config_func_error_1_SUITE,{end_per_group,g2,[]},ok}}], diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl index bc716fb5e3..93bcb8fe52 100644 --- a/lib/common_test/test/ct_hooks_SUITE.erl +++ b/lib/common_test/test/ct_hooks_SUITE.erl @@ -82,10 +82,13 @@ all(suite) -> scope_suite_state_cth, fail_pre_suite_cth, double_fail_pre_suite_cth, fail_post_suite_cth, skip_pre_suite_cth, skip_pre_end_cth, + skip_pre_init_tc_cth, skip_post_suite_cth, recover_post_suite_cth, update_config_cth, state_update_cth, options_cth, same_id_cth, fail_n_skip_with_minimal_cth, prio_cth, no_config, - data_dir, cth_log + no_init_suite_config, no_init_config, no_end_config, + failed_sequence, repeat_force_stop, config_clash, + callbacks_on_skip, fallback, data_dir, cth_log ] ). @@ -190,6 +193,10 @@ skip_post_suite_cth(Config) when is_list(Config) -> do_test(skip_post_suite_cth, "ct_cth_empty_SUITE.erl", [skip_post_suite_cth],Config). +skip_pre_init_tc_cth(Config) -> + do_test(skip_pre_init_tc_cth, "ct_cth_empty_SUITE.erl", + [skip_pre_init_tc_cth],Config). + recover_post_suite_cth(Config) when is_list(Config) -> do_test(recover_post_suite_cth, "ct_cth_fail_per_suite_SUITE.erl", [recover_post_suite_cth],Config). @@ -223,6 +230,16 @@ no_config(Config) when is_list(Config) -> do_test(no_config, "ct_no_config_SUITE.erl", [verify_config_cth],Config). +no_init_suite_config(Config) when is_list(Config) -> + do_test(no_init_suite_config, "ct_no_init_suite_config_SUITE.erl", + [empty_cth],Config). + +no_init_config(Config) when is_list(Config) -> + do_test(no_init_config, "ct_no_init_config_SUITE.erl",[empty_cth],Config). + +no_end_config(Config) when is_list(Config) -> + do_test(no_end_config, "ct_no_end_config_SUITE.erl",[empty_cth],Config). + data_dir(Config) when is_list(Config) -> do_test(data_dir, "ct_data_dir_SUITE.erl", [verify_data_dir_cth],Config). @@ -254,24 +271,53 @@ cth_log(Config) when is_list(Config) -> end, UnexpIoLogs), ok. +%% OTP-10599 adds the Suite argument as first argument to all hook +%% callbacks that did not have a Suite argument from before. This test +%% checks that ct_hooks will fall back to old versions of callbacks if +%% new versions are not exported. +fallback(Config) -> + do_test(fallback, "all_hook_callbacks_SUITE.erl",[fallback_cth], Config). + +%% Test that expected callbacks, and only those, are called when tests +%% are skipped in different ways +callbacks_on_skip(Config) -> + do_test(callbacks_on_skip, {spec,"skip.spec"},[skip_cth], Config). + +%% Test that expected callbacks, and only those, are called when tests +%% are skipped due to failed sequence +failed_sequence(Config) -> + do_test(failed_sequence, "seq_SUITE.erl", [skip_cth], Config). + +%% Test that expected callbacks, and only those, are called when tests +%% are skipped due to {force_stop,skip_rest} option +repeat_force_stop(Config) -> + do_test(repeat_force_stop, "repeat_SUITE.erl", [skip_cth], Config, ok, 2, + [{force_stop,skip_rest},{duration,"000009"}]). + +%% Test that expected callbacks, and only those, are called when a test +%% are fails due to clash in config alias names +config_clash(Config) -> + do_test(config_clash, "config_clash_SUITE.erl", [skip_cth], Config). %%%----------------------------------------------------------------- %%% HELP FUNCTIONS %%%----------------------------------------------------------------- -do_test(Tag, SWC, CTHs, Config) -> - do_test(Tag, SWC, CTHs, Config, ok). -do_test(Tag, SWC, CTHs, Config, {error,_} = Res) -> - do_test(Tag, SWC, CTHs, Config, Res, 1); -do_test(Tag, SWC, CTHs, Config, Res) -> - do_test(Tag, SWC, CTHs, Config, Res, 2). +do_test(Tag, WTT, CTHs, Config) -> + do_test(Tag, WTT, CTHs, Config, ok). +do_test(Tag, WTT, CTHs, Config, {error,_} = Res) -> + do_test(Tag, WTT, CTHs, Config, Res, 1,[]); +do_test(Tag, WTT, CTHs, Config, Res) -> + do_test(Tag, WTT, CTHs, Config, Res, 2,[]). -do_test(Tag, SuiteWildCard, CTHs, Config, Res, EC) -> +do_test(Tag, WhatToTest, CTHs, Config, Res, EC, ExtraOpts) when is_list(WhatToTest) -> + do_test(Tag, {suite,WhatToTest}, CTHs, Config, Res, EC, ExtraOpts); +do_test(Tag, {WhatTag,Wildcard}, CTHs, Config, Res, EC, ExtraOpts) -> DataDir = ?config(data_dir, Config), - Suites = filelib:wildcard( - filename:join([DataDir,"cth/tests",SuiteWildCard])), - {Opts,ERPid} = setup([{suite,Suites}, - {ct_hooks,CTHs},{label,Tag}], Config), + Files = filelib:wildcard( + filename:join([DataDir,"cth/tests",Wildcard])), + {Opts,ERPid} = + setup([{WhatTag,Files},{ct_hooks,CTHs},{label,Tag}|ExtraOpts], Config), Res = ct_test_support:run(Opts, Config), Events = ct_test_support:get_events(ERPid, Config), @@ -323,10 +369,10 @@ test_events(one_empty_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, - {?eh,cth,{empty_cth,pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{empty_cth,post_init_per_testcase,[test_case,'$proplist','_',[]]}}, - {?eh,cth,{empty_cth,pre_end_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[]]}}, + {?eh,cth,{empty_cth,pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[]]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, @@ -355,10 +401,10 @@ test_events(two_empty_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, @@ -423,8 +469,8 @@ test_events(minimal_and_maximal_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, @@ -512,8 +558,8 @@ test_events(scope_per_suite_cth) -> {?eh,tc_done,{ct_scope_per_suite_cth_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_scope_per_suite_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_suite_cth_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_suite_cth_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_scope_per_suite_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_per_suite_cth_SUITE,end_per_suite}}, @@ -538,8 +584,8 @@ test_events(scope_suite_cth) -> {?eh,tc_done,{ct_scope_suite_cth_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_scope_suite_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_suite_cth_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_suite_cth_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_scope_suite_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_suite_cth_SUITE,end_per_suite}}, @@ -561,17 +607,17 @@ test_events(scope_per_group_cth) -> [{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]}}}, {?eh,cth,{'_',id,[[]]}}, {?eh,cth,{'_',init,['_',[]]}}, - {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}}, + {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_cth_SUITE,group1, '$proplist','$proplist',[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}}, {?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}}, - {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}}, + {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','_',[]]}}, {?eh,cth,{'_',terminate,[[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]},ok}}], @@ -592,8 +638,8 @@ test_events(scope_per_suite_state_cth) -> {?eh,tc_done,{ct_scope_per_suite_state_cth_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_scope_per_suite_state_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_suite_state_cth_SUITE,test_case,'$proplist',[test]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_suite_state_cth_SUITE,test_case,'$proplist',ok,[test]]}}, {?eh,tc_done,{ct_scope_per_suite_state_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_per_suite_state_cth_SUITE,end_per_suite}}, @@ -618,8 +664,8 @@ test_events(scope_suite_state_cth) -> {?eh,tc_done,{ct_scope_suite_state_cth_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_scope_suite_state_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_suite_state_cth_SUITE,test_case,'$proplist',[test]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_suite_state_cth_SUITE,test_case,'$proplist',ok,[test]]}}, {?eh,tc_done,{ct_scope_suite_state_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_suite_state_cth_SUITE,end_per_suite}}, @@ -641,17 +687,17 @@ test_events(scope_per_group_state_cth) -> [{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,{init_per_group,group1,[]}}}, {?eh,cth,{'_',id,[[test]]}}, {?eh,cth,{'_',init,['_',[test]]}}, - {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[test]]}}, + {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist','$proplist',[test]]}}, {?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{init_per_group,group1,[]},ok}}, {?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_state_cth_SUITE,test_case,'$proplist',[test]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_state_cth_SUITE,test_case,'$proplist',ok,[test]]}}, {?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]}}}, - {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[test]]}}, - {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[test]]}}, + {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist',[test]]}}, + {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist','_',[test]]}}, {?eh,cth,{'_',terminate,[[test]]}}, {?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]},ok}}], @@ -674,14 +720,14 @@ test_events(fail_pre_suite_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite, {failed, {error,"Test failure"}}}}, {?eh,cth,{'_',on_tc_fail, - [init_per_suite,{failed,"Test failure"},[]]}}, + [ct_cth_empty_SUITE,init_per_suite,"Test failure",[]]}}, {?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case, {failed,{ct_cth_empty_SUITE,init_per_suite, {failed,"Test failure"}}}}}, {?eh,cth,{'_',on_tc_skip, - [test_case, {tc_auto_skip, + [ct_cth_empty_SUITE,test_case, {tc_auto_skip, {failed, {ct_cth_empty_SUITE, init_per_suite, {failed, "Test failure"}}}},[]]}}, @@ -690,7 +736,7 @@ test_events(fail_pre_suite_cth) -> {failed, {ct_cth_empty_SUITE, init_per_suite, {failed, "Test failure"}}}}}, {?eh,cth,{'_',on_tc_skip, - [end_per_suite, {tc_auto_skip, + [ct_cth_empty_SUITE,end_per_suite, {tc_auto_skip, {failed, {ct_cth_empty_SUITE, init_per_suite, {failed, "Test failure"}}}},[]]}}, @@ -727,17 +773,17 @@ test_events(fail_post_suite_cth) -> {?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist','$proplist',[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite, {failed,{error,"Test failure"}}}}, - {?eh,cth,{'_',on_tc_fail,[init_per_suite, {failed,"Test failure"}, []]}}, + {?eh,cth,{'_',on_tc_fail,[ct_cth_empty_SUITE,init_per_suite, "Test failure", []]}}, {?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case, {failed,{ct_cth_empty_SUITE,init_per_suite, {failed,"Test failure"}}}}}, - {?eh,cth,{'_',on_tc_skip,[test_case,{tc_auto_skip,'_'},[]]}}, + {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_auto_skip,'_'},[]]}}, {?eh,tc_auto_skip, {ct_cth_empty_SUITE, end_per_suite, {failed, {ct_cth_empty_SUITE, init_per_suite, {failed, "Test failure"}}}}}, - {?eh,cth,{'_',on_tc_skip,[end_per_suite,{tc_auto_skip,'_'},[]]}}, + {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,end_per_suite,{tc_auto_skip,'_'},[]]}}, {?eh,test_done,{'DEF','STOP_TIME'}}, {?eh,cth, {'_',terminate,[[]]}}, @@ -754,10 +800,10 @@ test_events(skip_pre_suite_cth) -> {?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist',{skip,"Test skip"},[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,{skipped,"Test skip"}}}, {?eh,cth,{'_',on_tc_skip, - [init_per_suite,{tc_user_skip,{skipped,"Test skip"}},[]]}}, + [ct_cth_empty_SUITE,init_per_suite,{tc_user_skip,"Test skip"},[]]}}, {?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}}, - {?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}}, + {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_user_skip,"Test skip"},[]]}}, {?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}}, @@ -776,27 +822,29 @@ test_events(skip_pre_end_cth) -> [{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]}}}, {?eh,cth,{'_',id,[[]]}}, {?eh,cth,{'_',init,['_',[]]}}, - {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}}, + {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','$proplist',[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}}, {?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}}, {?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}}, - {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}}, + {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','_',[]]}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}, {skipped,"Test skip"}}}], - {?eh,cth,{'_',on_tc_skip,[{end_per_group,group1}, - {tc_user_skip,{skipped,"Test skip"}}, + {?eh,cth,{'_',on_tc_skip,[ct_scope_per_group_cth_SUITE, + {end_per_group,group1}, + {tc_user_skip,"Test skip"}, []]}}, {?eh,tc_start,{ct_scope_per_group_cth_SUITE,end_per_suite}}, {?eh,tc_done,{ct_scope_per_group_cth_SUITE,end_per_suite, {skipped,"Test skip"}}}, - {?eh,cth,{'_',on_tc_skip,[end_per_suite, - {tc_user_skip,{skipped,"Test skip"}}, + {?eh,cth,{'_',on_tc_skip,[ct_scope_per_group_cth_SUITE, + end_per_suite, + {tc_user_skip,"Test skip"}, []]}}, {?eh,test_done,{'DEF','STOP_TIME'}}, {?eh,cth,{'_',terminate,[[]]}}, @@ -814,10 +862,10 @@ test_events(skip_post_suite_cth) -> {?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist','$proplist',[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,{skipped,"Test skip"}}}, {?eh,cth,{'_',on_tc_skip, - [init_per_suite,{tc_user_skip,{skipped,"Test skip"}},[]]}}, + [ct_cth_empty_SUITE,init_per_suite,{tc_user_skip,"Test skip"},[]]}}, {?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}}, - {?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}}, + {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_user_skip,"Test skip"},[]]}}, {?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}}, @@ -826,6 +874,41 @@ test_events(skip_post_suite_cth) -> {?eh,stop_logging,[]} ]; +test_events(skip_pre_init_tc_cth) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,init,['_',[]]}}, + {?eh,start_info,{1,1,1}}, + {?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [ct_cth_empty_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [ct_cth_empty_SUITE,test_case,'$proplist', + {skip,"Skipped in pre_init_per_testcase"}, + []]}}, + {?eh,tc_done,{ct_cth_empty_SUITE,test_case, + {skipped,"Skipped in pre_init_per_testcase"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [ct_cth_empty_SUITE,test_case, + {tc_user_skip,"Skipped in pre_init_per_testcase"}, + []]}}, + {?eh,test_stats,{0,0,{1,0}}}, + {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [ct_cth_empty_SUITE,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_cth_empty_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ]; + test_events(recover_post_suite_cth) -> Suite = ct_cth_fail_per_suite_SUITE, [ @@ -840,9 +923,9 @@ test_events(recover_post_suite_cth) -> {?eh,tc_start,{Suite,test_case}}, {?eh,cth,{'_',pre_init_per_testcase, - [test_case, not_contains([tc_status]),[]]}}, + [Suite,test_case, not_contains([tc_status]),[]]}}, {?eh,cth,{'_',post_end_per_testcase, - [test_case, contains([tc_status]),'_',[]]}}, + [Suite,test_case, contains([tc_status]),'_',[]]}}, {?eh,tc_done,{Suite,test_case,ok}}, {?eh,tc_start,{Suite,end_per_suite}}, @@ -876,13 +959,15 @@ test_events(update_config_cth) -> {?eh,tc_start,{ct_update_config_SUITE, {init_per_group,group1,[]}}}, {?eh,cth,{'_',pre_init_per_group, - [group1,contains( + [ct_update_config_SUITE, + group1,contains( [post_init_per_suite, init_per_suite, pre_init_per_suite]), []]}}, {?eh,cth,{'_',post_init_per_group, - [group1, + [ct_update_config_SUITE, + group1, contains( [post_init_per_suite, init_per_suite, @@ -898,7 +983,8 @@ test_events(update_config_cth) -> {?eh,tc_start,{ct_update_config_SUITE,test_case}}, {?eh,cth,{'_',pre_init_per_testcase, - [test_case,contains( + [ct_update_config_SUITE, + test_case,contains( [post_init_per_group, init_per_group, pre_init_per_group, @@ -907,7 +993,8 @@ test_events(update_config_cth) -> pre_init_per_suite]), []]}}, {?eh,cth,{'_',post_end_per_testcase, - [test_case,contains( + [ct_update_config_SUITE, + test_case,contains( [init_per_testcase, pre_init_per_testcase, post_init_per_group, @@ -921,7 +1008,8 @@ test_events(update_config_cth) -> {?eh,tc_start,{ct_update_config_SUITE, {end_per_group,group1,[]}}}, {?eh,cth,{'_',pre_end_per_group, - [group1,contains( + [ct_update_config_SUITE, + group1,contains( [post_init_per_group, init_per_group, pre_init_per_group, @@ -930,7 +1018,8 @@ test_events(update_config_cth) -> pre_init_per_suite]), []]}}, {?eh,cth,{'_',post_end_per_group, - [group1, + [ct_update_config_SUITE, + group1, contains( [pre_end_per_group, post_init_per_group, @@ -1018,8 +1107,8 @@ test_events(options_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, - {?eh,cth,{empty_cth,pre_init_per_testcase,[test_case,'$proplist',[test]]}}, - {?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[test]]}}, + {?eh,cth,{empty_cth,pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[test]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[test]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}, {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, @@ -1051,12 +1140,12 @@ test_events(same_id_cth) -> {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}}, {?eh,tc_start,{ct_cth_empty_SUITE,test_case}}, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, {negative, - {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}}, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}}, + {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}}, {negative, - {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}, + {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}}, {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}}, @@ -1094,11 +1183,13 @@ test_events(fail_n_skip_with_minimal_cth) -> {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case2,{skipped,"skip it"}}}, {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case3}}, {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case3,{skipped,"skip it"}}}, - {?eh,cth,{empty_cth,on_tc_skip,[{test_case2,group2}, - {tc_user_skip,{skipped,"skip it"}}, + {?eh,cth,{empty_cth,on_tc_skip,[ct_cth_fail_one_skip_one_SUITE, + {test_case2,group2}, + {tc_user_skip,"skip it"}, []]}}, - {?eh,cth,{empty_cth,on_tc_skip,[{test_case3,group2}, - {tc_user_skip,{skipped,"skip it"}}, + {?eh,cth,{empty_cth,on_tc_skip,[ct_cth_fail_one_skip_one_SUITE, + {test_case3,group2}, + {tc_user_skip,"skip it"}, []]}}, {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group, group2,[parallel]}}}, @@ -1115,13 +1206,24 @@ test_events(fail_n_skip_with_minimal_cth) -> ]; test_events(prio_cth) -> - GenPre = fun(Func,States) -> - [{?eh,cth,{'_',Func,['_','_',State]}} || State <- States] + GenPre = fun(Func,States) when Func==pre_init_per_suite; + Func==pre_end_per_suite -> + [{?eh,cth,{'_',Func,['_','_',State]}} || + State <- States]; + (Func,States) -> + [{?eh,cth,{'_',Func,['_','_','_',State]}} || + State <- States] end, - GenPost = fun(Func,States) -> - [{?eh,cth,{'_',Func,['_','_','_',State]}} || State <- States] - end, + GenPost = fun(Func,States) when Func==post_init_per_suite; + Func==post_end_per_suite -> + [{?eh,cth,{'_',Func,['_','_','_',State]}} || + State <- States]; + (Func,States) -> + [{?eh,cth,{'_',Func,['_','_','_','_',State]}} || + State <- States] + + end, [{?eh,start_logging,{'DEF','RUNDIR'}}, {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}] ++ @@ -1197,30 +1299,30 @@ test_events(no_config) -> {?eh,tc_done,{ct_framework,init_per_suite,ok}}, {?eh,tc_start,{ct_no_config_SUITE,test_case_1}}, {?eh,cth,{empty_cth,pre_init_per_testcase, - [test_case_1,'$proplist',[]]}}, + [ct_no_config_SUITE,test_case_1,'$proplist',[]]}}, {?eh,cth,{empty_cth,post_end_per_testcase, - [test_case_1,'$proplist',ok,[]]}}, + [ct_no_config_SUITE,test_case_1,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_no_config_SUITE,test_case_1,ok}}, {?eh,test_stats,{1,0,{0,0}}}, [{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}}, {?eh,cth,{empty_cth,pre_init_per_group, - [test_group,'$proplist',[]]}}, + [ct_no_config_SUITE,test_group,'$proplist',[]]}}, {?eh,cth,{empty_cth,post_init_per_group, - [test_group,'$proplist','$proplist',[]]}}, + [ct_no_config_SUITE,test_group,'$proplist','$proplist',[]]}}, {?eh,tc_done,{ct_framework, {init_per_group,test_group,'$proplist'},ok}}, {?eh,tc_start,{ct_no_config_SUITE,test_case_2}}, {?eh,cth,{empty_cth,pre_init_per_testcase, - [test_case_2,'$proplist',[]]}}, + [ct_no_config_SUITE,test_case_2,'$proplist',[]]}}, {?eh,cth,{empty_cth,post_end_per_testcase, - [test_case_2,'$proplist',ok,[]]}}, + [ct_no_config_SUITE,test_case_2,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_no_config_SUITE,test_case_2,ok}}, {?eh,test_stats,{2,0,{0,0}}}, {?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}}, {?eh,cth,{empty_cth,pre_end_per_group, - [test_group,'$proplist',[]]}}, + [ct_no_config_SUITE,test_group,'$proplist',[]]}}, {?eh,cth,{empty_cth,post_end_per_group, - [test_group,'$proplist',ok,[]]}}, + [ct_no_config_SUITE,test_group,'$proplist',ok,[]]}}, {?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}], {?eh,tc_start,{ct_framework,end_per_suite}}, {?eh,cth,{empty_cth,pre_end_per_suite, @@ -1233,6 +1335,166 @@ test_events(no_config) -> {?eh,stop_logging,[]} ]; +test_events(no_init_suite_config) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,1}}, + {?eh,tc_start,{ct_no_init_suite_config_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [ct_no_init_suite_config_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [ct_no_init_suite_config_SUITE,'$proplist','_',[]]}}, + {?eh,tc_done,{ct_no_init_suite_config_SUITE,init_per_suite, + {failed,{error,{undef,'_'}}}}}, + {?eh,cth,{empty_cth,on_tc_fail,[ct_no_init_suite_config_SUITE, + init_per_suite, + {undef,'_'},[]]}}, + {?eh,tc_auto_skip,{ct_no_init_suite_config_SUITE,test_case, + {failed,{ct_no_init_suite_config_SUITE,init_per_suite, + {'EXIT',{undef,'_'}}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [ct_no_init_suite_config_SUITE, + test_case, + {tc_auto_skip, + {failed,{ct_no_init_suite_config_SUITE,init_per_suite, + {'EXIT',{undef,'_'}}}}}, + []]}}, + {?eh,test_stats,{0,0,{0,1}}}, + {?eh,tc_auto_skip,{ct_no_init_suite_config_SUITE,end_per_suite, + {failed,{ct_no_init_suite_config_SUITE,init_per_suite, + {'EXIT',{undef,'_'}}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [ct_no_init_suite_config_SUITE, + end_per_suite, + {tc_auto_skip, + {failed,{ct_no_init_suite_config_SUITE,init_per_suite, + {'EXIT',{undef,'_'}}}}}, + []]}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ]; + +test_events(no_init_config) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,2}}, + {?eh,tc_start,{ct_no_init_config_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [ct_no_init_config_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [ct_no_init_config_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_no_init_config_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{ct_no_init_config_SUITE,test_case_1}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [ct_no_init_config_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [ct_no_init_config_SUITE,test_case_1,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_no_init_config_SUITE,test_case_1,ok}}, + {?eh,test_stats,{1,0,{0,0}}}, + [{?eh,tc_start,{ct_no_init_config_SUITE,{init_per_group,test_group,[]}}}, + {?eh,cth,{empty_cth,pre_init_per_group, + [ct_no_init_config_SUITE,test_group,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_group, + [ct_no_init_config_SUITE,test_group,'$proplist','_',[]]}}, + {?eh,tc_done,{ct_no_init_config_SUITE,{init_per_group,test_group,[]}, + {failed,{error,{undef,'_'}}}}}, + {?eh,cth,{empty_cth,on_tc_fail,[ct_no_init_config_SUITE, + {init_per_group,test_group}, + {undef,'_'},[]]}}, + {?eh,tc_auto_skip,{ct_no_init_config_SUITE,{test_case_2,test_group}, + {failed,{ct_no_init_config_SUITE,init_per_group, + {'EXIT',{undef,'_'}}}}}}, + {?eh,cth,{empty_cth,on_tc_skip,[ct_no_init_config_SUITE, + {test_case_2,test_group}, + {tc_auto_skip, + {failed, + {ct_no_init_config_SUITE,init_per_group, + {'EXIT',{undef,'_'}}}}}, + []]}}, + {?eh,test_stats,{1,0,{0,1}}}, + {?eh,tc_auto_skip,{ct_no_init_config_SUITE,{end_per_group,test_group}, + {failed,{ct_no_init_config_SUITE,init_per_group, + {'EXIT',{undef,'_'}}}}}}, + {?eh,cth,{empty_cth,on_tc_skip,[ct_no_init_config_SUITE, + {end_per_group,test_group}, + {tc_auto_skip, + {failed, + {ct_no_init_config_SUITE,init_per_group, + {'EXIT',{undef,'_'}}}}}, + []]}}], + {?eh,tc_start,{ct_no_init_config_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [ct_no_init_config_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [ct_no_init_config_SUITE,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_no_init_config_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ]; + +test_events(no_end_config) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,2}}, + {?eh,tc_start,{ct_no_end_config_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [ct_no_end_config_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [ct_no_end_config_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE,init_per_suite,ok}}, + {?eh,tc_start,{ct_no_end_config_SUITE,test_case_1}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [ct_no_end_config_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [ct_no_end_config_SUITE,test_case_1,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE,test_case_1,ok}}, + {?eh,test_stats,{1,0,{0,0}}}, + [{?eh,tc_start,{ct_no_end_config_SUITE, + {init_per_group,test_group,'$proplist'}}}, + {?eh,cth,{empty_cth,pre_init_per_group, + [ct_no_end_config_SUITE,test_group,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_group, + [ct_no_end_config_SUITE,test_group,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE, + {init_per_group,test_group,'$proplist'},ok}}, + {?eh,tc_start,{ct_no_end_config_SUITE,test_case_2}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [ct_no_end_config_SUITE,test_case_2,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [ct_no_end_config_SUITE,test_case_2,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE,test_case_2,ok}}, + {?eh,test_stats,{2,0,{0,0}}}, + {?eh,tc_start,{ct_no_end_config_SUITE, + {end_per_group,test_group,'$proplist'}}}, + {?eh,cth,{empty_cth,pre_end_per_group, + [ct_no_end_config_SUITE,test_group,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_group, + [ct_no_end_config_SUITE,test_group,'$proplist','_',[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE,{end_per_group,test_group,[]}, + {failed,{error,{undef,'_'}}}}}, + {?eh,cth,{empty_cth,on_tc_fail,[ct_no_end_config_SUITE, + {end_per_group,test_group}, + {undef,'_'},[]]}}], + {?eh,tc_start,{ct_no_end_config_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [ct_no_end_config_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [ct_no_end_config_SUITE,'$proplist','_',[]]}}, + {?eh,tc_done,{ct_no_end_config_SUITE,end_per_suite, + {failed,{error,{undef,'_'}}}}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ]; + test_events(data_dir) -> [ {?eh,start_logging,{'DEF','RUNDIR'}}, @@ -1247,30 +1509,30 @@ test_events(data_dir) -> {?eh,tc_done,{ct_framework,init_per_suite,ok}}, {?eh,tc_start,{ct_data_dir_SUITE,test_case_1}}, {?eh,cth,{empty_cth,pre_init_per_testcase, - [test_case_1,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_case_1,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,cth,{empty_cth,post_end_per_testcase, - [test_case_1,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_case_1,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,tc_done,{ct_data_dir_SUITE,test_case_1,ok}}, {?eh,test_stats,{1,0,{0,0}}}, [{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}}, {?eh,cth,{empty_cth,pre_init_per_group, - [test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,cth,{empty_cth,post_init_per_group, - [test_group,'$proplist','$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_group,'$proplist','$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,tc_done,{ct_framework, {init_per_group,test_group,'$proplist'},ok}}, {?eh,tc_start,{ct_data_dir_SUITE,test_case_2}}, {?eh,cth,{empty_cth,pre_init_per_testcase, - [test_case_2,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_case_2,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,cth,{empty_cth,post_end_per_testcase, - [test_case_2,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_case_2,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,tc_done,{ct_data_dir_SUITE,test_case_2,ok}}, {?eh,test_stats,{2,0,{0,0}}}, {?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}}, {?eh,cth,{empty_cth,pre_end_per_group, - [test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,cth,{empty_cth,post_end_per_group, - [test_group,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, + [ct_data_dir_SUITE,test_group,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}}, {?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}], {?eh,tc_start,{ct_framework,end_per_suite}}, {?eh,cth,{empty_cth,pre_end_per_suite, @@ -1303,6 +1565,645 @@ test_events(cth_log) -> {?eh,stop_logging,[]} ]; +test_events(fallback) -> + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,id,[[]]}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,tc_start,{all_hook_callbacks_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [all_hook_callbacks_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [all_hook_callbacks_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{all_hook_callbacks_SUITE,init_per_suite,ok}}, + + [{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}}, + {?eh,cth,{empty_cth,pre_init_per_group, + [fallback_nosuite,test_group,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_group, + [fallback_nosuite,test_group,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_framework, + {init_per_group,test_group,'$proplist'},ok}}, + {?eh,tc_start,{all_hook_callbacks_SUITE,test_case}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [fallback_nosuite,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [fallback_nosuite,test_case,'$proplist',ok,[]]}}, + {?eh,tc_done,{all_hook_callbacks_SUITE,test_case,ok}}, + {?eh,test_stats,{1,0,{0,0}}}, + {?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}}, + {?eh,cth,{empty_cth,pre_end_per_group, + [fallback_nosuite,test_group,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_group, + [fallback_nosuite,test_group,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}], + {?eh,tc_start,{all_hook_callbacks_SUITE,test_case}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [fallback_nosuite,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [fallback_nosuite,test_case,'$proplist','_',[]]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [fallback_nosuite,test_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [fallback_nosuite,test_case,'$proplist','_',[]]}}, + {?eh,tc_done,{all_hook_callbacks_SUITE,test_case,ok}}, + {?eh,test_stats,{2,0,{0,0}}}, + {?eh,tc_start,{all_hook_callbacks_SUITE,skip_case}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [fallback_nosuite,skip_case,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [fallback_nosuite,skip_case,'$proplist', + {skip,"Skipped in init_per_testcase/2"},[]]}}, + {?eh,tc_done,{all_hook_callbacks_SUITE,skip_case, + {skipped,"Skipped in init_per_testcase/2"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [fallback_nosuite,skip_case, + {tc_user_skip,"Skipped in init_per_testcase/2"}, + []]}}, + {?eh,test_stats,{2,0,{1,0}}}, + {?eh,tc_start,{all_hook_callbacks_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [all_hook_callbacks_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [all_hook_callbacks_SUITE,'$proplist','_',[]]}}, + {?eh,tc_done,{all_hook_callbacks_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ]; + +test_events(callbacks_on_skip) -> + %% skip_cth.erl will send a 'cth_error' event if a hook is + %% erroneously called. Therefore, all Events are changed to + %% {negative,{?eh,cth_error,'_'},Event} + %% at the end of this function. + Events = + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,id,[[]]}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{6,6,15}}, + + %% all_hook_callbacks_SUITE is skipped in spec + %% Only the on_tc_skip callback shall be called + {?eh,tc_user_skip,{all_hook_callbacks_SUITE,all,"Skipped in spec"}}, + {?eh,cth,{empty_cth,on_tc_skip, + [all_hook_callbacks_SUITE,all, + {tc_user_skip,"Skipped in spec"}, + []]}}, + {?eh,test_stats,{0,0,{1,0}}}, + + %% skip_init_SUITE is skipped in its init_per_suite function + %% No group- or testcase-functions shall be called. + {?eh,tc_start,{skip_init_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [skip_init_SUITE, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [skip_init_SUITE, + '$proplist', + {skip,"Skipped in init_per_suite/1"}, + []]}}, + {?eh,tc_done,{skip_init_SUITE,init_per_suite, + {skipped,"Skipped in init_per_suite/1"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_init_SUITE,init_per_suite, + {tc_user_skip,"Skipped in init_per_suite/1"}, + []]}}, + {?eh,tc_user_skip,{skip_init_SUITE,test_case,"Skipped in init_per_suite/1"}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_init_SUITE,test_case, + {tc_user_skip,"Skipped in init_per_suite/1"}, + []]}}, + {?eh,test_stats,{0,0,{2,0}}}, + {?eh,tc_user_skip,{skip_init_SUITE,end_per_suite, + "Skipped in init_per_suite/1"}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_init_SUITE,end_per_suite, + {tc_user_skip,"Skipped in init_per_suite/1"}, + []]}}, + + %% skip_req_SUITE is auto-skipped since a 'require' statement + %% returned by suite/0 is not fulfilled. + %% No group- or testcase-functions shall be called. + {?eh,tc_start,{skip_req_SUITE,init_per_suite}}, + {?eh,tc_done,{skip_req_SUITE,init_per_suite, + {auto_skipped,{require_failed_in_suite0, + {not_available,whatever}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_req_SUITE,init_per_suite, + {tc_auto_skip,{require_failed_in_suite0, + {not_available,whatever}}}, + []]}}, + {?eh,tc_auto_skip,{skip_req_SUITE,test_case,{require_failed_in_suite0, + {not_available,whatever}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_req_SUITE,test_case, + {tc_auto_skip,{require_failed_in_suite0, + {not_available,whatever}}}, + []]}}, + {?eh,test_stats,{0,0,{2,1}}}, + {?eh,tc_auto_skip,{skip_req_SUITE,end_per_suite, + {require_failed_in_suite0, + {not_available,whatever}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_req_SUITE,end_per_suite, + {tc_auto_skip,{require_failed_in_suite0, + {not_available,whatever}}}, + []]}}, + + %% skip_fail_SUITE is auto-skipped since the suite/0 function + %% retuns a faluty format. + %% No group- or testcase-functions shall be called. + {?eh,tc_start,{skip_fail_SUITE,init_per_suite}}, + {?eh,tc_done,{skip_fail_SUITE,init_per_suite, + {failed,{error,{suite0_failed,bad_return_value}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_fail_SUITE,init_per_suite, + {tc_auto_skip, + {failed,{error,{suite0_failed,bad_return_value}}}}, + []]}}, + {?eh,tc_auto_skip,{skip_fail_SUITE,test_case, + {failed,{error,{suite0_failed,bad_return_value}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_fail_SUITE,test_case, + {tc_auto_skip, + {failed,{error,{suite0_failed,bad_return_value}}}}, + []]}}, + {?eh,test_stats,{0,0,{2,2}}}, + {?eh,tc_auto_skip,{skip_fail_SUITE,end_per_suite, + {failed,{error,{suite0_failed,bad_return_value}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_fail_SUITE,end_per_suite, + {tc_auto_skip, + {failed,{error,{suite0_failed,bad_return_value}}}}, + []]}}, + + %% skip_group_SUITE + {?eh,tc_start,{skip_group_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [skip_group_SUITE, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [skip_group_SUITE, + '$proplist', + '_', + []]}}, + {?eh,tc_done,{skip_group_SUITE,init_per_suite,ok}}, + + %% test_group_1 - auto_skip due to require failed + [{?eh,tc_start,{skip_group_SUITE,{init_per_group,test_group_1,[]}}}, + {?eh,tc_done, + {skip_group_SUITE,{init_per_group,test_group_1,[]}, + {auto_skipped,{require_failed,{not_available,whatever}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {init_per_group,test_group_1}, + {tc_auto_skip,{require_failed,{not_available,whatever}}}, + []]}}, + {?eh,tc_auto_skip,{skip_group_SUITE,{test_case,test_group_1}, + {require_failed,{not_available,whatever}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {test_case,test_group_1}, + {tc_auto_skip,{require_failed,{not_available,whatever}}}, + []]}}, + {?eh,test_stats,{0,0,{2,3}}}, + {?eh,tc_auto_skip,{skip_group_SUITE,{end_per_group,test_group_1}, + {require_failed,{not_available,whatever}}}}], + %% The following appears to be outside of the group, but + %% that's only an implementation detail in + %% ct_test_support.erl - it does not know about events from + %% test suite specific hooks and regards the group ended with + %% the above tc_auto_skip-event for end_per_group. + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {end_per_group,test_group_1}, + {tc_auto_skip,{require_failed,{not_available,whatever}}}, + []]}}, + + %% test_group_2 - auto_skip due to failed return from group/1 + [{?eh,tc_start,{skip_group_SUITE,{init_per_group,test_group_2,[]}}}, + {?eh,tc_done, + {skip_group_SUITE,{init_per_group,test_group_2,[]}, + {auto_skipped,{group0_failed,bad_return_value}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {init_per_group,test_group_2}, + {tc_auto_skip,{group0_failed,bad_return_value}}, + []]}}, + {?eh,tc_auto_skip,{skip_group_SUITE,{test_case,test_group_2}, + {group0_failed,bad_return_value}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {test_case,test_group_2}, + {tc_auto_skip,{group0_failed,bad_return_value}}, + []]}}, + {?eh,test_stats,{0,0,{2,4}}}, + {?eh,tc_auto_skip,{skip_group_SUITE,{end_per_group,test_group_2}, + {group0_failed,bad_return_value}}}], + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {end_per_group,test_group_2}, + {tc_auto_skip,{group0_failed,bad_return_value}}, + []]}}, + %% test_group_3 - user_skip in init_per_group/2 + [{?eh,tc_start, + {skip_group_SUITE,{init_per_group,test_group_3,[]}}}, + {?eh,cth,{empty_cth,pre_init_per_group, + [skip_group_SUITE,test_group_3,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_group, + [skip_group_SUITE,test_group_3,'$proplist', + {skip,"Skipped in init_per_group/2"}, + []]}}, + {?eh,tc_done,{skip_group_SUITE, + {init_per_group,test_group_3,[]}, + {skipped,"Skipped in init_per_group/2"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {init_per_group,test_group_3}, + {tc_user_skip,"Skipped in init_per_group/2"}, + []]}}, + {?eh,tc_user_skip,{skip_group_SUITE, + {test_case,test_group_3}, + "Skipped in init_per_group/2"}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {test_case,test_group_3}, + {tc_user_skip,"Skipped in init_per_group/2"}, + []]}}, + {?eh,test_stats,{0,0,{3,4}}}, + {?eh,tc_user_skip,{skip_group_SUITE, + {end_per_group,test_group_3}, + "Skipped in init_per_group/2"}}], + {?eh,cth,{empty_cth,on_tc_skip, + [skip_group_SUITE, + {end_per_group,test_group_3}, + {tc_user_skip,"Skipped in init_per_group/2"}, + []]}}, + + {?eh,tc_start,{skip_group_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [skip_group_SUITE, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [skip_group_SUITE, + '$proplist', + ok,[]]}}, + {?eh,tc_done,{skip_group_SUITE,end_per_suite,ok}}, + + + %% skip_case_SUITE has 4 test cases which are all skipped in + %% different ways + {?eh,tc_start,{skip_case_SUITE,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [skip_case_SUITE, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [skip_case_SUITE, + '$proplist', + '_', + []]}}, + {?eh,tc_done,{skip_case_SUITE,init_per_suite,ok}}, + + %% Skip in spec -> only on_tc_skip shall be called + {?eh,tc_user_skip,{skip_case_SUITE,skip_in_spec,"Skipped in spec"}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,skip_in_spec, + {tc_user_skip,"Skipped in spec"}, + []]}}, + {?eh,test_stats,{0,0,{4,4}}}, + + %% Skip in init_per_testcase -> pre/post_end_per_testcase + %% shall not be called + {?eh,tc_start,{skip_case_SUITE,skip_in_init}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,skip_in_init, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,skip_in_init, + '$proplist', + {skip,"Skipped in init_per_testcase/2"}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,skip_in_init, + {skipped,"Skipped in init_per_testcase/2"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,skip_in_init, + {tc_user_skip,"Skipped in init_per_testcase/2"}, + []]}}, + {?eh,test_stats,{0,0,{5,4}}}, + + %% Fail in init_per_testcase -> pre/post_end_per_testcase + %% shall not be called + {?eh,tc_start,{skip_case_SUITE,fail_in_init}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,fail_in_init, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,fail_in_init, + '$proplist', + {skip,{failed,'_'}}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,fail_in_init, + {auto_skipped,{failed,'_'}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,fail_in_init, + {tc_auto_skip,{failed,'_'}}, + []]}}, + {?eh,test_stats,{0,0,{5,5}}}, + + %% Exit in init_per_testcase -> pre/post_end_per_testcase + %% shall not be called + {?eh,tc_start,{skip_case_SUITE,exit_in_init}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,exit_in_init, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,exit_in_init, + '$proplist', + {skip,{failed,'_'}}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,exit_in_init, + {auto_skipped,{failed,'_'}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,exit_in_init, + {tc_auto_skip,{failed,'_'}}, + []]}}, + {?eh,test_stats,{0,0,{5,6}}}, + + %% Fail in end_per_testcase -> all hooks shall be called and + %% test shall succeed. + {?eh,tc_start,{skip_case_SUITE,fail_in_end}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,fail_in_end, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,fail_in_end, + '$proplist', + ok, + []]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [skip_case_SUITE,fail_in_end, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [skip_case_SUITE,fail_in_end, + '$proplist', + {failed, + {skip_case_SUITE,end_per_testcase, + {'EXIT', + {test_case_failed,"Failed in end_per_testcase/2"}}}}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,fail_in_end, + {failed, + {skip_case_SUITE,end_per_testcase, + {'EXIT', + {test_case_failed,"Failed in end_per_testcase/2"}}}}}}, + {?eh,test_stats,{1,0,{5,6}}}, + + %% Exit in end_per_testcase -> all hooks shall be called and + %% test shall succeed. + {?eh,tc_start,{skip_case_SUITE,exit_in_end}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,exit_in_end, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,exit_in_end, + '$proplist', + ok, + []]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [skip_case_SUITE,exit_in_end, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [skip_case_SUITE,exit_in_end, + '$proplist', + {failed, + {skip_case_SUITE,end_per_testcase, + {'EXIT',"Exit in end_per_testcase/2"}}}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,exit_in_end, + {failed, + {skip_case_SUITE,end_per_testcase, + {'EXIT',"Exit in end_per_testcase/2"}}}}}, + {?eh,test_stats,{2,0,{5,6}}}, + + %% Skip in testcase function -> all callbacks shall be called + {?eh,tc_start,{skip_case_SUITE,skip_in_case}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [skip_case_SUITE,skip_in_case, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [skip_case_SUITE,skip_in_case, + '$proplist', + ok,[]]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [skip_case_SUITE,skip_in_case, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [skip_case_SUITE,skip_in_case, + '$proplist', + {skip,"Skipped in test case function"}, + []]}}, + {?eh,tc_done,{skip_case_SUITE,skip_in_case, + {skipped,"Skipped in test case function"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,skip_in_case, + {tc_user_skip,"Skipped in test case function"}, + []]}}, + {?eh,test_stats,{2,0,{6,6}}}, + + %% Auto skip due to failed 'require' -> only the on_tc_skip + %% callback shall be called + {?eh,tc_start,{skip_case_SUITE,req_auto_skip}}, + {?eh,tc_done,{skip_case_SUITE,req_auto_skip, + {auto_skipped,{require_failed,{not_available,whatever}}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,req_auto_skip, + {tc_auto_skip,{require_failed,{not_available,whatever}}}, + []]}}, + {?eh,test_stats,{2,0,{6,7}}}, + + %% Auto skip due to failed testcase/0 function -> only the + %% on_tc_skip callback shall be called + {?eh,tc_start,{skip_case_SUITE,fail_auto_skip}}, + {?eh,tc_done,{skip_case_SUITE,fail_auto_skip, + {auto_skipped,{testcase0_failed,bad_return_value}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [skip_case_SUITE,fail_auto_skip, + {tc_auto_skip,{testcase0_failed,bad_return_value}}, + []]}}, + {?eh,test_stats,{2,0,{6,8}}}, + + {?eh,tc_start,{skip_case_SUITE,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [skip_case_SUITE, + '$proplist', + []]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [skip_case_SUITE, + '$proplist', + ok,[]]}}, + {?eh,tc_done,{skip_case_SUITE,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ], + %% Make sure no 'cth_error' events are received! + [{negative,{?eh,cth_error,'_'},E} || E <- Events]; + +test_events(failed_sequence) -> + %% skip_cth.erl will send a 'cth_error' event if a hook is + %% erroneously called. Therefore, all Events are changed to + %% {negative,{?eh,cth_error,'_'},Event} + %% at the end of this function. + Events = + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,id,[[]]}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,2}}, + {?eh,tc_start,{ct_framework,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite,[seq_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [seq_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_framework,init_per_suite,ok}}, + {?eh,tc_start,{seq_SUITE,test_case_1}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [seq_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [seq_SUITE,test_case_1,'$proplist',ok,[]]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [seq_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [seq_SUITE,test_case_1,'$proplist', + {error,failed_on_purpose},[]]}}, + {?eh,tc_done,{seq_SUITE,test_case_1,{failed,{error,failed_on_purpose}}}}, + {?eh,cth,{empty_cth,on_tc_fail, + [seq_SUITE,test_case_1,failed_on_purpose,[]]}}, + {?eh,test_stats,{0,1,{0,0}}}, + {?eh,tc_start,{seq_SUITE,test_case_2}}, + {?eh,tc_done,{seq_SUITE,test_case_2, + {auto_skipped,{sequence_failed,seq1,test_case_1}}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [seq_SUITE,test_case_2, + {tc_auto_skip,{sequence_failed,seq1,test_case_1}}, + []]}}, + {?eh,test_stats,{0,1,{0,1}}}, + {?eh,tc_start,{ct_framework,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite,[seq_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite,[seq_SUITE,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_framework,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ], + %% Make sure no 'cth_error' events are received! + [{negative,{?eh,cth_error,'_'},E} || E <- Events]; + +test_events(repeat_force_stop) -> + %% skip_cth.erl will send a 'cth_error' event if a hook is + %% erroneously called. Therefore, all Events are changed to + %% {negative,{?eh,cth_error,'_'},Event} + %% at the end of this function. + Events= + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,id,[[]]}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,2}}, + {?eh,tc_start,{ct_framework,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite,[repeat_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [repeat_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_framework,init_per_suite,ok}}, + {?eh,tc_start,{repeat_SUITE,test_case_1}}, + {?eh,cth,{empty_cth,pre_init_per_testcase, + [repeat_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_testcase, + [repeat_SUITE,test_case_1,'$proplist',ok,[]]}}, + {?eh,cth,{empty_cth,pre_end_per_testcase, + [repeat_SUITE,test_case_1,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_testcase, + [repeat_SUITE,test_case_1,'$proplist',ok,[]]}}, + {?eh,tc_done,{repeat_SUITE,test_case_1,ok}}, + {?eh,test_stats,{1,0,{0,0}}}, + {?eh,tc_start,{repeat_SUITE,test_case_2}}, + {?eh,tc_done,{repeat_SUITE,test_case_2, + {auto_skipped, + "Repeated test stopped by force_stop option"}}}, + {?eh,cth,{empty_cth,on_tc_skip, + [repeat_SUITE,test_case_2, + {tc_auto_skip,"Repeated test stopped by force_stop option"}, + []]}}, + {?eh,test_stats,{1,0,{0,1}}}, + {?eh,tc_start,{ct_framework,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite,[repeat_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [repeat_SUITE,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_framework,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ], + %% Make sure no 'cth_error' events are received! + [{negative,{?eh,cth_error,'_'},E} || E <- Events]; + +test_events(config_clash) -> + %% skip_cth.erl will send a 'cth_error' event if a hook is + %% erroneously called. Therefore, all Events are changed to + %% {negative,{?eh,cth_error,'_'},Event} + %% at the end of this function. + Events = + [ + {?eh,start_logging,{'DEF','RUNDIR'}}, + {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}, + {?eh,cth,{empty_cth,id,[[]]}}, + {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}}, + {?eh,start_info,{1,1,1}}, + {?eh,tc_start,{ct_framework,init_per_suite}}, + {?eh,cth,{empty_cth,pre_init_per_suite, + [config_clash_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_init_per_suite, + [config_clash_SUITE,'$proplist','$proplist',[]]}}, + {?eh,tc_done,{ct_framework,init_per_suite,ok}}, + {?eh,tc_start,{config_clash_SUITE,test_case_1}}, + {?eh,tc_done,{config_clash_SUITE,test_case_1, + {failed,{error,{config_name_already_in_use,[aa]}}}}}, + {?eh,cth,{empty_cth,on_tc_fail, + [config_clash_SUITE,test_case_1, + {config_name_already_in_use,[aa]}, + []]}}, + {?eh,test_stats,{0,1,{0,0}}}, + {?eh,tc_start,{ct_framework,end_per_suite}}, + {?eh,cth,{empty_cth,pre_end_per_suite, + [config_clash_SUITE,'$proplist',[]]}}, + {?eh,cth,{empty_cth,post_end_per_suite, + [config_clash_SUITE,'$proplist',ok,[]]}}, + {?eh,tc_done,{ct_framework,end_per_suite,ok}}, + {?eh,test_done,{'DEF','STOP_TIME'}}, + {?eh,cth,{empty_cth,terminate,[[]]}}, + {?eh,stop_logging,[]} + ], + %% Make sure no 'cth_error' events are received! + [{negative,{?eh,cth_error,'_'},E} || E <- Events]; + test_events(ok) -> ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl new file mode 100644 index 0000000000..5b50548694 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl @@ -0,0 +1,62 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(all_hook_callbacks_SUITE). + +-suite_defaults([{timetrap, {minutes, 10}}]). + +%% Note: This directive should only be used in test suites. +-compile(export_all). + +-include("ct.hrl"). + +%% Test server callback functions +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(Config) -> + Config. + +end_per_group(_Config) -> + ok. + +init_per_testcase(skip_case, Config) -> + {skip,"Skipped in init_per_testcase/2"}; +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + ok. + +all() -> + [{group,test_group},test_case,skip_case]. + +groups() -> + [{test_group,[test_case]}]. + +%% Test cases starts here. +test_case(Config) -> + ok. + +skip_case(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl new file mode 100644 index 0000000000..f74c757cc1 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl @@ -0,0 +1,43 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(config_clash_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + [{require,aa,yy},{default_config,yy,"this is a default value"}]. + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [test_case_1]. + +%% Test cases starts here. +test_case_1() -> + [{require,aa,xx}]. +test_case_1(_Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl new file mode 100644 index 0000000000..7cdaf2024b --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl @@ -0,0 +1,51 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(ct_no_end_config_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +%%% This suite is used to verify that all pre/post_end_per_* callbacks +%%% are called with correct SuiteName even if no end_per_* config +%%% function exist in the suite, and that the non-exported config +%%% functions fail with 'undef'. + +init_per_suite(Config) -> + Config. + +init_per_group(_Group,Config) -> + Config. + +init_per_testcase(_TC,Config) -> + Config. + +all() -> + [test_case_1, {group,test_group}]. + +groups() -> + [{test_group,[],[test_case_2]}]. + +test_case_1(Config) -> + ok. + +test_case_2(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl new file mode 100644 index 0000000000..43c062d66f --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl @@ -0,0 +1,54 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(ct_no_init_config_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +%%% This suite is used to verify that all +%%% pre/post_init_per_group/testcase callbacks are called with correct +%%% SuiteName even if no init_per_group/testcase function exist in the +%%% suite, and that the non-exported config functions fail with 'undef'. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + Config. + +end_per_group(_Group,Config) -> + Config. + +end_per_testcase(_TC,Config) -> + Config. + +all() -> + [test_case_1, {group,test_group}]. + +groups() -> + [{test_group,[],[test_case_2]}]. + +test_case_1(Config) -> + ok. + +test_case_2(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl new file mode 100644 index 0000000000..85dfe8ca4b --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl @@ -0,0 +1,39 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(ct_no_init_suite_config_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +%%% This suite is used to verify that pre/post_init_per_suite +%%% callbacks are called with correct SuiteName even if no +%%% init_per_suite function exist in the suite, and that the +%%% non-exported config function fails with 'undef'. + +end_per_suite(Config) -> + Config. + +all() -> + [test_case]. + +test_case(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl index c00eb5cf93..37742f0d20 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl @@ -44,18 +44,18 @@ -export([pre_end_per_suite/3]). -export([post_end_per_suite/4]). --export([pre_init_per_group/3]). --export([post_init_per_group/4]). --export([pre_end_per_group/3]). --export([post_end_per_group/4]). +-export([pre_init_per_group/4]). +-export([post_init_per_group/5]). +-export([pre_end_per_group/4]). +-export([post_end_per_group/5]). --export([pre_init_per_testcase/3]). --export([post_init_per_testcase/4]). --export([pre_end_per_testcase/3]). --export([post_end_per_testcase/4]). +-export([pre_init_per_testcase/4]). +-export([post_init_per_testcase/5]). +-export([pre_end_per_testcase/4]). +-export([post_end_per_testcase/5]). --export([on_tc_fail/3]). --export([on_tc_skip/3]). +-export([on_tc_fail/4]). +-export([on_tc_skip/4]). -export([terminate/1]). @@ -154,150 +154,160 @@ post_end_per_suite(Suite,Config,Return,State) -> %% @doc Called before each init_per_group. %% You can change the config in this function. --spec pre_init_per_group(Group :: atom(), - Config :: config(), - State :: #state{}) -> +-spec pre_init_per_group(Suite :: atom(), + Group :: atom(), + Config :: config(), + State :: #state{}) -> {config() | skip_or_fail(), NewState :: #state{}}. -pre_init_per_group(Group,Config,State) -> +pre_init_per_group(Suite,Group,Config,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_init_per_group, - [Group,Config,State]}}), - ct:log("~w:pre_init_per_group(~w) called", [?MODULE,Group]), + [Suite,Group,Config,State]}}), + ct:log("~w:pre_init_per_group(~w,~w) called", [?MODULE,Suite,Group]), {Config, State}. %% @doc Called after each init_per_group. %% You can change the return value in this function. --spec post_init_per_group(Group :: atom(), +-spec post_init_per_group(Suite :: atom(), + Group :: atom(), Config :: config(), Return :: config() | skip_or_fail(), State :: #state{}) -> {config() | skip_or_fail(), NewState :: #state{}}. -post_init_per_group(Group,Config,Return,State) -> +post_init_per_group(Suite,Group,Config,Return,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_init_per_group, - [Group,Config,Return,State]}}), - ct:log("~w:post_init_per_group(~w) called", [?MODULE,Group]), + [Suite,Group,Config,Return,State]}}), + ct:log("~w:post_init_per_group(~w,~w) called", [?MODULE,Suite,Group]), {Return, State}. %% @doc Called after each end_per_group. The config/state can be changed here, %% though it will only affect the *end_per_group functions. --spec pre_end_per_group(Group :: atom(), +-spec pre_end_per_group(Suite :: atom(), + Group :: atom(), Config :: config() | skip_or_fail(), State :: #state{}) -> {ok | skip_or_fail(), NewState :: #state{}}. -pre_end_per_group(Group,Config,State) -> +pre_end_per_group(Suite,Group,Config,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_end_per_group, - [Group,Config,State]}}), - ct:log("~w:pre_end_per_group(~w) called", [?MODULE,Group]), + [Suite,Group,Config,State]}}), + ct:log("~w:pre_end_per_group(~w~w) called", [?MODULE,Suite,Group]), {Config, State}. %% @doc Called after each end_per_group. Note that the config cannot be %% changed here, only the status of the group. --spec post_end_per_group(Group :: atom(), +-spec post_end_per_group(Suite :: atom(), + Group :: atom(), Config :: config(), Return :: term(), State :: #state{}) -> {ok | skip_or_fail(), NewState :: #state{}}. -post_end_per_group(Group,Config,Return,State) -> +post_end_per_group(Suite,Group,Config,Return,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_end_per_group, - [Group,Config,Return,State]}}), - ct:log("~w:post_end_per_group(~w) called", [?MODULE,Group]), + [Suite,Group,Config,Return,State]}}), + ct:log("~w:post_end_per_group(~w,~w) called", [?MODULE,Suite,Group]), {Return, State}. %% @doc Called before init_per_testcase/2 for each test case. %% You can change the config in this function. --spec pre_init_per_testcase(TC :: atom(), - Config :: config(), - State :: #state{}) -> +-spec pre_init_per_testcase(Suite :: atom(), + TC :: atom(), + Config :: config(), + State :: #state{}) -> {config() | skip_or_fail(), NewState :: #state{}}. -pre_init_per_testcase(TC,Config,State) -> +pre_init_per_testcase(Suite,TC,Config,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_init_per_testcase, - [TC,Config,State]}}), - ct:log("~w:pre_init_per_testcase(~w) called", [?MODULE,TC]), + [Suite,TC,Config,State]}}), + ct:log("~w:pre_init_per_testcase(~w,~w) called", [?MODULE,Suite,TC]), {Config, State}. %% @doc Called after init_per_testcase/2, and before the test case. --spec post_init_per_testcase(TC :: atom(), +-spec post_init_per_testcase(Suite :: atom(), + TC :: atom(), Config :: config(), Return :: config() | skip_or_fail(), State :: #state{}) -> {config() | skip_or_fail(), NewState :: #state{}}. -post_init_per_testcase(TC,Config,Return,State) -> +post_init_per_testcase(Suite,TC,Config,Return,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_init_per_testcase, - [TC,Config,Return,State]}}), - ct:log("~w:post_init_per_testcase(~w) called", [?MODULE,TC]), + [Suite,TC,Config,Return,State]}}), + ct:log("~w:post_init_per_testcase(~w,~w) called", [?MODULE,Suite,TC]), {Return, State}. %% @doc Called before end_per_testacse/2. No skip or fail allowed here, %% only config additions. --spec pre_end_per_testcase(TC :: atom(), - Config :: config(), - State :: #state{}) -> +-spec pre_end_per_testcase(Suite :: atom(), + TC :: atom(), + Config :: config(), + State :: #state{}) -> {config(), NewState :: #state{}}. -pre_end_per_testcase(TC,Config,State) -> +pre_end_per_testcase(Suite,TC,Config,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, pre_end_per_testcase, - [TC,Config,State]}}), - ct:log("~w:pre_end_per_testcase(~w) called", [?MODULE,TC]), + [Suite,TC,Config,State]}}), + ct:log("~w:pre_end_per_testcase(~w,~w) called", [?MODULE,Suite,TC]), {Config, State}. %% @doc Called after end_per_testcase/2 for each test case. Note that %% the config cannot be changed here, only the status of the test case. --spec post_end_per_testcase(TC :: atom(), +-spec post_end_per_testcase(Suite :: atom(), + TC :: atom(), Config :: config(), Return :: term(), State :: #state{}) -> {ok | skip_or_fail(), NewState :: #state{}}. -post_end_per_testcase(TC,Config,Return,State) -> +post_end_per_testcase(Suite,TC,Config,Return,State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, post_end_per_testcase, - [TC,Config,Return,State]}}), - ct:log("~w:post_end_per_testcase(~w) called", [?MODULE,TC]), + [Suite,TC,Config,Return,State]}}), + ct:log("~w:post_end_per_testcase(~w,~w) called", [?MODULE,Suite,TC]), {Return, State}. %% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group, %% post_end_per_group and post_end_per_tc if the suite, group or test case failed. %% This function should be used for extra cleanup which might be needed. %% It is not possible to modify the config or the status of the test run. --spec on_tc_fail(TC :: init_per_suite | end_per_suite | +-spec on_tc_fail(Suite :: atom(), + TC :: init_per_suite | end_per_suite | init_per_group | end_per_group | atom() | {Function :: atom(), GroupName :: atom()}, Reason :: term(), State :: #state{}) -> NewState :: #state{}. -on_tc_fail(TC, Reason, State) -> +on_tc_fail(Suite, TC, Reason, State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, on_tc_fail, - [TC,Reason,State]}}), - ct:log("~w:on_tc_fail(~w) called", [?MODULE,TC]), + [Suite,TC,Reason,State]}}), + ct:log("~w:on_tc_fail(~w,~w) called", [?MODULE,Suite,TC]), State. %% @doc Called when a test case is skipped by either user action %% or due to an init function failing. Test case can be %% end_per_suite, init_per_group, end_per_group and the actual test cases. --spec on_tc_skip(TC :: end_per_suite | +-spec on_tc_skip(Suite :: atom(), + TC :: end_per_suite | init_per_group | end_per_group | atom() | {Function :: atom(), GroupName :: atom()}, {tc_auto_skip, {failed, {Mod :: atom(), Function :: atom(), Reason :: term()}}} | {tc_user_skip, {skipped, Reason :: term()}}, State :: #state{}) -> NewState :: #state{}. -on_tc_skip(TC, Reason, State) -> +on_tc_skip(Suite, TC, Reason, State) -> gen_event:notify( ?CT_EVMGR_REF, #event{ name = cth, node = node(), data = {?MODULE, on_tc_skip, - [TC,Reason,State]}}), - ct:log("~w:on_tc_skip(~w) called", [?MODULE,TC]), + [Suite,TC,Reason,State]}}), + ct:log("~w:on_tc_skip(~w,~w) called", [?MODULE,Suite,TC]), State. %% @doc Called when the scope of the CTH is done, this depends on diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl index 559b22bc9f..141b933697 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl @@ -45,29 +45,29 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl index 51202443bf..07d7c84ed5 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl @@ -45,35 +45,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl new file mode 100644 index 0000000000..59a3d5cbf9 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl @@ -0,0 +1,81 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + + +-module(fallback_cth). + + +-include_lib("common_test/src/ct_util.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + + +%% CT Hooks +-compile(export_all). + +id(Opts) -> + empty_cth:id(Opts). + +init(Id, Opts) -> + empty_cth:init(Id, Opts). + +pre_init_per_suite(Suite, Config, State) -> + empty_cth:pre_init_per_suite(Suite,Config,State). + +post_init_per_suite(Suite,Config,Return,State) -> + empty_cth:post_init_per_suite(Suite,Config,Return,State). + +pre_end_per_suite(Suite,Config,State) -> + empty_cth:pre_end_per_suite(Suite,Config,State). + +post_end_per_suite(Suite,Config,Return,State) -> + empty_cth:post_end_per_suite(Suite,Config,Return,State). + +pre_init_per_group(Group,Config,State) -> + empty_cth:pre_init_per_group(fallback_nosuite,Group,Config,State). + +post_init_per_group(Group,Config,Return,State) -> + empty_cth:post_init_per_group(fallback_nosuite,Group,Config,Return,State). + +pre_end_per_group(Group,Config,State) -> + empty_cth:pre_end_per_group(fallback_nosuite,Group,Config,State). + +post_end_per_group(Group,Config,Return,State) -> + empty_cth:post_end_per_group(fallback_nosuite,Group,Config,Return,State). + +pre_init_per_testcase(TC,Config,State) -> + empty_cth:pre_init_per_testcase(fallback_nosuite,TC,Config,State). + +post_init_per_testcase(TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(fallback_nosuite,TC,Config,Return,State). + +pre_end_per_testcase(TC,Config,State) -> + empty_cth:pre_end_per_testcase(fallback_nosuite,TC,Config,State). + +post_end_per_testcase(TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(fallback_nosuite,TC,Config,Return,State). + +on_tc_fail(TC, Reason, State) -> + empty_cth:on_tc_fail(fallback_nosuite,TC,Reason,State). + +on_tc_skip(TC, Reason, State) -> + empty_cth:on_tc_skip(fallback_nosuite,TC,Reason,State). + +terminate(State) -> + empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl index b49cbe7fb4..679f076f3a 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl @@ -29,13 +29,13 @@ %% CT Hooks -export([init/2]). -export([terminate/1]). --export([on_tc_skip/3]). +-export([on_tc_skip/4]). init(Id, Opts) -> empty_cth:init(Id, Opts). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite, TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl index a687743641..95bb76b4c1 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl @@ -47,35 +47,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl index 4d9c60f1ca..3562d39967 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl @@ -47,35 +47,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/typer/src/typer.appup.src b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/repeat_SUITE.erl index 3b7464a97c..fded4c02ab 100644 --- a/lib/typer/src/typer.appup.src +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/repeat_SUITE.erl @@ -1,7 +1,7 @@ -%% -*- erlang -*- +%% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2014-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -16,7 +16,27 @@ %% limitations under the License. %% %% %CopyrightEnd% -{"%VSN%", - [{<<".*">>,[{restart_application, typer}]}], - [{<<".*">>,[{restart_application, typer}]}] -}. +%% + +-module(repeat_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [test_case_1, test_case_2]. + +%% Test cases starts here. +test_case_1(_Config) -> + timer:sleep(10000), + ok. + +test_case_2(_Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl index 494f398fc1..b9d9d4cec1 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl @@ -48,35 +48,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl new file mode 100644 index 0000000000..6d1302fd35 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl @@ -0,0 +1,45 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(seq_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [{sequence,seq1}]. + +sequences() -> + [{seq1,[test_case_1,test_case_2]}]. + +%% Test cases starts here. +test_case_1(_Config) -> + exit(failed_on_purpose). + +test_case_2(_Config) -> + ct:fail("This test shall never be run since test_case_1 fails " + "and they are run in sequence"). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec new file mode 100644 index 0000000000..a271c5e8b2 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec @@ -0,0 +1,8 @@ +{suites,".",[all_hook_callbacks_SUITE, + skip_init_SUITE, + skip_req_SUITE, + skip_fail_SUITE, + skip_group_SUITE, + skip_case_SUITE]}. +{skip_suites,".",all_hook_callbacks_SUITE,"Skipped in spec"}. +{skip_cases,".",skip_case_SUITE,skip_in_spec,"Skipped in spec"}. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl new file mode 100644 index 0000000000..dad80ae914 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl @@ -0,0 +1,106 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(skip_case_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + ok. + +init_per_group(_,Config) -> + Config. + +end_per_group(_,_) -> + ok. + +init_per_testcase(skip_in_init,Config) -> + {skip,"Skipped in init_per_testcase/2"}; +init_per_testcase(fail_in_init,Config) -> + ct:fail("Failed in init_per_testcase/2"); +init_per_testcase(exit_in_init,Config) -> + exit(self(),"Exit in init_per_testcase/2"); +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(fail_in_end,_) -> + ct:fail("Failed in end_per_testcase/2"); +end_per_testcase(exit_in_end,_) -> + exit(self(),"Exit in end_per_testcase/2"); +end_per_testcase(_,_) -> + ok. + +all() -> + [skip_in_spec, + skip_in_init, + fail_in_init, + exit_in_init, + fail_in_end, + exit_in_end, + skip_in_case, + req_auto_skip, + fail_auto_skip + ]. + +%% Test cases starts here. +skip_in_spec(Config) -> + ct:fail("This test shall never be run. " + "It shall be skipped in the test spec."). + +skip_in_init(Config) -> + ct:fail("This test shall never be run. " + "It shall be skipped in init_per_testcase/2."). + +fail_in_init(Config) -> + ct:fail("This test shall never be run. " + "It shall fail in init_per_testcase/2."). + +exit_in_init(Config) -> + ct:fail("This test shall never be run. " + "It shall exit in init_per_testcase/2."). + +fail_in_end(Config) -> + ok. + +exit_in_end(Config) -> + ok. + +skip_in_case(Config) -> + {skip,"Skipped in test case function"}. + +req_auto_skip() -> + [{require,whatever}]. +req_auto_skip(Config) -> + ct:fail("This test shall never be run due to " + "failed require"). + +fail_auto_skip() -> + faulty_return_value. +fail_auto_skip(Config) -> + ct:fail("This test shall never be run due to " + "faulty return from info function"). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl new file mode 100644 index 0000000000..16f015fe7a --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl @@ -0,0 +1,182 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + + +-module(skip_cth). + + +-include_lib("common_test/src/ct_util.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + +%% Send a cth_error event if a callback is called with unexpected arguments +-define(fail(Info), + gen_event:notify( + ?CT_EVMGR_REF, + #event{ name = cth_error, + node = node(), + data = {illegal_hook_callback,{?MODULE,?FUNCTION_NAME,Info}}})). + +%% CT Hooks +-compile(export_all). + +id(Opts) -> + empty_cth:id(Opts). + +init(Id, Opts) -> + empty_cth:init(Id, Opts). + +pre_init_per_suite(Suite, Config, State) -> + Suite==skip_init_SUITE + orelse Suite==skip_group_SUITE + orelse Suite==skip_case_SUITE + orelse Suite==seq_SUITE + orelse Suite==repeat_SUITE + orelse Suite==config_clash_SUITE + orelse ?fail(Suite), + empty_cth:pre_init_per_suite(Suite,Config,State). + +post_init_per_suite(Suite,Config,Return,State) -> + Suite==skip_init_SUITE + orelse Suite==skip_group_SUITE + orelse Suite==skip_case_SUITE + orelse Suite==seq_SUITE + orelse Suite==repeat_SUITE + orelse Suite==config_clash_SUITE + orelse ?fail(Suite), + empty_cth:post_init_per_suite(Suite,Config,Return,State). + +pre_end_per_suite(Suite,Config,State) -> + Suite==skip_case_SUITE + orelse Suite==skip_group_SUITE + orelse Suite==seq_SUITE + orelse Suite==repeat_SUITE + orelse Suite==config_clash_SUITE + orelse ?fail(Suite), + empty_cth:pre_end_per_suite(Suite,Config,State). + +post_end_per_suite(Suite,Config,Return,State) -> + Suite==skip_case_SUITE + orelse Suite==skip_group_SUITE + orelse Suite==seq_SUITE + orelse Suite==repeat_SUITE + orelse Suite==config_clash_SUITE + orelse ?fail(Suite), + empty_cth:post_end_per_suite(Suite,Config,Return,State). + +pre_init_per_group(Suite,Group,Config,State) -> + (Suite==skip_group_SUITE andalso Group==test_group_3) + orelse ?fail({Suite,Group}), + empty_cth:pre_init_per_group(Suite,Group,Config,State). + +post_init_per_group(Suite,Group,Config,Return,State) -> + (Suite==skip_group_SUITE andalso Group==test_group_3) + orelse ?fail({Suite,Group}), + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). + +pre_end_per_group(Suite,Group,Config,State) -> + ?fail({Suite,Group}), + empty_cth:pre_end_per_group(Suite,Group,Config,State). + +post_end_per_group(Suite,Group,Config,Return,State) -> + ?fail({Suite,Group}), + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). + +pre_init_per_testcase(Suite,TC,Config,State) -> + (Suite==skip_case_SUITE andalso (TC==skip_in_init + orelse TC==fail_in_init + orelse TC==exit_in_init + orelse TC==fail_in_end + orelse TC==exit_in_end + orelse TC==skip_in_case)) + orelse (Suite==seq_SUITE andalso TC==test_case_1) + orelse (Suite==repeat_SUITE andalso TC==test_case_1) + orelse ?fail({Suite,TC}), + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). + +post_init_per_testcase(Suite,TC,Config,Return,State) -> + (Suite==skip_case_SUITE andalso (TC==skip_in_init + orelse TC==fail_in_init + orelse TC==exit_in_init + orelse TC==fail_in_end + orelse TC==exit_in_end + orelse TC==skip_in_case)) + orelse (Suite==seq_SUITE andalso TC==test_case_1) + orelse (Suite==repeat_SUITE andalso TC==test_case_1) + orelse ?fail({Suite,TC}), + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). + +pre_end_per_testcase(Suite,TC,Config,State) -> + (Suite==skip_case_SUITE andalso (TC==skip_in_case + orelse TC==fail_in_end + orelse TC==exit_in_end)) + orelse (Suite==seq_SUITE andalso TC==test_case_1) + orelse (Suite==repeat_SUITE andalso TC==test_case_1) + orelse ?fail({Suite,TC}), + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). + +post_end_per_testcase(Suite,TC,Config,Return,State) -> + (Suite==skip_case_SUITE andalso (TC==skip_in_case + orelse TC==fail_in_end + orelse TC==exit_in_end)) + orelse (Suite==seq_SUITE andalso TC==test_case_1) + orelse (Suite==repeat_SUITE andalso TC==test_case_1) + orelse ?fail({Suite,TC}), + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). + +on_tc_fail(Suite,TC,Reason,State) -> + (Suite==seq_SUITE andalso TC==test_case_1) + orelse (Suite==config_clash_SUITE andalso TC==test_case_1) + orelse ?fail({Suite,TC}), + empty_cth:on_tc_fail(Suite,TC,Reason,State). + +on_tc_skip(all_hook_callbacks_SUITE=Suite,all=TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State); +on_tc_skip(Suite,TC,Reason,State) + when (Suite==skip_init_SUITE + orelse Suite==skip_req_SUITE + orelse Suite==skip_fail_SUITE) + andalso + (TC==init_per_suite + orelse TC==test_case + orelse TC==end_per_suite) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State); +on_tc_skip(skip_group_SUITE=Suite,TC={C,G},Reason,State) + when (C==init_per_group orelse C==test_case orelse C==end_per_group) andalso + (G==test_group_1 orelse G==test_group_2 orelse G==test_group_3) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State); +on_tc_skip(skip_case_SUITE=Suite,TC,Reason,State) + when TC==skip_in_spec; + TC==skip_in_init; + TC==fail_in_init; + TC==exit_in_init; + TC==skip_in_case; + TC==req_auto_skip; + TC==fail_auto_skip -> + empty_cth:on_tc_skip(Suite,TC,Reason,State); +on_tc_skip(Suite,TC,Reason,State) + when (Suite==seq_SUITE andalso TC==test_case_2) + orelse (Suite==repeat_SUITE andalso TC==test_case_2) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State); +on_tc_skip(Suite,TC,Reason,State) -> + ?fail({Suite,TC}), + empty_cth:on_tc_skip(Suite,TC,Reason,State). + +terminate(State) -> + empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl new file mode 100644 index 0000000000..9f5dfee6b9 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl @@ -0,0 +1,53 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(skip_fail_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + faulty_return_value. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + ok. + +init_per_group(_,Config) -> + Config. + +end_per_group(_,_) -> + ok. + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [test_case]. + +%% Test cases starts here. +test_case(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl new file mode 100644 index 0000000000..d3b848bfbd --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl @@ -0,0 +1,64 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(skip_group_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + []. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + ok. + +group(test_group_1) -> + [{require,whatever}]; +group(test_group_2) -> + faulty_return_value; +group(_) -> + []. + +init_per_group(test_group_3,Config) -> + {skip,"Skipped in init_per_group/2"}; +init_per_group(_,Config) -> + ct:fail("This shall never be run due to auto_skip from group/1"). + +end_per_group(_,_) -> + ct:fail("This shall never be run"). + +all() -> + [{group,test_group_1}, + {group,test_group_2}, + {group,test_group_3}]. + +groups() -> + [{test_group_1,[test_case]}, + {test_group_2,[test_case]}, + {test_group_3,[test_case]}]. + +%% Test cases starts here. +test_case(_Config) -> + ct:fail("This test case shall never be run due to skip on group level"). + diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl new file mode 100644 index 0000000000..70305421ac --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl @@ -0,0 +1,53 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(skip_init_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + []. + +init_per_suite(Config) -> + {skip,"Skipped in init_per_suite/1"}. + +end_per_suite(Config) -> + ok. + +init_per_group(_,Config) -> + Config. + +end_per_group(_,_) -> + ok. + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [test_case]. + +%% Test cases starts here. +test_case(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl index d5b347e723..48a2d70e22 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl @@ -45,35 +45,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl index 36abac0bf8..d638954d3c 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl @@ -46,36 +46,36 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State), +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State), {{skip, "Test skip"}, State}. -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl new file mode 100644 index 0000000000..e1d261d59a --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl @@ -0,0 +1,79 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + + +-module(skip_pre_init_tc_cth). + + +-include_lib("common_test/src/ct_util.hrl"). +-include_lib("common_test/include/ct_event.hrl"). + + +%% CT Hooks +-compile(export_all). + +init(Id, Opts) -> + empty_cth:init(Id, Opts). + +pre_init_per_suite(Suite, Config, State) -> + empty_cth:pre_init_per_suite(Suite,Config,State). + +post_init_per_suite(Suite,Config,Return,State) -> + empty_cth:post_init_per_suite(Suite,Config,Return,State). + +pre_end_per_suite(Suite,Config,State) -> + empty_cth:pre_end_per_suite(Suite,Config,State). + +post_end_per_suite(Suite,Config,Return,State) -> + empty_cth:post_end_per_suite(Suite,Config,Return,State). + +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). + +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). + +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). + +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). + +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State), + {{skip, "Skipped in pre_init_per_testcase"}, State}. + +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). + +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). + +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). + +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). + +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). + +terminate(State) -> + empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl index fa510b2d54..d7b07ee33c 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl @@ -46,35 +46,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl new file mode 100644 index 0000000000..bc69dd5ea4 --- /dev/null +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl @@ -0,0 +1,53 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +-module(skip_req_SUITE). + +-compile(export_all). + +-include("ct.hrl"). + +suite() -> + [{require,whatever}]. + +init_per_suite(Config) -> + Config. + +end_per_suite(Config) -> + ok. + +init_per_group(_,Config) -> + Config. + +end_per_group(_,_) -> + ok. + +init_per_testcase(_,Config) -> + Config. + +end_per_testcase(_,_) -> + ok. + +all() -> + [test_case]. + +%% Test cases starts here. +test_case(Config) -> + ok. diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl index 7ec0d458b6..c6e0419c50 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl @@ -48,44 +48,44 @@ post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State), {Return, [post_end_per_suite|State]}. -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State), +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State), {Config, [pre_init_per_group|State]}. -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State), +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State), {Return, [post_init_per_group|State]}. -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State), +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State), {Config, [pre_end_per_group|State]}. -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State), +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State), {Return, [post_end_per_group|State]}. -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State), +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State), {Config, [pre_init_per_testcase|State]}. -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State), +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State), {Return, [post_init_per_testcase|State]}. -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State), +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State), {Config, [pre_end_per_testcase|State]}. -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State), +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State), {Return, [post_end_per_testcase|State]}. -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State), +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State), [on_tc_fail|State]. -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State), +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State), [on_tc_skip|State]. terminate(State) -> diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl index 2b9e726819..10a7047899 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl @@ -44,35 +44,35 @@ pre_end_per_suite(Suite,Config,State) -> post_end_per_suite(Suite,Config,Return,State) -> empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State). +pre_init_per_group(Suite,Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State). +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State). +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State). +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State). +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State). +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl index d48981f667..f933c7702e 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl @@ -50,43 +50,43 @@ post_end_per_suite(Suite,Config,Return,State) -> NewConfig = [{post_end_per_suite,?now}|Config], {NewConfig,NewConfig}. -pre_init_per_group(Group,Config,State) -> - empty_cth:pre_init_per_group(Group,Config,State), +pre_init_per_group(Suite, Group,Config,State) -> + empty_cth:pre_init_per_group(Suite,Group,Config,State), {[{pre_init_per_group,?now}|Config],State}. -post_init_per_group(Group,Config,Return,State) -> - empty_cth:post_init_per_group(Group,Config,Return,State), +post_init_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_init_per_group(Suite,Group,Config,Return,State), {[{post_init_per_group,?now}|Return],State}. -pre_end_per_group(Group,Config,State) -> - empty_cth:pre_end_per_group(Group,Config,State), +pre_end_per_group(Suite,Group,Config,State) -> + empty_cth:pre_end_per_group(Suite,Group,Config,State), {[{pre_end_per_group,?now}|Config],State}. -post_end_per_group(Group,Config,Return,State) -> - empty_cth:post_end_per_group(Group,Config,Return,State), +post_end_per_group(Suite,Group,Config,Return,State) -> + empty_cth:post_end_per_group(Suite,Group,Config,Return,State), {[{post_end_per_group,?now}|Config],State}. -pre_init_per_testcase(TC,Config,State) -> - empty_cth:pre_init_per_testcase(TC,Config,State), +pre_init_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_init_per_testcase(Suite,TC,Config,State), {[{pre_init_per_testcase,?now}|Config],State}. -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State), +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State), {[{post_init_per_testcase,?now}|Config],State}. -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State), +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State), {[{pre_end_per_testcase,?now}|Config],State}. -post_end_per_testcase(TC,Config,Return,State) -> - empty_cth:post_end_per_testcase(TC,Config,Return,State), +post_end_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State), {[{post_end_per_testcase,?now}|Config],State}. -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl index 71d84781e0..b29256a77e 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl @@ -60,37 +60,37 @@ post_end_per_suite(Suite,Config,Return,State) -> ct_no_config_SUITE = ct:get_config(suite_cfg), empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> +pre_init_per_group(Suite,Group,Config,State) -> true = ?val(post_init_per_suite, Config), ct_no_config_SUITE = ct:get_config(suite_cfg), test_group = ct:get_config(group_cfg), - empty_cth:pre_init_per_group(Group, + empty_cth:pre_init_per_group(Suite,Group, [{pre_init_per_group,true} | Config], State). -post_init_per_group(Group,Config,Return,State) -> +post_init_per_group(Suite,Group,Config,Return,State) -> true = ?val(pre_init_per_group, Return), test_group = ct:get_config(group_cfg), - empty_cth:post_init_per_group(Group, + empty_cth:post_init_per_group(Suite,Group, Config, [{post_init_per_group,true} | Return], State). -pre_end_per_group(Group,Config,State) -> +pre_end_per_group(Suite,Group,Config,State) -> true = ?val(post_init_per_group, Config), ct_no_config_SUITE = ct:get_config(suite_cfg), test_group = ct:get_config(group_cfg), - empty_cth:pre_end_per_group(Group, + empty_cth:pre_end_per_group(Suite,Group, [{pre_end_per_group,true} | Config], State). -post_end_per_group(Group,Config,Return,State) -> +post_end_per_group(Suite,Group,Config,Return,State) -> true = ?val(pre_end_per_group, Config), ct_no_config_SUITE = ct:get_config(suite_cfg), test_group = ct:get_config(group_cfg), - empty_cth:post_end_per_group(Group,Config,Return,State). + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> +pre_init_per_testcase(Suite,TC,Config,State) -> true = ?val(post_init_per_suite, Config), case ?val(name, ?val(tc_group_properties, Config)) of undefined -> @@ -102,19 +102,19 @@ pre_init_per_testcase(TC,Config,State) -> ct_no_config_SUITE = ct:get_config(suite_cfg), CfgKey = list_to_atom(atom_to_list(TC) ++ "_cfg"), TC = ct:get_config(CfgKey), - empty_cth:pre_init_per_testcase(TC, + empty_cth:pre_init_per_testcase(Suite,TC, [{pre_init_per_testcase,true} | Config], State). %%! TODO: Verify Config also in post_init and pre_end! -post_init_per_testcase(TC,Config,Return,State) -> - empty_cth:post_init_per_testcase(TC,Config,Return,State). +post_init_per_testcase(Suite,TC,Config,Return,State) -> + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> - empty_cth:pre_end_per_testcase(TC,Config,State). +pre_end_per_testcase(Suite,TC,Config,State) -> + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> +post_end_per_testcase(Suite,TC,Config,Return,State) -> true = ?val(post_init_per_suite, Config), true = ?val(pre_init_per_testcase, Config), case ?val(name, ?val(tc_group_properties, Config)) of @@ -127,13 +127,13 @@ post_end_per_testcase(TC,Config,Return,State) -> ct_no_config_SUITE = ct:get_config(suite_cfg), CfgKey = list_to_atom(atom_to_list(TC) ++ "_cfg"), TC = ct:get_config(CfgKey), - empty_cth:post_end_per_testcase(TC,Config,Return,State). + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl index 9abd2e5e83..42e086b96e 100644 --- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl +++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl @@ -62,43 +62,43 @@ post_end_per_suite(Suite,Config,Return,State) -> check_dirs(State,Config), empty_cth:post_end_per_suite(Suite,Config,Return,State). -pre_init_per_group(Group,Config,State) -> +pre_init_per_group(Suite,Group,Config,State) -> check_dirs(State,Config), - empty_cth:pre_init_per_group(Group,Config,State). + empty_cth:pre_init_per_group(Suite,Group,Config,State). -post_init_per_group(Group,Config,Return,State) -> +post_init_per_group(Suite,Group,Config,Return,State) -> check_dirs(State,Return), - empty_cth:post_init_per_group(Group,Config,Return,State). + empty_cth:post_init_per_group(Suite,Group,Config,Return,State). -pre_end_per_group(Group,Config,State) -> +pre_end_per_group(Suite,Group,Config,State) -> check_dirs(State,Config), - empty_cth:pre_end_per_group(Group,Config,State). + empty_cth:pre_end_per_group(Suite,Group,Config,State). -post_end_per_group(Group,Config,Return,State) -> +post_end_per_group(Suite,Group,Config,Return,State) -> check_dirs(State,Config), - empty_cth:post_end_per_group(Group,Config,Return,State). + empty_cth:post_end_per_group(Suite,Group,Config,Return,State). -pre_init_per_testcase(TC,Config,State) -> +pre_init_per_testcase(Suite,TC,Config,State) -> check_dirs(State,Config), - empty_cth:pre_init_per_testcase(TC,Config,State). + empty_cth:pre_init_per_testcase(Suite,TC,Config,State). -post_init_per_testcase(TC,Config,Return,State) -> +post_init_per_testcase(Suite,TC,Config,Return,State) -> check_dirs(State,Config), - empty_cth:post_init_per_testcase(TC,Config,Return,State). + empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State). -pre_end_per_testcase(TC,Config,State) -> +pre_end_per_testcase(Suite,TC,Config,State) -> check_dirs(State,Config), - empty_cth:pre_end_per_testcase(TC,Config,State). + empty_cth:pre_end_per_testcase(Suite,TC,Config,State). -post_end_per_testcase(TC,Config,Return,State) -> +post_end_per_testcase(Suite,TC,Config,Return,State) -> check_dirs(State,Config), - empty_cth:post_end_per_testcase(TC,Config,Return,State). + empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State). -on_tc_fail(TC, Reason, State) -> - empty_cth:on_tc_fail(TC,Reason,State). +on_tc_fail(Suite,TC, Reason, State) -> + empty_cth:on_tc_fail(Suite,TC,Reason,State). -on_tc_skip(TC, Reason, State) -> - empty_cth:on_tc_skip(TC,Reason,State). +on_tc_skip(Suite,TC, Reason, State) -> + empty_cth:on_tc_skip(Suite,TC,Reason,State). terminate(State) -> empty_cth:terminate(State). diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE.erl index f8b6a379f6..76611a2db3 100644 --- a/lib/common_test/test/ct_repeat_testrun_SUITE.erl +++ b/lib/common_test/test/ct_repeat_testrun_SUITE.erl @@ -363,14 +363,17 @@ skip_first_tc1(Suite) -> {?eh,tc_start,{Suite,tc1}}, {?eh,tc_done,{Suite,tc1,ok}}, {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,tc2}}, {?eh,tc_done,{Suite,tc2,?skipped}}, {?eh,test_stats,{'_',0,{0,1}}}, + {?eh,tc_start,{Suite,{init_per_group,g,[]}}}, {?eh,tc_done,{Suite,{init_per_group,g,[]},?skipped}}, {?eh,tc_auto_skip,{Suite,{tc1,g},?skip_reason}}, {?eh,test_stats,{'_',0,{0,2}}}, {?eh,tc_auto_skip,{Suite,{tc2,g},?skip_reason}}, {?eh,test_stats,{'_',0,{0,3}}}, {?eh,tc_auto_skip,{Suite,{end_per_group,g},?skip_reason}}, + {?eh,tc_start,{Suite,tc2}}, {?eh,tc_done,{Suite,tc2,?skipped}}, {?eh,test_stats,{'_',0,{0,4}}}, {?eh,tc_start,{Suite,end_per_suite}}, @@ -390,10 +393,12 @@ skip_tc1_in_group(Suite) -> {?eh,tc_start,{Suite,tc1}}, {?eh,tc_done,{Suite,tc1,ok}}, {?eh,test_stats,{'_',0,{0,0}}}, + {?eh,tc_start,{Suite,tc2}}, {?eh,tc_done,{Suite,tc2,?skipped}}, {?eh,test_stats,{'_',0,{0,1}}}, {?eh,tc_start,{Suite,{end_per_group,g,[]}}}, {?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}], + {?eh,tc_start,{Suite,tc2}}, {?eh,tc_done,{Suite,tc2,?skipped}}, {?eh,test_stats,{'_',0,{0,2}}}, {?eh,tc_start,{Suite,end_per_suite}}, diff --git a/lib/common_test/test/ct_surefire_SUITE.erl b/lib/common_test/test/ct_surefire_SUITE.erl index 42ec685c16..884217afc2 100644 --- a/lib/common_test/test/ct_surefire_SUITE.erl +++ b/lib/common_test/test/ct_surefire_SUITE.erl @@ -73,7 +73,9 @@ all() -> relative_path, url, logdir, - fail_pre_init_per_suite + fail_pre_init_per_suite, + skip_case_in_spec, + skip_suite_in_spec ]. %%-------------------------------------------------------------------- @@ -119,6 +121,18 @@ fail_pre_init_per_suite(Config) when is_list(Config) -> run(fail_pre_init_per_suite,[fail_pre_init_per_suite, {cth_surefire,[{path,Path}]}],Path,Config,[],Suites). +skip_case_in_spec(Config) -> + DataDir = ?config(data_dir,Config), + Spec = filename:join(DataDir,"skip_one_case.spec"), + Path = "skip_case_in_spec.xml", + run_spec(skip_case_in_spec,[{cth_surefire,[{path,Path}]}],Path,Config,Spec). + +skip_suite_in_spec(Config) -> + DataDir = ?config(data_dir,Config), + Spec = filename:join(DataDir,"skip_one_suite.spec"), + Path = "skip_suite_in_spec.xml", + run_spec(skip_suite_in_spec,[{cth_surefire,[{path,Path}]}],Path,Config,Spec). + %%%----------------------------------------------------------------- %%% HELP FUNCTIONS %%%----------------------------------------------------------------- @@ -129,8 +143,15 @@ run(Case,CTHs,Report,Config,ExtraOpts) -> Suite = filename:join(DataDir, "surefire_SUITE"), run(Case,CTHs,Report,Config,ExtraOpts,Suite). run(Case,CTHs,Report,Config,ExtraOpts,Suite) -> - {Opts,ERPid} = setup([{suite,Suite},{ct_hooks,CTHs},{label,Case}|ExtraOpts], - Config), + Test = [{suite,Suite},{ct_hooks,CTHs},{label,Case}|ExtraOpts], + do_run(Case, Report, Test, Config). + +run_spec(Case,CTHs,Report,Config,Spec) -> + Test = [{spec,Spec},{ct_hooks,CTHs},{label,Case}], + do_run(Case, Report, Test, Config). + +do_run(Case, Report, Test, Config) -> + {Opts,ERPid} = setup(Test, Config), ok = execute(Case, Opts, ERPid, Config), LogDir = case lists:keyfind(logdir,1,Opts) of @@ -201,7 +222,10 @@ test_suite_events(pass_SUITE) -> {?eh,test_stats,{1,0,{0,0}}}, {?eh,tc_start,{ct_framework,end_per_suite}}, {?eh,tc_done,{ct_framework,end_per_suite,ok}}]; -test_suite_events(_) -> +test_suite_events(skip_all_surefire_SUITE) -> + [{?eh,tc_user_skip,{skip_all_surefire_SUITE,all,"skipped in spec"}}, + {?eh,test_stats,{0,0,{1,0}}}]; +test_suite_events(Test) -> [{?eh,tc_start,{surefire_SUITE,init_per_suite}}, {?eh,tc_done,{surefire_SUITE,init_per_suite,ok}}, {?eh,tc_start,{surefire_SUITE,tc_ok}}, @@ -210,46 +234,55 @@ test_suite_events(_) -> {?eh,tc_start,{surefire_SUITE,tc_fail}}, {?eh,tc_done,{surefire_SUITE,tc_fail, {failed,{error,{test_case_failed,"this test should fail"}}}}}, - {?eh,test_stats,{1,1,{0,0}}}, - {?eh,tc_start,{surefire_SUITE,tc_skip}}, - {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}, - {?eh,test_stats,{1,1,{1,0}}}, - {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, - {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, - {auto_skipped,{require_failed,'_'}}}}, - {?eh,test_stats,{1,1,{1,1}}}, - [{?eh,tc_start,{surefire_SUITE,{init_per_group,g,[]}}}, - {?eh,tc_done,{surefire_SUITE,{init_per_group,g,[]},ok}}, - {?eh,tc_start,{surefire_SUITE,tc_ok}}, - {?eh,tc_done,{surefire_SUITE,tc_ok,ok}}, - {?eh,test_stats,{2,1,{1,1}}}, - {?eh,tc_start,{surefire_SUITE,tc_fail}}, - {?eh,tc_done,{surefire_SUITE,tc_fail, - {failed,{error,{test_case_failed,"this test should fail"}}}}}, - {?eh,test_stats,{2,2,{1,1}}}, - {?eh,tc_start,{surefire_SUITE,tc_skip}}, - {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}, - {?eh,test_stats,{2,2,{2,1}}}, - {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, - {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, - {auto_skipped,{require_failed,'_'}}}}, - {?eh,test_stats,{2,2,{2,2}}}, - {?eh,tc_start,{surefire_SUITE,{end_per_group,g,[]}}}, - {?eh,tc_done,{surefire_SUITE,{end_per_group,g,[]},ok}}], - [{?eh,tc_start,{surefire_SUITE,{init_per_group,g_fail,[]}}}, - {?eh,tc_done,{surefire_SUITE,{init_per_group,g_fail,[]}, - {failed,{error,all_cases_should_be_skipped}}}}, - {?eh,tc_auto_skip,{surefire_SUITE,{tc_ok,g_fail}, - {failed, - {surefire_SUITE,init_per_group, - {'EXIT',all_cases_should_be_skipped}}}}}, - {?eh,test_stats,{2,2,{2,3}}}, - {?eh,tc_auto_skip,{surefire_SUITE,{end_per_group,g_fail}, - {failed, - {surefire_SUITE,init_per_group, - {'EXIT',all_cases_should_be_skipped}}}}}], - {?eh,tc_start,{surefire_SUITE,end_per_suite}}, - {?eh,tc_done,{surefire_SUITE,end_per_suite,ok}}]. + {?eh,test_stats,{1,1,{0,0}}}] ++ + tc_skip_events(Test,undefined) ++ + [{?eh,test_stats,{1,1,{1,0}}}, + {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, + {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, + {auto_skipped,{require_failed,'_'}}}}, + {?eh,test_stats,{1,1,{1,1}}}, + [{?eh,tc_start,{surefire_SUITE,{init_per_group,g,[]}}}, + {?eh,tc_done,{surefire_SUITE,{init_per_group,g,[]},ok}}, + {?eh,tc_start,{surefire_SUITE,tc_ok}}, + {?eh,tc_done,{surefire_SUITE,tc_ok,ok}}, + {?eh,test_stats,{2,1,{1,1}}}, + {?eh,tc_start,{surefire_SUITE,tc_fail}}, + {?eh,tc_done,{surefire_SUITE,tc_fail, + {failed,{error,{test_case_failed,"this test should fail"}}}}}, + {?eh,test_stats,{2,2,{1,1}}}] ++ + tc_skip_events(Test,g) ++ + [{?eh,test_stats,{2,2,{2,1}}}, + {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}}, + {?eh,tc_done,{surefire_SUITE,tc_autoskip_require, + {auto_skipped,{require_failed,'_'}}}}, + {?eh,test_stats,{2,2,{2,2}}}, + {?eh,tc_start,{surefire_SUITE,{end_per_group,g,[]}}}, + {?eh,tc_done,{surefire_SUITE,{end_per_group,g,[]},ok}}], + [{?eh,tc_start,{surefire_SUITE,{init_per_group,g_fail,[]}}}, + {?eh,tc_done,{surefire_SUITE,{init_per_group,g_fail,[]}, + {failed,{error,all_cases_should_be_skipped}}}}, + {?eh,tc_auto_skip,{surefire_SUITE,{tc_ok,g_fail}, + {failed, + {surefire_SUITE,init_per_group, + {'EXIT',all_cases_should_be_skipped}}}}}, + {?eh,test_stats,{2,2,{2,3}}}, + {?eh,tc_auto_skip,{surefire_SUITE,{end_per_group,g_fail}, + {failed, + {surefire_SUITE,init_per_group, + {'EXIT',all_cases_should_be_skipped}}}}}], + {?eh,tc_start,{surefire_SUITE,end_per_suite}}, + {?eh,tc_done,{surefire_SUITE,end_per_suite,ok}}]. + +tc_skip_events(skip_case_in_spec,Group) -> + [{?eh,tc_user_skip,{surefire_SUITE,tc_skip_name(Group),"skipped in spec"}}]; +tc_skip_events(_Test,_Group) -> + [{?eh,tc_start,{surefire_SUITE,tc_skip}}, + {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}]. + +tc_skip_name(undefined) -> + tc_skip; +tc_skip_name(Group) -> + {tc_skip,Group}. test_events(fail_pre_init_per_suite) -> [{?eh,start_logging,{'DEF','RUNDIR'}}, @@ -257,6 +290,10 @@ test_events(fail_pre_init_per_suite) -> test_suite_events(pass_SUITE) ++ test_suite_events(fail_SUITE, {1,0,{0,1}}) ++ [{?eh,stop_logging,[]}]; +test_events(skip_suite_in_spec) -> + [{?eh,start_logging,'_'},{?eh,start_info,{1,1,0}}] ++ + test_suite_events(skip_all_surefire_SUITE) ++ + [{?eh,stop_logging,[]}]; test_events(Test) -> [{?eh,start_logging,'_'}, {?eh,start_info,{1,1,9}}] ++ test_suite_events(Test) ++ @@ -364,6 +401,8 @@ failed_or_skipped([]) -> events_to_result(E) -> events_to_result(E, []). +events_to_result([{?eh,tc_user_skip,{_Suite,all,_}}|E], Result) -> + events_to_result(E, [[[s]]|Result]); events_to_result([{?eh,tc_auto_skip,{_Suite,init_per_suite,_}}|E], Result) -> {Suite,Rest} = events_to_result1(E), events_to_result(Rest, [[[s]|Suite]|Result]); @@ -382,7 +421,7 @@ events_to_result1([{?eh,tc_done,{_Suite, end_per_suite,R}}|E]) -> events_to_result1([{?eh,tc_done,{_Suite,_Case,R}}|E]) -> {Suite,Rest} = events_to_result1(E), {[result(R)|Suite],Rest}; -events_to_result1([{?eh,tc_auto_skip,_}|E]) -> +events_to_result1([{?eh,Skip,_}|E]) when Skip==tc_auto_skip; Skip==tc_user_skip -> {Suite,Rest} = events_to_result1(E), {[[s]|Suite],Rest}; events_to_result1([_|E]) -> diff --git a/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec new file mode 100644 index 0000000000..42df8a7d1a --- /dev/null +++ b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec @@ -0,0 +1,2 @@ +{suites,".",surefire_SUITE}. +{skip_cases,".",surefire_SUITE,tc_skip,"skipped in spec"}. diff --git a/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec new file mode 100644 index 0000000000..57966328ab --- /dev/null +++ b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec @@ -0,0 +1,2 @@ +{suites,".",[skip_all_surefire_SUITE]}. +{skip_suites,".",skip_all_surefire_SUITE,"skipped in spec"}. diff --git a/lib/common_test/test/ct_test_server_if_1_SUITE.erl b/lib/common_test/test/ct_test_server_if_1_SUITE.erl index 228d900545..ea8a1a5662 100644 --- a/lib/common_test/test/ct_test_server_if_1_SUITE.erl +++ b/lib/common_test/test/ct_test_server_if_1_SUITE.erl @@ -161,6 +161,7 @@ test_events(ts_if_1) -> {?eh,tc_start,{ts_if_1_SUITE,tc4}}, {?eh,tc_done,{ts_if_1_SUITE,tc4,{failed,{error,failed_on_purpose}}}}, {?eh,test_stats,{1,2,{0,1}}}, + {?eh,tc_start,{ts_if_1_SUITE,tc5}}, {?eh,tc_done,{ts_if_1_SUITE,tc5,{auto_skipped,{sequence_failed,seq1,tc4}}}}, {?eh,test_stats,{1,2,{0,2}}}, diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl index e926abd885..05a452b99d 100644 --- a/lib/common_test/test/ct_test_support.erl +++ b/lib/common_test/test/ct_test_support.erl @@ -765,23 +765,23 @@ locate({parallel,TEvs}, Node, Evs, Config) -> {Done,RemEvs2,length(RemEvs2)} end; %% end_per_group auto- or user skipped - (TEv={TEH,AutoOrUserSkip,{M,end_per_group,R}}, {Done,RemEvs,_RemSize}) + (TEv={TEH,AutoOrUserSkip,{M,{end_per_group,G},R}}, {Done,RemEvs,_RemSize}) when AutoOrUserSkip == tc_auto_skip; AutoOrUserSkip == tc_user_skip -> RemEvs1 = lists:dropwhile( fun({EH,#event{name=tc_auto_skip, node=EvNode, - data={Mod,end_per_group,Reason}}}) when - EH == TEH, EvNode == Node, Mod == M -> + data={Mod,{end_per_group,EvGroupName},Reason}}}) when + EH == TEH, EvNode == Node, Mod == M, EvGroupName == G -> case match_data(R, Reason) of match -> false; _ -> true end; ({EH,#event{name=tc_user_skip, node=EvNode, - data={Mod,end_per_group,Reason}}}) when - EH == TEH, EvNode == Node, Mod == M -> + data={Mod,{end_per_group,EvGroupName},Reason}}}) when + EH == TEH, EvNode == Node, Mod == M, EvGroupName == G -> case match_data(R, Reason) of match -> false; _ -> true @@ -1008,20 +1008,20 @@ locate({shuffle,TEvs}, Node, Evs, Config) -> {Done,RemEvs2,length(RemEvs2)} end; %% end_per_group auto-or user skipped - (TEv={TEH,AutoOrUserSkip,{M,end_per_group,R}}, {Done,RemEvs,_RemSize}) + (TEv={TEH,AutoOrUserSkip,{M,{end_per_group,G},R}}, {Done,RemEvs,_RemSize}) when AutoOrUserSkip == tc_auto_skip; AutoOrUserSkip == tc_user_skip -> RemEvs1 = lists:dropwhile( fun({EH,#event{name=tc_auto_skip, node=EvNode, - data={Mod,end_per_group,Reason}}}) when - EH == TEH, EvNode == Node, Mod == M, Reason == R -> + data={Mod,{end_per_group,EvGroupName},Reason}}}) when + EH == TEH, EvNode == Node, Mod == M, EvGroupName == G, Reason == R -> false; ({EH,#event{name=tc_user_skip, node=EvNode, - data={Mod,end_per_group,Reason}}}) when - EH == TEH, EvNode == Node, Mod == M, Reason == R -> + data={Mod,{end_per_group,EvGroupName},Reason}}}) when + EH == TEH, EvNode == Node, Mod == M, EvGroupName == G, Reason == R -> false; ({EH,#event{name=stop_logging, node=EvNode,data=_}}) when @@ -1264,10 +1264,10 @@ log_events1([E={_EH,tc_done,{_M,{end_per_group,_GrName,Props},_R}} | Evs], Dev, io:format(Dev, "~s~p]},~n", [Ind,E]), log_events1(Evs, Dev, Ind--" ") end; -log_events1([E={_EH,tc_auto_skip,{_M,end_per_group,_Reason}} | Evs], Dev, Ind) -> +log_events1([E={_EH,tc_auto_skip,{_M,{end_per_group,_GrName},_Reason}} | Evs], Dev, Ind) -> io:format(Dev, "~s~p],~n", [Ind,E]), log_events1(Evs, Dev, Ind--" "); -log_events1([E={_EH,tc_user_skip,{_M,end_per_group,_Reason}} | Evs], Dev, Ind) -> +log_events1([E={_EH,tc_user_skip,{_M,{end_per_group,_GrName},_Reason}} | Evs], Dev, Ind) -> io:format(Dev, "~s~p],~n", [Ind,E]), log_events1(Evs, Dev, Ind--" "); log_events1([E], Dev, Ind) -> diff --git a/lib/common_test/test/ct_testspec_2_SUITE.erl b/lib/common_test/test/ct_testspec_2_SUITE.erl index 1a941df185..1bab80942a 100644 --- a/lib/common_test/test/ct_testspec_2_SUITE.erl +++ b/lib/common_test/test/ct_testspec_2_SUITE.erl @@ -220,7 +220,24 @@ basic_compatible_no_nodes(_Config) -> {tc2,{skip,"skipped"}}]}]}], merge_tests = true}, - verify_result(Verify,ListResult,FileResult). + verify_result(Verify,ListResult,FileResult), + + {ok,Tests} = ct_testspec:get_tests([SpecFile]), + ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]), + [{[SpecFile],[{Node,Run,Skip}]}] = Tests, + [{Alias1V,x_SUITE,all}, + {Alias1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]}, + {Alias1V,z_SUITE,all}, + {Alias2V,x_SUITE,all}, + {Alias2V,y_SUITE,all}] = lists:sort(Run), + [{Alias1V,z_SUITE,"skipped"}, + {Alias2V,x_SUITE,{g1,all},"skipped"}, + {Alias2V,x_SUITE,{g2,all},"skipped"}, + {Alias2V,y_SUITE,tc1,"skipped"}, + {Alias2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip), + + ok. + %%%----------------------------------------------------------------- %%% @@ -346,7 +363,25 @@ basic_compatible_nodes(_Config) -> {tc2,{skip,"skipped"}}]}]}], merge_tests = true}, - verify_result(Verify,ListResult,FileResult). + verify_result(Verify,ListResult,FileResult), + + {ok,Tests} = ct_testspec:get_tests([SpecFile]), + ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]), + [{[SpecFile],[{Node,[],[]}, + {Node1,Run1,Skip1}, + {Node2,Run2,Skip2}]}] = Tests, + [{TO1V,x_SUITE,all}, + {TO1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]}, + {TO1V,z_SUITE,all}] = lists:sort(Run1), + [{TO2V,x_SUITE,all}, + {TO2V,y_SUITE,all}] = lists:sort(Run2), + [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1), + [{TO2V,x_SUITE,{g1,all},"skipped"}, + {TO2V,x_SUITE,{g2,all},"skipped"}, + {TO2V,y_SUITE,tc1,"skipped"}, + {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2), + + ok. %%%----------------------------------------------------------------- %%% @@ -439,7 +474,28 @@ no_merging(_Config) -> [{y_SUITE,[{tc1,{skip,"skipped"}}, {tc2,{skip,"skipped"}}]}]}]}, - verify_result(Verify,ListResult,FileResult). + verify_result(Verify,ListResult,FileResult), + + {ok,Tests} = ct_testspec:get_tests([SpecFile]), + ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]), + [{[SpecFile],[{Node,[],[]}, + {Node1,Run1,Skip1}, + {Node2,Run2,Skip2}]}] = Tests, + [{TO1V,x_SUITE,all}, + {TO1V,y_SUITE,[tc1,tc2]}, + {TO1V,y_SUITE,[{g1,all},{g2,all}]}, + {TO1V,z_SUITE,all}] = lists:sort(Run1), + [{TO2V,x_SUITE,all}, + {TO2V,x_SUITE,[{skipped,g1,all},{skipped,g2,all}]}, + {TO2V,y_SUITE,all}, + {TO2V,y_SUITE,[{skipped,tc1},{skipped,tc2}]}] = lists:sort(Run2), + [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1), + [{TO2V,x_SUITE,{g1,all},"skipped"}, + {TO2V,x_SUITE,{g2,all},"skipped"}, + {TO2V,y_SUITE,tc1,"skipped"}, + {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2), + + ok. %%%----------------------------------------------------------------- %%% @@ -510,7 +566,25 @@ multiple_specs(_Config) -> {y_SUITE,[all,{tc1,{skip,"skipped"}}, {tc2,{skip,"skipped"}}]}]}]}, - verify_result(Verify,FileResult,FileResult). + verify_result(Verify,FileResult,FileResult), + + {ok,Tests} = ct_testspec:get_tests([[SpecFile1,SpecFile2]]), + ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]), + [{[SpecFile1,SpecFile2],[{Node,[],[]}, + {Node1,Run1,Skip1}, + {Node2,Run2,Skip2}]}] = Tests, + [{TO1V,x_SUITE,all}, + {TO1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]}, + {TO1V,z_SUITE,all}] = lists:sort(Run1), + [{TO2V,x_SUITE,all}, + {TO2V,y_SUITE,all}] = lists:sort(Run2), + [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1), + [{TO2V,x_SUITE,{g1,all},"skipped"}, + {TO2V,x_SUITE,{g2,all},"skipped"}, + {TO2V,y_SUITE,tc1,"skipped"}, + {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2), + + ok. %%%----------------------------------------------------------------- %%% diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk index 2fab4d3883..e6ae8b2e7a 100644 --- a/lib/common_test/vsn.mk +++ b/lib/common_test/vsn.mk @@ -1 +1 @@ -COMMON_TEST_VSN = 1.13 +COMMON_TEST_VSN = 1.14 diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml index bd488a39a5..ed04dac1c0 100644 --- a/lib/compiler/doc/src/compile.xml +++ b/lib/compiler/doc/src/compile.xml @@ -418,7 +418,7 @@ module.beam: module.erl \ without module prefix to local or imported functions before trying with auto-imported BIFs. If the BIF is to be called, use the <c>erlang</c> module prefix in the call, not - <c>{ no_auto_import,[{F,A}, ...]}</c>.</p> + <c>{no_auto_import,[{F,A}, ...]}</c>.</p> </note> <p>If this option is written in the source code, as a <c>-compile</c> directive, the syntax <c>F/A</c> can be used instead @@ -439,6 +439,15 @@ module.beam: module.erl \ </p> </item> + <tag><c>{extra_chunks, [{binary(), binary()}]}</c></tag> + <item> + <p>Pass extra chunks to be stored in the <c>.beam</c> file. + The extra chunks must be a list of tuples with a four byte + binary as chunk name followed by a binary with the chunk contents. + See <seealso marker="stdlib:beam_lib">beam_lib</seealso> for + more information. + </p> + </item> </taglist> <p>If warnings are turned on (option <c>report_warnings</c> @@ -679,7 +688,7 @@ module.beam: module.erl \ <fsummary>Compiles a list of forms.</fsummary> <desc> <p>Is the same as - <c>forms(File, [verbose,report_errors,report_warnings])</c>. + <c>forms(Forms, [verbose,report_errors,report_warnings])</c>. </p> </desc> </func> diff --git a/lib/compiler/doc/src/notes.xml b/lib/compiler/doc/src/notes.xml index 2e58b68bf0..449453bf88 100644 --- a/lib/compiler/doc/src/notes.xml +++ b/lib/compiler/doc/src/notes.xml @@ -32,6 +32,22 @@ <p>This document describes the changes made to the Compiler application.</p> +<section><title>Compiler 7.0.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Minor internal changes. A typo in the documentation was + also fixed.</p> + <p> + Own Id: OTP-14240</p> + </item> + </list> + </section> + +</section> + <section><title>Compiler 7.0.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl index a2f5dc674c..1bda185acd 100644 --- a/lib/compiler/src/beam_asm.erl +++ b/lib/compiler/src/beam_asm.erl @@ -21,7 +21,7 @@ -module(beam_asm). --export([module/4]). +-export([module/5]). -export([encode/2]). -export_type([fail/0,label/0,reg/0,src/0,module_code/0,function_name/0]). @@ -49,28 +49,26 @@ -type function_name() :: atom(). --type exports() :: [{function_name(),arity()}]. - -type asm_function() :: {'function',function_name(),arity(),label(),[asm_instruction()]}. -type module_code() :: {module(),[_],[_],[asm_function()],pos_integer()}. --spec module(module_code(), exports(), [_], [compile:option()]) -> +-spec module(module_code(), [{binary(), binary()}], [_], [compile:option()], [compile:option()]) -> {'ok',binary()}. -module(Code, Abst, SourceFile, Opts) -> - {ok,assemble(Code, Abst, SourceFile, Opts)}. +module(Code, ExtraChunks, SourceFile, Opts, CompilerOpts) -> + {ok,assemble(Code, ExtraChunks, SourceFile, Opts, CompilerOpts)}. -assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, Abst, SourceFile, Opts) -> +assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, ExtraChunks, SourceFile, Opts, CompilerOpts) -> {1,Dict0} = beam_dict:atom(Mod, beam_dict:new()), {0,Dict1} = beam_dict:fname(atom_to_list(Mod) ++ ".erl", Dict0), NumFuncs = length(Asm0), {Asm,Attr} = on_load(Asm0, Attr0), Exp = cerl_sets:from_list(Exp0), {Code,Dict2} = assemble_1(Asm, Exp, Dict1, []), - build_file(Code, Attr, Dict2, NumLabels, NumFuncs, Abst, SourceFile, Opts). + build_file(Code, Attr, Dict2, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts). on_load(Fs0, Attr0) -> case proplists:get_value(on_load, Attr0) of @@ -113,7 +111,7 @@ assemble_function([H|T], Acc, Dict0) -> assemble_function([], Code, Dict) -> {Code, Dict}. -build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) -> +build_file(Code, Attr, Dict, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts) -> %% Create the code chunk. CodeChunk = chunk(<<"Code">>, @@ -125,9 +123,9 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) -> Code), %% Create the atom table chunk. - - {NumAtoms, AtomTab} = beam_dict:atom_table(Dict), - AtomChunk = chunk(<<"Atom">>, <<NumAtoms:32>>, AtomTab), + AtomEncoding = atom_encoding(CompilerOpts), + {NumAtoms, AtomTab} = beam_dict:atom_table(Dict, AtomEncoding), + AtomChunk = chunk(atom_chunk_name(AtomEncoding), <<NumAtoms:32>>, AtomTab), %% Create the import table chunk. @@ -188,21 +186,30 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) -> AttrChunk = chunk(<<"Attr">>, Attributes), CompileChunk = chunk(<<"CInf">>, Compile), - %% Create the abstract code chunk. + %% Compile all extra chunks. - AbstChunk = chunk(<<"Abst">>, Abst), + CheckedChunks = [chunk(Key, Value) || {Key, Value} <- ExtraChunks], %% Create IFF chunk. Chunks = case member(slim, Opts) of true -> - [Essentials,AttrChunk,AbstChunk]; + [Essentials,AttrChunk,CheckedChunks]; false -> [Essentials,LocChunk,AttrChunk, - CompileChunk,AbstChunk,LineChunk] + CompileChunk,CheckedChunks,LineChunk] end, build_form(<<"BEAM">>, Chunks). +atom_encoding(Opts) -> + case proplists:get_bool(no_utf8_atoms, Opts) of + false -> utf8; + true -> latin1 + end. + +atom_chunk_name(utf8) -> <<"AtU8">>; +atom_chunk_name(latin1) -> <<"Atom">>. + %% finalize_fun_table(Essentials, MD5) -> FinalizedEssentials %% Update the 'old_uniq' field in the entry for each fun in the %% 'FunT' chunk. We'll use part of the MD5 for the module as a diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl index 719d799fd7..990e86062a 100644 --- a/lib/compiler/src/beam_dict.erl +++ b/lib/compiler/src/beam_dict.erl @@ -24,7 +24,7 @@ -export([new/0,opcode/2,highest_opcode/1, atom/2,local/4,export/4,import/4, string/2,lambda/3,literal/2,line/2,fname/2, - atom_table/1,local_table/1,export_table/1,import_table/1, + atom_table/2,local_table/1,export_table/1,import_table/1, string_table/1,lambda_table/1,literal_table/1, line_table/1]). @@ -197,15 +197,15 @@ fname(Name, #asm{fnames=Fnames}=Dict) -> end. %% Returns the atom table. -%% atom_table(Dict) -> {LastIndex,[Length,AtomString...]} --spec atom_table(bdict()) -> {non_neg_integer(), [[non_neg_integer(),...]]}. +%% atom_table(Dict, Encoding) -> {LastIndex,[Length,AtomString...]} +-spec atom_table(bdict(), latin1 | utf8) -> {non_neg_integer(), [[non_neg_integer(),...]]}. -atom_table(#asm{atoms=Atoms}) -> +atom_table(#asm{atoms=Atoms}, Encoding) -> NumAtoms = maps:size(Atoms), Sorted = lists:keysort(2, maps:to_list(Atoms)), {NumAtoms,[begin - L = atom_to_list(A), - [length(L)|L] + L = atom_to_binary(A, Encoding), + [byte_size(L),L] end || {A,_} <- Sorted]}. %% Returns the table of local functions. diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl index 050c599d6b..2b5d558ee4 100644 --- a/lib/compiler/src/beam_type.erl +++ b/lib/compiler/src/beam_type.erl @@ -683,6 +683,9 @@ op_type('bsr') -> integer; op_type('div') -> integer; op_type(_) -> unknown. +flush(Rs, [{set,[_],[_,_,_],{bif,is_record,_}}|_]=Is0, Acc0) -> + Acc = flush_all(Rs, Is0, Acc0), + {[],Acc}; flush(Rs, [{set,[_],[],{put_tuple,_}}|_]=Is0, Acc0) -> Acc = flush_all(Rs, Is0, Acc0), {[],Acc}; diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl index 069add7890..c849306c0d 100644 --- a/lib/compiler/src/compile.erl +++ b/lib/compiler/src/compile.erl @@ -214,11 +214,21 @@ expand_opt(report, Os) -> expand_opt(return, Os) -> [return_errors,return_warnings|Os]; expand_opt(r12, Os) -> - [no_recv_opt,no_line_info|Os]; + [no_recv_opt,no_line_info,no_utf8_atoms|Os]; expand_opt(r13, Os) -> - [no_recv_opt,no_line_info|Os]; + [no_recv_opt,no_line_info,no_utf8_atoms|Os]; expand_opt(r14, Os) -> - [no_line_info|Os]; + [no_line_info,no_utf8_atoms|Os]; +expand_opt(r15, Os) -> + [no_utf8_atoms|Os]; +expand_opt(r16, Os) -> + [no_utf8_atoms|Os]; +expand_opt(r17, Os) -> + [no_utf8_atoms|Os]; +expand_opt(r18, Os) -> + [no_utf8_atoms|Os]; +expand_opt(r19, Os) -> + [no_utf8_atoms|Os]; expand_opt({debug_info_key,_}=O, Os) -> [encrypt_debug_info,O|Os]; expand_opt(no_float_opt, Os) -> @@ -305,19 +315,25 @@ format_error_reason(Reason) -> mod_options=[] :: [option()], %Options for module_info encoding=none :: none | epp:source_encoding(), errors=[] :: [err_warn_info()], - warnings=[] :: [err_warn_info()]}). + warnings=[] :: [err_warn_info()], + extra_chunks=[] :: [{binary(), binary()}]}). internal({forms,Forms}, Opts0) -> {_,Ps} = passes(forms, Opts0), Source = proplists:get_value(source, Opts0, ""), Opts1 = proplists:delete(source, Opts0), - Compile = #compile{options=Opts1,mod_options=Opts1}, + Compile = build_compile(Opts1), internal_comp(Ps, Forms, Source, "", Compile); internal({file,File}, Opts) -> {Ext,Ps} = passes(file, Opts), - Compile = #compile{options=Opts,mod_options=Opts}, + Compile = build_compile(Opts), internal_comp(Ps, none, File, Ext, Compile). +build_compile(Opts0) -> + ExtraChunks = proplists:get_value(extra_chunks, Opts0, []), + Opts1 = proplists:delete(extra_chunks, Opts0), + #compile{options=Opts1,mod_options=Opts1,extra_chunks=ExtraChunks}. + internal_comp(Passes, Code0, File, Suffix, St0) -> Dir = filename:dirname(File), Base = filename:basename(File, Suffix), @@ -1376,13 +1392,15 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) -> save_core_code(Code, St) -> {ok,Code,St#compile{core_code=cerl:from_records(Code)}}. -beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,mod_options=Opts0}=St) -> +beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,extra_chunks=ExtraChunks, + options=CompilerOpts,mod_options=Opts0}=St) -> Source = paranoid_absname(File), Opts1 = lists:map(fun({debug_info_key,_}) -> {debug_info_key,'********'}; (Other) -> Other end, Opts0), Opts2 = [O || O <- Opts1, effects_code_generation(O)], - case beam_asm:module(Code0, Abst, Source, Opts2) of + Chunks = [{<<"Abst">>, Abst} | ExtraChunks], + case beam_asm:module(Code0, Chunks, Source, Opts2, CompilerOpts) of {ok,Code} -> {ok,Code,St#compile{abstract_code=[]}} end. diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl index 14cd41ae27..8dea7ec03a 100644 --- a/lib/compiler/src/v3_core.erl +++ b/lib/compiler/src/v3_core.erl @@ -1059,13 +1059,30 @@ count_bits(Int) -> count_bits_1(0, Bits) -> Bits; count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64). -bin_expand_strings(Es) -> - foldr(fun ({bin_element,Line,{string,_,S},Sz,Ts}, Es1) -> - foldr(fun (C, Es2) -> - [{bin_element,Line,{char,Line,C},Sz,Ts}|Es2] - end, Es1, S); - (E, Es1) -> [E|Es1] - end, [], Es). +bin_expand_strings(Es0) -> + foldr(fun ({bin_element,Line,{string,_,S},{integer,_,8},_}, Es) -> + bin_expand_string(S, Line, 0, 0) ++ Es; + ({bin_element,Line,{string,_,S},Sz,Ts}, Es1) -> + foldr( + fun (C, Es) -> + [{bin_element,Line,{char,Line,C},Sz,Ts}|Es] + end, Es1, S); + (E, Es) -> + [E|Es] + end, [], Es0). + +bin_expand_string(S, Line, Val, Size) when Size >= 2048 -> + Combined = make_combined(Line, Val, Size), + [Combined|bin_expand_string(S, Line, 0, 0)]; +bin_expand_string([H|T], Line, Val, Size) -> + bin_expand_string(T, Line, (Val bsl 8) bor H, Size+8); +bin_expand_string([], Line, Val, Size) -> + [make_combined(Line, Val, Size)]. + +make_combined(Line, Val, Size) -> + {bin_element,Line,{integer,Line,Val}, + {integer,Line,Size}, + [integer,{unit,1},unsigned,big]}. expr_bin_1(Es, St) -> foldr(fun (E, {Ces,Esp,St0}) -> diff --git a/lib/compiler/test/beam_type_SUITE.erl b/lib/compiler/test/beam_type_SUITE.erl index 492067ef00..7ca544a537 100644 --- a/lib/compiler/test/beam_type_SUITE.erl +++ b/lib/compiler/test/beam_type_SUITE.erl @@ -22,7 +22,7 @@ -export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1, init_per_group/2,end_per_group/2, integers/1,coverage/1,booleans/1,setelement/1,cons/1, - tuple/1]). + tuple/1,record_float/1]). suite() -> [{ct_hooks,[ts_install_cth]}]. @@ -37,7 +37,8 @@ groups() -> booleans, setelement, cons, - tuple + tuple, + record_float ]}]. init_per_suite(Config) -> @@ -126,5 +127,22 @@ tuple(_Config) -> do_tuple() -> {0, _} = {necessary}. +-record(x, {a}). + +record_float(_Config) -> + 17.0 = record_float(#x{a={0}}, 1700), + 23.0 = record_float(#x{a={0}}, 2300.0), + {'EXIT',{if_clause,_}} = (catch record_float(#x{a={1}}, 88)), + {'EXIT',{if_clause,_}} = (catch record_float(#x{a={}}, 88)), + {'EXIT',{if_clause,_}} = (catch record_float(#x{}, 88)), + ok. + +record_float(R, N0) -> + N = N0 / 100, + if element(1, R#x.a) =:= 0 -> + N + end. + + id(I) -> I. diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl index 8c09414a52..10740ac2b0 100644 --- a/lib/compiler/test/compile_SUITE.erl +++ b/lib/compiler/test/compile_SUITE.erl @@ -30,7 +30,7 @@ file_1/1, forms_2/1, module_mismatch/1, big_file/1, outdir/1, binary/1, makedep/1, cond_and_ifdef/1, listings/1, listings_big/1, other_output/1, kernel_listing/1, encrypted_abstr/1, - strict_record/1, + strict_record/1, utf8_atoms/1, extra_chunks/1, cover/1, env/1, core/1, core_roundtrip/1, asm/1, optimized_guards/1, sys_pre_attributes/1, dialyzer/1, @@ -48,7 +48,7 @@ all() -> [app_test, appup_test, file_1, forms_2, module_mismatch, big_file, outdir, binary, makedep, cond_and_ifdef, listings, listings_big, other_output, kernel_listing, encrypted_abstr, - strict_record, + strict_record, utf8_atoms, extra_chunks, cover, env, core, core_roundtrip, asm, optimized_guards, sys_pre_attributes, dialyzer, warnings, pre_load_check, env_compiler_options]. @@ -450,8 +450,10 @@ do_kernel_listing({M,A}) -> try {ok,M,Kern} = compile:forms(A, [to_kernel]), IoList = v3_kernel_pp:format(Kern), - _ = iolist_size(IoList), - ok + case unicode:characters_to_binary(IoList) of + Bin when is_binary(Bin) -> + ok + end catch throw:{error,Error} -> io:format("*** compilation failure '~p' for module ~s\n", @@ -680,6 +682,32 @@ test_sloppy() -> {1,2} = record_access:test(Turtle), Turtle. +utf8_atoms(Config) when is_list(Config) -> + Anno = erl_anno:new(1), + Atom = binary_to_atom(<<"こんにちは"/utf8>>, utf8), + Forms = [{attribute,Anno,compile,[export_all]}, + {function,Anno,atom,0,[{clause,Anno,[],[],[{atom,Anno,Atom}]}]}], + + Utf8AtomForms = [{attribute,Anno,module,utf8_atom}|Forms], + {ok,utf8_atom,Utf8AtomBin} = + compile:forms(Utf8AtomForms, [binary]), + {ok,{utf8_atom,[{atoms,_}]}} = + beam_lib:chunks(Utf8AtomBin, [atoms]), + code:load_binary(utf8_atom, "compile_SUITE", Utf8AtomBin), + Atom = utf8_atom:atom(), + + NoUtf8AtomForms = [{attribute,Anno,module,no_utf8_atom}|Forms], + error = compile:forms(NoUtf8AtomForms, [binary, r19]). + +extra_chunks(Config) when is_list(Config) -> + Anno = erl_anno:new(1), + Forms = [{attribute,Anno,module,extra_chunks}], + + {ok,extra_chunks,ExtraChunksBinary} = + compile:forms(Forms, [binary, {extra_chunks, [{<<"ExCh">>, <<"Contents">>}]}]), + {ok,{extra_chunks,[{"ExCh",<<"Contents">>}]}} = + beam_lib:chunks(ExtraChunksBinary, ["ExCh"]). + env(Config) when is_list(Config) -> {Simple,Target} = get_files(Config, simple, env), {ok,Cwd} = file:get_cwd(), @@ -751,7 +779,7 @@ do_core_1(M, A, Outdir) -> {ok,M,Core0} = compile:forms(A, [to_core]), CoreFile = filename:join(Outdir, atom_to_list(M)++".core"), CorePP = core_pp:format(Core0), - ok = file:write_file(CoreFile, CorePP), + ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)), %% Parse the .core file and return the result as Core Erlang Terms. Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of @@ -823,7 +851,7 @@ do_core_roundtrip_1(Mod, Abstr, Outdir) -> do_core_roundtrip_2(M, Core0, Outdir) -> CoreFile = filename:join(Outdir, atom_to_list(M)++".core"), CorePP = core_pp:format_all(Core0), - ok = file:write_file(CoreFile, CorePP), + ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)), %% Parse the .core file and return the result as Core Erlang Terms. Core2 = case compile:file(CoreFile, [report_errors,from_core, diff --git a/lib/compiler/test/compile_SUITE_data/simple.erl b/lib/compiler/test/compile_SUITE_data/simple.erl index d8324dafaf..9385d101e0 100644 --- a/lib/compiler/test/compile_SUITE_data/simple.erl +++ b/lib/compiler/test/compile_SUITE_data/simple.erl @@ -19,7 +19,7 @@ %% -module(simple). --export([test/0]). +-export([test/0,unicode/0]). -ifdef(need_foo). -export([foo/0]). @@ -28,6 +28,9 @@ test() -> passed. +unicode() -> + {"это",'спутник'}. + %% Conditional inclusion. %% Compile with [{d, need_foo}, {d, foo_value, 42}]. diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl index a662d85272..ccb9b58225 100644 --- a/lib/compiler/test/guard_SUITE.erl +++ b/lib/compiler/test/guard_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2016. All Rights Reserved. +%% Copyright Ericsson AB 2001-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1538,7 +1538,7 @@ literal_type_tests_1(Config) -> Func = {function, Anno, test, 0, [{clause,Anno,[],[],Tests}]}, Form = [{attribute,Anno,module,Mod}, {attribute,Anno,compile,export_all}, - Func, {eof,Anno}], + Func, {eof,999}], %% Print generated code for inspection. lists:foreach(fun (F) -> io:put_chars([erl_pp:form(F),"\n"]) end, Form), diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl index adb96fb87d..76dfaee482 100644 --- a/lib/compiler/test/lc_SUITE.erl +++ b/lib/compiler/test/lc_SUITE.erl @@ -227,7 +227,7 @@ effect(Config) when is_list(Config) -> lc_SUITE -> _ = [{'EXIT',{badarg,_}} = (catch binary_to_atom(<<C/utf8>>, utf8)) || - C <- lists:seq(16#10000, 16#FFFFF)]; + C <- lists:seq(16#FF10000, 16#FFFFFFF)]; _ -> ok end, diff --git a/lib/compiler/vsn.mk b/lib/compiler/vsn.mk index 9c3cf1f34b..5c87304a01 100644 --- a/lib/compiler/vsn.mk +++ b/lib/compiler/vsn.mk @@ -1 +1 @@ -COMPILER_VSN = 7.0.3 +COMPILER_VSN = 7.0.4 diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c index 44c3fc4f06..b2f31870b9 100644 --- a/lib/crypto/c_src/crypto.c +++ b/lib/crypto/c_src/crypto.c @@ -71,6 +71,46 @@ PACKED_OPENSSL_VERSION(MAJ,MIN,FIX,('a'-1)) +/* LibreSSL was cloned from OpenSSL 1.0.1g and claims to be API and BPI compatible + * with 1.0.1. + * + * LibreSSL has the same names on include files and symbols as OpenSSL, but defines + * the OPENSSL_VERSION_NUMBER to be >= 2.0.0 + * + * Therefor works tests like this as intendend: + * OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0) + * (The test is for example "2.4.2" >= "1.0.0" although the test + * with the cloned OpenSSL test would be "1.0.1" >= "1.0.0") + * + * But tests like this gives wrong result: + * OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0) + * (The test is false since "2.4.2" < "1.1.0". It should have been + * true because the LibreSSL API version is "1.0.1") + * + */ + +#ifdef LIBRESSL_VERSION_NUMBER +/* A macro to test on in this file */ +#define HAS_LIBRESSL +#endif + +#ifdef HAS_LIBRESSL +/* LibreSSL dislikes FIPS */ +# ifdef FIPS_SUPPORT +# undef FIPS_SUPPORT +# endif + +/* LibreSSL wants the 1.0.1 API */ +# define NEED_EVP_COMPATIBILITY_FUNCTIONS +#endif + + +#if OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0) +# define NEED_EVP_COMPATIBILITY_FUNCTIONS +#endif + + + #if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0) #include <openssl/modes.h> #endif @@ -120,7 +160,9 @@ #endif #if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,1,0) -# define HAVE_CHACHA20_POLY1305 +# ifndef HAS_LIBRESSL +# define HAVE_CHACHA20_POLY1305 +# endif #endif #if OPENSSL_VERSION_NUMBER <= PACKED_OPENSSL_VERSION(0,9,8,'l') @@ -205,8 +247,8 @@ do { \ } \ } while (0) -#if OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0) +#ifdef NEED_EVP_COMPATIBILITY_FUNCTIONS /* * In OpenSSL 1.1.0, most structs are opaque. That means that * the structs cannot be allocated as automatic variables on the @@ -237,9 +279,19 @@ static void HMAC_CTX_free(HMAC_CTX *ctx) #define EVP_MD_CTX_new() EVP_MD_CTX_create() #define EVP_MD_CTX_free(ctx) EVP_MD_CTX_destroy(ctx) +static INLINE void *BN_GENCB_get_arg(BN_GENCB *cb); + +static INLINE void *BN_GENCB_get_arg(BN_GENCB *cb) +{ + return cb->arg; +} + static INLINE int RSA_set0_key(RSA *r, BIGNUM *n, BIGNUM *e, BIGNUM *d); +static INLINE void RSA_get0_key(const RSA *r, const BIGNUM **n, const BIGNUM **e, const BIGNUM **d); static INLINE int RSA_set0_factors(RSA *r, BIGNUM *p, BIGNUM *q); +static INLINE void RSA_get0_factors(const RSA *r, const BIGNUM **p, const BIGNUM **q); static INLINE int RSA_set0_crt_params(RSA *r, BIGNUM *dmp1, BIGNUM *dmq1, BIGNUM *iqmp); +static INLINE void RSA_get0_crt_params(const RSA *r, const BIGNUM **dmp1, const BIGNUM **dmq1, const BIGNUM **iqmp); static INLINE int RSA_set0_key(RSA *r, BIGNUM *n, BIGNUM *e, BIGNUM *d) { @@ -249,6 +301,13 @@ static INLINE int RSA_set0_key(RSA *r, BIGNUM *n, BIGNUM *e, BIGNUM *d) return 1; } +static INLINE void RSA_get0_key(const RSA *r, const BIGNUM **n, const BIGNUM **e, const BIGNUM **d) +{ + *n = r->n; + *e = r->e; + *d = r->d; +} + static INLINE int RSA_set0_factors(RSA *r, BIGNUM *p, BIGNUM *q) { r->p = p; @@ -256,6 +315,12 @@ static INLINE int RSA_set0_factors(RSA *r, BIGNUM *p, BIGNUM *q) return 1; } +static INLINE void RSA_get0_factors(const RSA *r, const BIGNUM **p, const BIGNUM **q) +{ + *p = r->p; + *q = r->q; +} + static INLINE int RSA_set0_crt_params(RSA *r, BIGNUM *dmp1, BIGNUM *dmq1, BIGNUM *iqmp) { r->dmp1 = dmp1; @@ -264,6 +329,13 @@ static INLINE int RSA_set0_crt_params(RSA *r, BIGNUM *dmp1, BIGNUM *dmq1, BIGNUM return 1; } +static INLINE void RSA_get0_crt_params(const RSA *r, const BIGNUM **dmp1, const BIGNUM **dmq1, const BIGNUM **iqmp) +{ + *dmp1 = r->dmp1; + *dmq1 = r->dmq1; + *iqmp = r->iqmp; +} + static INLINE int DSA_set0_key(DSA *d, BIGNUM *pub_key, BIGNUM *priv_key); static INLINE int DSA_set0_pqg(DSA *d, BIGNUM *p, BIGNUM *q, BIGNUM *g); @@ -326,7 +398,11 @@ DH_get0_key(const DH *dh, const BIGNUM **pub_key, const BIGNUM **priv_key) *priv_key = dh->priv_key; } -#endif /* End of compatibility definitions. */ +#else /* End of compatibility definitions. */ + +#define HAVE_OPAQUE_BN_GENCB + +#endif /* NIF interface declarations */ static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info); @@ -364,6 +440,7 @@ static ERL_NIF_TERM rsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM rsa_public_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM rsa_private_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); +static ERL_NIF_TERM rsa_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM dh_generate_parameters_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM dh_check(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); @@ -397,6 +474,7 @@ static EC_KEY* ec_key_new(ErlNifEnv* env, ERL_NIF_TERM curve_arg); static int term2point(ErlNifEnv* env, ERL_NIF_TERM term, EC_GROUP *group, EC_POINT **pptr); #endif +static ERL_NIF_TERM bin_from_bn(ErlNifEnv* env, const BIGNUM *bn); static int library_refc = 0; /* number of users of this dynamic library */ @@ -434,6 +512,7 @@ static ErlNifFunc nif_funcs[] = { {"dss_sign_nif", 3, dss_sign_nif}, {"rsa_public_crypt", 4, rsa_public_crypt}, {"rsa_private_crypt", 4, rsa_private_crypt}, + {"rsa_generate_key_nif", 2, rsa_generate_key_nif}, {"dh_generate_parameters_nif", 2, dh_generate_parameters_nif}, {"dh_check", 1, dh_check}, {"dh_generate_key_nif", 4, dh_generate_key_nif}, @@ -883,6 +962,7 @@ static int initialize(ErlNifEnv* env, ERL_NIF_TERM load_info) CRYPTO_set_dynlock_destroy_callback(ccb->dyn_destroy_function); } #endif /* OPENSSL_THREADS */ + return 0; } @@ -2237,6 +2317,20 @@ static int get_bn_from_bin(ErlNifEnv* env, ERL_NIF_TERM term, BIGNUM** bnp) return 1; } +static ERL_NIF_TERM bin_from_bn(ErlNifEnv* env, const BIGNUM *bn) +{ + int bn_len; + unsigned char *bin_ptr; + ERL_NIF_TERM term; + + /* Copy the bignum into an erlang binary. */ + bn_len = BN_num_bytes(bn); + bin_ptr = enif_make_new_binary(env, bn_len, &term); + BN_bn2bin(bn, bin_ptr); + + return term; +} + static ERL_NIF_TERM rand_uniform_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {/* (Lo,Hi) */ BIGNUM *bn_from = NULL, *bn_to, *bn_rand; @@ -2808,6 +2902,119 @@ static ERL_NIF_TERM rsa_private_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE } } +/* Creates a term which can be parsed by get_rsa_private_key(). This is a list of plain integer binaries (not mpints). */ +static ERL_NIF_TERM put_rsa_private_key(ErlNifEnv* env, const RSA *rsa) +{ + ERL_NIF_TERM result[8]; + const BIGNUM *n, *e, *d, *p, *q, *dmp1, *dmq1, *iqmp; + + /* Return at least [E,N,D] */ + n = NULL; e = NULL; d = NULL; + RSA_get0_key(rsa, &n, &e, &d); + + result[0] = bin_from_bn(env, e); // Exponent E + result[1] = bin_from_bn(env, n); // Modulus N = p*q + result[2] = bin_from_bn(env, d); // Exponent D + + /* Check whether the optional additional parameters are available */ + p = NULL; q = NULL; + RSA_get0_factors(rsa, &p, &q); + dmp1 = NULL; dmq1 = NULL; iqmp = NULL; + RSA_get0_crt_params(rsa, &dmp1, &dmq1, &iqmp); + + if (p && q && dmp1 && dmq1 && iqmp) { + result[3] = bin_from_bn(env, p); // Factor p + result[4] = bin_from_bn(env, q); // Factor q + result[5] = bin_from_bn(env, dmp1); // D mod (p-1) + result[6] = bin_from_bn(env, dmq1); // D mod (q-1) + result[7] = bin_from_bn(env, iqmp); // (1/q) mod p + + return enif_make_list_from_array(env, result, 8); + } else { + return enif_make_list_from_array(env, result, 3); + } +} + +static int check_erlang_interrupt(int maj, int min, BN_GENCB *ctxt) +{ + ErlNifEnv *env = BN_GENCB_get_arg(ctxt); + + if (!enif_is_current_process_alive(env)) { + return 0; + } else { + return 1; + } +} + +static ERL_NIF_TERM rsa_generate_key(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (ModulusSize, PublicExponent) */ + int modulus_bits; + BIGNUM *pub_exp, *three; + RSA *rsa; + int success; + ERL_NIF_TERM result; + BN_GENCB *intr_cb; +#ifndef HAVE_OPAQUE_BN_GENCB + BN_GENCB intr_cb_buf; +#endif + + if (!enif_get_int(env, argv[0], &modulus_bits) || modulus_bits < 256) { + return enif_make_badarg(env); + } + + if (!get_bn_from_bin(env, argv[1], &pub_exp)) { + return enif_make_badarg(env); + } + + /* Make sure the public exponent is large enough (at least 3). + * Without this, RSA_generate_key_ex() can run forever. */ + three = BN_new(); + BN_set_word(three, 3); + success = BN_cmp(pub_exp, three); + BN_free(three); + if (success < 0) { + BN_free(pub_exp); + return enif_make_badarg(env); + } + + /* For large keys, prime generation can take many seconds. Set up + * the callback which we use to test whether the process has been + * interrupted. */ +#ifdef HAVE_OPAQUE_BN_GENCB + intr_cb = BN_GENCB_new(); +#else + intr_cb = &intr_cb_buf; +#endif + BN_GENCB_set(intr_cb, check_erlang_interrupt, env); + + rsa = RSA_new(); + success = RSA_generate_key_ex(rsa, modulus_bits, pub_exp, intr_cb); + BN_free(pub_exp); + +#ifdef HAVE_OPAQUE_BN_GENCB + BN_GENCB_free(intr_cb); +#endif + + if (!success) { + RSA_free(rsa); + return atom_error; + } + + result = put_rsa_private_key(env, rsa); + RSA_free(rsa); + + return result; +} + +static ERL_NIF_TERM rsa_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{ + /* RSA key generation can take a long time (>1 sec for a large + * modulus), so schedule it as a CPU-bound operation. */ + return enif_schedule_nif(env, "rsa_generate_key", + ERL_NIF_DIRTY_JOB_CPU_BOUND, + rsa_generate_key, argc, argv); +} + static ERL_NIF_TERM dh_generate_parameters_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {/* (PrimeLen, Generator) */ int prime_len, generator; diff --git a/lib/crypto/c_src/crypto_callback.h b/lib/crypto/c_src/crypto_callback.h index 2641cc0c8b..489810116f 100644 --- a/lib/crypto/c_src/crypto_callback.h +++ b/lib/crypto/c_src/crypto_callback.h @@ -19,7 +19,7 @@ */ #include <openssl/crypto.h> -#if OPENSSL_VERSION_NUMBER < 0x10100000L +#ifdef NEED_EVP_COMPATIBILITY_FUNCTIONS # define CCB_FILE_LINE_ARGS #else # define CCB_FILE_LINE_ARGS , const char *file, int line diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml index cbf141b3b0..d0deaceaaf 100644 --- a/lib/crypto/doc/src/crypto.xml +++ b/lib/crypto/doc/src/crypto.xml @@ -77,7 +77,7 @@ <code>rsa_private() = [key_value()] = [E, N, D] | [E, N, D, P1, P2, E1, E2, C] </code> <p>Where E is the public exponent, N is public modulus and D is - the private exponent.The longer key format contains redundant + the private exponent. The longer key format contains redundant information that will make the calculation faster. P1,P2 are first and second prime factors. E1,E2 are first and second exponents. C is the CRT coefficient. Terminology is taken from <url href="http://www.ietf.org/rfc/rfc3477.txt"> RFC 3447</url>.</p> @@ -103,7 +103,7 @@ <code>dh_private() = key_value() </code> - <code>dh_params() = [key_value()] = [P, G] </code> + <code>dh_params() = [key_value()] = [P, G] | [P, G, PrivateKeyBitLength]</code> <code>ecdh_public() = key_value() </code> @@ -298,22 +298,32 @@ <func> <name>generate_key(Type, Params) -> {PublicKey, PrivKeyOut} </name> <name>generate_key(Type, Params, PrivKeyIn) -> {PublicKey, PrivKeyOut} </name> - <fsummary>Generates a public keys of type <c>Type</c></fsummary> + <fsummary>Generates a public key of type <c>Type</c></fsummary> <type> - <v> Type = dh | ecdh | srp </v> - <v>Params = dh_params() | ecdh_params() | SrpUserParams | SrpHostParams </v> + <v> Type = dh | ecdh | rsa | srp </v> + <v>Params = dh_params() | ecdh_params() | RsaParams | SrpUserParams | SrpHostParams </v> + <v>RsaParams = {ModulusSizeInBits::integer(), PublicExponent::key_value()}</v> <v>SrpUserParams = {user, [Generator::binary(), Prime::binary(), Version::atom()]}</v> <v>SrpHostParams = {host, [Verifier::binary(), Generator::binary(), Prime::binary(), Version::atom()]}</v> - <v>PublicKey = dh_public() | ecdh_public() | srp_public() </v> + <v>PublicKey = dh_public() | ecdh_public() | rsa_public() | srp_public() </v> <v>PrivKeyIn = undefined | dh_private() | ecdh_private() | srp_private() </v> - <v>PrivKeyOut = dh_private() | ecdh_private() | srp_private() </v> - </type> - <desc> - <p>Generates public keys of type <c>Type</c>. - See also <seealso marker="public_key:public_key#generate_key-1">public_key:generate_key/1</seealso> - May throw exception <c>low_entropy</c> in case the random generator - failed due to lack of secure "randomness". - </p> + <v>PrivKeyOut = dh_private() | ecdh_private() | rsa_private() | srp_private() </v> + </type> + <desc> + <p>Generates a public key of type <c>Type</c>. + See also <seealso marker="public_key:public_key#generate_key-1">public_key:generate_key/1</seealso>. + May throw exception an exception of class <c>error</c>: + </p> + <list type="bulleted"> + <item><c>badarg</c>: an argument is of wrong type or has an illegal value,</item> + <item><c>low_entropy</c>: the random generator failed due to lack of secure "randomness",</item> + <item><c>computation_failed</c>: the computation fails of another reason than <c>low_entropy</c>.</item> + </list> + <note> + <p>RSA key generation is only available if the runtime was + built with dirty scheduler support. Otherwise, attempting to + generate an RSA key will throw exception <c>error:notsup</c>.</p> + </note> </desc> </func> diff --git a/lib/crypto/doc/src/notes.xml b/lib/crypto/doc/src/notes.xml index 53ea6bb58b..37997b649b 100644 --- a/lib/crypto/doc/src/notes.xml +++ b/lib/crypto/doc/src/notes.xml @@ -31,6 +31,24 @@ </header> <p>This document describes the changes made to the Crypto application.</p> +<section><title>Crypto 3.7.3</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + The implementation of the key exchange algorithms + diffie-hellman-group-exchange-sha* are optimized, up to a + factor of 11 for the slowest ( = biggest and safest) + group size.</p> + <p> + Own Id: OTP-14169 Aux Id: seq-13261 </p> + </item> + </list> + </section> + +</section> + <section><title>Crypto 3.7.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/crypto/src/crypto.app.src b/lib/crypto/src/crypto.app.src index 460894c012..3bf4279ae1 100644 --- a/lib/crypto/src/crypto.app.src +++ b/lib/crypto/src/crypto.app.src @@ -25,6 +25,6 @@ {registered, []}, {applications, [kernel, stdlib]}, {env, [{fips_mode, false}]}, - {runtime_dependencies, ["erts-6.0","stdlib-2.0","kernel-3.0"]}]}. + {runtime_dependencies, ["erts-9.0","stdlib-3.4","kernel-5.3"]}]}. diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl index 5a915d4233..631af62615 100644 --- a/lib/crypto/src/crypto.erl +++ b/lib/crypto/src/crypto.erl @@ -452,6 +452,15 @@ generate_key(srp, {user, [Generator, Prime, Version]}, PrivateArg) end, user_srp_gen_key(Private, Generator, Prime); +generate_key(rsa, {ModulusSize, PublicExponent}, undefined) -> + case rsa_generate_key_nif(ModulusSize, ensure_int_as_bin(PublicExponent)) of + error -> + erlang:error(computation_failed, + [rsa,{ModulusSize,PublicExponent}]); + Private -> + {lists:sublist(Private, 2), Private} + end; + generate_key(ecdh, Curve, PrivKey) -> ec_key_generate(nif_curve_params(Curve), ensure_int_as_bin(PrivKey)). @@ -787,6 +796,11 @@ rsa_verify_nif(_Type, _Digest, _Signature, _Key) -> ?nif_stub. ecdsa_verify_nif(_Type, _Digest, _Signature, _Curve, _Key) -> ?nif_stub. %% Public Keys -------------------------------------------------------------------- +%% RSA Rivest-Shamir-Adleman functions +%% + +rsa_generate_key_nif(_Bits, _Exp) -> ?nif_stub. + %% DH Diffie-Hellman functions %% diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl index 31f4e89ffe..1d7037d003 100644 --- a/lib/crypto/test/crypto_SUITE.erl +++ b/lib/crypto/test/crypto_SUITE.erl @@ -119,7 +119,8 @@ groups() -> {sha384, [], [hash, hmac]}, {sha512, [], [hash, hmac]}, {rsa, [], [sign_verify, - public_encrypt + public_encrypt, + generate ]}, {dss, [], [sign_verify]}, {ecdsa, [], [sign_verify]}, @@ -247,6 +248,21 @@ init_per_testcase(cmac, Config) -> % The CMAC functionality was introduced in OpenSSL 1.0.1 {skip, "OpenSSL is too old"} end; +init_per_testcase(generate, Config) -> + case proplists:get_value(type, Config) of + rsa -> + % RSA key generation is a lengthy process, and is only available + % if dirty CPU scheduler support was enabled for this runtime. + case try erlang:system_info(dirty_cpu_schedulers) of + N -> N > 0 + catch + error:badarg -> false + end of + true -> Config; + false -> {skip, "RSA key generation requires dirty scheduler support."} + end; + _ -> Config + end; init_per_testcase(_Name,Config) -> Config. @@ -756,7 +772,10 @@ do_generate({ecdh = Type, Curve, Priv, Pub}) -> ok; {Other, _} -> ct:fail({{crypto, generate_key, [Type, Priv, Curve]}, {expected, Pub}, {got, Other}}) - end. + end; +do_generate({rsa = Type, Mod, Exp}) -> + {Pub,Priv} = crypto:generate_key(Type, {Mod,Exp}), + do_sign_verify({rsa, sha256, Pub, Priv, rsa_plain()}). notsup(Fun, Args) -> Result = @@ -1008,7 +1027,8 @@ group_config(rsa = Type, Config) -> rsa_oaep(), no_padding() ], - [{sign_verify, SignVerify}, {pub_priv_encrypt, PubPrivEnc} | Config]; + Generate = [{rsa, 2048, 3}, {rsa, 3072, 65537}], + [{sign_verify, SignVerify}, {pub_priv_encrypt, PubPrivEnc}, {generate, Generate} | Config]; group_config(dss = Type, Config) -> Msg = dss_plain(), Public = dss_params() ++ [dss_public()], diff --git a/lib/crypto/vsn.mk b/lib/crypto/vsn.mk index 38e2db9033..81cb2f8130 100644 --- a/lib/crypto/vsn.mk +++ b/lib/crypto/vsn.mk @@ -1 +1 @@ -CRYPTO_VSN = 3.7.2 +CRYPTO_VSN = 3.7.3 diff --git a/lib/debugger/src/dbg_wx_win.erl b/lib/debugger/src/dbg_wx_win.erl index d302423077..2c9d83ea74 100644 --- a/lib/debugger/src/dbg_wx_win.erl +++ b/lib/debugger/src/dbg_wx_win.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -276,7 +276,7 @@ verify(Type, Str) -> case erl_scan:string(Str) of {ok, Tokens, _EndLine} when Type==term -> - case erl_parse:parse_term(Tokens++[{dot, 1}]) of + case erl_parse:parse_term(Tokens++[{dot, erl_anno:new(1)}]) of {ok, Value} -> {edit, Value}; _Error -> ignore diff --git a/lib/debugger/test/int_SUITE.erl b/lib/debugger/test/int_SUITE.erl index f697ace4e5..cb1fcb83f3 100644 --- a/lib/debugger/test/int_SUITE.erl +++ b/lib/debugger/test/int_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1998-2016. All Rights Reserved. +%% Copyright Ericsson AB 1998-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -241,7 +241,8 @@ interpretable(Config) when is_list(Config) -> true = code:del_path(PrivDir), %% {error, no_src} - {ok, lists2, Binary} = compile:forms([{attribute,1,module,lists2}], []), + A1 = erl_anno:new(1), + {ok, lists2, Binary} = compile:forms([{attribute,A1,module,lists2}], []), code:load_binary(lists2, "unknown", Binary), {error, no_src} = int:interpretable(lists2), diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES index 2457faa07a..299cc8642f 100644 --- a/lib/dialyzer/RELEASE_NOTES +++ b/lib/dialyzer/RELEASE_NOTES @@ -181,7 +181,7 @@ Version 1.8.0 (in Erlang/OTP R12B-2) - Dialyzer has a new warning option -Wunmatched_returns which warns for function calls that ignore the return value. This catches many common programming errors (e.g. calling file:close/1 - and not checking for the absense of errors), interface discrepancies + and not checking for the absence of errors), interface discrepancies (e.g. a function returning multiple values when in reality the function is void and only called for its side-effects), calling the wrong function (e.g. io_lib:format/1 instead of io:format/1), and even possible diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml index 54abd09504..cd4ec4c068 100644 --- a/lib/dialyzer/doc/src/notes.xml +++ b/lib/dialyzer/doc/src/notes.xml @@ -32,6 +32,48 @@ <p>This document describes the changes made to the Dialyzer application.</p> +<section><title>Dialyzer 3.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> Fix a bug concerning parameterized opaque types. </p> + <p> + Own Id: OTP-14130</p> + </item> + <item> + <p> Improve a few warnings. One of them could cause a + crash. </p> + <p> + Own Id: OTP-14177</p> + </item> + <item> + <p>The dialyzer and observer applications will now use a + portable way to find the home directory. That means that + there is no longer any need to manually set the HOME + environment variable on Windows.</p> + <p> + Own Id: OTP-14249 Aux Id: ERL-161 </p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> The peak memory consumption is reduced. </p><p> The + evaluation of huge SCCs in <c>dialyzer_typesig</c> is + optimized. </p><p> Analyzing modules with binary + construction with huge strings is now much faster. </p> + <p> + Own Id: OTP-14126 Aux Id: ERL-308 </p> + </item> + </list> + </section> + +</section> + <section><title>Dialyzer 3.0.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src index 5b28f7ae86..f517c51ec1 100644 --- a/lib/dialyzer/src/dialyzer.app.src +++ b/lib/dialyzer/src/dialyzer.app.src @@ -2,7 +2,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2016. All Rights Reserved. +%% Copyright Ericsson AB 2006-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -48,5 +48,5 @@ {applications, [compiler, hipe, kernel, stdlib, wx]}, {env, []}, {runtime_dependencies, ["wx-1.2","syntax_tools-2.0","stdlib-3.0", - "kernel-5.0","hipe-3.15.1","erts-8.0", + "kernel-5.0","hipe-3.15.4","erts-8.0", "compiler-7.0"]}]}. diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl index ae1e4d8c38..aeeb895a0c 100644 --- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl +++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl @@ -114,7 +114,6 @@ loop(#server_state{parent = Parent} = State, %% The Analysis %%-------------------------------------------------------------------- -%% Calls to erlang:garbage_collect() help to reduce the heap size. analysis_start(Parent, Analysis, LegalWarnings) -> CServer = dialyzer_codeserver:new(), Plt = Analysis#analysis.plt, @@ -136,11 +135,9 @@ analysis_start(Parent, Analysis, LegalWarnings) -> %% Remote type postprocessing NewCServer = try - NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0), + TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt), NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0), - OldRecords = dialyzer_plt:get_types(Plt), OldExpTypes0 = dialyzer_plt:get_exported_types(Plt), - MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords), RemMods = [case Analysis#analysis.start_from of byte_code -> list_to_atom(filename:basename(F, ".beam")); @@ -148,25 +145,20 @@ analysis_start(Parent, Analysis, LegalWarnings) -> end || F <- Files], OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0), MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1), - TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0), TmpCServer2 = dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1), - erlang:garbage_collect(), + erlang:garbage_collect(), % reduce heap size ?timing(State#analysis_state.timing_server, "remote", contracts_and_records(TmpCServer2)) catch throw:{error, _ErrorMsg} = Error -> exit(Error) end, - NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)), - ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer), - NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes), - State0 = State#analysis_state{plt = NewPlt1}, - dump_callgraph(Callgraph, State0, Analysis), + dump_callgraph(Callgraph, State, Analysis), %% Remove all old versions of the files being analyzed AllNodes = dialyzer_callgraph:all_nodes(Callgraph), - Plt1_a = dialyzer_plt:delete_list(NewPlt1, AllNodes), + Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes), Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer), - State1 = State0#analysis_state{codeserver = NewCServer, plt = Plt1}, + State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1}, Exports = dialyzer_codeserver:get_exports(NewCServer), NonExports = sets:subtract(sets:from_list(AllNodes), Exports), NonExportsList = sets:to_list(NonExports), @@ -176,14 +168,17 @@ analysis_start(Parent, Analysis, LegalWarnings) -> false -> Callgraph end, State2 = analyze_callgraph(NewCallgraph, State1), - #analysis_state{plt = MiniPlt2, doc_plt = DocPlt} = State2, + #analysis_state{plt = MiniPlt2, + doc_plt = DocPlt, + codeserver = Codeserver0} = State2, + {Codeserver, MiniPlt3} = move_data(Codeserver0, MiniPlt2), dialyzer_callgraph:dispose_race_server(NewCallgraph), rcv_and_send_ext_types(Parent), %% Since the PLT is never used, a dummy is sent: DummyPlt = dialyzer_plt:new(), - send_codeserver_plt(Parent, CServer, DummyPlt), - MiniPlt3 = dialyzer_plt:delete_list(MiniPlt2, NonExportsList), - send_analysis_done(Parent, MiniPlt3, DocPlt). + send_codeserver_plt(Parent, Codeserver, DummyPlt), + MiniPlt4 = dialyzer_plt:delete_list(MiniPlt3, NonExportsList), + send_analysis_done(Parent, MiniPlt4, DocPlt). contracts_and_records(CodeServer) -> Fun = contrs_and_recs(CodeServer), @@ -200,15 +195,20 @@ contracts_and_records(CodeServer) -> contrs_and_recs(TmpCServer2) -> fun() -> Parent = receive {Pid, go} -> Pid end, - {TmpCServer3, RecordDict} = - dialyzer_utils:process_record_remote_types(TmpCServer2), + TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2), TmpServer4 = - dialyzer_contracts:process_contract_remote_types(TmpCServer3, - RecordDict), + dialyzer_contracts:process_contract_remote_types(TmpCServer3), dialyzer_codeserver:give_away(TmpServer4, Parent), exit(TmpServer4) end. +move_data(CServer, MiniPlt) -> + {CServer1, Records} = dialyzer_codeserver:extract_records(CServer), + MiniPlt1 = dialyzer_plt:insert_types(MiniPlt, Records), + {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1), + NewMiniPlt = dialyzer_plt:insert_exported_types(MiniPlt1, ExpTypes), + {NewCServer, NewMiniPlt}. + analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver, doc_plt = DocPlt, plt = Plt, @@ -603,6 +603,7 @@ send_ext_types(Parent, ExtTypes) -> ok. send_codeserver_plt(Parent, CServer, Plt) -> + ok = dialyzer_codeserver:give_away(CServer, Parent), Parent ! {self(), cserver, CServer, Plt}, ok. diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl index 68f3d7a240..6387f3d1e4 100644 --- a/lib/dialyzer/src/dialyzer_callgraph.erl +++ b/lib/dialyzer/src/dialyzer_callgraph.erl @@ -40,7 +40,7 @@ module_postorder_from_funs/2, new/0, get_depends_on/2, - get_required_by/2, + %% get_required_by/2, in_neighbours/2, renew_race_info/4, renew_race_code/2, @@ -250,12 +250,12 @@ get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) -> get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) -> digraph:out_neighbours(DG, SCC). --spec get_required_by(scc() | module(), callgraph()) -> [scc()]. +%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()]. -get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) -> - lookup_scc(SCC, In, Maps); -get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) -> - digraph:in_neighbours(DG, SCC). +%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) -> +%% lookup_scc(SCC, In, Maps); +%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) -> +%% digraph:in_neighbours(DG, SCC). lookup_scc(SCC, Table, Maps) -> case ets_lookup_dict({'scc', SCC}, Maps) of @@ -285,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) -> Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]), MDG = digraph:new([acyclic]), digraph_confirm_vertices(sets:to_list(Nodes), MDG), - Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end, + Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end, lists:foreach(Foreach, sets:to_list(Edges)), - {digraph_utils:topsort(MDG), {'d', MDG}}. + %% The out-neighbors of a vertex are the vertices called directly. + %% The used vertices are to occur *before* the calling vertex: + {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}. edge_fold({{M1,_,_},{M2,_,_}}, Set) -> case M1 =/= M2 of @@ -305,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) -> Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]), MDG = digraph:new(), digraph_confirm_vertices(sets:to_list(Nodes), MDG), - Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end, + Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end, lists:foreach(Foreach, sets:to_list(Edges)), Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))} || N <- sets:to_list(Nodes)], @@ -363,7 +365,7 @@ ets_lookup_set(Key, Table) -> %% The core tree must be labeled as by cerl_trees:label/1 (or /2). %% The set of labels in the tree must be disjoint from the set of -%% labels already occuring in the callgraph. +%% labels already occurring in the callgraph. -spec scan_core_tree(cerl:c_module(), callgraph()) -> {[mfa_or_funlbl()], [callgraph_edge()]}. @@ -552,9 +554,21 @@ digraph_add_edge(From, To, DG) -> false -> digraph:add_vertex(DG, To); {To, _} -> ok end, - digraph:add_edge(DG, {From, To}, From, To, []), + check_add_edge(DG, {From, To}, From, To, []), ok. +check_add_edge(G, V1, V2) -> + case digraph:add_edge(G, V1, V2) of + {error, Error} -> exit({add_edge, V1, V2, Error}); + _Edge -> ok + end. + +check_add_edge(G, E, V1, V2, L) -> + case digraph:add_edge(G, E, V1, V2, L) of + {error, Error} -> exit({add_edge, E, V1, V2, L, Error}); + _Edge -> ok + end. + digraph_confirm_vertices([MFA|Left], DG) -> digraph:add_vertex(DG, MFA, confirmed), digraph_confirm_vertices(Left, DG); @@ -762,28 +776,53 @@ to_ps(#callgraph{} = CG, File, Args) -> ok. condensation(G) -> - SCCs = digraph_utils:strong_components(G), - %% Assign unique numbers to SCCs: - Ints = lists:seq(1, length(SCCs)), - IntToSCC = lists:zip(Ints, SCCs), - IntScc = sofs:relation(IntToSCC, [{int, scc}]), - %% Subsitute strong components for vertices in edges using the - %% unique numbers: - C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]), - I2V = sofs:relative_product(IntScc, C2V), % [{v, int}] - Es = sofs:relation(digraph:edges(G), [{v, v}]), - R1 = sofs:relative_product(I2V, Es), - R2 = sofs:relative_product(I2V, sofs:converse(R1)), - %% Create in- and out-neighbours: - In = sofs:relation_to_family(sofs:strict_relation(R2)), - R3 = sofs:converse(R2), - Out = sofs:relation_to_family(sofs:strict_relation(R3)), - [OutETS, InETS, MapsETS] = - [ets:new(Name,[{read_concurrency, true}]) || - Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]], - ets:insert(OutETS, sofs:to_external(Out)), - ets:insert(InETS, sofs:to_external(In)), - %% Create mappings from SCCs to unique integers, and the inverse: - ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)), - ets:insert(MapsETS, IntToSCC), - {{'e', OutETS, InETS, MapsETS}, SCCs}. + erlang:garbage_collect(), % reduce heap size + {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())), + receive {'DOWN', Ref, process, Pid, Result} -> + {SCCInts, OutETS, InETS, MapsETS} = Result, + NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts], + {{'e', OutETS, InETS, MapsETS}, NewSCCs} + end. + +-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()). + +do_condensation(G, Parent) -> + fun() -> + [OutETS, InETS, MapsETS] = + [ets:new(Name,[{read_concurrency, true}]) || + Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]], + SCCs = digraph_utils:strong_components(G), + %% Assign unique numbers to SCCs: + Ints = lists:seq(1, length(SCCs)), + IntToSCC = lists:zip(Ints, SCCs), + IntScc = sofs:relation(IntToSCC, [{int, scc}]), + %% Create mapping from unique integers to SCCs: + ets:insert(MapsETS, IntToSCC), + %% Subsitute strong components for vertices in edges using the + %% unique numbers: + C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]), + I2V = sofs:relative_product(IntScc, C2V), % [{v, int}] + Es = sofs:relation(digraph:edges(G), [{v, v}]), + R1 = sofs:relative_product(I2V, Es), + R2 = sofs:relative_product(I2V, sofs:converse(R1)), + R2Strict = sofs:strict_relation(R2), + %% Create out-neighbours: + Out = sofs:relation_to_family(sofs:converse(R2Strict)), + ets:insert(OutETS, sofs:to_external(Out)), + %% Sort the SCCs topologically: + DG = sofs:family_to_digraph(Out), + lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints), + SCCInts0 = digraph_utils:topsort(DG), + digraph:delete(DG), + %% The out-neighbors of a vertex are the vertices called directly. + %% The used vertices are to occur *before* the calling vertex: + SCCInts = lists:reverse(SCCInts0), + %% Create in-neighbours: + In = sofs:relation_to_family(R2Strict), + ets:insert(InETS, sofs:to_external(In)), + %% Create mapping from SCCs to unique integers: + ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)), + lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any) + end, [OutETS, InETS, MapsETS]), + exit({SCCInts, OutETS, InETS, MapsETS}) + end. diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl index 158ee761af..8500c59ebe 100644 --- a/lib/dialyzer/src/dialyzer_cl.erl +++ b/lib/dialyzer/src/dialyzer_cl.erl @@ -30,6 +30,8 @@ -record(cl_state, {backend_pid :: pid() | 'undefined', + code_server = none :: 'none' + | dialyzer_codeserver:codeserver(), erlang_mode = false :: boolean(), external_calls = [] :: [mfa()], external_types = [] :: [mfa()], @@ -630,6 +632,9 @@ cl_loop(State, LogCache) -> {BackendPid, warnings, Warnings} -> NewState = store_warnings(State, Warnings), cl_loop(NewState, LogCache); + {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored + NewState = State#cl_state{code_server = CodeServer}, + cl_loop(NewState, LogCache); {BackendPid, done, NewMiniPlt, _NewDocPlt} -> return_value(State, NewMiniPlt); {BackendPid, ext_calls, ExtCalls} -> @@ -647,7 +652,6 @@ cl_loop(State, LogCache) -> cl_error(State, Msg); _Other -> %% io:format("Received ~p\n", [_Other]), - %% Note: {BackendPid, cserver, CodeServer, Plt} is ignored. cl_loop(State, LogCache) end. @@ -688,18 +692,34 @@ cl_error(State, Msg) -> maybe_close_output_file(State), throw({dialyzer_error, lists:flatten(Msg)}). -return_value(State = #cl_state{erlang_mode = ErlangMode, +return_value(State = #cl_state{code_server = CodeServer, + erlang_mode = ErlangMode, mod_deps = ModDeps, output_plt = OutputPlt, plt_info = PltInfo, stored_warnings = StoredWarnings}, MiniPlt) -> + %% Just for now: + case CodeServer =:= none of + true -> + ok; + false -> + dialyzer_codeserver:delete(CodeServer) + end, case OutputPlt =:= none of true -> dialyzer_plt:delete(MiniPlt); false -> - Plt = dialyzer_plt:restore_full_plt(MiniPlt), - dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo) + Fun = to_file_fun(OutputPlt, MiniPlt, ModDeps, PltInfo), + {Pid, Ref} = erlang:spawn_monitor(Fun), + dialyzer_plt:give_away(MiniPlt, Pid), + Pid ! go, + receive {'DOWN', Ref, process, Pid, Result} -> + case Result of + ok -> ok; + Thrown -> throw(Thrown) + end + end end, UnknownWarnings = unknown_warnings(State), RetValue = @@ -720,6 +740,16 @@ return_value(State = #cl_state{erlang_mode = ErlangMode, {RetValue, set_warning_id(AllWarnings)} end. +-spec to_file_fun(_, _, _, _) -> fun(() -> no_return()). + +to_file_fun(Filename, MiniPlt, ModDeps, PltInfo) -> + fun() -> + receive go -> ok end, + Plt = dialyzer_plt:restore_full_plt(MiniPlt), + dialyzer_plt:to_file(Filename, Plt, ModDeps, PltInfo), + exit(ok) + end. + unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) -> Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of true -> diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl index f53c713bfe..a1a7370eff 100644 --- a/lib/dialyzer/src/dialyzer_codeserver.erl +++ b/lib/dialyzer/src/dialyzer_codeserver.erl @@ -26,18 +26,21 @@ give_away/2, finalize_contracts/1, finalize_exported_types/2, - finalize_records/2, + finalize_records/1, get_contracts/1, get_callbacks/1, get_exported_types/1, + extract_exported_types/1, get_exports/1, - get_records/1, + get_records_table/1, + extract_records/1, get_next_core_label/1, get_temp_contracts/2, - contracts_modules/1, + all_temp_modules/1, store_contracts/4, get_temp_exported_types/1, - get_temp_records/1, + get_temp_records_table/1, + lookup_temp_mod_records/2, insert/3, insert_exports/2, insert_temp_exported_types/2, @@ -52,7 +55,6 @@ lookup_meta_info/2, new/0, set_next_core_label/2, - set_temp_records/2, store_temp_records/3, translate_fake_file/3]). @@ -67,10 +69,8 @@ -type set_ets() :: ets:tid(). -type types() :: erl_types:type_table(). --type mod_records() :: erl_types:mod_records(). -type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}. --type mod_contracts() :: dict:dict(module(), contracts()). %% A property-list of data compiled from -compile and -dialyzer attributes. -type meta_info() :: [{{'nowarn_function' | dial_warn_tag()}, @@ -80,8 +80,8 @@ -record(codeserver, {next_core_label = 0 :: label(), code :: dict_ets(), - exported_types :: set_ets(), % set(mfa()) - records :: map_ets(), + exported_types :: 'clean' | set_ets(), % set(mfa()) + records :: 'clean' | map_ets(), contracts :: map_ets(), callbacks :: map_ets(), fun_meta_info :: dict_ets(), % {mfa(), meta_info()} @@ -107,9 +107,6 @@ ets_map_store(Key, Element, Table) -> true = ets:insert(Table, {Key, Element}), Table. -ets_dict_store_dict(Dict, Table) -> - true = ets:insert(Table, dict:to_list(Dict)). - ets_dict_to_dict(Table) -> Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end, ets:foldl(Fold, dict:new(), Table). @@ -164,11 +161,8 @@ new() -> -spec delete(codeserver()) -> 'ok'. -delete(#codeserver{code = Code, exported_types = ExportedTypes, - records = Records, contracts = Contracts, - callbacks = Callbacks}) -> - lists:foreach(fun ets:delete/1, - [Code, ExportedTypes, Records, Contracts, Callbacks]). +delete(CServer) -> + lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)). -spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver(). @@ -222,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) -> get_exported_types(#codeserver{exported_types = ExpTypes}) -> ets_set_to_set(ExpTypes). +-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}. + +extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) -> + {CS#codeserver{exported_types = 'clean'}, ExpTypes}. + -spec get_exports(codeserver()) -> sets:set(mfa()). get_exports(#codeserver{exports = Exports}) -> @@ -269,10 +268,15 @@ lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) -> {ok, Map} -> Map end. --spec get_records(codeserver()) -> mod_records(). +-spec get_records_table(codeserver()) -> map_ets(). + +get_records_table(#codeserver{records = RecDict}) -> + RecDict. -get_records(#codeserver{records = RecDict}) -> - ets_dict_to_dict(RecDict). +-spec extract_records(codeserver()) -> {codeserver(), map_ets()}. + +extract_records(#codeserver{records = RecDict} = CS) -> + {CS#codeserver{records = clean}, RecDict}. -spec store_temp_records(module(), types(), codeserver()) -> codeserver(). @@ -283,26 +287,26 @@ store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS) false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)} end. --spec get_temp_records(codeserver()) -> mod_records(). +-spec get_temp_records_table(codeserver()) -> map_ets(). -get_temp_records(#codeserver{temp_records = TempRecDict}) -> - ets_dict_to_dict(TempRecDict). +get_temp_records_table(#codeserver{temp_records = TempRecDict}) -> + TempRecDict. --spec set_temp_records(mod_records(), codeserver()) -> codeserver(). +-spec lookup_temp_mod_records(module(), codeserver()) -> types(). -set_temp_records(Dict, CS) -> - true = ets:delete(CS#codeserver.temp_records), - TempRecords = ets:new(dialyzer_codeserver_temp_records,[]), - true = ets_dict_store_dict(Dict, TempRecords), - CS#codeserver{temp_records = TempRecords}. +lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) -> + case ets_dict_find(Mod, TempRecDict) of + error -> maps:new(); + {ok, Map} -> Map + end. --spec finalize_records(mod_records(), codeserver()) -> codeserver(). +-spec finalize_records(codeserver()) -> codeserver(). -finalize_records(Dict, #codeserver{temp_records = TmpRecords, - records = Records} = CS) -> - true = ets:delete(TmpRecords), - true = ets_dict_store_dict(Dict, Records), - CS#codeserver{temp_records = clean}. +finalize_records(#codeserver{temp_records = TmpRecords, + records = Records} = CS) -> + true = ets:delete(Records), + ets:rename(TmpRecords, dialyzer_codeserver_records), + CS#codeserver{temp_records = clean, records = TmpRecords}. -spec lookup_mod_contracts(atom(), codeserver()) -> contracts(). @@ -331,10 +335,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) -> {ok, PropList} -> PropList end. --spec get_contracts(codeserver()) -> mod_contracts(). +-spec get_contracts(codeserver()) -> + dict:dict(mfa(), dialyzer_contracts:file_contract()). get_contracts(#codeserver{contracts = ContDict}) -> - ets_dict_to_dict(ContDict). + dict:filter(fun({_M, _F, _A}, _) -> true; + (_, _) -> false + end, ets_dict_to_dict(ContDict)). -spec get_callbacks(codeserver()) -> list(). @@ -348,12 +355,14 @@ store_temp_contracts(Mod, SpecMap, CallbackMap, #codeserver{temp_contracts = Cn, temp_callbacks = Cb} = CS) when is_atom(Mod) -> + %% Make sure Mod is stored even if there are not callbacks or + %% contracts. CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)}, CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}. --spec contracts_modules(codeserver()) -> [module()]. +-spec all_temp_modules(codeserver()) -> [module()]. -contracts_modules(#codeserver{temp_contracts = TempContTable}) -> +all_temp_modules(#codeserver{temp_contracts = TempContTable}) -> ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]). -spec store_contracts(module(), contracts(), contracts(), codeserver()) -> @@ -380,17 +389,25 @@ get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict, -spec give_away(codeserver(), pid()) -> 'ok'. -give_away(#codeserver{temp_records = TempRecords, - temp_contracts = TempContracts, - temp_callbacks = TempCallbacks, - records = Records, - contracts = Contracts, - callbacks = Callbacks}, Pid) -> - _ = [true = ets:give_away(Table, Pid, any) || - Table <- [TempRecords, TempContracts, TempCallbacks, - Records, Contracts, Callbacks], - Table =/= clean], - ok. +give_away(CServer, Pid) -> + lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any) + end, tables(CServer)). + +tables(#codeserver{code = Code, + fun_meta_info = FunMetaInfo, + exports = Exports, + temp_exported_types = TempExpTypes, + temp_records = TempRecords, + temp_contracts = TempContracts, + temp_callbacks = TempCallbacks, + exported_types = ExportedTypes, + records = Records, + contracts = Contracts, + callbacks = Callbacks}) -> + [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes, + TempRecords, TempContracts, TempCallbacks, + ExportedTypes, Records, Contracts, Callbacks], + Table =/= clean]. -spec finalize_contracts(codeserver()) -> codeserver(). diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl index 2078e58ce8..5f24b5a668 100644 --- a/lib/dialyzer/src/dialyzer_contracts.erl +++ b/lib/dialyzer/src/dialyzer_contracts.erl @@ -24,7 +24,7 @@ get_contract_return/2, %% get_contract_signature/1, is_overloaded/1, - process_contract_remote_types/2, + process_contract_remote_types/1, store_tmp_contract/5]). -export_type([file_contract/0, plt_contracts/0]). @@ -139,18 +139,18 @@ sequence([], _Delimiter) -> ""; sequence([H], _Delimiter) -> H; sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter). --spec process_contract_remote_types(dialyzer_codeserver:codeserver(), - erl_types:mod_records()) -> +-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver(). -process_contract_remote_types(CodeServer, RecordDict) -> - Mods = dialyzer_codeserver:contracts_modules(CodeServer), +process_contract_remote_types(CodeServer) -> + Mods = dialyzer_codeserver:all_temp_modules(CodeServer), + RecordTable = dialyzer_codeserver:get_records_table(CodeServer), ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer), ContractFun = fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) -> #tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract, {NewCs, C2} = lists:mapfoldl(fun(CFun, C1) -> - CFun(ExpTypes, RecordDict, C1) + CFun(ExpTypes, RecordTable, C1) end, C0, CFuns), Args = general_domain(NewCs), Contract = #contract{contracts = NewCs, args = Args, forms = Forms}, @@ -177,7 +177,7 @@ process_contract_remote_types(CodeServer, RecordDict) -> -type fun_types() :: dict:dict(label(), erl_types:type_table()). --spec check_contracts([{mfa(), file_contract()}], +-spec check_contracts(orddict:orddict(mfa(), file_contract()), dialyzer_callgraph:callgraph(), fun_types(), opaques_fun()) -> plt_contracts(). @@ -206,7 +206,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) -> error -> NewContracts end end, - dict:fold(FoldFun, [], FunTypes). + orddict:from_list(dict:fold(FoldFun, [], FunTypes)). %% Checks all components of a contract -spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}. @@ -451,10 +451,10 @@ contract_from_form(Forms, MFA, RecDict, FileLine) -> contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = - fun(ExpTypes, AllRecords, Cache) -> + fun(ExpTypes, RecordTable, Cache) -> {NewType, NewCache} = try - from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) + from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) catch throw:{error, Msg} -> {File, Line} = FileLine, @@ -472,12 +472,12 @@ contract_from_form([{type, _L1, bounded_fun, [{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left], MFA, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = - fun(ExpTypes, AllRecords, Cache) -> + fun(ExpTypes, RecordTable, Cache) -> {Constr1, VarTable, Cache1} = - process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords, + process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable, Cache), {NewType, NewCache} = - from_form_with_check(Form, ExpTypes, MFA, AllRecords, + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache1), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {{NewTypeNoVars, Constr1}, NewCache} @@ -488,28 +488,28 @@ contract_from_form([{type, _L1, bounded_fun, contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) -> {lists:reverse(TypeAcc), lists:reverse(FormAcc)}. -process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> +process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> {Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes, - AllRecords, Cache), + RecordTable, Cache), Init = remove_cycles(Init0), - constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache). + constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache). -initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> - initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, +initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> + initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache, []). -initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords, +initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable, Cache, Acc) -> {Acc, Cache}; -initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords, +initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable, Cache, Acc) -> case Constr of {type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} -> VarTable = erl_types:var_table__new(), {T1, NewCache} = - final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache), + final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache), Entry = {T1, Type2}, - initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords, + initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable, NewCache, [Entry|Acc]); {type, _, constraint, [{atom,_,Name}, List]} -> N = length(List), @@ -517,18 +517,18 @@ initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}) end. -constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> +constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> VarTable = erl_types:var_table__new(), {VarTab, NewCache} = - constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTable, Cache), constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, NewCache). + RecordTable, NewCache). constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, Cache) -> + RecordTable, Cache) -> {NewVarTab, NewCache} = - constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, OldVarTab, Cache), case NewVarTab of OldVarTab -> @@ -540,38 +540,38 @@ constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes, {FinalConstrs, NewVarTab, NewCache}; _Other -> constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, NewCache) + RecordTable, NewCache) end. -final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) -> - from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache). +final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) -> + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache). -from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) -> +from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) -> VarTable = erl_types:var_table__new(), - from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache). + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache). -from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) -> +from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) -> Site = {spec, MFA}, - C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords, + C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable, VarTable, Cache), - erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1). + erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1). -constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, +constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache) -> {Subtypes, NewCache} = - constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache, []), {insert_constraints(Subtypes), NewCache}. -constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords, +constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable, _VarTab, Cache, Acc) -> {Acc, Cache}; -constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords, +constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache, Acc) -> {T2, NewCache} = - final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache), + final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache), NewAcc = [{subtype, T1, T2}|Acc], - constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable, VarTab, NewCache, NewAcc). %% Replaces variables with '_' when necessary to break up cycles among diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl index 99f95a4dca..7c1bc1de5a 100644 --- a/lib/dialyzer/src/dialyzer_coordinator.erl +++ b/lib/dialyzer/src/dialyzer_coordinator.erl @@ -76,6 +76,8 @@ active = 0 :: integer(), result :: result(), next_label = 0 :: integer(), + jobs :: [job()], + job_fun :: fun(), init_data :: init_data(), regulator :: regulator(), scc_to_pid :: scc_to_pid() @@ -108,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) -> false -> unused end, Coordinator = {Collector, Regulator, SCCtoPID}, - Fold = - fun(Job, Count) -> - Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator), - case TypesigOrDataflow of - true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok; - false -> ok - end, - Count + 1 + JobFun = + fun(Job) -> + Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator), + case TypesigOrDataflow of + true -> true = ets:insert(SCCtoPID, {Job, Pid}); + false -> true + end end, - JobCount = lists:foldl(Fold, 0, Jobs), + JobCount = length(Jobs), + NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()), + {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs), + lists:foreach(JobFun, InitJobs), Unit = case Mode of 'typesig' -> "SCCs"; @@ -129,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) -> 'compile' -> dialyzer_analysis_callgraph:compile_init_result(); _ -> [] end, - #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0, - init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}. + #state{mode = Mode, active = JobCount, result = InitResult, + next_label = 0, job_fun = JobFun, jobs = RestJobs, + init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}. collect_result(#state{mode = Mode, active = Active, result = Result, next_label = NextLabel, init_data = InitData, + jobs = JobsLeft, job_fun = JobFun, regulator = Regulator, scc_to_pid = SCCtoPID} = State) -> receive {next_label_request, Estimation, Pid} -> @@ -141,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result, collect_result(State#state{next_label = NextLabel + Estimation}); {done, Job, Data} -> NewResult = update_result(Mode, InitData, Job, Data, Result), + TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'), case Active of 1 -> kill_regulator(Regulator), case Mode of 'compile' -> {NewResult, NextLabel}; - X when X =:= 'typesig'; X =:= 'dataflow' -> + _ when TypesigOrDataflow -> ets:delete(SCCtoPID), NewResult; 'warnings' -> NewResult end; N -> - collect_result(State#state{result = NewResult, active = N - 1}) + case TypesigOrDataflow of + true -> true = ets:delete(SCCtoPID, Job); + false -> true + end, + NewJobsLeft = + case JobsLeft of + [] -> []; + [NewJob|JobsLeft1] -> + JobFun(NewJob), + JobsLeft1 + end, + NewState = State#state{result = NewResult, + jobs = NewJobsLeft, + active = N - 1}, + collect_result(NewState) end end. @@ -170,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) -> end. -spec sccs_to_pids([scc() | module()], coordinator()) -> - {[dialyzer_worker:worker()], [scc() | module()]}. + [dialyzer_worker:worker()]. sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) -> Fold = - fun(SCC, {Pids, Unknown}) -> - try ets:lookup_element(SCCtoPID, SCC, 2) of - Result -> {[Result|Pids], Unknown} - catch - _:_ -> {Pids, [SCC|Unknown]} - end + fun(SCC, Pids) -> + %% The SCCs that SCC depends on have always been started. + try ets:lookup_element(SCCtoPID, SCC, 2) of + Pid when is_pid(Pid) -> + [Pid|Pids] + catch + _:_ -> Pids + end end, - lists:foldl(Fold, {[], []}, SCCs). + lists:foldl(Fold, [], SCCs). -spec job_done(job(), job_result(), coordinator()) -> ok. diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl index f706ebfb02..4c29b4f1eb 100644 --- a/lib/dialyzer/src/dialyzer_dataflow.erl +++ b/lib/dialyzer/src/dialyzer_dataflow.erl @@ -1344,8 +1344,6 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) -> {Msg, Force} = case t_is_none(ArgType0) of true -> - PatString = format_patterns(Pats), - PatTypes = [PatString, format_type(OrigArgType, State1)], %% See if this is covered by an earlier clause or if it %% simply cannot match OrigArgTypes = @@ -1353,17 +1351,27 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) -> true -> Any = t_any(), [Any || _ <- Pats]; false -> t_to_tlist(OrigArgType) end, + PatString = format_patterns(Pats), + ArgTypeString = format_type(OrigArgType, State1), + BindResOrig = + bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1), Tag = - case bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1) of + case BindResOrig of {error, bind, _, _, _} -> pattern_match; {error, record, _, _, _} -> record_match; {error, opaque, _, _, _} -> opaque_match; {_, _} -> pattern_match_cov end, - {{Tag, PatTypes}, false}; + PatTypes = case BindResOrig of + {error, opaque, _, _, OpaqueType} -> + [PatString, ArgTypeString, + format_type(OpaqueType, State1)]; + _ -> [PatString, ArgTypeString] + end, + {{Tag, PatTypes}, false}; false -> %% Try to find out if this is a default clause in a list - %% comprehension and supress this. A real Hack(tm) + %% comprehension and suppress this. A real Hack(tm) Force0 = case is_compiler_generated(cerl:get_ann(C)) of true -> diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl index 37c22fef48..bfd3f84fc5 100644 --- a/lib/dialyzer/src/dialyzer_plt.erl +++ b/lib/dialyzer/src/dialyzer_plt.erl @@ -31,9 +31,8 @@ included_files/1, from_file/1, get_default_plt/0, - get_types/1, + get_module_types/2, get_exported_types/1, - %% insert/3, insert_list/2, insert_contract_list/2, insert_callbacks/2, @@ -143,6 +142,10 @@ delete_list(#plt{info = Info, types = Types, -spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt(). +insert_contract_list(#plt{contracts = Contracts} = PLT, List) -> + NewContracts = dict:merge(fun(_MFA, _Old, New) -> New end, + Contracts, dict:from_list(List)), + PLT#plt{contracts = NewContracts}; insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) -> true = ets:insert(Contracts, List), PLT. @@ -184,20 +187,23 @@ lookup(Plt, Label) when is_integer(Label) -> lookup_1(#mini_plt{info = Info}, MFAorLabel) -> ets_table_lookup(Info, MFAorLabel). --spec insert_types(plt(), erl_types:mod_records()) -> plt(). +-spec insert_types(plt(), ets:tid()) -> plt(). -insert_types(PLT, Rec) -> - PLT#plt{types = Rec}. +insert_types(MiniPLT, Records) -> + ets:rename(Records, plt_types), + MiniPLT#mini_plt{types = Records}. --spec insert_exported_types(plt(), sets:set()) -> plt(). +-spec insert_exported_types(plt(), ets:tid()) -> plt(). -insert_exported_types(PLT, Set) -> - PLT#plt{exported_types = Set}. +insert_exported_types(MiniPLT, ExpTypes) -> + ets:rename(ExpTypes, plt_exported_types), + MiniPLT#mini_plt{exported_types = ExpTypes}. --spec get_types(plt()) -> erl_types:mod_records(). +-spec get_module_types(plt(), atom()) -> + 'none' | {'value', erl_types:type_table()}. -get_types(#plt{types = Types}) -> - Types. +get_module_types(#plt{types = Types}, M) when is_atom(M) -> + table_lookup(Types, M). -spec get_exported_types(plt()) -> sets:set(). @@ -227,12 +233,8 @@ contains_mfa(#plt{info = Info, contracts = Contracts}, MFA) -> get_default_plt() -> case os:getenv("DIALYZER_PLT") of false -> - case os:getenv("HOME") of - false -> - plt_error("The HOME environment variable needs to be set " ++ - "so that Dialyzer knows where to find the default PLT"); - HomeDir -> filename:join(HomeDir, ".dialyzer_plt") - end; + {ok,[[HomeDir]]} = init:get_argument(home), + filename:join(HomeDir, ".dialyzer_plt"); UserSpecPlt -> UserSpecPlt end. @@ -520,10 +522,12 @@ get_mini_plt(#plt{info = Info, contracts = Contracts, callbacks = Callbacks, exported_types = ExpTypes}) -> - [ETSInfo, ETSTypes, ETSContracts, ETSCallbacks, ETSExpTypes] = + [ETSInfo, ETSContracts] = [ets:new(Name, [public]) || - Name <- [plt_info, plt_types, plt_contracts, plt_callbacks, - plt_exported_types]], + Name <- [plt_info, plt_contracts]], + [ETSTypes, ETSCallbacks, ETSExpTypes] = + [ets:new(Name, [compressed, public]) || + Name <- [plt_types, plt_callbacks, plt_exported_types]], CallbackList = dict:to_list(Callbacks), CallbacksByModule = [{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} || diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl index 3c90f46e95..be685baf22 100644 --- a/lib/dialyzer/src/dialyzer_succ_typings.erl +++ b/lib/dialyzer/src/dialyzer_succ_typings.erl @@ -29,7 +29,7 @@ -export([ find_succ_types_for_scc/2, refine_one_module/2, - find_required_by/2, + %% find_required_by/2, find_depends_on/2, collect_warnings/2, lookup_names/2 @@ -236,10 +236,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver, find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> dialyzer_callgraph:get_depends_on(SCC, Callgraph). --spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()]. +%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()]. -find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> - dialyzer_callgraph:get_required_by(SCC, Callgraph). +%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> +%% dialyzer_callgraph:get_required_by(SCC, Callgraph). -spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()]. diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl index b33484bda4..c3ba44fde7 100644 --- a/lib/dialyzer/src/dialyzer_typesig.erl +++ b/lib/dialyzer/src/dialyzer_typesig.erl @@ -81,7 +81,7 @@ -record(constraint_list, {type :: 'conj' | 'disj', list :: [constr()], deps :: deps(), - masks = maps:new() :: #{dep() => mask()}, + masks :: #{dep() => mask()} | 'undefined', id :: {'list', dep()} | 'undefined'}). -type constraint_list() :: #constraint_list{}. @@ -181,7 +181,6 @@ analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) -> M <- lists:usort([M || {M, _, _} <- SCC])], State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1), State3 = state__finalize(State2), - erlang:garbage_collect(), Funs = state__scc(State3), pp_constrs_scc(Funs, State3), constraints_to_dot_scc(Funs, State3), @@ -202,7 +201,8 @@ traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) -> {M, Rec} = lists:keyfind(M, 1, ModRecs), TmpState1 = state__set_rec_dict(AccState, Rec), DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)), - {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1), + TmpState2 = state__new_constraint_context(TmpState1), + {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2), traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState); traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) -> AccState. @@ -2098,6 +2098,12 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval, end; v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) -> {ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed}; +v2_solve_disj([every_i], Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) -> + NewIs = case Cs of + [] -> []; + _ -> [I, every_i] + end, + v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed); v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) -> Uneval = [{I,C#constraint_list.id} || not is_failed_list(C, V2State)] ++ Uneval0, @@ -2169,7 +2175,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0, M = lists:keydelete(I, 1, vars_per_child(U, Masks)), {V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1), {NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0), - Is1 = lists:umerge(Is, F), + Is1 = umerge_mask(Is, F), NewFs = [NewF|NewFs0], v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2, [U|UL], NewFs, VarsUp, LastMap, LastFlags) @@ -2191,6 +2197,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State, [], [], [U|VarsUp], Map, NewFlags) end; +v2_solve_conj([every_i], Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, + LastMap, LastFlags) -> + NewIs = case Cs of + [] -> []; + _ -> [I, every_i] + end, + v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, + LastMap, LastFlags); v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, LastMap, LastFlags) -> v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, @@ -2207,7 +2221,12 @@ report_detected_loop(_) -> add_mask_to_flags(Flags, [Im|M], I, L) when I > Im -> add_mask_to_flags(Flags, M, I, [Im|L]); add_mask_to_flags(Flags, [_|M], _I, L) -> - {lists:umerge(M, Flags), lists:reverse(L)}. + {umerge_mask(Flags, M), lists:reverse(L)}. + +umerge_mask([every_i]=Is, _F) -> + Is; +umerge_mask(Is, F) -> + lists:umerge(Is, F). get_mask(V, Masks) -> case maps:find(V, Masks) of @@ -2221,7 +2240,7 @@ get_flags(#v2_state{constr_data = ConData}=V2State0, C) -> error -> ?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]), V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)}, - {V2State, lists:seq(1, length(Cs))}; + {V2State, [every_i]}; {ok, failed} -> {V2State0, failed_list}; {ok, {Part,U}} when U =/= [] -> @@ -2901,8 +2920,9 @@ state__get_rec_var(Fun, #state{fun_map = Map}) -> maps:find(Fun, Map). state__finalize(State) -> - State1 = enumerate_constraints(State), - order_fun_constraints(State1). + State1 = state__new_constraint_context(State), + State2 = enumerate_constraints(State1), + order_fun_constraints(State2). %% ============================================================================ %% @@ -2982,7 +3002,7 @@ find_constraint_deps([Type|Tail], Acc) -> NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc], find_constraint_deps(Tail, NewAcc); find_constraint_deps([], Acc) -> - lists:flatten(Acc). + lists:append(Acc). mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs -> #constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps}; @@ -3090,8 +3110,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) -> List1 = [C || C <- List, is_simple_constraint(C)], %% Just an assert. [] = [C || #constraint{} = C <- List1], - Expanded = lists:flatten([expand_to_conjunctions(C) - || #constraint_list{} = C <- List]), + Expanded = lists:append([expand_to_conjunctions(C) + || #constraint_list{} = C <- List]), ReturnList = Expanded ++ List1, if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj); true -> ReturnList @@ -3116,8 +3136,10 @@ calculate_deps(List) -> calculate_deps([H|Tail], Acc) -> Deps = get_deps(H), calculate_deps(Tail, [Deps|Acc]); +calculate_deps([], []) -> []; +calculate_deps([], [L]) -> L; calculate_deps([], Acc) -> - ordsets:from_list(lists:flatten(Acc)). + lists:umerge(Acc). mk_conj_constraint_list(List) -> mk_constraint_list(conj, List). @@ -3185,7 +3207,8 @@ order_fun_constraints(State) -> order_fun_constraints([#constraint_ref{id = Id}|Tail], State) -> Cs = state__get_cs(Id, State), - {[NewCs], State1} = order_fun_constraints([Cs], [], [], State), + {[Cs1], State1} = order_fun_constraints([Cs], [], [], State), + NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps}, NewState = state__store_constrs(Id, NewCs, State1), order_fun_constraints(Tail, NewState); order_fun_constraints([], State) -> @@ -3193,23 +3216,31 @@ order_fun_constraints([], State) -> order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) -> order_fun_constraints(Tail, [C|Funs], Acc, State); -order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail], +order_fun_constraints([#constraint_list{list = List, + type = Type, + masks = OldMasks} = C|Tail], Funs, Acc, State) -> - {NewList, NewState} = - case Type of - conj -> order_fun_constraints(List, [], [], State); - disj -> - FoldFun = fun(X, AccState) -> - {[NewX], NewAccState} = - order_fun_constraints([X], [], [], AccState), - {NewX, NewAccState} - end, - lists:mapfoldl(FoldFun, State, List) - end, - C1 = update_constraint_list(C, NewList), - Masks = calculate_masks(NewList, 1, []), - NewAcc = [update_masks(C1, Masks)|Acc], - order_fun_constraints(Tail, Funs, NewAcc, NewState); + case OldMasks of + undefined -> + {NewList, NewState} = + case Type of + conj -> order_fun_constraints(List, [], [], State); + disj -> + FoldFun = fun(X, AccState) -> + {[NewX], NewAccState} = + order_fun_constraints([X], [], [], AccState), + {NewX, NewAccState} + end, + lists:mapfoldl(FoldFun, State, List) + end, + NewList2 = reset_deps(NewList, State), + C1 = update_constraint_list(C, NewList2), + Masks = calculate_masks(NewList, 1, []), + NewAcc = [update_masks(C1, Masks)|Acc], + order_fun_constraints(Tail, Funs, NewAcc, NewState); + M when is_map(M) -> + order_fun_constraints(Tail, Funs, [C|Acc], State) + end; order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) -> order_fun_constraints(Tail, Funs, [C|Acc], State); order_fun_constraints([], Funs, Acc, State) -> @@ -3219,6 +3250,18 @@ order_fun_constraints([], Funs, Acc, State) -> update_masks(C, Masks) -> C#constraint_list{masks = Masks}. +reset_deps(ConstrList, #state{solvers = Solvers}) -> + case lists:member(v1, Solvers) of + true -> + ConstrList; + false -> + [reset_deps(Constr) || Constr <- ConstrList] + end. + +reset_deps(#constraint{}=C) -> C#constraint{deps = []}; +reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []}; +reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}. + calculate_masks([C|Cs], I, L0) -> calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0); calculate_masks([], _I, L) -> diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl index 432d27571b..9eaf95c1a2 100644 --- a/lib/dialyzer/src/dialyzer_utils.erl +++ b/lib/dialyzer/src/dialyzer_utils.erl @@ -37,9 +37,9 @@ get_fun_meta_info/3, is_suppressed_fun/2, is_suppressed_tag/3, - merge_records/2, pp_hook/0, process_record_remote_types/1, + merge_types/2, sets_filter/2, src_compiler_opts/0, refold_pattern/1, @@ -188,7 +188,6 @@ get_core_from_abstract_code(AbstrCode, Opts) -> %% ============================================================================ -type type_table() :: erl_types:type_table(). --type mod_records() :: dict:dict(module(), type_table()). -spec get_record_and_type_info(abstract_code()) -> {'ok', type_table()} | {'error', string()}. @@ -289,18 +288,18 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) -> get_record_fields([], _RecDict, Acc) -> lists:reverse(Acc). --spec process_record_remote_types(codeserver()) -> - {codeserver(), mod_records()}. +-spec process_record_remote_types(codeserver()) -> codeserver(). %% The field types are cached. Used during analysis when handling records. process_record_remote_types(CServer) -> - TempRecords = dialyzer_codeserver:get_temp_records(CServer), ExpTypes = dialyzer_codeserver:get_exported_types(CServer), - TempRecords1 = process_opaque_types0(TempRecords, ExpTypes), - %% A cache (not the field type cache) is used for speeding things up a bit. + Mods = dialyzer_codeserver:all_temp_modules(CServer), + process_opaque_types0(Mods, CServer, ExpTypes), VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), ModuleFun = - fun({Module, Record}) -> + fun(Module) -> + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), RecordFun = fun({Key, Value}, C2) -> case Key of @@ -313,7 +312,7 @@ process_record_remote_types(CServer) -> {FieldT, C6} = erl_types:t_from_form (Field, ExpTypes, Site, - TempRecords1, VarTable, + RecordTable, VarTable, C5), {{FieldName, Field, FieldT}, C6} end, C4, Fields), @@ -328,30 +327,29 @@ process_record_remote_types(CServer) -> end, Cache = erl_types:cache__new(), {RecordList, _NewCache} = - lists:mapfoldl(RecordFun, Cache, maps:to_list(Record)), - {Module, maps:from_list(RecordList)} + lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)), + dialyzer_codeserver:store_temp_records(Module, + maps:from_list(RecordList), + CServer) end, - NewRecordsList = lists:map(ModuleFun, dict:to_list(TempRecords1)), - NewRecords = dict:from_list(NewRecordsList), - check_record_fields(NewRecords, ExpTypes), - {dialyzer_codeserver:finalize_records(NewRecords, CServer), NewRecords}. + lists:foreach(ModuleFun, Mods), + check_record_fields(Mods, CServer, ExpTypes), + dialyzer_codeserver:finalize_records(CServer). %% erl_types:t_from_form() substitutes the declaration of opaque types %% for the expanded type in some cases. To make sure the initial type, %% any(), is not used, the expansion is done twice. %% XXX: Recursive opaque types are not handled well. -process_opaque_types0(TempRecords0, TempExpTypes) -> - Cache = erl_types:cache__new(), - {TempRecords1, Cache1} = - process_opaque_types(TempRecords0, TempExpTypes, Cache), - {TempRecords, _NewCache} = - process_opaque_types(TempRecords1, TempExpTypes, Cache1), - TempRecords. - -process_opaque_types(TempRecords, TempExpTypes, Cache) -> +process_opaque_types0(AllModules, CServer, TempExpTypes) -> + process_opaque_types(AllModules, CServer, TempExpTypes), + process_opaque_types(AllModules, CServer, TempExpTypes). + +process_opaque_types(AllModules, CServer, TempExpTypes) -> VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), ModuleFun = - fun({Module, Record}, C0) -> + fun(Module) -> + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), RecordFun = fun({Key, Value}, C2) -> case Key of @@ -360,32 +358,32 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) -> Site = {type, {Module, Name, NArgs}}, {Type, C3} = erl_types:t_from_form(Form, TempExpTypes, Site, - TempRecords, VarTable, C2), + RecordTable, VarTable, C2), {{Key, {F, Type}}, C3}; _Other -> {{Key, Value}, C2} end end, - {RecordList, C1} = - lists:mapfoldl(RecordFun, C0, maps:to_list(Record)), - {{Module, maps:from_list(RecordList)}, C1} - %% dict:map(RecordFun, Record) + C0 = erl_types:cache__new(), + {RecordList, _NewCache} = + lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)), + dialyzer_codeserver:store_temp_records(Module, + maps:from_list(RecordList), + CServer) end, - {TempRecordList, NewCache} = - lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)), - {dict:from_list(TempRecordList), NewCache}. - %% dict:map(ModuleFun, TempRecords). + lists:foreach(ModuleFun, AllModules). -check_record_fields(Records, TempExpTypes) -> - Cache = erl_types:cache__new(), +check_record_fields(AllModules, CServer, TempExpTypes) -> VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), CheckFun = - fun({Module, Element}, C0) -> + fun(Module) -> CheckForm = fun(Form, Site, C1) -> erl_types:t_check_record_fields(Form, TempExpTypes, - Site, Records, + Site, RecordTable, VarTable, C1) end, - ElemFun = + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), + RecordFun = fun({Key, Value}, C2) -> case Key of {record, Name} -> @@ -406,10 +404,10 @@ check_record_fields(Records, TempExpTypes) -> msg_with_position(Fun, FileLine) end end, - lists:foldl(ElemFun, C0, maps:to_list(Element)) + C0 = erl_types:cache__new(), + _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap)) end, - _NewCache = lists:foldl(CheckFun, Cache, dict:to_list(Records)), - ok. + lists:foreach(CheckFun, AllModules). msg_with_position(Fun, FileLine) -> try Fun() @@ -421,10 +419,37 @@ msg_with_position(Fun, FileLine) -> throw({error, NewMsg}) end. --spec merge_records(mod_records(), mod_records()) -> mod_records(). +-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver(). -merge_records(NewRecords, OldRecords) -> - dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords). +merge_types(CServer, Plt) -> + AllNewModules = dialyzer_codeserver:all_temp_modules(CServer), + AllNewModulesSet = sets:from_list(AllNewModules), + AllOldModulesSet = dialyzer_plt:all_modules(Plt), + AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet), + ModuleFun = + fun(Module) -> + KeepOldFun = + fun() -> + case dialyzer_plt:get_module_types(Plt, Module) of + none -> no; + {value, OldRecords} -> + case sets:is_element(Module, AllNewModulesSet) of + true -> no; + false -> {yes, OldRecords} + end + end + end, + Records = + case KeepOldFun() of + no -> + dialyzer_codeserver:lookup_temp_mod_records(Module, CServer); + {yes, OldRecords} -> + OldRecords + end, + dialyzer_codeserver:store_temp_records(Module, Records, CServer) + end, + lists:foreach(ModuleFun, sets:to_list(AllModulesSet)), + CServer. %% ============================================================================ %% diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl index 418c9798b3..af0f2e9e08 100644 --- a/lib/dialyzer/src/dialyzer_worker.erl +++ b/lib/dialyzer/src/dialyzer_worker.erl @@ -56,10 +56,14 @@ launch(Mode, Job, InitData, Coordinator) -> %%-------------------------------------------------------------------- -init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when +init(#state{job = SCC, mode = Mode, init_data = InitData, + coordinator = Coordinator} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData), - ?debug("Deps ~p: ~p\n",[SCC, DependsOn]), + DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData), + ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]), + Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator), + ?debug("~w: PidsDeps ~p\n", [self(), Pids]), + DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids], loop(updating, State#state{depends_on = DependsOn}); init(#state{mode = Mode} = State) when Mode =:= 'compile'; Mode =:= 'warnings' -> @@ -67,7 +71,7 @@ init(#state{mode = Mode} = State) when loop(updating, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - ?debug("Update: ~p\n",[State#state.job]), + ?debug("~w: Update: ~p\n", [self(), State#state.job]), NextStatus = case waits_more_success_typings(State) of true -> waiting; @@ -76,11 +80,11 @@ loop(updating, #state{mode = Mode} = State) when loop(NextStatus, State); loop(waiting, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - ?debug("Wait: ~p\n",[State#state.job]), + ?debug("~w: Wait: ~p\n", [self(), State#state.job]), NewState = wait_for_success_typings(State), loop(updating, NewState); loop(running, #state{mode = 'compile'} = State) -> - dialyzer_coordinator:request_activation(State#state.coordinator), + request_activation(State), ?debug("Compile: ~s\n",[State#state.job]), Result = case start_compilation(State) of @@ -92,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) -> end, report_to_coordinator(Result, State); loop(running, #state{mode = 'warnings'} = State) -> - dialyzer_coordinator:request_activation(State#state.coordinator), + request_activation(State), ?debug("Warning: ~s\n",[State#state.job]), Result = collect_warnings(State), report_to_coordinator(Result, State); loop(running, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> request_activation(State), - ?debug("Run: ~p\n",[State#state.job]), + ?debug("~w: Run: ~p\n", [self(), State#state.job]), NotFixpoint = do_work(State), - ok = broadcast_done(State), report_to_coordinator(NotFixpoint, State). waits_more_success_typings(#state{depends_on = Depends}) -> Depends =/= []. -broadcast_done(#state{job = SCC, init_data = InitData, - coordinator = Coordinator}) -> - RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData), - {Callers, Unknown} = - dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator), - send_done(Callers, SCC), - continue_broadcast_done(Unknown, SCC, Coordinator). - -send_done(Callers, SCC) -> - ?debug("Sending ~p: ~p\n",[SCC, Callers]), - SendSTFun = fun(PID) -> PID ! {done, SCC} end, - lists:foreach(SendSTFun, Callers). - -continue_broadcast_done([], _SCC, _Coordinator) -> ok; -continue_broadcast_done(Rest, SCC, Coordinator) -> - %% This time limit should be greater than the time required - %% by the coordinator to spawn all processes. - timer:sleep(500), - {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator), - send_done(Callers, SCC), - continue_broadcast_done(Unknown, SCC, Coordinator). - wait_for_success_typings(#state{depends_on = DependsOn} = State) -> receive - {done, SCC} -> - ?debug("GOT ~p: ~p\n",[State#state.job, SCC]), - State#state{depends_on = DependsOn -- [SCC]} + {'DOWN', Ref, process, Pid, _Info} -> + ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]), + State#state{depends_on = DependsOn -- [{Pid, Ref}]} after 5000 -> - ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]), + ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]), State end. @@ -150,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) -> end. report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) -> - ?debug("Done: ~p\n",[Job]), + ?debug("~w: Done: ~p\n",[self(), Job]), dialyzer_coordinator:job_done(Job, Result, Coordinator). start_compilation(#state{job = Job, init_data = InitData}) -> diff --git a/lib/dialyzer/test/abstract_SUITE.erl b/lib/dialyzer/test/abstract_SUITE.erl index 269db3e836..37fb39cf27 100644 --- a/lib/dialyzer/test/abstract_SUITE.erl +++ b/lib/dialyzer/test/abstract_SUITE.erl @@ -7,7 +7,7 @@ -include_lib("common_test/include/ct.hrl"). -include("dialyzer_test_constants.hrl"). --export([suite/0, all/0, init_per_suite/0, init_per_suite/1]). +-export([suite/0, all/0, init_per_suite/0, init_per_suite/1, end_per_suite/1]). -export([generated_case/1]). suite() -> @@ -24,6 +24,10 @@ init_per_suite(Config) -> ok -> [{dialyzer_options, []}|Config] end. +end_per_suite(_Config) -> + %% This function is required since init_per_suite/1 exists. + ok. + generated_case(Config) when is_list(Config) -> %% Equivalent to: %% @@ -79,7 +83,8 @@ generated_case(Config) when is_list(Config) -> Config, [], []), ok. -test(Prog, Config, COpts, DOpts) -> +test(Prog0, Config, COpts, DOpts) -> + Prog = erl_parse:anno_from_term(Prog0), {ok, BeamFile} = compile(Config, Prog, COpts), run_dialyzer(Config, succ_typings, [BeamFile], DOpts). diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options index cb6a88786e..365b4798c5 100644 --- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options @@ -1,2 +1,2 @@ {dialyzer_options, []}. -{time_limit, 2}. +{time_limit, 5}. diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options index 50991c9bc5..02425c33f2 100644 --- a/lib/dialyzer/test/map_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options @@ -1 +1,2 @@ {dialyzer_options, []}. +{time_limit, 30}. diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options index 06ed52043a..cb301ff6a1 100644 --- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options @@ -1,2 +1,2 @@ {dialyzer_options, [{warnings, [no_unused, no_return]}]}. -{time_limit, 20}. +{time_limit, 40}. diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/weird b/lib/dialyzer/test/opaque_SUITE_data/results/weird new file mode 100644 index 0000000000..d7f57cd152 --- /dev/null +++ b/lib/dialyzer/test/opaque_SUITE_data/results/weird @@ -0,0 +1,6 @@ + +weird_warning1.erl:15: Matching of pattern {'a', Dict} tagged with a record name violates the declared type of #b{q::queue:queue(_)} +weird_warning2.erl:13: Matching of pattern <{'b', Queue}, Key, Value> tagged with a record name violates the declared type of <#a{d::dict:dict(_,_)},'my_key','my_value'> +weird_warning3.erl:14: The call weird_warning3:add_element(#a{d::queue:queue(_)},'my_key','my_value') does not have a term of type #a{d::dict:dict(_,_)} | #b{q::queue:queue(_)} (with opaque subterms) as 1st argument +weird_warning3.erl:16: The attempt to match a term of type #a{d::queue:queue(_)} against the pattern {'a', Dict} breaks the opacity of queue:queue(_) +weird_warning3.erl:18: Matching of pattern {'b', Queue} tagged with a record name violates the declared type of #a{d::queue:queue(_)} diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl index 6a5b593db0..53b08cc5c9 100644 --- a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl +++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl @@ -1340,7 +1340,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) -> {{Tag, PatTypes}, false}; false -> %% Try to find out if this is a default clause in a list - %% comprehension and supress this. A real Hack(tm) + %% comprehension and suppress this. A real Hack(tm) Force0 = case is_compiler_generated(cerl:get_ann(C)) of true -> diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl new file mode 100644 index 0000000000..094138e72b --- /dev/null +++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl @@ -0,0 +1,18 @@ +-module(weird_warning1). +-export([public_func/0]). + +-record(a, { + d = dict:new() :: dict:dict() + }). + +-record(b, { + q = queue:new() :: queue:queue() + }). + +public_func() -> + add_element(#b{}, my_key, my_value). + +add_element(#a{d = Dict}, Key, Value) -> + dict:store(Key, Value, Dict); +add_element(#b{q = Queue}, Key, Value) -> + queue:in({Key, Value}, Queue). diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl new file mode 100644 index 0000000000..4e4512157b --- /dev/null +++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl @@ -0,0 +1,14 @@ +-module(weird_warning2). +-export([public_func/0]). + +-record(a, {d = dict:new() :: dict:dict()}). + +-record(b, {q = queue:new() :: queue:queue()}). + +public_func() -> + add_element(#a{}, my_key, my_value). + +add_element(#a{d = Dict}, Key, Value) -> + dict:store(Key, Value, Dict); +add_element(#b{q = Queue}, Key, Value) -> + queue:in({Key, Value}, Queue). diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl new file mode 100644 index 0000000000..b70ca645cb --- /dev/null +++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl @@ -0,0 +1,19 @@ +-module(weird_warning3). +-export([public_func/0]). + +-record(a, { + d = dict:new() :: dict:dict() + }). + +-record(b, { + q = queue:new() :: queue:queue() + }). + +public_func() -> + %% Notice that t_to_string() will create "#a{d::queue:queue(_)}". + add_element({a, queue:new()}, my_key, my_value). + +add_element(#a{d = Dict}, Key, Value) -> + dict:store(Key, Value, Dict); +add_element(#b{q = Queue}, Key, Value) -> + queue:in({Key, Value}, Queue). diff --git a/lib/dialyzer/test/options1_SUITE_data/results/compiler b/lib/dialyzer/test/options1_SUITE_data/results/compiler index 30b6f4814a..cbb5115c91 100644 --- a/lib/dialyzer/test/options1_SUITE_data/results/compiler +++ b/lib/dialyzer/test/options1_SUITE_data/results/compiler @@ -31,6 +31,8 @@ cerl_inline.erl:2756: The pattern <{F, _L, D}, Vs> can never match the type <[1. compile.erl:788: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>} core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can never match the type <_,#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}},tl::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}}},[any()],_> core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_> +sys_pre_expand.erl:625: Call to missing or unexported function erlang:hash/2 v3_codegen.erl:1569: The call v3_codegen:load_reg_1(V::any(),I::0,Rs::any(),pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0) v3_codegen.erl:1571: The call v3_codegen:load_reg_1(V::any(),I::0,[],pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0) v3_core.erl:646: Matching of pattern {'iprimop', _, _, _} tagged with a record name violates the declared type of #c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]} +v3_kernel.erl:1381: Call to missing or unexported function erlang:hash/2 diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl index 0108f91b7f..cf2cbe8e2b 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl @@ -565,7 +565,7 @@ resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) -> [OldIndex] = resolve_args(Args), {value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} = lists:keysearch(OldIndex, 1, Lambdas), - [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy. + [{_,{M,_,_}}|_] = Lbls, % Slightly kludgy. {make_fun2,{M,F,A},OldIndex,OldUniq,NumFree}; resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) -> resolve_inst(Instr, Imports, Str, Lbls). diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl index 95d2076ccf..8fca202b8c 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl @@ -951,7 +951,7 @@ i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) -> %% Finally, we create new letrec-bindings for any and all %% residualised definitions. All referenced functions should have - %% been visited; the call to `visit' below is expected to retreive a + %% been visited; the call to `visit' below is expected to retrieve a %% cached expression. Rs1 = keep_referenced(Rs, S4), {Es1, S5} = mapfoldl(fun (R, S) -> @@ -997,7 +997,7 @@ i_apply(E, Ctxt, Ren, Env, S) -> %% location could be recycled after the flag has been tested, but %% there is no real advantage to that, because in practice, only %% 4-5% of all created store locations will ever be reused, while - %% there will be a noticable overhead for managing the free list.) + %% there will be a noticeable overhead for managing the free list.) case st__get_app_inlined(L, S3) of true -> %% The application was inlined, so we have the final @@ -2007,7 +2007,7 @@ residualize_operand(Opnd, E, S) -> case st__get_opnd_effect(Opnd#opnd.loc, S) of true -> %% The operand has not been visited, so we do that now, but - %% in `effect' context. (Waddell's algoritm does some stuff + %% in `effect' context. (Waddell's algorithm does some stuff %% here to account specially for the operand size, which %% appears unnecessary.) {E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren, diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl index 01c2512397..76ae871aee 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl @@ -469,7 +469,7 @@ get(Key, Env) -> -define(MINIMUM_RANGE, 1000). -define(START_RANGE_FACTOR, 50). -define(MAX_RETRIES, 2). % retries before enlarging range --define(ENLARGE_FACTOR, 10). % range enlargment factor +-define(ENLARGE_FACTOR, 10). % range enlargement factor -ifdef(DEBUG). %% If you want to use these process dictionary counters, make sure to diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl index 49a95a95e5..69139cd568 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl @@ -316,7 +316,7 @@ record_test_in_guard(Line, Term, Name, Vs, St) -> %% code bloat.) %% (4) Xref may be run on the abstract code, so the name in the %% abstract code must be erlang:is_record/3. - %% (5) To achive both (3) and (4) at the same time, set the name + %% (5) To achieve both (3) and (4) at the same time, set the name %% here to erlang:is_record/3, but mark it as compiler-generated. %% The v3_core pass will change the name to erlang:internal_is_record/3. Fs = record_fields(Name, St), diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl index 33a322b466..acb49b5faf 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl @@ -1667,7 +1667,7 @@ bs_function({function,Name,Arity,CLabel,Asm0}=Func) -> %%% %%% Pass 1: Found out which bs_restore's that are needed. For now we assume -%%% that a bs_restore is needed unless it is directly preceeded by a bs_save. +%%% that a bs_restore is needed unless it is directly preceded by a bs_save. %%% bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) -> diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl index 460d4e2240..ba153c1c27 100644 --- a/lib/dialyzer/test/plt_SUITE.erl +++ b/lib/dialyzer/test/plt_SUITE.erl @@ -26,6 +26,8 @@ build_plt(Config) -> end. beam_tests(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "beam_tests.plt"), Prog = <<" -module(no_auto_import). @@ -42,10 +44,12 @@ beam_tests(Config) when is_list(Config) -> ">>, Opts = [no_auto_import], {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts), - [] = run_dialyzer(plt_build, [BeamFile], []), + [] = run_dialyzer(plt_build, [BeamFile], [{output_plt, Plt}]), ok. run_plt_check(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "run_plt_check.plt"), Mod1 = <<" -module(run_plt_check1). ">>, @@ -56,7 +60,7 @@ run_plt_check(Config) when is_list(Config) -> {ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []), {ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []), - [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []), + [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], [{output_plt, Plt}]), Mod2B = <<" -module(run_plt_check2). @@ -70,11 +74,13 @@ run_plt_check(Config) when is_list(Config) -> % callgraph warning as run_plt_check2:call/1 makes a call to unexported % function run_plt_check1:call/1. - [_] = run_dialyzer(plt_check, [], []), + [_] = run_dialyzer(plt_check, [], [{init_plt, Plt}]), ok. run_succ_typings(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "run_succ_typings.plt"), Mod1A = <<" -module(run_succ_typings1). @@ -84,7 +90,7 @@ run_succ_typings(Config) when is_list(Config) -> ">>, {ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []), - [] = run_dialyzer(plt_build, [BeamFile1], []), + [] = run_dialyzer(plt_build, [BeamFile1], [{output_plt, Plt}]), Mod1B = <<" -module(run_succ_typings1). @@ -107,9 +113,11 @@ run_succ_typings(Config) when is_list(Config) -> {ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []), % contract types warning as run_succ_typings2:call/0 makes a call to % run_succ_typings1:call/0, which returns a (not b) in the PLT. - [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]), + [_] = run_dialyzer(succ_typings, [BeamFile2], + [{check_plt, false}, {init_plt, Plt}]), % warning not returned as run_succ_typings1 is updated in the PLT. - [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]), + [] = run_dialyzer(succ_typings, [BeamFile2], + [{check_plt, true}, {init_plt, Plt}]), ok. @@ -253,16 +261,15 @@ remove_plt(Config) -> bad_dialyzer_attr(Config) -> PrivDir = ?config(priv_dir, Config), - + Plt = filename:join(PrivDir, "plt_bad_dialyzer_attr.plt"), Prog1 = <<"-module(dial). -dialyzer({no_return, [undef/0]}).">>, {ok, Beam1} = compile(Config, Prog1, dial, []), - Plt = filename:join(PrivDir, "bad_attr.plt"), {dialyzer_error, "Analysis failed with error:\n" "Could not scan the following file(s):\n" " Unknown function undef/0 in line " ++ _} = - (catch run_dialyzer(plt_build, [Beam1], [])), + (catch run_dialyzer(plt_build, [Beam1], [{output_plt, Plt}])), Prog2 = <<"-module(dial). -dialyzer({no_return, [{undef,1,2}]}).">>, @@ -271,7 +278,7 @@ bad_dialyzer_attr(Config) -> "Analysis failed with error:\n" "Could not scan the following file(s):\n" " Bad function {undef,1,2} in line " ++ _} = - (catch run_dialyzer(plt_build, [Beam2], [])), + (catch run_dialyzer(plt_build, [Beam2], [{output_plt, Plt}])), ok. diff --git a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia index bf67447ee7..71acdd9c9e 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia +++ b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia @@ -17,6 +17,7 @@ mnesia_frag.erl:294: The call mnesia_frag:remote_collect(Ref::reference(),{'erro mnesia_frag.erl:304: The call mnesia_frag:remote_collect(Ref::reference(),{'error',{'node_not_running',_}},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()])) mnesia_frag.erl:312: The call mnesia_frag:remote_collect(Ref::reference(),LocalRes::{'error',_},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()])) mnesia_frag_hash.erl:24: Callback info about the mnesia_frag_hash behaviour is not available +mnesia_frag_old_hash.erl:105: Call to missing or unexported function erlang:hash/2 mnesia_frag_old_hash.erl:23: Callback info about the mnesia_frag_hash behaviour is not available mnesia_index.erl:52: The call mnesia_lib:other_val(Var::{_,'commit_work' | 'index' | 'setorbag' | 'storage_type' | {'index',_}},_ReASoN_::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any()) mnesia_lib.erl:1028: The pattern {'EXIT', Reason} can never match the type [any()] | {'error',_} diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl index ed38b2f915..3829479a94 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl @@ -520,7 +520,7 @@ save_automatic_tagged_types([_M|Ms]) -> %% remove_in_set_imports/3 : %% input: list with tuples of each module's imports and module name %% respectively. -%% output: one list with same format but each occured import from a +%% output: one list with same format but each occurred import from a %% module in the input set (IMNameL) is removed. remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) -> NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]), @@ -1628,7 +1628,7 @@ tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) -> tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag). %% reads the content from the configuration file and returns the -%% selected part choosen by InfoType. Assumes that the config file +%% selected part chosen by InfoType. Assumes that the config file %% content is an Erlang term. read_config_file(ModuleName,InfoType) when atom(InfoType) -> CfgList = read_config_file(ModuleName), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl index c26b8f851b..a4f39bde74 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl @@ -4028,7 +4028,7 @@ check_sequence(S,Type,Comps) -> {CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps), % io:format("CRelInf: ~p~n",[CRelInf]), % io:format("NewComps2: ~p~n",[NewComps2]), - %% CompListWithTblInf has got a lot unecessary info about + %% CompListWithTblInf has got a lot unnecessary info about %% the involved class removed, as the class of the object %% set. CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2), @@ -4686,7 +4686,7 @@ any_component_relation(_,[],_,_,Acc) -> %% evaluate_atpath/4 finds out whether the at notation refers to the %% search level. The list of referenced names in the AtNot list shall %% begin with a name that exists on the level it refers to. If the -%% found AtPath is refering to the same sub-branch as the simple table +%% found AtPath is referring to the same sub-branch as the simple table %% has, then there shall not be any leading attribute info on this %% level. evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) -> @@ -4857,7 +4857,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) -> case Cons of [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] -> %% This AtList must have an "outermost" at sign to be - %% relevent here. + %% relevant here. [{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2] = AtList, %% #'ObjectClassFieldType'{class=ClassDef} = Def, diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl index 392896932a..0b5ea85681 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl @@ -1259,7 +1259,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) -> end, case DecObjInf of {Cname,ObjSet} -> % this must be the component were an object is - %% choosen from the object set according to the table + %% chosen from the object set according to the table %% constraint. {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}], PostpDec}; diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl index 9725da4d11..fb9ffb13db 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl @@ -1096,7 +1096,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) -> end, case DecObjInf of {Cname,ObjSet} -> % this must be the component were an object is - %% choosen from the object set according to the table + %% chosen from the object set according to the table %% constraint. {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}], PostpDec}; diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl index 5f8c7a0de8..32676b3448 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl @@ -2721,7 +2721,7 @@ prioritize_error(ErrList) -> end, NewErrList), case SplitErrs of - {[],UndefPosErrs} -> % if no error with Positon exists + {[],UndefPosErrs} -> % if no error with Position exists lists:last(UndefPosErrs); {IntPosErrs,_} -> IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl index 5854f8edbd..8f4d189b5a 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl @@ -1036,7 +1036,7 @@ decode_real2(Buffer0, Form, Len, RemBytes1) -> %% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl index 0457425445..6e12d36579 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl @@ -1034,7 +1034,7 @@ decode_real_notag(_Buffer, _Form) -> %% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl index b163aa24ac..97c92a2dd1 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl @@ -823,7 +823,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl index 15986cc217..aa2cf5ba88 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl @@ -1000,7 +1000,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl index 43d9bef54e..24f7949c21 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl @@ -1059,7 +1059,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl index 4f0ca99cce..8be5b0cd6e 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl @@ -108,7 +108,7 @@ user(Pid, User, Pass) -> gen_server:call(Pid, {user, User, Pass}, infinity). %% user(Pid, User, Pass,Acc) -%% Purpose: Login whith a supplied account name +%% Purpose: Login with a supplied account name %% Args: Pid = pid(), User = Pass = Acc = string() %% Returns: ok | {error, euser} | {error, econn} | {error, eacct} user(Pid, User, Pass,Acc) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl index cf05431f5a..039960dac7 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl @@ -24,7 +24,7 @@ %%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!) %%% - HTTP/1.1 Specification Errata found at %%% http://world.std.com/~lawrence/http_errata.html -%%% Additionaly follows the following recommendations: +%%% Additionally follows the following recommendations: %%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!) %%% - draft-nottingham-hdrreg-http-00.txt (not yet!) %%% diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl index ebefcd7ad7..28ea42c685 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl @@ -697,7 +697,7 @@ lookup(Key,Val) -> %%% This code is for parsing trailer headers in chunked messages. %%% Will be deprecated whenever I have found an alternative working solution! %%% Note: -%%% - The header names are returned slighly different from what the what +%%% - The header names are returned slightly different from what the what %%% inet_drv returns read_headers_old(Scheme,Socket,Timeout) -> read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl index 45beaa84f7..d2653184aa 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl @@ -95,7 +95,7 @@ abort_session(Addr,Sid,Msg) -> next_request(Addr,Sid) -> gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity). -%%% Session handler has succeded to set up a new session, now register +%%% Session handler has succeed to set up a new session, now register %%% the socket register_socket(Addr,Sid,Socket) -> gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl index 85e06f43b6..3058ac3556 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl @@ -224,7 +224,7 @@ is_blocked(ServerRef) -> %% -%% Module API. Theese functions are intended for use from modules only. +%% Module API. These functions are intended for use from modules only. %% config_lookup(Port, Query) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl index d7a698d65a..07f951d057 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl @@ -109,7 +109,7 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)-> %%If it is version prio to 1.1 kill the conneciton [$H, $T, $T, $P, $\/, $1, $.,N] -> case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of - %%if the connection isnt ordered to go down let it live + %%if the connection isn't ordered to go down let it live %%The keep-alive value is the older http/1.1 might be older %%Clients that use it. "keep-alive" when N >= 49 -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl index 47c7fc1b8d..50e0e42786 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl @@ -34,7 +34,7 @@ -define(PROCEED_RESPONSE(StatusCode, Info), {proceed, [{response,{already_sent, StatusCode, - httpd_util:key1search(Info#mod.data,content_lenght)}}]}). + httpd_util:key1search(Info#mod.data,content_length)}}]}). -include("httpd.hrl"). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl index 6b872d7c95..73edcf6b92 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl @@ -60,7 +60,7 @@ % request_line, % string() Request Line headers, % #req_headers{} Parsed request headers entity_body= <<>>, % binary() Body of request - connection, % boolean() true if persistant connection + connection, % boolean() true if persistent connection status_code, % int() Status code logging % int() 0=No logging % 1=Only mod_log present diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl index e42494ff76..847d6e97c1 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl @@ -53,7 +53,7 @@ store_directory_data(Directory, DirData) -> %% API %% -%% Compability API +%% Compatibility API store_user(UserName, Password, Port, Dir, AccessPassword) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl index 1203aeaa4c..a48f73274b 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl @@ -440,7 +440,7 @@ try_new_erl_scheme_method(Info,Env,Input,Mod,Func)-> %%---------------------------------------------------------------------- -%%The function recieves the data from the process that generates the page +%%The function receives the data from the process that generates the page %%and send the data to the client through the mod_cgi:send function %%---------------------------------------------------------------------- diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl index f600c65e92..d95c745b07 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl @@ -272,10 +272,10 @@ controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)-> end. -%---------------------------------------------------------------------% -%The Denycontrol isn't neccessary to preform since the allow control % -%override the deny control % -%---------------------------------------------------------------------% +%--------------------------------------------------------------------% +%The Denycontrol isn't necessary to preform since the allow control % +%override the deny control % +%--------------------------------------------------------------------% controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)-> case AllowedNetworks of [{allow,all}]-> @@ -657,7 +657,7 @@ getData2(HtAccessFileNames,SplittedPath,Info)-> %---------------------------------------------------------------------- %HtAccessFilenames is a list the names the accesssfiles can have -%Path is the shortest match agains all alias and documentroot +%Path is the shortest match against all alias and documentroot %rest of splitted path is a list of the parts of the path %Info is the mod recod from the server %---------------------------------------------------------------------- diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl index 4e6030d5e2..f2c45c4a3f 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl @@ -80,7 +80,7 @@ send_range_response(Path,Info,Ranges,FileInfo,LastModified)-> send_range_response(Path,Info,Start,Stop,FileInfo,LastModified) end. %%More than one range specified -%%Send a multipart reponse to the user +%%Send a multipart response to the user % %%An example of an multipart range response diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl index 76168f3890..a997db6880 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl @@ -48,8 +48,8 @@ do(Info) -> %%---------------------------------------------------------------------- -%%Control that the request header did not contians any limitations -%%wheather a response shall be createed or not +%%Control that the request header did not contains any limitations +%%whether a response shall be created or not %%---------------------------------------------------------------------- do_responsecontrol(Info) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl index 19b571ac47..cc72a9b6fe 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl @@ -431,7 +431,7 @@ wrap_trans(State, Fun, Args, Retries, Mod, Kind) -> %% read lock is only set on the first node %% Nodes may either be a list of nodes or one node as an atom %% Mnesia on all Nodes must be connected to each other, but -%% it is not neccessary that they are up and running. +%% it is not necessary that they are up and running. lock(LockItem, LockKind) -> case get(mnesia_activity_state) of diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl index fdbf3e4481..a85a08e4f8 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl @@ -775,7 +775,7 @@ restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) -> restore_tables([], _Header, _Schema, State) -> State. -%% Creates all neccessary dat files and inserts +%% Creates all necessary dat files and inserts %% the table definitions in the schema table %% %% Returns a list of local_tab tuples for all local tables diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl index 2b5c77b3ba..0403c7e978 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl @@ -332,7 +332,7 @@ really_retain(Name, Tab) -> %% %% {min, MinTabs} %% Minimize redundancy and only keep checkpoint info together with -%% one replica, preferrably at the local node. If any node involved +%% one replica, preferably at the local node. If any node involved %% the checkpoint goes down, the checkpoint is deactivated. %% %% {max, MaxTabs} @@ -345,7 +345,7 @@ really_retain(Name, Tab) -> %% {ram_overrides_dump, Tabs} %% Only applicable for ram_copies. Bool controls which versions of %% the records that should be included in the checkpoint state. -%% true means that the latest comitted records in ram (i.e. the +%% true means that the latest committed records in ram (i.e. the %% records that the application accesses) should be included %% in the checkpoint. false means that the records dumped to %% dat-files (the records that will be loaded at startup) should diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl index 70fee1741e..07667d73f5 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl @@ -61,7 +61,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies -> Repair = mnesia_monitor:get_env(auto_repair), Args = [{keypos, 2}, public, named_table, Type], case Reason of - {dumper, _} -> %% Resources allready allocated + {dumper, _} -> %% Resources already allocated ignore; _ -> mnesia_monitor:mktab(Tab, Args), @@ -82,7 +82,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies -> do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies -> Args = [{keypos, 2}, public, named_table, Type], case Reason of - {dumper, _} -> %% Resources allready allocated + {dumper, _} -> %% Resources already allocated ignore; _ -> mnesia_monitor:mktab(Tab, Args), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl index 701aa8f598..accb631f2a 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl @@ -170,14 +170,14 @@ loop(State) -> end; %% If test_set_sticky fails, we send this to all nodes - %% after aquiring a real write lock on Oid + %% after acquiring a real write lock on Oid {stick, {Tab, _}, N} -> ?ets_insert(mnesia_sticky_locks, {Tab, N}), loop(State); %% The caller which sends this message, must have first - %% aquired a write lock on the entire table + %% acquired a write lock on the entire table {unstick, Tab} -> ?ets_delete(mnesia_sticky_locks, Tab), loop(State); @@ -738,11 +738,11 @@ dirty_sticky_lock(Tab, Key, Nodes, Lock) -> sticky_wlock_table(Tid, Store, Tab) -> sticky_lock(Tid, Store, {Tab, ?ALL}, write). -%% aquire a wlock on Oid +%% acquire a wlock on Oid %% We store a {Tabname, write, Tid} in all locktables %% on all nodes containing a copy of Tabname %% We also store an item {{locks, Tab, Key}, write} in the -%% local store when we have aquired the lock. +%% local store when we have acquired the lock. %% wlock(Tid, Store, Oid) -> {Tab, Key} = Oid, diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl index d1ff09ce29..7fd5f70e23 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl @@ -144,7 +144,7 @@ check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) -> end, [node(Mon) | check_protocol(Tail, Protocols)]; false -> - unlink(Mon), % Get rid of unneccessary link + unlink(Mon), % Get rid of unnecessary link check_protocol(Tail, Protocols) end; check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl index ec07e1c1ab..fbd1356a7f 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl @@ -1265,7 +1265,7 @@ make_change_table_copy_type(Tab, Node, ToS) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% change index functions .... -%% Pos is allready added by 1 in both of these functions +%% Pos is already added by 1 in both of these functions add_table_index(Tab, Pos) -> schema_transaction(fun() -> do_add_table_index(Tab, Pos) end). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl index 3e08354b5a..09e310530d 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl @@ -1615,7 +1615,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) -> do_abort(Tid, Bin) when binary(Bin) -> %% Possible optimization: - %% If we want we could pass arround a flag + %% If we want we could pass around a flag %% that tells us whether the binary contains %% schema ops or not. Only if the binary %% contains schema ops there are meningful diff --git a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl index d608275efe..88ac486044 100644 --- a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl +++ b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl @@ -2,7 +2,7 @@ %%% File : tuple1.erl %%% Author : Tobias Lindahl <[email protected]> %%% Description : Exposed two bugs in the analysis; -%%% one supressed warning and one crash. +%%% one suppressed warning and one crash. %%% %%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]> %%%------------------------------------------------------------------- diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk index 9830a36e60..0919fba834 100644 --- a/lib/dialyzer/vsn.mk +++ b/lib/dialyzer/vsn.mk @@ -1 +1 @@ -DIALYZER_VSN = 3.0.3 +DIALYZER_VSN = 3.1 diff --git a/lib/diameter/doc/src/notes.xml b/lib/diameter/doc/src/notes.xml index c2bbed2e5a..70e1880be5 100644 --- a/lib/diameter/doc/src/notes.xml +++ b/lib/diameter/doc/src/notes.xml @@ -43,6 +43,30 @@ first.</p> <!-- ===================================================================== --> +<section><title>diameter 1.12.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + An improvement in the handling of peer failover in + diameter 1.12.1 adversely affected performance when + sending requests. Further, the inefficient use of a + public table to route incoming answers has been removed.</p> + <p> + Own Id: OTP-14206</p> + </item> + <item> + <p> + Fixed xml issues in old release notes</p> + <p> + Own Id: OTP-14269</p> + </item> + </list> + </section> + +</section> + <section><title>diameter 1.12.1</title> <section><title>Fixed Bugs and Malfunctions</title> @@ -255,8 +279,8 @@ first.</p> Fix decode of Grouped AVPs containing errors.</p> <p> RFC 6733 says this of Failed-AVP in 7.5:</p> - <p> - <taglist><item><p><c> In the case where the offending AVP + + <taglist><tag></tag><item><p><c> In the case where the offending AVP is embedded within a Grouped AVP, the Failed-AVP MAY contain the grouped AVP, which in turn contains the single offending AVP. The same method MAY be employed if @@ -265,11 +289,11 @@ first.</p> the grouped AVP hierarchy up to the single offending AVP. This enables the recipient to detect the location of the offending AVP when embedded in a - group.</c></p></item></taglist></p> + group.</c></p></item></taglist> <p> It says this of DIAMETER_INVALID_AVP_LENGTH in 7.1.5:</p> - <p> - <taglist><item><p><c> The request contained an AVP with + + <taglist><tag></tag><item><p><c> The request contained an AVP with an invalid length. A Diameter message indicating this error MUST include the offending AVPs within a Failed-AVP AVP. In cases where the erroneous AVP length value @@ -284,7 +308,8 @@ first.</p> the minimum AVP header length, it is sufficient to include an offending AVP header that is formulated by padding the incomplete AVP header with zero up to the - minimum AVP header length.</c></p></item></taglist></p> + minimum AVP header length.</c></p></item></taglist> + <p> The AVPs placed in the errors field of a diameter_packet record are intended to be appropriate for inclusion in a @@ -949,8 +974,8 @@ first.</p> Be lenient with the M-bit in Grouped AVPs.</p> <p> RFC 6733 says this, in 4.4:</p> - <p> - <taglist><item><p><c>Receivers of a Grouped AVP that does + + <taglist><tag></tag><item><p><c>Receivers of a Grouped AVP that does not have the 'M' (mandatory) bit set and one or more of the encapsulated AVPs within the group has the 'M' (mandatory) bit set MAY simply be ignored if the Grouped @@ -958,14 +983,14 @@ first.</p> encapsulated AVP with its 'M' (mandatory) bit set is further encapsulated within other sub-groups, i.e., other Grouped AVPs embedded within the Grouped - AVP.</c></p></item></taglist></p> + AVP.</c></p></item></taglist> <p> The first sentence is mangled but take it to mean this:</p> - <p> - <taglist><item><p><c>An unrecognized AVP of type Grouped + + <taglist><tag></tag><item><p><c>An unrecognized AVP of type Grouped that does not set the 'M' bit MAY be ignored even if one of its encapsulated AVPs sets the 'M' - bit.</c></p></item></taglist></p> + bit.</c></p></item></taglist> <p> This is a bit of a non-statement since if the AVP is unrecognized then its type is unknown. We therefore don't diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl index 611ad796a9..5361510d69 100644 --- a/lib/diameter/include/diameter_gen.hrl +++ b/lib/diameter/include/diameter_gen.hrl @@ -424,7 +424,7 @@ d(true, _, Name, Avp, Acc) -> %% ... or not. Failures here won't be visible since they're a "normal" %% occurrence if the peer sends a faulty AVP that we need to respond -%% sensibly to. Log the occurence for traceability, but the peer will +%% sensibly to. Log the occurrence for traceability, but the peer will %% also receive info in the resulting answer message. d(false, Reason, Name, Avp, {Avps, Acc}) -> Stack = diameter_lib:get_stacktrace(), diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl index e8f2f63f86..253f64133c 100644 --- a/lib/diameter/src/base/diameter.erl +++ b/lib/diameter/src/base/diameter.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -377,6 +377,7 @@ call(SvcName, App, Message) -> | {capabilities, [capability()]} | {capabilities_cb, evaluable()} | {capx_timeout, 'Unsigned32'()} + | {capx_strictness, boolean()} | {disconnect_cb, evaluable()} | {dpr_timeout, 'Unsigned32'()} | {dpa_timeout, 'Unsigned32'()} diff --git a/lib/diameter/src/base/diameter_callback.erl b/lib/diameter/src/base/diameter_callback.erl index f479cb6612..0e445492b8 100644 --- a/lib/diameter/src/base/diameter_callback.erl +++ b/lib/diameter/src/base/diameter_callback.erl @@ -35,7 +35,7 @@ %% in a callback applied to the atom-valued callback name and argument %% list. For all callbacks not to this module, the 'extra' field is a %% list of additional arguments, following arguments supplied by -%% diameter but preceeding those of the diameter:evaluable() being +%% diameter but preceding those of the diameter:evaluable() being %% applied. %% %% For example, the following config to diameter:start_service/2, in diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl index fdbbd412a1..e10804c931 100644 --- a/lib/diameter/src/base/diameter_config.erl +++ b/lib/diameter/src/base/diameter_config.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -580,6 +580,9 @@ opt({K, Tmo}) K == dpa_timeout -> ?IS_UINT32(Tmo); +opt({capx_strictness, B}) -> + is_boolean(B); + opt({length_errors, T}) -> lists:member(T, [exit, handle, discard]); @@ -865,7 +868,7 @@ init_cb(List) -> V <- [proplists:get_value(F, List, D)]], #diameter_callback{} = list_to_tuple([diameter_callback | Values]). -%% Retreive and validate. +%% Retrieve and validate. get_opt(Key, List, Def, Other) -> init_opt(Key, get_opt(Key, List, Def), [Def|Other]). diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl index 996e75a8d3..46d231da74 100644 --- a/lib/diameter/src/base/diameter_peer_fsm.erl +++ b/lib/diameter/src/base/diameter_peer_fsm.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -128,6 +128,7 @@ %% outgoing DPR; boolean says whether or not %% the request was sent explicitly with %% diameter:call/4. + strict :: boolean(), length_errors :: exit | handle | discard, incoming_maxlen :: integer() | infinity}). @@ -233,6 +234,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) -> proplists:get_value(dpa_timeout, Opts, ?DPA_TIMEOUT)}), Tmo = proplists:get_value(capx_timeout, Opts, ?CAPX_TIMEOUT), + Strictness = proplists:get_value(capx_strictness, Opts, true), OnLengthErr = proplists:get_value(length_errors, Opts, exit), {TPid, Addrs} = start_transport(T, Rest, Svc), @@ -246,6 +248,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) -> mode = M, service = svc(Svc, Addrs), length_errors = OnLengthErr, + strict = Strictness, incoming_maxlen = Maxlen}. %% The transport returns its local ip addresses so that different %% transports on the same service can use different local addresses. @@ -356,7 +359,7 @@ handle_info(T, #state{} = State) -> %% Note that there's no guarantee that the service and transport %% capabilities are good enough to build a CER/CEA that can be -%% succesfully encoded. It's not checked at diameter:add_transport/2 +%% successfully encoded. It's not checked at diameter:add_transport/2 %% since this can be called before creating the service. %% terminate/2 @@ -454,6 +457,9 @@ transition({timeout, _}, _) -> %% Outgoing message. transition({send, Msg}, S) -> outgoing(Msg, S); +transition({send, Msg, Route}, S) -> + put_route(Route), + outgoing(Msg, S); %% Request for graceful shutdown at remove_transport, stop_service of %% application shutdown. @@ -483,8 +489,10 @@ transition({'DOWN', _, process, TPid, _}, = S) -> start_next(S); -%% Transport has died after connection timeout. -transition({'DOWN', _, process, _, _}, _) -> +%% Transport has died after connection timeout, or handler process has +%% died. +transition({'DOWN', _, process, Pid, _}, _) -> + erase_route(Pid), ok; %% State query. @@ -494,6 +502,40 @@ transition({state, Pid}, #state{state = S, transport = TPid}) -> %% Crash on anything unexpected. +%% put_route/1 +%% +%% Map identifiers in an outgoing request to be able to lookup the +%% handler process when the answer is received. + +put_route({Pid, Ref, Seqs}) -> + MRef = monitor(process, Pid), + put(Pid, Seqs), + put(Seqs, {Pid, Ref, MRef}). + +%% get_route/1 + +get_route(#diameter_packet{header = #diameter_header{is_request = false}} + = Pkt) -> + Seqs = diameter_codec:sequence_numbers(Pkt), + case erase(Seqs) of + {Pid, Ref, MRef} -> + demonitor(MRef), + erase(Pid), + {Pid, Ref, self()}; + undefined -> + false + end; + +get_route(_) -> + false. + +%% erase_route/1 + +erase_route(Pid) -> + erase(erase(Pid)). + +%% capx/1 + capx(recv_CER) -> 'CER'; capx({'Wait-CEA', _, _}) -> @@ -576,8 +618,7 @@ incoming({Msg, NPid}, S) -> T catch {?MODULE, Name, Pkt} -> - S#state.parent ! {recv, self(), Name, {Pkt, NPid}}, - rcv(Name, Pkt, S) + incoming(Name, Pkt, NPid, S) end; incoming(Msg, S) -> @@ -585,10 +626,15 @@ incoming(Msg, S) -> recv(Msg, S) catch {?MODULE, Name, Pkt} -> - S#state.parent ! {recv, self(), Name, Pkt}, - rcv(Name, Pkt, S) + incoming(Name, Pkt, false, S) end. +%% incoming/4 + +incoming(Name, Pkt, NPid, #state{parent = Pid} = S) -> + Pid ! {recv, self(), get_route(Pkt), Name, Pkt, NPid}, + rcv(Name, Pkt, S). + %% recv/2 recv(#diameter_packet{header = #diameter_header{} = Hdr} @@ -614,6 +660,17 @@ recv1(_, when M < size(Bin) -> invalid(false, incoming_maxlen_exceeded, {size(Bin), H}); +%% Ignore anything but an expected CER/CEA if so configured. This is +%% non-standard behaviour. +recv1(Name, _, #state{state = {'Wait-CEA', _, _}, + strict = false}) + when Name /= 'CEA' -> + ok; +recv1(Name, _, #state{state = recv_CER, + strict = false}) + when Name /= 'CER' -> + ok; + %% Incoming request after outgoing DPR: discard. Don't discard DPR, so %% both ends don't do so when sending simultaneously. recv1(Name, diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl index ccf68f4d93..e4f77e3a24 100644 --- a/lib/diameter/src/base/diameter_service.erl +++ b/lib/diameter/src/base/diameter_service.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1858,13 +1858,6 @@ eq(Any, Id, PeerId) -> %% OctetString() can be specified as an iolist() so test for string %% rather then term equality. -%% transports/1 - -transports(#state{watchdogT = WatchdogT}) -> - ets:select(WatchdogT, [{#watchdog{peer = '$1', _ = '_'}, - [{'is_pid', '$1'}], - ['$1']}]). - %% --------------------------------------------------------------------------- %% # service_info/2 %% --------------------------------------------------------------------------- @@ -1887,7 +1880,6 @@ transports(#state{watchdogT = WatchdogT}) -> -define(ALL_INFO, [capabilities, applications, transport, - pending, options]). %% The rest. @@ -1981,7 +1973,6 @@ complete_info(Item, #state{service = Svc} = S) -> applications -> info_apps(S); transport -> info_transport(S); options -> info_options(S); - pending -> info_pending(S); keys -> ?ALL_INFO ++ ?CAP_INFO ++ ?OTHER_INFO; all -> service_info(?ALL_INFO, S); statistics -> info_stats(S); @@ -2189,13 +2180,6 @@ info_apps(#state{service = #diameter_service{applications = Apps}}) -> mk_app(#diameter_app{} = A) -> lists:zip(record_info(fields, diameter_app), tl(tuple_to_list(A))). -%% info_pending/1 -%% -%% One entry for each outgoing request whose answer is outstanding. - -info_pending(#state{} = S) -> - diameter_traffic:pending(transports(S)). - %% info_info/1 %% %% Extract process_info from connections info. diff --git a/lib/diameter/src/base/diameter_sup.erl b/lib/diameter/src/base/diameter_sup.erl index 482289cb9a..01c51f0856 100644 --- a/lib/diameter/src/base/diameter_sup.erl +++ b/lib/diameter/src/base/diameter_sup.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -42,7 +42,7 @@ -define(TABLES, [{diameter_sequence, [set]}, {diameter_service, [set, {keypos, 3}]}, - {diameter_request, [bag]}, + {diameter_request, [set]}, {diameter_config, [bag, {keypos, 2}]}]). %% start_link/0 diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl index d93a3e71e3..bc1ccf4feb 100644 --- a/lib/diameter/src/base/diameter_traffic.erl +++ b/lib/diameter/src/base/diameter_traffic.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2016. All Rights Reserved. +%% Copyright Ericsson AB 2013-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ -export([send_request/4]). %% towards diameter_watchdog --export([receive_message/4]). +-export([receive_message/6]). %% towards diameter_peer_fsm and diameter_watchdog -export([incr/4, @@ -40,11 +40,11 @@ %% towards diameter_service -export([make_recvdata/1, peer_up/1, - peer_down/1, - pending/1]). + peer_down/1]). -%% towards ?MODULE --export([send/1]). %% send from remote node +%% internal +-export([send/1, %% send from remote node + init/1]). %% monitor process start -include_lib("diameter/include/diameter.hrl"). -include("diameter_internal.hrl"). @@ -57,14 +57,12 @@ -define(DEFAULT_TIMEOUT, 5000). %% for outgoing requests -define(DEFAULT_SPAWN_OPTS, []). -%% Table containing outgoing requests for which a reply has yet to be -%% received. +%% Table containing outgoing entries that live and die with +%% peer_up/down. The name is historic, since the table used to contain +%% information about outgoing requests for which an answer has yet to +%% be received. -define(REQUEST_TABLE, diameter_request). -%% Workaround for dialyzer's lack of understanding of match specs. --type match(T) - :: T | '_' | '$1' | '$2' | '$3' | '$4'. - %% Record diameter:call/4 options are parsed into. -record(options, {filter = none :: diameter:peer_filter(), @@ -72,7 +70,7 @@ timeout = ?DEFAULT_TIMEOUT :: 0..16#FFFFFFFF, detach = false :: boolean()}). -%% Term passed back to receive_message/4 with every incoming message. +%% Term passed back to receive_message/6 with every incoming message. -record(recvdata, {peerT :: ets:tid(), service_name :: diameter:service_name(), @@ -87,12 +85,12 @@ %% Record stored in diameter_request for each outgoing request. -record(request, - {ref :: match(reference()), %% used to receive answer - caller :: match(pid()), %% calling process - handler :: match(pid()), %% request process - transport :: match(pid()), %% peer process - caps :: match(#diameter_caps{}), %% of connection - packet :: match(#diameter_packet{})}). %% of request + {ref :: reference(), %% used to receive answer + caller :: pid() | undefined, %% calling process + handler :: pid(), %% request process + transport :: pid() | undefined, %% peer process + caps :: #diameter_caps{} | undefined, %% of connection + packet :: #diameter_packet{} | undefined}). %% of request %% --------------------------------------------------------------------------- %% # make_recvdata/1 @@ -113,26 +111,27 @@ make_recvdata([SvcName, PeerT, Apps, SvcOpts | _]) -> %% peer_up/1 %% --------------------------------------------------------------------------- -%% Insert an element that is used to detect whether or not there has -%% been a failover when inserting an outgoing request. +%% Start a process that dies with peer_down/1, on which request +%% processes can monitor. There is no other process that dies with +%% peer_down since failover doesn't imply the loss of transport in the +%% case of a watchdog transition into state SUSPECT. peer_up(TPid) -> - ets:insert(?REQUEST_TABLE, {TPid}). + proc_lib:start(?MODULE, init, [TPid]). + +init(TPid) -> + ets:insert(?REQUEST_TABLE, {TPid, self()}), + proc_lib:init_ack(self()), + proc_lib:hibernate(erlang, exit, [{shutdown, TPid}]). %% --------------------------------------------------------------------------- %% peer_down/1 %% --------------------------------------------------------------------------- peer_down(TPid) -> - ets:delete_object(?REQUEST_TABLE, {TPid}), - lists:foreach(fun failover/1, ets:lookup(?REQUEST_TABLE, TPid)). -%% Note that a request process can store its request after failover -%% notifications are sent here: insert_request/2 sends the notification -%% in that case. - -%% failover/1 - -failover({_TPid, {Pid, TRef}}) -> - Pid ! {failover, TRef}. + [{_, Pid}] = ets:lookup(?REQUEST_TABLE, TPid), + ets:delete(?REQUEST_TABLE, TPid), + Pid ! ok, %% make it die + Pid. %% --------------------------------------------------------------------------- %% incr/4 @@ -207,54 +206,25 @@ incr_rc(Dir, Pkt, TPid, Dict0) -> incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}). %% --------------------------------------------------------------------------- -%% pending/1 -%% --------------------------------------------------------------------------- - -pending(TPids) -> - MatchSpec = [{{'$1', - #request{caller = '$2', - handler = '$3', - transport = '$4', - _ = '_'}, - '_'}, - [?ORCOND([{'==', T, '$4'} || T <- TPids])], - [{{'$1', [{{caller, '$2'}}, - {{handler, '$3'}}, - {{transport, '$4'}}]}}]}], - - try - ets:select(?REQUEST_TABLE, MatchSpec) - catch - error: badarg -> [] %% service has gone down - end. - -%% --------------------------------------------------------------------------- -%% # receive_message/4 +%% # receive_message/6 %% %% Handle an incoming Diameter message. %% --------------------------------------------------------------------------- -%% Handle an incoming Diameter message in the watchdog process. This -%% used to come through the service process but this avoids that -%% becoming a bottleneck. +%% Handle an incoming Diameter message in the watchdog process. -receive_message(TPid, {Pkt, NPid}, Dict0, RecvData) -> - NPid ! {diameter, incoming(TPid, Pkt, Dict0, RecvData)}; +receive_message(TPid, Route, Pkt, false, Dict0, RecvData) -> + incoming(TPid, Route, Pkt, Dict0, RecvData); -receive_message(TPid, Pkt, Dict0, RecvData) -> - incoming(TPid, Pkt, Dict0, RecvData). +receive_message(TPid, Route, Pkt, NPid, Dict0, RecvData) -> + NPid ! {diameter, incoming(TPid, Route, Pkt, Dict0, RecvData)}. %% incoming/4 -incoming(TPid, Pkt, Dict0, RecvData) +incoming(TPid, Route, Pkt, Dict0, RecvData) when is_pid(TPid) -> #diameter_packet{header = #diameter_header{is_request = R}} = Pkt, - recv(R, - (not R) andalso lookup_request(Pkt, TPid), - TPid, - Pkt, - Dict0, - RecvData). + recv(R, Route, TPid, Pkt, Dict0, RecvData). %% recv/6 @@ -269,8 +239,8 @@ recv(true, false, TPid, Pkt, Dict0, T) -> end; %% ... answer to known request ... -recv(false, #request{ref = Ref, handler = Pid} = Req, _, Pkt, Dict0, _) -> - Pid ! {answer, Ref, Req, Dict0, Pkt}, +recv(false, {Pid, Ref, TPid}, _, Pkt, Dict0, _) -> + Pid ! {answer, Ref, TPid, Dict0, Pkt}, {answer, Pid}; %% Note that failover could have happened prior to this message being @@ -1503,32 +1473,39 @@ send_R(Pkt0, packet = Pkt0}, incr(send, Pkt, TPid, AppDict), - TRef = send_request(TPid, Pkt, Req, SvcName, Timeout), + {TRef, MRef} = zend_requezt(TPid, Pkt, Req, SvcName, Timeout), Pid ! Ref, %% tell caller a send has been attempted handle_answer(SvcName, App, - recv_A(Timeout, SvcName, App, Opts, {TRef, Req})). + recv_A(Timeout, SvcName, App, Opts, {TRef, MRef, Req})). %% recv_A/5 -recv_A(Timeout, SvcName, App, Opts, {TRef, #request{ref = Ref} = Req}) -> +recv_A(Timeout, SvcName, App, Opts, {TRef, MRef, #request{ref = Ref} = Req}) -> %% Matching on TRef below ensures we ignore messages that pertain %% to a previous transport prior to failover. The answer message - %% includes the #request{} since it's not necessarily Req; that - %% is, from the last peer to which we've transmitted. + %% includes the pid of the transport on which it was received, + %% which may not be the last peer to which we've transmitted. receive - {answer = A, Ref, Rq, Dict0, Pkt} -> %% Answer from peer - {A, Rq, Dict0, Pkt}; + {answer = A, Ref, TPid, Dict0, Pkt} -> %% Answer from peer + {A, #request{} = erase(TPid), Dict0, Pkt}; {timeout = Reason, TRef, _} -> %% No timely reply {error, Req, Reason}; - {failover, TRef} -> %% Service says peer has gone down - retransmit(pick_peer(SvcName, App, Req, Opts), - Req, - Opts, - SvcName, - Timeout) + {'DOWN', MRef, process, _, _} when false /= MRef -> %% local peer_down + failover(SvcName, App, Req, Opts, Timeout); + {failover, TRef} -> %% local or remote peer_down + failover(SvcName, App, Req, Opts, Timeout) end. +%% failover/5 + +failover(SvcName, App, Req, Opts, Timeout) -> + retransmit(pick_peer(SvcName, App, Req, Opts), + Req, + Opts, + SvcName, + Timeout). + %% handle_answer/3 handle_answer(SvcName, App, {error, Req, Reason}) -> @@ -1705,44 +1682,63 @@ encode(DictT, TPid, #diameter_packet{bin = undefined} = Pkt) -> encode(_, _, #diameter_packet{} = Pkt) -> Pkt. +%% zend_requezt/5 +%% +%% Strip potentially large record fields that aren't used by the +%% processes the records can be send to, possibly on a remote node. + +zend_requezt(TPid, Pkt, Req, SvcName, Timeout) -> + put(TPid, Req), + send_request(TPid, z(Pkt), Req, SvcName, Timeout). + %% send_request/5 send_request(TPid, #diameter_packet{bin = Bin} = Pkt, Req, _SvcName, Timeout) when node() == node(TPid) -> Seqs = diameter_codec:sequence_numbers(Bin), TRef = erlang:start_timer(Timeout, self(), TPid), - Entry = {Seqs, #request{handler = Pid} = Req, TRef}, - - %% Ensure that request table is cleaned even if the process is - %% killed. - spawn(fun() -> diameter_lib:wait([Pid]), delete_request(Entry) end), - - insert_request(Entry), - send(TPid, Pkt), - TRef; + send(TPid, Pkt, _Route = {self(), Req#request.ref, Seqs}), + {TRef, _MRef = peer_monitor(TPid, TRef)}; %% Send using a remote transport: spawn a process on the remote node %% to relay the answer. send_request(TPid, #diameter_packet{} = Pkt, Req, SvcName, Timeout) -> TRef = erlang:start_timer(Timeout, self(), TPid), - T = {TPid, Pkt, Req, SvcName, Timeout, TRef}, + T = {TPid, Pkt, z(Req), SvcName, Timeout, TRef}, spawn(node(TPid), ?MODULE, send, [T]), - TRef. + {TRef, false}. + +%% z/1 +%% +%% Avoid sending potentially large terms unnecessarily. The records +%% themselves are retained since they're sent between nodes in send/1 +%% and changing what's sent causes upgrade issues. + +z(#request{ref = Ref, handler = Pid}) -> + #request{ref = Ref, + handler = Pid}; + +z(#diameter_packet{header = H, bin = Bin, transport_data = T}) -> + #diameter_packet{header = H, + bin = Bin, + transport_data = T}. %% send/1 send({TPid, Pkt, #request{handler = Pid} = Req0, SvcName, Timeout, TRef}) -> Req = Req0#request{handler = self()}, - recv(TPid, Pid, TRef, send_request(TPid, Pkt, Req, SvcName, Timeout)). + recv(TPid, Pid, TRef, zend_requezt(TPid, Pkt, Req, SvcName, Timeout)). %% recv/4 %% %% Relay an answer from a remote node. -recv(TPid, Pid, TRef, LocalTRef) -> +recv(TPid, Pid, TRef, {LocalTRef, MRef}) -> receive {answer, _, _, _, _} = A -> Pid ! A; + {'DOWN', MRef, process, _, _} -> + Pid ! {failover, TRef}; {failover = T, LocalTRef} -> Pid ! {T, TRef}; T -> @@ -1751,14 +1747,13 @@ recv(TPid, Pid, TRef, LocalTRef) -> %% send/2 -send(Pid, Pkt) -> %% Strip potentially large message terms. - #diameter_packet{header = H, - bin = Bin, - transport_data = T} - = Pkt, - Pid ! {send, #diameter_packet{header = H, - bin = Bin, - transport_data = T}}. +send(Pid, Pkt) -> + Pid ! {send, Pkt}. + +%% send/3 + +send(Pid, Pkt, Route) -> + Pid ! {send, Pkt, Route}. %% retransmit/4 @@ -1768,8 +1763,8 @@ retransmit({TPid, Caps, App} = Req, SvcName, Timeout) -> - have_request(Pkt0, TPid) %% Don't failover to a peer we've - andalso ?THROW(timeout), %% already sent to. + undefined == get(TPid) %% Don't failover to a peer we've + orelse ?THROW(timeout), %% already sent to. Pkt = make_retransmit_packet(Pkt0), @@ -1822,56 +1817,20 @@ resend_request(Pkt0, ?LOG(retransmission, Pkt#diameter_packet.header), incr(TPid, {msg_id(Pkt, AppDict), send, retransmission}), - TRef = send_request(TPid, Pkt, Req, SvcName, Tmo), - {TRef, Req}. - -%% insert_request/1 - -insert_request({_Seqs, #request{transport = TPid}, TRef} = T) -> - ets:insert(?REQUEST_TABLE, [T, {TPid, {self(), TRef}}]), - is_peer_up(TPid) - orelse (self() ! {failover, TRef}). %% failover/1 may have missed - -%% is_peer_up/1 -%% -%% Is the entry written by peer_up/1 and deleted by peer_down/1 still -%% in the request table? + {TRef, MRef} = zend_requezt(TPid, Pkt, Req, SvcName, Tmo), + {TRef, MRef, Req}. -is_peer_up(TPid) -> - Spec = [{{TPid}, [], ['$_']}], - '$end_of_table' /= ets:select(?REQUEST_TABLE, Spec, 1). +%% peer_monitor/2 -%% lookup_request/2 -%% -%% Note the match on both the key and transport pid. The latter is -%% necessary since the same Hop-by-Hop and End-to-End identifiers are -%% reused in the case of retransmission. - -lookup_request(Msg, TPid) -> - Seqs = diameter_codec:sequence_numbers(Msg), - Spec = [{{Seqs, #request{transport = TPid, _ = '_'}, '_'}, - [], - ['$_']}], - case ets:select(?REQUEST_TABLE, Spec) of - [{_, Req, _}] -> - Req; - [] -> +peer_monitor(TPid, TRef) -> + case ets:lookup(?REQUEST_TABLE, TPid) of %% at peer_up/1 + [{_, MPid}] -> + monitor(process, MPid); + [] -> %% transport has gone down + self() ! {failover, TRef}, false end. -%% delete_request/1 - -delete_request({_Seqs, #request{handler = Pid, transport = TPid}, TRef} = T) -> - Spec = [{R, [], [true]} || R <- [T, {TPid, {Pid, TRef}}]], - ets:select_delete(?REQUEST_TABLE, Spec). - -%% have_request/2 - -have_request(Pkt, TPid) -> - Seqs = diameter_codec:sequence_numbers(Pkt), - Pat = {Seqs, #request{transport = TPid, _ = '_'}, '_'}, - '$end_of_table' /= ets:select(?REQUEST_TABLE, [{Pat, [], ['$_']}], 1). - %% get_destination/2 get_destination(Dict, Msg) -> diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl index 2ba60a65fb..f28b8f2910 100644 --- a/lib/diameter/src/base/diameter_watchdog.erl +++ b/lib/diameter/src/base/diameter_watchdog.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -283,7 +283,7 @@ event(Msg, ?LOG(transition, {From, To}). data(Msg, TPid, reopen, okay) -> - {recv, TPid, 'DWA', _Pkt} = Msg, %% assert + {recv, TPid, false, 'DWA', _Pkt, _NPid} = Msg, %% assert {TPid, T} = eraser(open), [T]; @@ -447,12 +447,14 @@ transition({'DOWN', _, process, TPid, _Reason} = D, end; %% Incoming message. -transition({recv, TPid, Name, PktT}, #watchdog{transport = TPid} = S) -> +transition({recv, TPid, Route, Name, Pkt, NPid}, + #watchdog{transport = TPid} + = S) -> try - incoming(Name, PktT, S) + incoming(Name, Pkt, NPid, S) catch #watchdog{dictionary = Dict0, receive_data = T} = NS -> - diameter_traffic:receive_message(TPid, PktT, Dict0, T), + diameter_traffic:receive_message(TPid, Route, Pkt, NPid, Dict0, T), NS end; @@ -582,15 +584,17 @@ send_watchdog(#watchdog{pending = false, %% Don't count encode errors since we don't expect any on DWR/DWA. -%% incoming/3 +%% incoming/4 -incoming(Name, {Pkt, NPid}, S) -> - NS = recv(Name, Pkt, S), - NPid ! {diameter, discard}, - NS; +incoming(Name, Pkt, false, S) -> + recv(Name, Pkt, S); -incoming(Name, Pkt, S) -> - recv(Name, Pkt, S). +incoming(Name, Pkt, NPid, S) -> + try + recv(Name, Pkt, S) + after + NPid ! {diameter, discard} + end. %% recv/3 diff --git a/lib/diameter/src/compiler/diameter_codegen.erl b/lib/diameter/src/compiler/diameter_codegen.erl index 864d5f0691..928ae37e7f 100644 --- a/lib/diameter/src/compiler/diameter_codegen.erl +++ b/lib/diameter/src/compiler/diameter_codegen.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -184,7 +184,7 @@ erl_forms(Mod, ParseD) -> f_enumerated_avp(ParseD), f_empty_value(ParseD), f_dict(ParseD), - {eof, erl_anno:new(?LINE)}]], + {eof, ?LINE}]], lists:append(Forms). diff --git a/lib/diameter/src/diameter.appup.src b/lib/diameter/src/diameter.appup.src index b1b8e38d39..eb5a5a44f3 100644 --- a/lib/diameter/src/diameter.appup.src +++ b/lib/diameter/src/diameter.appup.src @@ -2,7 +2,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -50,10 +50,8 @@ {"1.11", [{restart_application, diameter}]}, %% 18.1 {"1.11.1", [{restart_application, diameter}]}, %% 18.2 {"1.11.2", [{restart_application, diameter}]}, %% 18.3 - {"1.12", [{load_module, diameter_lib}, %% 19.0 - {load_module, diameter_traffic}, - {load_module, diameter_tcp}, - {load_module, diameter_sctp}]} + {"1.12", [{restart_application, diameter}]}, %% 19.0 + {"1.12.1", [{restart_application, diameter}]} %% 19.1 ], [ {"0.9", [{restart_application, diameter}]}, @@ -85,9 +83,7 @@ {"1.11", [{restart_application, diameter}]}, {"1.11.1", [{restart_application, diameter}]}, {"1.11.2", [{restart_application, diameter}]}, - {"1.12", [{load_module, diameter_sctp}, - {load_module, diameter_tcp}, - {load_module, diameter_traffic}, - {load_module, diameter_lib}]} + {"1.12", [{restart_application, diameter}]}, + {"1.12.1", [{restart_application, diameter}]} ] }. diff --git a/lib/diameter/src/info/diameter_info.erl b/lib/diameter/src/info/diameter_info.erl index 59a3b94ee4..2a27600346 100644 --- a/lib/diameter/src/info/diameter_info.erl +++ b/lib/diameter/src/info/diameter_info.erl @@ -195,7 +195,7 @@ format(Tables, SFun, CFun) %%% %%% Description: Pretty-print records in a named tables as collected %%% from local and remote nodes. Each table listing is -%%% preceeded by a banner. +%%% preceded by a banner. %%% ---------------------------------------------------------- format(Local, Remote, SFun) -> diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl index f48e4347ee..ad9f4b0d80 100644 --- a/lib/diameter/src/transport/diameter_sctp.erl +++ b/lib/diameter/src/transport/diameter_sctp.erl @@ -402,7 +402,7 @@ handle_info(T, #transport{} = S) -> handle_info(T, #listener{} = S) -> {noreply, #listener{} = l(T,S)}. -%% Prior to the possiblity of setting pool_size on in transport +%% Prior to the possibility of setting pool_size on in transport %% configuration, a new accepting transport was only started following %% the death of a predecessor, so that there was only at most one %% previously started transport process waiting for an association. diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl index eadb354a1d..383fa0a031 100644 --- a/lib/diameter/test/diameter_pool_SUITE.erl +++ b/lib/diameter/test/diameter_pool_SUITE.erl @@ -115,7 +115,7 @@ connect(ClientProt, ServerProt) -> %% 'up' events. (Although it's likely.) sleep(), {9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting - %% Ensure ther are still the expected number of accepting transports + %% Ensure there are still the expected number of accepting transports %% after stopping the client service. ok = diameter:stop_service("client"), sleep(), diff --git a/lib/diameter/vsn.mk b/lib/diameter/vsn.mk index 23219950bb..94d9d72a48 100644 --- a/lib/diameter/vsn.mk +++ b/lib/diameter/vsn.mk @@ -1,6 +1,6 @@ # %CopyrightBegin% # -# Copyright Ericsson AB 2010-2016. All Rights Reserved. +# Copyright Ericsson AB 2010-2017. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,5 +17,5 @@ # %CopyrightEnd% APPLICATION = diameter -DIAMETER_VSN = 1.12.1 +DIAMETER_VSN = 1.12.2 APP_VSN = $(APPLICATION)-$(DIAMETER_VSN)$(PRE_VSN) diff --git a/lib/edoc/src/edoc_layout.erl b/lib/edoc/src/edoc_layout.erl index 9407ae1321..5ef210980c 100644 --- a/lib/edoc/src/edoc_layout.erl +++ b/lib/edoc/src/edoc_layout.erl @@ -214,7 +214,7 @@ layout_module(#xmlElement{name = module, content = Es}=E, Opts) -> ++ functions(SortedFs, Opts) ++ [hr, ?NL] ++ navigation("bottom") - ++ timestamp()), + ++ footer()), Encoding = get_attrval(encoding, E), xhtml(Title, stylesheet(Opts), Body, Encoding). @@ -228,12 +228,8 @@ module_params(Es) -> [element(1, First) | [ {[", ",A]} || {A, _D} <- Rest]] end. -timestamp() -> - [?NL, {p, [{i, [io_lib:fwrite("Generated by EDoc, ~s, ~s.", - [edoc_lib:datestr(date()), - edoc_lib:timestr(time())]) - ]}]}, - ?NL]. +footer() -> + [?NL, {p, [{i, ["Generated by EDoc"]}]}, ?NL]. stylesheet(Opts) -> case Opts#opts.stylesheet of @@ -1039,7 +1035,7 @@ overview(E=#xmlElement{name = overview, content = Es}, Options) -> ++ FullDesc ++ [?NL, hr] ++ navigation("bottom") - ++ timestamp()), + ++ footer()), Encoding = get_attrval(encoding, E), XML = xhtml(Title, stylesheet(Opts), Body, Encoding), xmerl:export_simple(XML, ?HTML_EXPORT, []). diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl index 7e59f373b2..da078de0b9 100644 --- a/lib/edoc/src/edoc_tags.erl +++ b/lib/edoc/src/edoc_tags.erl @@ -227,7 +227,7 @@ filter_tags([#tag{name = N, line = L} = T | Ts], Tags, Where, Ts1) -> filter_tags([], _, _, Ts) -> lists:reverse(Ts). -%% Check occurrances of tags. +%% Check occurrences of tags. check_tags(Ts, Allow, Single, Where) -> check_tags(Ts, Allow, Single, Where, false, sets:new()). diff --git a/lib/edoc/test/edoc_SUITE.erl b/lib/edoc/test/edoc_SUITE.erl index 00d7550bed..4d846ad63d 100644 --- a/lib/edoc/test/edoc_SUITE.erl +++ b/lib/edoc/test/edoc_SUITE.erl @@ -69,7 +69,7 @@ build_std(Config) when is_list(Config) -> {def, {vsn,"TEST"}}, {dir, PrivDir}]), - ok = edoc:application(xmerl, [{dir, PrivDir}]), + ok = edoc:application(xmerl, [{preprocess,true},{dir, PrivDir}]), ok. build_map_module(Config) when is_list(Config) -> diff --git a/lib/eldap/test/README b/lib/eldap/test/README index ec774c1ae3..af1bf6a082 100644 --- a/lib/eldap/test/README +++ b/lib/eldap/test/README @@ -16,7 +16,7 @@ To start slapd: This will however not work, since slapd is guarded by apparmor that checks that slapd does not access other than allowed files... -To make a local extension of alowed operations: +To make a local extension of allowed operations: sudo emacs /etc/apparmor.d/local/usr.sbin.slapd and, after the change (yes, at least on Ubuntu it is right to edit ../local/.. but run with another file): diff --git a/lib/erl_interface/doc/src/erl_call.xml b/lib/erl_interface/doc/src/erl_call.xml index f1e52b1889..426f6b88ca 100644 --- a/lib/erl_interface/doc/src/erl_call.xml +++ b/lib/erl_interface/doc/src/erl_call.xml @@ -193,7 +193,7 @@ erl_call -s -a 'erlang halt' -n madonna <p>To apply with many arguments:</p> <code type="none"><![CDATA[ -erl_call -s -a 'lists map [{math,sqrt},[1,4,9,16,25]]' -n madonna +erl_call -s -a 'lists seq [1,10]' -n madonna ]]></code> <p>To evaluate some expressions diff --git a/lib/erl_interface/doc/src/notes.xml b/lib/erl_interface/doc/src/notes.xml index 69ba3cddb8..b5d8def655 100644 --- a/lib/erl_interface/doc/src/notes.xml +++ b/lib/erl_interface/doc/src/notes.xml @@ -31,6 +31,21 @@ </header> <p>This document describes the changes made to the Erl_interface application.</p> +<section><title>Erl_Interface 3.9.3</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Minor documentation update</p> + <p> + Own Id: OTP-14233 Aux Id: PR-1343 </p> + </item> + </list> + </section> + +</section> + <section><title>Erl_Interface 3.9.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/erl_interface/src/README b/lib/erl_interface/src/README index feee2e48e8..7591615f78 100644 --- a/lib/erl_interface/src/README +++ b/lib/erl_interface/src/README @@ -11,7 +11,7 @@ Also, assertions are enabled, meaning that the code will be a little bit slower. In the final release, there will be two alternative libraries shipped, with and without assertions. -If an assertion triggers, there will be a printout similiar to this +If an assertion triggers, there will be a printout similar to this one: Assertion failed: ep != NULL in erl_eterm.c, line 694 diff --git a/lib/erl_interface/src/legacy/erl_marshal.c b/lib/erl_interface/src/legacy/erl_marshal.c index 2bdf5f2134..527ae0ef8f 100644 --- a/lib/erl_interface/src/legacy/erl_marshal.c +++ b/lib/erl_interface/src/legacy/erl_marshal.c @@ -1626,7 +1626,7 @@ static int cmp_refs(unsigned char **e1, unsigned char **e2) if (cre1 != cre2) return cre1 < cre2 ? -1 : 1; - /* ... and then finaly ids. */ + /* ... and then finally ids. */ if (n1 != n2) { unsigned char zero[] = {0, 0, 0, 0}; if (n1 > n2) @@ -1791,7 +1791,7 @@ static int cmp_exe2(unsigned char **e1, unsigned char **e2) if (port1.creation < port2.creation) return -1; else if (port1.creation > port2.creation) return 1; - /* ... and then finaly ids. */ + /* ... and then finally ids. */ if (port1.id < port2.id) return -1; else if (port1.id > port2.id) return 1; diff --git a/lib/erl_interface/src/misc/ei_locking.c b/lib/erl_interface/src/misc/ei_locking.c index 85b2a5fd8b..a0e00b7871 100644 --- a/lib/erl_interface/src/misc/ei_locking.c +++ b/lib/erl_interface/src/misc/ei_locking.c @@ -76,8 +76,8 @@ ei_mutex_t *ei_mutex_create(void) return l; } -/* - * Free a mutex and the structure asociated with it. +/* + * Free a mutex and the structure associated with it. * * This function attempts to obtain the mutex before releasing it; * If nblock == 1 and the mutex was unavailable, the function will diff --git a/lib/erl_interface/test/ei_decode_SUITE.erl b/lib/erl_interface/test/ei_decode_SUITE.erl index 1495a0d5d9..10e90685c8 100644 --- a/lib/erl_interface/test/ei_decode_SUITE.erl +++ b/lib/erl_interface/test/ei_decode_SUITE.erl @@ -99,7 +99,7 @@ test_ei_decode_ulonglong(Config) when is_list(Config) -> %% ######################################################################## %% -%% A "character" for us is an 8 bit integer, alwasy positive, i.e. +%% A "character" for us is an 8 bit integer, always positive, i.e. %% it is unsigned. %% FIXME maybe the API should change to use "unsigned char" to be clear?! diff --git a/lib/erl_interface/test/erl_eterm_SUITE.erl b/lib/erl_interface/test/erl_eterm_SUITE.erl index 0e51a50c19..7fd46694b8 100644 --- a/lib/erl_interface/test/erl_eterm_SUITE.erl +++ b/lib/erl_interface/test/erl_eterm_SUITE.erl @@ -31,7 +31,7 @@ %%% 2. Constructing terms (the erl_mk_xxx() functions and erl_copy_term()). %%% 3. Extracting & info functions (erl_hd(), erl_length() etc). %%% 4. I/O list functions. -%%% 5. Miscellanous functions. +%%% 5. Miscellaneous functions. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -export([all/0, suite/0, diff --git a/lib/erl_interface/vsn.mk b/lib/erl_interface/vsn.mk index c7981ed3a5..563694a0c1 100644 --- a/lib/erl_interface/vsn.mk +++ b/lib/erl_interface/vsn.mk @@ -1,2 +1,2 @@ -EI_VSN = 3.9.2 +EI_VSN = 3.9.3 ERL_INTERFACE_VSN = $(EI_VSN) diff --git a/lib/eunit/doc/overview.edoc b/lib/eunit/doc/overview.edoc index 3a46e991cb..dc9f858812 100644 --- a/lib/eunit/doc/overview.edoc +++ b/lib/eunit/doc/overview.edoc @@ -578,7 +578,7 @@ results for equality, if testing is enabled. If the values are not equal, an informative exception will be generated; see the `assert' macro for further details. -`assertEqual' is more suitable than than `assertMatch' when the +`assertEqual' is more suitable than `assertMatch' when the left-hand side is a computed value rather than a simple pattern, and gives more details than `?assert(Expect =:= Expr)'. @@ -994,7 +994,7 @@ specified node. `local' means that the current process will handle both setup/teardown and running the tests - the drawback is that if a test times out so that the process is killed, the <em>cleanup will not be performed</em>; hence, avoid this for persistent fixtures such as file -operations. In general, 'local' should only be used when: +operations. In general, `local' should only be used when: <ul> <li>the setup/teardown needs to be executed by the process that will run the tests;</li> diff --git a/lib/eunit/doc/src/notes.xml b/lib/eunit/doc/src/notes.xml index 8509f44ffc..d7ec2108e9 100644 --- a/lib/eunit/doc/src/notes.xml +++ b/lib/eunit/doc/src/notes.xml @@ -498,7 +498,7 @@ <list> <item> <p> - Miscellanous updates.</p> + Miscellaneous updates.</p> <p> Own Id: OTP-8038</p> </item> diff --git a/lib/eunit/src/eunit.erl b/lib/eunit/src/eunit.erl index 2c832a7f7a..1ace85ffde 100644 --- a/lib/eunit/src/eunit.erl +++ b/lib/eunit/src/eunit.erl @@ -256,7 +256,7 @@ all_options(Opts) -> false -> Opts; S -> {ok, Ts, _} = erl_scan:string(S), - {ok, V} = erl_parse:parse_term(Ts ++ [{dot,1}]), + {ok, V} = erl_parse:parse_term(Ts ++ [{dot,erl_anno:new(1)}]), if is_list(V) -> Opts ++ V; true -> Opts ++ [V] end diff --git a/lib/hipe/amd64/Makefile b/lib/hipe/amd64/Makefile index 617f6749ac..d0da8cdff6 100644 --- a/lib/hipe/amd64/Makefile +++ b/lib/hipe/amd64/Makefile @@ -128,6 +128,7 @@ $(EBIN)/hipe_amd64_ra_postconditions.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl $(EBIN)/hipe_amd64_ra_sse2_postconditions.beam: ../main/hipe.hrl $(EBIN)/hipe_amd64_registers.beam: ../rtl/hipe_literals.hrl $(EBIN)/hipe_amd64_spill_restore.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl ../flow/cfg.hrl ../x86/hipe_x86_spill_restore.erl +$(EBIN)/hipe_amd64_subst.beam: ../x86/hipe_x86_subst.erl $(EBIN)/hipe_amd64_x87.beam: ../x86/hipe_x86_x87.erl $(EBIN)/hipe_amd64_sse2.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl $(EBIN)/hipe_rtl_to_amd64.beam: ../x86/hipe_rtl_to_x86.erl ../rtl/hipe_rtl.hrl diff --git a/lib/hipe/amd64/hipe_amd64_encode.erl b/lib/hipe/amd64/hipe_amd64_encode.erl index f8cc0c7d83..bda2824ffc 100644 --- a/lib/hipe/amd64/hipe_amd64_encode.erl +++ b/lib/hipe/amd64/hipe_amd64_encode.erl @@ -1316,6 +1316,7 @@ dotest1(OS) -> RM64 = {rm64,rm_reg(?EDX)}, RM32 = {rm32,rm_reg(?EDX)}, RM16 = {rm16,rm_reg(?EDX)}, + RM16REX = {rm16,rm_reg(?R13)}, RM8 = {rm8,rm_reg(?EDX)}, RM8REX = {rm8,rm_reg(?SIL)}, Rel32 = {rel32,Word32}, @@ -1479,6 +1480,7 @@ dotest1(OS) -> t(OS,'test',{RM8,Imm8}), t(OS,'test',{RM8REX,Imm8}), t(OS,'test',{RM16,Imm16}), + t(OS,'test',{RM16REX,Imm16}), t(OS,'test',{RM32,Imm32}), t(OS,'test',{RM64,Imm32}), t(OS,'test',{RM32,Reg32}), diff --git a/lib/hipe/amd64/hipe_amd64_registers.erl b/lib/hipe/amd64/hipe_amd64_registers.erl index a4cb71a106..a5cecef5a1 100644 --- a/lib/hipe/amd64/hipe_amd64_registers.erl +++ b/lib/hipe/amd64/hipe_amd64_registers.erl @@ -207,19 +207,14 @@ allocatable_x87() -> nr_args() -> ?AMD64_NR_ARG_REGS. -arg(N) -> - if N < ?AMD64_NR_ARG_REGS -> - case N of - 0 -> ?ARG0; - 1 -> ?ARG1; - 2 -> ?ARG2; - 3 -> ?ARG3; - 4 -> ?ARG4; - 5 -> ?ARG5; - _ -> exit({?MODULE, arg, N}) - end; - true -> - exit({?MODULE, arg, N}) +arg(N) when N < ?AMD64_NR_ARG_REGS -> + case N of + 0 -> ?ARG0; + 1 -> ?ARG1; + 2 -> ?ARG2; + 3 -> ?ARG3; + 4 -> ?ARG4; + 5 -> ?ARG5 end. is_arg(R) -> @@ -240,11 +235,7 @@ args(Arity) when is_integer(Arity), Arity >= 0 -> args(I, Rest) when I < 0 -> Rest; args(I, Rest) -> args(I-1, [arg(I) | Rest]). -ret(N) -> - case N of - 0 -> ?RAX; - _ -> exit({?MODULE, ret, N}) - end. +ret(0) -> ?RAX. %% Note: the fact that (allocatable() UNION allocatable_x87() UNION %% allocatable_sse2()) is a subset of call_clobbered() is hard-coded in diff --git a/lib/hipe/cerl/cerl_to_icode.erl b/lib/hipe/cerl/cerl_to_icode.erl index acad8a9da4..e37eae8a03 100644 --- a/lib/hipe/cerl/cerl_to_icode.erl +++ b/lib/hipe/cerl/cerl_to_icode.erl @@ -2621,7 +2621,7 @@ icode_switch_val(Arg, Fail, Length, Cases) -> hipe_icode:mk_switch_val(Arg, Fail, Length, Cases). icode_switch_tuple_arity(Arg, Fail, Length, Cases) -> - SortedCases = lists:keysort(1, Cases), %% immitate BEAM compiler - Kostis + SortedCases = lists:keysort(1, Cases), %% imitate BEAM compiler - Kostis hipe_icode:mk_switch_tuple_arity(Arg, Fail, Length, SortedCases). diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl index 91ee104f77..ea8cc1677d 100644 --- a/lib/hipe/cerl/erl_types.erl +++ b/lib/hipe/cerl/erl_types.erl @@ -518,7 +518,8 @@ list_contains_opaque(List, Opaques) -> lists:any(fun(E) -> t_contains_opaque(E, Opaques) end, List). %% t_find_opaque_mismatch/2 of two types should only be used if their -%% t_inf is t_none() due to some opaque type violation. +%% t_inf is t_none() due to some opaque type violation. However, +%% 'error' is returned if a structure mismatch is found. %% %% The first argument of the function is the pattern and its second %% argument the type we are matching against the pattern. @@ -527,22 +528,30 @@ list_contains_opaque(List, Opaques) -> 'error' | {'ok', erl_type(), erl_type()}. t_find_opaque_mismatch(T1, T2, Opaques) -> - t_find_opaque_mismatch(T1, T2, T2, Opaques). + catch t_find_opaque_mismatch(T1, T2, T2, Opaques). t_find_opaque_mismatch(?any, _Type, _TopType, _Opaques) -> error; -t_find_opaque_mismatch(?none, _Type, _TopType, _Opaques) -> error; +t_find_opaque_mismatch(?none, _Type, _TopType, _Opaques) -> throw(error); t_find_opaque_mismatch(?list(T1, Tl1, _), ?list(T2, Tl2, _), TopType, Opaques) -> t_find_opaque_mismatch_ordlists([T1, Tl1], [T2, Tl2], TopType, Opaques); t_find_opaque_mismatch(T1, ?opaque(_) = T2, TopType, Opaques) -> case is_opaque_type(T2, Opaques) of - false -> {ok, TopType, T2}; + false -> + case t_is_opaque(T1) andalso compatible_opaque_types(T1, T2) =/= [] of + true -> error; + false -> {ok, TopType, T2} + end; true -> t_find_opaque_mismatch(T1, t_opaque_structure(T2), TopType, Opaques) end; t_find_opaque_mismatch(?opaque(_) = T1, T2, TopType, Opaques) -> %% The generated message is somewhat misleading: case is_opaque_type(T1, Opaques) of - false -> {ok, TopType, T1}; + false -> + case t_is_opaque(T2) andalso compatible_opaque_types(T1, T2) =/= [] of + true -> error; + false -> {ok, TopType, T1} + end; true -> t_find_opaque_mismatch(t_opaque_structure(T1), T2, TopType, Opaques) end; @@ -558,7 +567,11 @@ t_find_opaque_mismatch(?tuple(_, _, _) = T1, ?tuple_set(_) = T2, t_find_opaque_mismatch_lists(Tuples1, Tuples2, TopType, Opaques); t_find_opaque_mismatch(T1, ?union(U2), TopType, Opaques) -> t_find_opaque_mismatch_lists([T1], U2, TopType, Opaques); -t_find_opaque_mismatch(_T1, _T2, _TopType, _Opaques) -> error. +t_find_opaque_mismatch(T1, T2, _TopType, Opaques) -> + case t_is_none(t_inf(T1, T2, Opaques)) of + false -> error; + true -> throw(error) + end. t_find_opaque_mismatch_ordlists(L1, L2, TopType, Opaques) -> List = lists:zipwith(fun(T1, T2) -> @@ -567,10 +580,11 @@ t_find_opaque_mismatch_ordlists(L1, L2, TopType, Opaques) -> t_find_opaque_mismatch_list(List). t_find_opaque_mismatch_lists(L1, L2, _TopType, Opaques) -> - List = [t_find_opaque_mismatch(T1, T2, T2, Opaques) || T1 <- L1, T2 <- L2], + List = [catch t_find_opaque_mismatch(T1, T2, T2, Opaques) || + T1 <- L1, T2 <- L2], t_find_opaque_mismatch_list(List). -t_find_opaque_mismatch_list([]) -> error; +t_find_opaque_mismatch_list([]) -> throw(error); t_find_opaque_mismatch_list([H|T]) -> case H of {ok, _T1, _T2} -> H; @@ -2235,16 +2249,21 @@ t_has_var_list([]) -> false. -spec t_collect_vars(erl_type()) -> [erl_type()]. t_collect_vars(T) -> - t_collect_vars(T, []). + Vs = t_collect_vars(T, maps:new()), + [V || {V, _} <- maps:to_list(Vs)]. + +-type ctab() :: #{erl_type() => 'any'}. --spec t_collect_vars(erl_type(), [erl_type()]) -> [erl_type()]. +-spec t_collect_vars(erl_type(), ctab()) -> ctab(). t_collect_vars(?var(_) = Var, Acc) -> - ordsets:add_element(Var, Acc); + maps:put(Var, any, Acc); t_collect_vars(?function(Domain, Range), Acc) -> - ordsets:union(t_collect_vars(Domain, Acc), t_collect_vars(Range, [])); + Acc1 = t_collect_vars(Domain, Acc), + t_collect_vars(Range, Acc1); t_collect_vars(?list(Contents, Termination, _), Acc) -> - ordsets:union(t_collect_vars(Contents, Acc), t_collect_vars(Termination, [])); + Acc1 = t_collect_vars(Contents, Acc), + t_collect_vars(Termination, Acc1); t_collect_vars(?product(Types), Acc) -> t_collect_vars_list(Types, Acc); t_collect_vars(?tuple(?any, ?any, ?any), Acc) -> @@ -3042,6 +3061,9 @@ inf_opaque_types(IsOpaque1, T1, IsOpaque2, T2, Opaques) -> end end. +compatible_opaque_types(?opaque(Es1), ?opaque(Es2)) -> + [{O1, O2} || O1 <- Es1, O2 <- Es2, is_compat_opaque_names(O1, O2)]. + is_compat_opaque_names(Opaque1, Opaque2) -> #opaque{mod = Mod1, name = Name1, args = Args1} = Opaque1, #opaque{mod = Mod2, name = Name2, args = Args2} = Opaque2, @@ -4424,9 +4446,17 @@ mod_name(Mod, Name) -> -type site() :: {'type', mta()} | {'spec', mfa()} | {'record', mra()}. -type cache_key() :: {module(), atom(), expand_depth(), [erl_type()], type_names()}. --opaque cache() :: #{cache_key() => {erl_type(), expand_limit()}}. +-type mod_type_table() :: ets:tid(). +-record(cache, + { + types = maps:new() :: #{cache_key() => {erl_type(), expand_limit()}}, + mod_recs = {mrecs, dict:new()} :: 'undefined' + | {'mrecs', mod_records()} + }). + +-opaque cache() :: #cache{}. --spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_records(), +-spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_type_table(), var_table(), cache()) -> {erl_type(), cache()}. t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) -> @@ -4438,11 +4468,12 @@ t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) -> t_from_form_without_remote(Form, Site, TypeTable) -> Module = site_module(Site), - RecDict = dict:from_list([{Module, TypeTable}]), + ModRecs = dict:from_list([{Module, TypeTable}]), ExpTypes = replace_by_none, VarTab = var_table__new(), - Cache = cache__new(), - t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache). + Cache0 = cache__new(), + Cache = Cache0#cache{mod_recs = {mrecs, ModRecs}}, + t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache). %% REC_TYPE_LIMIT is used for limiting the depth of recursive types. %% EXPAND_LIMIT is used for limiting the size of types by @@ -4457,13 +4488,13 @@ t_from_form_without_remote(Form, Site, TypeTable) -> -record(from_form, {site :: site(), xtypes :: sets:set(mfa()) | 'replace_by_none', - mrecs :: mod_records(), + mrecs :: 'undefined' | mod_type_table(), vtab :: var_table(), tnames :: type_names()}). -spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none', - site(), mod_records(), var_table(), cache()) -> - {erl_type(), cache()}. + site(), 'undefined' | mod_type_table(), var_table(), + cache()) -> {erl_type(), cache()}. t_from_form1(Form, ET, Site, MR, V, C) -> TypeNames = initial_typenames(Site), @@ -4709,13 +4740,13 @@ from_form({opaque, _L, Name, {Mod, Args, Rep}}, _S, _D, L, C) -> builtin_type(Name, Type, S, D, L, C) -> #from_form{site = Site, mrecs = MR} = S, M = site_module(Site), - case dict:find(M, MR) of - {ok, R} -> + case lookup_module_types(M, MR, C) of + {R, C1} -> case lookup_type(Name, 0, R) of {_, {{_M, _FL, _F, _A}, _T}} -> - type_from_form(Name, [], S, D, L, C); + type_from_form(Name, [], S, D, L, C1); error -> - {Type, L, C} + {Type, L, C1} end; error -> {Type, L, C} @@ -4728,9 +4759,9 @@ type_from_form(Name, Args, S, D, L, C) -> TypeName = {type, {Module, Name, ArgsLen}}, case can_unfold_more(TypeName, TypeNames) of true -> - {ok, R} = dict:find(Module, MR), + {R, C1} = lookup_module_types(Module, MR, C), type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, - S, D, L, C); + S, D, L, C1); false -> {t_any(), L, C} end. @@ -4782,24 +4813,24 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) -> true -> ArgsLen = length(Args), MFA = {RemMod, Name, ArgsLen}, - case dict:find(RemMod, MR) of + case lookup_module_types(RemMod, MR, C) of error -> self() ! {self(), ext_types, MFA}, {t_any(), L, C}; - {ok, RemDict} -> + {RemDict, C1} -> case sets:is_element(MFA, ET) of true -> RemType = {type, MFA}, case can_unfold_more(RemType, TypeNames) of true -> remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict, - RemType, TypeNames, S, D, L, C); + RemType, TypeNames, S, D, L, C1); false -> - {t_any(), L, C} + {t_any(), L, C1} end; false -> self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, - {t_any(), L, C} + {t_any(), L, C1} end end end. @@ -4874,15 +4905,15 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) -> case can_unfold_more(RecordType, TypeNames) of true -> M = site_module(Site), - {ok, R} = dict:find(M, MR), + {R, C1} = lookup_module_types(M, MR, C), case lookup_record(Name, R) of {ok, DeclFields} -> NewTypeNames = [RecordType|TypeNames], Site1 = {record, {M, Name, length(DeclFields)}}, S1 = S#from_form{site = Site1, tnames = NewTypeNames}, Fun = fun(D, L) -> - {GetModRec, L1, C1} = - get_mod_record(ModFields, DeclFields, S1, D, L, C), + {GetModRec, L1, C2} = + get_mod_record(ModFields, DeclFields, S1, D, L, C1), case GetModRec of {error, FieldName} -> throw({error, @@ -4890,12 +4921,12 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) -> [Name, FieldName])}); {ok, NewFields} -> S2 = S1#from_form{vtab = var_table__new()}, - {NewFields1, L2, C2} = - fields_from_form(NewFields, S2, D, L1, C1), + {NewFields1, L2, C3} = + fields_from_form(NewFields, S2, D, L1, C2), Rec = t_tuple( [t_atom(Name)|[Type || {_FieldName, Type} <- NewFields1]]), - {Rec, L2, C2} + {Rec, L2, C3} end end, recur_limit(Fun, D0, L0, RecordType, TypeNames); @@ -5026,7 +5057,7 @@ recur_limit(Fun, D, L, TypeName, TypeNames) -> end. -spec t_check_record_fields(parse_form(), sets:set(mfa()), site(), - mod_records(), var_table(), cache()) -> cache(). + mod_type_table(), var_table(), cache()) -> cache(). t_check_record_fields(Form, ExpTypes, Site, RecDict, VarTable, Cache) -> State = #from_form{site = Site, @@ -5070,13 +5101,13 @@ check_record_fields({user_type, _L, _Name, Args}, S, C) -> check_record({atom, _, Name}, ModFields, S, C) -> #from_form{site = Site, mrecs = MR} = S, M = site_module(Site), - {ok, R} = dict:find(M, MR), + {R, C1} = lookup_module_types(M, MR, C), {ok, DeclFields} = lookup_record(Name, R), - case check_fields(Name, ModFields, DeclFields, S, C) of + case check_fields(Name, ModFields, DeclFields, S, C1) of {error, FieldName} -> throw({error, io_lib:format("Illegal declaration of #~w{~w}\n", [Name, FieldName])}); - C1 -> C1 + C2 -> C2 end. check_fields(RecName, [{type, _, field_type, [{atom, _, Name}, Abstr]}|Left], @@ -5106,7 +5137,7 @@ site_module({_, {Module, _, _}}) -> -spec cache__new() -> cache(). cache__new() -> - maps:new(). + #cache{}. -spec cache_key(module(), atom(), [erl_type()], type_names(), expand_depth()) -> cache_key(). @@ -5123,8 +5154,8 @@ cache_key(Module, Name, ArgTypes, TypeNames, D) -> -spec cache_find(cache_key(), cache()) -> {erl_type(), expand_limit()} | 'error'. -cache_find(Key, Cache) -> - case maps:find(Key, Cache) of +cache_find(Key, #cache{types = Types}) -> + case maps:find(Key, Types) of {ok, Value} -> Value; error -> @@ -5136,8 +5167,9 @@ cache_find(Key, Cache) -> cache_put(_Key, _Type, DeltaL, Cache) when DeltaL < 0 -> %% The type is truncated; do not reuse it. Cache; -cache_put(Key, Type, DeltaL, Cache) -> - maps:put(Key, {Type, DeltaL}, Cache). +cache_put(Key, Type, DeltaL, #cache{types = Types} = Cache) -> + NewTypes = maps:put(Key, {Type, DeltaL}, Types), + Cache#cache{types = NewTypes}. -spec t_var_names([erl_type()]) -> [atom()]. @@ -5236,14 +5268,12 @@ t_form_to_string({type, _L, union, Args}) -> t_form_to_string({type, _L, Name, []} = T) -> try M = mod, - D0 = maps:new(), - MR = dict:from_list([{M, D0}]), Site = {type, {M,Name,0}}, V = var_table__new(), C = cache__new(), State = #from_form{site = Site, xtypes = sets:new(), - mrecs = MR, + mrecs = 'undefined', vtab = V, tnames = []}, {T1, _, _} = from_form(T, State, _Deep=1000, _ALot=1000000, C), @@ -5297,6 +5327,28 @@ is_erl_type(?unit) -> true; is_erl_type(#c{}) -> true; is_erl_type(_) -> false. +-spec lookup_module_types(module(), mod_type_table(), cache()) -> + 'error' | {type_table(), cache()}. + +lookup_module_types(Module, CodeTable, Cache) -> + #cache{mod_recs = ModRecs} = Cache, + case ModRecs of + undefined -> error; + {mrecs, MRecs} -> + case dict:find(Module, MRecs) of + {ok, R} -> + {R, Cache}; + error -> + try ets:lookup_element(CodeTable, Module, 2) of + R -> + NewMRecs = dict:store(Module, R, MRecs), + {R, Cache#cache{mod_recs = {mrecs, NewMRecs}}} + catch + _:_ -> error + end + end + end. + -spec lookup_record(atom(), type_table()) -> 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}. diff --git a/lib/hipe/doc/src/notes.xml b/lib/hipe/doc/src/notes.xml index 0bdd60adfd..58ca0b2138 100644 --- a/lib/hipe/doc/src/notes.xml +++ b/lib/hipe/doc/src/notes.xml @@ -31,6 +31,26 @@ </header> <p>This document describes the changes made to HiPE.</p> +<section><title>Hipe 3.15.4</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> Fix a bug concerning parameterized opaque types. </p> + <p> + Own Id: OTP-14130</p> + </item> + <item> + <p> + Fixed xml issues in old release notes</p> + <p> + Own Id: OTP-14269</p> + </item> + </list> + </section> + +</section> + <section><title>Hipe 3.15.3</title> <section><title>Fixed Bugs and Malfunctions</title> @@ -130,12 +150,12 @@ </item> <item> <p> - Various fixes and improvements to the HiPE LLVM backend. + Various fixes and improvements to the HiPE LLVM backend.</p> <list> <item>Add support for LLVM 3.7 and 3.8 in the HiPE/LLVM x86_64 backend</item> <item>Reinstate support for the LLVM backend on x86 (works OK for LLVM 3.5 to 3.7 -- LLVM 3.8 has a bug that prevents it from generating - correct native code on x86)</item> </list></p> + correct native code on x86)</item> </list> <p> Own Id: OTP-13626</p> </item> @@ -191,7 +211,7 @@ <item> <p> Fix various binary construction inconsistencies for hipe - compiled code. <list> <item>Passing bad field sizes to + compiled code.</p> <list> <item>Passing bad field sizes to binary constructions would throw <c>badarith</c> rather than <c>badarg</c>. Worse, in guards, when the unit size of the field was 1, the exception would leak rather than @@ -211,7 +231,7 @@ missing check for unit size match when inserting a binary. For example, a faulty expression like <c><<<<1:7>>/binary>></c> would - succeed.</item> </list></p> + succeed.</item> </list> <p> Own Id: OTP-13272</p> </item> @@ -1297,7 +1317,7 @@ <list> <item> <p> - Miscellanous updates.</p> + Miscellaneous updates.</p> <p> Own Id: OTP-8038</p> </item> diff --git a/lib/hipe/flow/cfg.inc b/lib/hipe/flow/cfg.inc index 362c5b697c..17342d3b60 100644 --- a/lib/hipe/flow/cfg.inc +++ b/lib/hipe/flow/cfg.inc @@ -212,7 +212,7 @@ info_update(CFG, I) -> -ifndef(GEN_CFG). -spec other_entrypoints(cfg()) -> [cfg_lbl()]. -%% @doc Returns a list of labels that are refered to from the data section. +%% @doc Returns a list of labels that are referred to from the data section. other_entrypoints(CFG) -> hipe_consttab:referred_labels(data(CFG)). diff --git a/lib/hipe/flow/ebb.inc b/lib/hipe/flow/ebb.inc index 58213e44d5..e4b7fd0efb 100644 --- a/lib/hipe/flow/ebb.inc +++ b/lib/hipe/flow/ebb.inc @@ -40,12 +40,14 @@ %% | {ebb_leaf, SuccesorLabel} %%-------------------------------------------------------------------- -%% XXX: Cheating big time! no recursive types --type ebb() :: {ebb_node, icode_lbl(), _} - | {ebb_leaf, icode_lbl()}. +-type ebb() :: ebb_node() + | ebb_leaf(). -record(ebb_node, {label :: icode_lbl(), successors :: [ebb()]}). +-type ebb_node() :: #ebb_node{}. + -record(ebb_leaf, {successor :: icode_lbl()}). +-type ebb_leaf() :: #ebb_leaf{}. %%-------------------------------------------------------------------- %% Returns a list of extended basic blocks. @@ -193,7 +195,7 @@ add_succ([Lbl|Lbls], Visited, Node, MkFun, EBBs, CFG) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% --spec mk_node(icode_lbl(), [ebb()]) -> #ebb_node{}. +-spec mk_node(icode_lbl(), [ebb()]) -> ebb_node(). mk_node(Label, Successors) -> #ebb_node{label=Label, successors=Successors}. -spec node_label(#ebb_node{}) -> icode_lbl(). @@ -202,11 +204,11 @@ node_label(#ebb_node{label=Label}) -> Label. -spec node_successors(#ebb_node{}) -> [ebb()]. node_successors(#ebb_node{successors=Successors}) -> Successors. --spec mk_leaf(icode_lbl()) -> #ebb_leaf{}. +-spec mk_leaf(icode_lbl()) -> ebb_leaf(). mk_leaf(NextEbb) -> #ebb_leaf{successor=NextEbb}. %% leaf_next(Leaf) -> Leaf#ebb_leaf.successor. --spec type(#ebb_node{}) -> 'node' ; (#ebb_leaf{}) -> 'leaf'. +-spec type(ebb_node()) -> 'node' ; (ebb_leaf()) -> 'leaf'. type(#ebb_node{}) -> node; type(#ebb_leaf{}) -> leaf. diff --git a/lib/hipe/flow/hipe_dominators.erl b/lib/hipe/flow/hipe_dominators.erl index 570452c14e..749edd4f72 100644 --- a/lib/hipe/flow/hipe_dominators.erl +++ b/lib/hipe/flow/hipe_dominators.erl @@ -317,7 +317,7 @@ updateCell(Value, Field, WD) -> %%>----------------------------------------------------------------------< %% Procedure : dfs/1 %% Purpose : The main purpose of this function is to traverse the CFG in -%% a depth first order. It is aslo used to initialize certain +%% a depth first order. It is also used to initialize certain %% elements defined in a workDataCell. %% Arguments : CFG - a Control Flow Graph representation %% Returns : A table (WorkData) and the total number of elements in diff --git a/lib/hipe/icode/hipe_beam_to_icode.erl b/lib/hipe/icode/hipe_beam_to_icode.erl index 100bc0b0e2..610578dfbc 100644 --- a/lib/hipe/icode/hipe_beam_to_icode.erl +++ b/lib/hipe/icode/hipe_beam_to_icode.erl @@ -148,7 +148,8 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) -> {Code3,_Env3} = mk_debug_calltrace(MFA, Env1, Code2), {Code3,_Env3} = {Code2,Env1}), %% For stack optimization - Leafness = leafness(Code3), + IsClosure = get_closure_info(MFA, ClosureInfo) =/= not_a_closure, + Leafness = leafness(Code3, IsClosure), IsLeaf = is_leaf_code(Leafness), Code4 = [FunLbl | @@ -156,7 +157,6 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) -> false -> Code3; true -> [mk_redtest()|Code3] end], - IsClosure = get_closure_info(MFA, ClosureInfo) =/= not_a_closure, Code5 = hipe_icode:mk_icode(MFA, FunArgs, IsClosure, IsLeaf, remove_dead_code(Code4), hipe_gensym:var_range(icode), @@ -173,12 +173,12 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) -> mk_redtest() -> hipe_icode:mk_primop([], redtest, []). -leafness(Is) -> % -> true, selfrec, or false - leafness(Is, true). +leafness(Is, IsClosure) -> % -> true, selfrec, closure, or false + leafness(Is, IsClosure, true). -leafness([], Leafness) -> +leafness([], _IsClosure, Leafness) -> Leafness; -leafness([I|Is], Leafness) -> +leafness([I|Is], IsClosure, Leafness) -> case I of #icode_comment{} -> %% BEAM self-tailcalls become gotos, but they leave @@ -191,7 +191,7 @@ leafness([I|Is], Leafness) -> 'self_tail_recursive' -> selfrec; % call_only to selfrec _ -> Leafness end, - leafness(Is, NewLeafness); + leafness(Is, IsClosure, NewLeafness); #icode_call{} -> case hipe_icode:call_type(I) of 'primop' -> @@ -199,12 +199,12 @@ leafness([I|Is], Leafness) -> call_fun -> false; % Calls closure enter_fun -> false; % Calls closure #apply_N{} -> false; - _ -> leafness(Is, Leafness) % Other primop calls are ok + _ -> leafness(Is, IsClosure, Leafness) % Other primop calls are ok end; T when T =:= 'local' orelse T =:= 'remote' -> {M,F,A} = hipe_icode:call_fun(I), case erlang:is_builtin(M, F, A) of - true -> leafness(Is, Leafness); + true -> leafness(Is, IsClosure, Leafness); false -> false end end; @@ -223,11 +223,12 @@ leafness([I|Is], Leafness) -> T when T =:= 'local' orelse T =:= 'remote' -> {M,F,A} = hipe_icode:enter_fun(I), case erlang:is_builtin(M, F, A) of - true -> leafness(Is, Leafness); + true -> leafness(Is, IsClosure, Leafness); + _ when IsClosure -> leafness(Is, IsClosure, closure); _ -> false end end; - _ -> leafness(Is, Leafness) + _ -> leafness(Is, IsClosure, Leafness) end. %% XXX: this old stuff is passed around but essentially unused @@ -235,12 +236,20 @@ is_leaf_code(Leafness) -> case Leafness of true -> true; selfrec -> true; + closure -> false; false -> false end. needs_redtest(Leafness) -> case Leafness of true -> false; + %% A "leaf" closure may contain tailcalls to non-closures in addition to + %% what other leaves may contain. Omitting the redtest is useful to generate + %% shorter code for closures generated by (fun F/A), and is safe since + %% control flow cannot return to a "leaf" closure again without a reduction + %% being consumed. This is true since no function that can call a closure + %% will ever have its redtest omitted. + closure -> false; selfrec -> true; false -> true end. diff --git a/lib/hipe/icode/hipe_icode_type.erl b/lib/hipe/icode/hipe_icode_type.erl index 815d1e57a8..aafaeb5a0a 100644 --- a/lib/hipe/icode/hipe_icode_type.erl +++ b/lib/hipe/icode/hipe_icode_type.erl @@ -1410,9 +1410,10 @@ transform_element2(I) -> NewIndex = case test_type(integer, IndexType) of true -> - case t_number_vals(IndexType) of - unknown -> unknown; - [_|_] = Vals -> {number, Vals} + case {number_min(IndexType), number_max(IndexType)} of + {Lb0, Ub0} when is_integer(Lb0), is_integer(Ub0) -> + {number, Lb0, Ub0}; + {_, _} -> unknown end; _ -> unknown end, @@ -1427,19 +1428,19 @@ transform_element2(I) -> _ -> unknown end, case {NewIndex, MinSize} of - {{number, [_|_] = Ns}, {tuple, A}} when is_integer(A) -> - case lists:all(fun(X) -> 0 < X andalso X =< A end, Ns) of + {{number, Lb, Ub}, {tuple, A}} when is_integer(A) -> + case 0 < Lb andalso Ub =< A of true -> - case Ns of - [Idx] -> + case {Lb, Ub} of + {Idx, Idx} -> [_, Tuple] = hipe_icode:args(I), update_call_or_enter(I, #unsafe_element{index = Idx}, [Tuple]); - [_|_] -> + {_, _} -> NewFun = {element, [MinSize, valid]}, update_call_or_enter(I, NewFun) end; false -> - case lists:all(fun(X) -> hipe_tagscheme:is_fixnum(X) end, Ns) of + case lists:all(fun(X) -> hipe_tagscheme:is_fixnum(X) end, [Lb, Ub]) of true -> NewFun = {element, [MinSize, fixnums]}, update_call_or_enter(I, NewFun); @@ -1454,7 +1455,7 @@ transform_element2(I) -> NewFun = {element, [MinSize, fixnums]}, update_call_or_enter(I, NewFun); false -> - NewFun = {element, [MinSize, NewIndex]}, + NewFun = {element, [MinSize, NewIndex]}, update_call_or_enter(I, NewFun) end end. diff --git a/lib/hipe/llvm/hipe_llvm.erl b/lib/hipe/llvm/hipe_llvm.erl index b22f8fb320..641d3fda0a 100644 --- a/lib/hipe/llvm/hipe_llvm.erl +++ b/lib/hipe/llvm/hipe_llvm.erl @@ -862,7 +862,7 @@ pp_ins(Dev, Ver, I) -> true -> write(Dev, "volatile "); false -> ok end, - pp_dereference_type(Dev, Ver, load_p_type(I)), + pp_dereference_type(Dev, load_p_type(I)), write(Dev, [" ", load_pointer(I), " "]), case load_alignment(I) of [] -> ok; @@ -898,7 +898,7 @@ pp_ins(Dev, Ver, I) -> true -> write(Dev, "inbounds "); false -> ok end, - pp_dereference_type(Dev, Ver, getelementptr_p_type(I)), + pp_dereference_type(Dev, getelementptr_p_type(I)), write(Dev, [" ", getelementptr_value(I)]), pp_typed_idxs(Dev, getelementptr_typed_idxs(I)), write(Dev, "\n"); @@ -959,10 +959,8 @@ pp_ins(Dev, Ver, I) -> pp_args(Dev, fun_def_arglist(I)), write(Dev, ") "), pp_options(Dev, fun_def_fn_attrs(I)), - case Ver >= {3,7} of false -> ok; true -> - write(Dev, "personality i32 (i32, i64, i8*,i8*)* " - "@__gcc_personality_v0 ") - end, + write(Dev, "personality i32 (i32, i64, i8*,i8*)* " + "@__gcc_personality_v0 "), case fun_def_align(I) of [] -> ok; N -> write(Dev, ["align ", N]) @@ -997,12 +995,7 @@ pp_ins(Dev, Ver, I) -> pp_type(Dev, const_decl_type(I)), write(Dev, [" ", const_decl_value(I), "\n"]); #llvm_landingpad{} -> - write(Dev, "landingpad { i8*, i32 } "), - case Ver < {3,7} of false -> ok; true -> - write(Dev, "personality i32 (i32, i64, i8*,i8*)* " - "@__gcc_personality_v0 ") - end, - write(Dev, "cleanup\n"); + write(Dev, "landingpad { i8*, i32 } cleanup\n"); #llvm_asm{} -> write(Dev, [asm_instruction(I), "\n"]); #llvm_adj_stack{} -> @@ -1011,15 +1004,7 @@ pp_ins(Dev, Ver, I) -> pp_type(Dev, adj_stack_type(I)), write(Dev, [" ", adj_stack_offset(I),")\n"]); #llvm_meta{} -> - write(Dev, ["!", meta_id(I), " = "]), - Named = case string:to_integer(meta_id(I)) of - {_, ""} -> false; - _ -> true - end, - case Ver < {3,6} andalso not Named of - true -> write(Dev, "metadata !{metadata "); - false -> write(Dev, "!{ ") - end, + write(Dev, ["!", meta_id(I), " = !{ "]), write(Dev, string:join([if is_list(Op) -> ["!\"", Op, "\""]; is_integer(Op) -> ["i32 ", integer_to_list(Op)]; is_record(Op, llvm_meta) -> @@ -1030,15 +1015,10 @@ pp_ins(Dev, Ver, I) -> exit({?MODULE, pp_ins, {"Unknown LLVM instruction", Other}}) end. -%% @doc Print the type of a dereference in an LLVM instruction using syntax -%% parsable by the specified LLVM version. -pp_dereference_type(Dev, Ver, Type) -> - case Ver >= {3,7} of - false -> ok; - true -> - pp_type(Dev, pointer_type(Type)), - write(Dev, ", ") - end, +%% @doc Print the type of a dereference in an LLVM instruction. +pp_dereference_type(Dev, Type) -> + pp_type(Dev, pointer_type(Type)), + write(Dev, ", "), pp_type(Dev, Type). %% @doc Pretty-print a list of types diff --git a/lib/hipe/llvm/hipe_rtl_to_llvm.erl b/lib/hipe/llvm/hipe_rtl_to_llvm.erl index f8911c1909..79e1bfd381 100644 --- a/lib/hipe/llvm/hipe_rtl_to_llvm.erl +++ b/lib/hipe/llvm/hipe_rtl_to_llvm.erl @@ -1364,7 +1364,7 @@ create_function_definition(Fun, Params, Code, LocalVars) -> EntryBlock = lists:flatten([EntryLabel, ExceptionSync, I2, LocalVars, StoredParams, I3]), Final_Code = EntryBlock ++ Code, - FunctionOptions = [nounwind, noredzone, list_to_atom("gc \"erlang\"")], + FunctionOptions = [nounwind, noredzone, 'gc "erlang"'], WordTy = hipe_llvm:mk_int(?BITS_IN_WORD), FunRetTy = hipe_llvm:mk_struct(lists:duplicate(?NR_PINNED_REGS + 1, WordTy)), hipe_llvm:mk_fun_def([], [], "cc 11", [], FunRetTy, FunctionName, Args, @@ -1431,7 +1431,7 @@ relocs_to_list(Relocs) -> %% constants/labels. handle_relocations(Relocs, Data, Fun) -> RelocsList = relocs_to_list(Relocs), - %% Seperate Relocations according to their type + %% Separate Relocations according to their type {CallList, AtomList, ClosureList, ClosureLabels, SwitchList} = seperate_relocs(RelocsList), %% Create code to declare atoms @@ -1474,7 +1474,7 @@ handle_relocations(Relocs, Data, Fun) -> LocalVariables = AtomLoad ++ ClosureLoad ++ ConstLoad, {Relocs4, ExternalDeclarations, LocalVariables}. -%% @doc Seperate relocations according to their type. +%% @doc Separate relocations according to their type. seperate_relocs(Relocs) -> seperate_relocs(Relocs, [], [], [], [], []). diff --git a/lib/hipe/main/hipe.erl b/lib/hipe/main/hipe.erl index 90ef84ca51..fff397b060 100644 --- a/lib/hipe/main/hipe.erl +++ b/lib/hipe/main/hipe.erl @@ -441,7 +441,7 @@ compile(Name, File, Opts0) when is_atom(Name) -> ?error_msg("Cannot get Core Erlang code from BEAM binary.",[]), ?EXIT({cant_compile_core_from_binary}); true -> - case filename:find_src(filename:rootname(File, ".beam")) of + case filelib:find_source(filename:rootname(File,".beam") ++ ".beam") of {error, _} -> ?error_msg("Cannot find source code for ~p.", [File]), ?EXIT({cant_find_source_code}); @@ -655,7 +655,7 @@ run_compiler_1(Name, DisasmFun, IcodeFun, Options) -> case proplists:get_bool(to_llvm, Opts0) andalso not llvm_support_available() of true -> - ?error_msg("No LLVM version 3.4 or greater " + ?error_msg("No LLVM version 3.9 or greater " "found in $PATH; aborting " "native code compilation.\n", []), ?EXIT(cant_find_required_llvm_version); @@ -1585,7 +1585,7 @@ check_options(Opts) -> -spec llvm_support_available() -> boolean(). llvm_support_available() -> - get_llvm_version() >= {3,4}. + get_llvm_version() >= {3,9}. -type llvm_version() :: {Major :: integer(), Minor :: integer()}. diff --git a/lib/hipe/opt/hipe_schedule.erl b/lib/hipe/opt/hipe_schedule.erl index 531690f885..0f25940e3d 100644 --- a/lib/hipe/opt/hipe_schedule.erl +++ b/lib/hipe/opt/hipe_schedule.erl @@ -1337,10 +1337,10 @@ cd([{N,I}|Xs], DAG, PrevBr, PrevUnsafe, PrevOthers) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Function : cd_branch_to_other_deps %% Argument : N - index of branch -%% Ms - list of indexes of "others" preceeding instrs +%% Ms - list of indexes of "others" preceding instrs %% DAG - dependence graph %% Returns : DAG - new graph -%% Description : Makes preceeding instrs fixed so they don't bypass a branch +%% Description : Makes preceding instrs fixed so they don't bypass a branch %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% cd_branch_to_other_deps(_, [], DAG) -> DAG; diff --git a/lib/hipe/opt/hipe_spillmin_color.erl b/lib/hipe/opt/hipe_spillmin_color.erl index 50e073a467..41f1972df7 100644 --- a/lib/hipe/opt/hipe_spillmin_color.erl +++ b/lib/hipe/opt/hipe_spillmin_color.erl @@ -119,7 +119,7 @@ color_heuristic(IG, Min, Max, Safe, MaxNodes, Target, MaxDepth) -> end; _ -> %% This can be increased from 2, and by this the heuristic can be - %% exited earlier, but the same can be achived by decreasing the + %% exited earlier, but the same can be achieved by decreasing the %% recursion depth. This should not be decreased below 2. case (Max - Min) < 2 of true -> diff --git a/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl b/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl index 9c94539bc6..9682d37520 100644 --- a/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl +++ b/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl @@ -126,8 +126,8 @@ temp0(_) -> all_precoloured(Ctx) -> allocatable(Ctx). -is_precoloured(Reg, Ctx) -> - lists:member(Reg,all_precoloured(Ctx)). +is_precoloured(Reg, _) -> + hipe_amd64_registers:is_precoloured_sse2(Reg). physical_name(Reg, _) -> Reg. diff --git a/lib/hipe/rtl/hipe_icode2rtl.erl b/lib/hipe/rtl/hipe_icode2rtl.erl index 82970f04ab..6da8a76d34 100644 --- a/lib/hipe/rtl/hipe_icode2rtl.erl +++ b/lib/hipe/rtl/hipe_icode2rtl.erl @@ -532,8 +532,12 @@ gen_cond(CondOp, Args, TrueLbl, FalseLbl, Pred) -> FalseLbl, Pred)]; '=:=' -> [Arg1, Arg2] = Args, + TypeTestLbl = hipe_rtl:mk_new_label(), [hipe_rtl:mk_branch(Arg1, eq, Arg2, TrueLbl, - hipe_rtl:label_name(GenLbl), Pred), + hipe_rtl:label_name(TypeTestLbl), Pred), + TypeTestLbl, + hipe_tagscheme:test_either_immed(Arg1, Arg2, FalseLbl, + hipe_rtl:label_name(GenLbl)), GenLbl, hipe_rtl:mk_call([Tmp], op_exact_eqeq_2, Args, TestRetName, [], not_remote), @@ -546,8 +550,12 @@ gen_cond(CondOp, Args, TrueLbl, FalseLbl, Pred) -> TrueLbl, 1-Pred)]; '=/=' -> [Arg1, Arg2] = Args, + TypeTestLbl = hipe_rtl:mk_new_label(), [hipe_rtl:mk_branch(Arg1, eq, Arg2, FalseLbl, - hipe_rtl:label_name(GenLbl), 1-Pred), + hipe_rtl:label_name(TypeTestLbl), 1-Pred), + TypeTestLbl, + hipe_tagscheme:test_either_immed(Arg1, Arg2, TrueLbl, + hipe_rtl:label_name(GenLbl)), GenLbl, hipe_rtl:mk_call([Tmp], op_exact_eqeq_2, Args, TestRetName, [], not_remote), diff --git a/lib/hipe/rtl/hipe_rtl_binary_construct.erl b/lib/hipe/rtl/hipe_rtl_binary_construct.erl index fd0d1f1223..52ea5db382 100644 --- a/lib/hipe/rtl/hipe_rtl_binary_construct.erl +++ b/lib/hipe/rtl/hipe_rtl_binary_construct.erl @@ -137,43 +137,6 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab end end; - {bs_put_integer, Size, Flags, ConstInfo} -> - Aligned = aligned(Flags), - LittleEndian = littleendian(Flags), - [NewOffset] = get_real(Dst), - case is_illegal_const(Size) of - true -> - [hipe_rtl:mk_goto(FalseLblName)]; - false -> - case ConstInfo of - fail -> - [hipe_rtl:mk_goto(FalseLblName)]; - _ -> - case Args of - [Src, Base, Offset] -> - CCode = static_int_c_code(NewOffset, Src, - Base, Offset, Size, - Flags, TrueLblName, - FalseLblName), - put_static_int(NewOffset, Src, Base, Offset, Size, - CCode, Aligned, LittleEndian, TrueLblName); - [Src, Bits, Base, Offset] -> - {SizeCode, SizeReg} = - hipe_rtl_binary:make_size(Size, Bits, - SystemLimitLblName, - FalseLblName), - CCode = int_c_code(NewOffset, Src, Base, - Offset, SizeReg, Flags, - TrueLblName, FalseLblName), - InCode = - put_dynamic_int(NewOffset, Src, Base, Offset, - SizeReg, CCode, Aligned, - LittleEndian, TrueLblName), - SizeCode ++ InCode - end - end - end; - {unsafe_bs_put_integer, 0, _Flags, _ConstInfo} -> [NewOffset] = get_real(Dst), case Args of @@ -186,44 +149,12 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab end; {unsafe_bs_put_integer, Size, Flags, ConstInfo} -> - case is_illegal_const(Size) of - true -> - [hipe_rtl:mk_goto(FalseLblName)]; - false -> - Aligned = aligned(Flags), - LittleEndian = littleendian(Flags), - [NewOffset] = get_real(Dst), - case ConstInfo of - fail -> - [hipe_rtl:mk_goto(FalseLblName)]; - _ -> - case Args of - [Src, Base, Offset] -> - CCode = static_int_c_code(NewOffset, Src, - Base, Offset, Size, - Flags, TrueLblName, - FalseLblName), - put_unsafe_static_int(NewOffset, Src, Base, - Offset, Size, - CCode, Aligned, LittleEndian, - TrueLblName); - [Src, Bits, Base, Offset] -> - {SizeCode, SizeReg} = - hipe_rtl_binary:make_size(Size, Bits, - SystemLimitLblName, - FalseLblName), - CCode = int_c_code(NewOffset, Src, Base, - Offset, SizeReg, Flags, - TrueLblName, FalseLblName), - InCode = - put_unsafe_dynamic_int(NewOffset, Src, Base, - Offset, SizeReg, CCode, - Aligned, LittleEndian, - TrueLblName), - SizeCode ++ InCode - end - end - end; + do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, true, + TrueLblName, FalseLblName, SystemLimitLblName); + + {bs_put_integer, Size, Flags, ConstInfo} -> + do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, false, + TrueLblName, FalseLblName, SystemLimitLblName); bs_utf8_size -> case Dst of @@ -360,6 +291,40 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab {Code, ConstTab} end. +%% Common implementation of bs_put_integer and unsafe_bs_put_integer +do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, SrcUnsafe, + TrueLblName, FalseLblName, SystemLimitLblName) -> + case is_illegal_const(Size) of + true -> + [hipe_rtl:mk_goto(FalseLblName)]; + false -> + Aligned = aligned(Flags), + LittleEndian = littleendian(Flags), + [NewOffset] = get_real(Dst), + case ConstInfo of + fail -> + [hipe_rtl:mk_goto(FalseLblName)]; + _ -> + case Args of + [Src, Base, Offset] -> + CCode = static_int_c_code(NewOffset, Src, Base, Offset, Size, + Flags, TrueLblName, FalseLblName), + put_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned, + LittleEndian, SrcUnsafe, TrueLblName); + [Src, Bits, Base, Offset] -> + {SizeCode, SizeReg} = + hipe_rtl_binary:make_size(Size, Bits, SystemLimitLblName, + FalseLblName), + CCode = int_c_code(NewOffset, Src, Base, Offset, SizeReg, Flags, + TrueLblName, FalseLblName), + InCode = put_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, + CCode, Aligned, LittleEndian, SrcUnsafe, + TrueLblName), + SizeCode ++ InCode + end + end + end. + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% Code that is used in the append and init writeable functions @@ -807,28 +772,8 @@ put_float(_NewOffset, _Src, _Base, _Offset, _Size, CCode, _Aligned, CCode. put_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned, - LittleEndian, TrueLblName) -> - {Init, End, UntaggedSrc} = make_init_end(Src, CCode, TrueLblName), - case {Aligned, LittleEndian} of - {true, true} -> - Init ++ - copy_int_little(Base, Offset, NewOffset, Size, UntaggedSrc) ++ - End; - {true, false} -> - Init ++ - copy_int_big(Base, Offset, NewOffset, Size, UntaggedSrc) ++ - End; - {false, true} -> - CCode; - {false, false} -> - Init ++ - copy_offset_int_big(Base, Offset, NewOffset, Size, UntaggedSrc) ++ - End - end. - -put_unsafe_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned, - LittleEndian, TrueLblName) -> - {Init, End, UntaggedSrc} = make_init_end(Src, TrueLblName), + LittleEndian, SrcUnsafe, TrueLblName) -> + {Init, End, UntaggedSrc} = make_init_end(Src, CCode, SrcUnsafe, TrueLblName), case {Aligned, LittleEndian} of {true, true} -> Init ++ @@ -847,27 +792,8 @@ put_unsafe_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned, end. put_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned, - LittleEndian, TrueLblName) -> - {Init, End, UntaggedSrc} = make_init_end(Src, CCode, TrueLblName), - case Aligned of - true -> - case LittleEndian of - true -> - Init ++ - copy_int_little(Base, Offset, NewOffset, SizeReg, UntaggedSrc) ++ - End; - false -> - Init ++ - copy_int_big(Base, Offset, NewOffset, SizeReg, UntaggedSrc) ++ - End - end; - false -> - CCode - end. - -put_unsafe_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned, - LittleEndian, TrueLblName) -> - {Init, End, UntaggedSrc} = make_init_end(Src, TrueLblName), + LittleEndian, SrcUnsafe, TrueLblName) -> + {Init, End, UntaggedSrc} = make_init_end(Src, CCode, SrcUnsafe, TrueLblName), case Aligned of true -> case LittleEndian of @@ -884,14 +810,13 @@ put_unsafe_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned, CCode end. - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% Help functions used by the above %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -make_init_end(Src, CCode, TrueLblName) -> +make_init_end(Src, CCode, false, TrueLblName) -> [CLbl, SuccessLbl] = create_lbls(2), [UntaggedSrc] = create_regs(1), Init = [hipe_tagscheme:test_fixnum(Src, hipe_rtl:label_name(SuccessLbl), @@ -899,9 +824,8 @@ make_init_end(Src, CCode, TrueLblName) -> SuccessLbl, hipe_tagscheme:untag_fixnum(UntaggedSrc,Src)], End = [hipe_rtl:mk_goto(TrueLblName), CLbl| CCode], - {Init, End, UntaggedSrc}. - -make_init_end(Src, TrueLblName) -> + {Init, End, UntaggedSrc}; +make_init_end(Src, _CCode, true, TrueLblName) -> [UntaggedSrc] = create_regs(1), Init = [hipe_tagscheme:untag_fixnum(UntaggedSrc,Src)], End = [hipe_rtl:mk_goto(TrueLblName)], diff --git a/lib/hipe/rtl/hipe_tagscheme.erl b/lib/hipe/rtl/hipe_tagscheme.erl index 35d1e7c8a4..68cbe75e85 100644 --- a/lib/hipe/rtl/hipe_tagscheme.erl +++ b/lib/hipe/rtl/hipe_tagscheme.erl @@ -40,6 +40,7 @@ fixnum_gt/5, fixnum_lt/5, fixnum_ge/5, fixnum_le/5, fixnum_val/1, fixnum_mul/4, fixnum_addsub/5, fixnum_andorxor/4, fixnum_not/2, fixnum_bsr/3, fixnum_bsl/3]). +-export([test_either_immed/4]). -export([unsafe_car/2, unsafe_cdr/2, unsafe_constant_element/3, unsafe_update_element/3, element/6]). -export([unsafe_closure_element/3]). @@ -363,14 +364,17 @@ test_matchstate(X, TrueLab, FalseLab, Pred) -> mask_and_compare(Tmp, ?TAG_HEADER_MASK, ?TAG_HEADER_BIN_MATCHSTATE, TrueLab, FalseLab, Pred)]. +test_bitstr_header(HdrTmp, TrueLab, FalseLab, Pred) -> + Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK, + mask_and_compare(HdrTmp, Mask, ?TAG_HEADER_REFC_BIN, TrueLab, FalseLab, Pred). + test_bitstr(X, TrueLab, FalseLab, Pred) -> Tmp = hipe_rtl:mk_new_reg_gcsafe(), HalfTrueLab = hipe_rtl:mk_new_label(), - Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK, [test_is_boxed(X, hipe_rtl:label_name(HalfTrueLab), FalseLab, Pred), HalfTrueLab, get_header(Tmp, X), - mask_and_compare(Tmp, Mask, ?TAG_HEADER_REFC_BIN, TrueLab, FalseLab, Pred)]. + test_bitstr_header(Tmp, TrueLab, FalseLab, Pred)]. test_binary(X, TrueLab, FalseLab, Pred) -> Tmp1 = hipe_rtl:mk_new_reg_gcsafe(), @@ -378,12 +382,10 @@ test_binary(X, TrueLab, FalseLab, Pred) -> IsBoxedLab = hipe_rtl:mk_new_label(), IsBitStrLab = hipe_rtl:mk_new_label(), IsSubBinLab = hipe_rtl:mk_new_label(), - Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK, [test_is_boxed(X, hipe_rtl:label_name(IsBoxedLab), FalseLab, Pred), IsBoxedLab, get_header(Tmp1, X), - mask_and_compare(Tmp1, Mask, ?TAG_HEADER_REFC_BIN, - hipe_rtl:label_name(IsBitStrLab), FalseLab, Pred), + test_bitstr_header(Tmp1, hipe_rtl:label_name(IsBitStrLab), FalseLab, Pred), IsBitStrLab, mask_and_compare(Tmp1, ?TAG_HEADER_MASK, ?TAG_HEADER_SUB_BIN, hipe_rtl:label_name(IsSubBinLab), TrueLab, 0.5), @@ -453,6 +455,10 @@ test_fixnums_1([Arg1, Arg2|Args], Acc) -> Tmp = hipe_rtl:mk_new_reg_gcsafe(), test_fixnums_1([Tmp|Args], [hipe_rtl:mk_alu(Tmp, Arg1, 'and', Arg2)|Acc]). +test_two_fixnums(Arg, Arg, FalseLab) -> + TrueLab = hipe_rtl:mk_new_label(), + [test_fixnum(Arg, hipe_rtl:label_name(TrueLab), FalseLab, 0.99), + TrueLab]; test_two_fixnums(Arg1, Arg2, FalseLab) -> TrueLab = hipe_rtl:mk_new_label(), case hipe_rtl:is_imm(Arg1) orelse hipe_rtl:is_imm(Arg2) of @@ -567,8 +573,8 @@ fixnum_andorxor(AluOp, Arg1, Arg2, Res) -> case AluOp of 'xor' -> Tmp = hipe_rtl:mk_new_reg_gcsafe(), - [hipe_rtl:mk_alu(Tmp, Arg1, 'xor', Arg2), % clears tag :-( - hipe_rtl:mk_alu(Res, Tmp, 'or', hipe_rtl:mk_imm(?TAG_IMMED1_SMALL))]; + [hipe_rtl:mk_alu(Tmp, Arg1, 'sub', hipe_rtl:mk_imm(?TAG_IMMED1_SMALL)), + hipe_rtl:mk_alu(Res, Tmp, 'xor', Arg2)]; _ -> hipe_rtl:mk_alu(Res, Arg1, AluOp, Arg2) end. @@ -595,6 +601,21 @@ fixnum_bsl(Arg1, Arg2, Res) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Test if either of two values are immediate (primary tag IMMED1, 0x3) +test_either_immed(Arg1, Arg2, TrueLab, FalseLab) -> + %% This test assumes primary tag 0x0 is reserved and immed has tag 0x3 + 16#0 = ?TAG_PRIMARY_HEADER, + 16#3 = ?TAG_PRIMARY_IMMED1, + Tmp1 = hipe_rtl:mk_new_reg_gcsafe(), + Tmp2 = hipe_rtl:mk_new_reg_gcsafe(), + [hipe_rtl:mk_alu(Tmp1, Arg1, 'sub', hipe_rtl:mk_imm(1)), + hipe_rtl:mk_alu(Tmp2, Arg2, 'sub', hipe_rtl:mk_imm(1)), + hipe_rtl:mk_alu(Tmp2, Tmp2, 'or', Tmp1), + hipe_rtl:mk_branch(Tmp2, 'and', hipe_rtl:mk_imm(2), eq, + FalseLab, TrueLab, 0.01)]. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + unsafe_car(Dst, Arg) -> hipe_rtl:mk_load(Dst, Arg, hipe_rtl:mk_imm(-(?TAG_PRIMARY_LIST))). @@ -631,14 +652,13 @@ unsafe_update_element(Tuple, Index, Value) -> % Index is an immediate element(Dst, Index, Tuple, FailLabName, {tuple, A}, IndexInfo) -> FixnumOkLab = hipe_rtl:mk_new_label(), IndexOkLab = hipe_rtl:mk_new_label(), - Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple UIndex = hipe_rtl:mk_new_reg_gcsafe(), Arity = hipe_rtl:mk_imm(A), - InvIndex = hipe_rtl:mk_new_reg_gcsafe(), - Offset = hipe_rtl:mk_new_reg_gcsafe(), case IndexInfo of valid -> %% This is no branch, 1 load and 3 alus = 4 instr + Offset = hipe_rtl:mk_new_reg_gcsafe(), + Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple [untag_fixnum(UIndex, Index), hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), hipe_rtl:mk_alu(Offset, UIndex, 'sll', @@ -647,72 +667,56 @@ element(Dst, Index, Tuple, FailLabName, {tuple, A}, IndexInfo) -> fixnums -> %% This is 1 branch, 1 load and 4 alus = 6 instr [untag_fixnum(UIndex, Index), - hipe_rtl:mk_alu(Ptr, Tuple, 'sub',hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex, - FailLabName, IndexOkLab)]; + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]; _ -> %% This is 3 branches, 1 load and 5 alus = 9 instr [test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab), FailLabName, 0.99), FixnumOkLab, untag_fixnum(UIndex, Index), - hipe_rtl:mk_alu(Ptr, Tuple, 'sub',hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex, - FailLabName, IndexOkLab)] + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)] end; element(Dst, Index, Tuple, FailLabName, tuple, IndexInfo) -> FixnumOkLab = hipe_rtl:mk_new_label(), IndexOkLab = hipe_rtl:mk_new_label(), - Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple Header = hipe_rtl:mk_new_reg_gcsafe(), UIndex = hipe_rtl:mk_new_reg_gcsafe(), Arity = hipe_rtl:mk_new_reg_gcsafe(), - InvIndex = hipe_rtl:mk_new_reg_gcsafe(), - Offset = hipe_rtl:mk_new_reg_gcsafe(), case IndexInfo of fixnums -> %% This is 1 branch, 2 loads and 5 alus = 8 instr - [hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), - hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)), + [get_header(Header, Tuple), untag_fixnum(UIndex, Index), hipe_rtl:mk_alu(Arity,Header,'srl',hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex, - FailLabName, IndexOkLab)]; + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]; Num when is_integer(Num) -> %% This is 1 branch, 1 load and 3 alus = 5 instr - [hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))| - gen_element_tail(Dst, Ptr, InvIndex, hipe_rtl:mk_imm(Num), - Offset, UIndex, FailLabName, IndexOkLab)]; + gen_element_tail(Dst, Tuple, hipe_rtl:mk_imm(Num), UIndex, FailLabName, + IndexOkLab); _ -> %% This is 2 branches, 2 loads and 6 alus = 10 instr [test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab), FailLabName, 0.99), FixnumOkLab, - hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), - hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)), + get_header(Header, Tuple), untag_fixnum(UIndex, Index), hipe_rtl:mk_alu(Arity,Header,'srl',hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex, - FailLabName, IndexOkLab)] + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)] end; element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) -> FixnumOkLab = hipe_rtl:mk_new_label(), BoxedOkLab = hipe_rtl:mk_new_label(), TupleOkLab = hipe_rtl:mk_new_label(), IndexOkLab = hipe_rtl:mk_new_label(), - Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple Header = hipe_rtl:mk_new_reg_gcsafe(), UIndex = hipe_rtl:mk_new_reg_gcsafe(), Arity = hipe_rtl:mk_new_reg_gcsafe(), - InvIndex = hipe_rtl:mk_new_reg_gcsafe(), - Offset = hipe_rtl:mk_new_reg_gcsafe(), case IndexInfo of fixnums -> %% This is 3 branches, 2 loads and 5 alus = 10 instr [test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab), FailLabName, 0.99), BoxedOkLab, - hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), - hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)), + get_header(Header, Tuple), hipe_rtl:mk_branch(Header, 'and', hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq', hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99), @@ -720,23 +724,21 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) -> untag_fixnum(UIndex, Index), hipe_rtl:mk_alu(Arity, Header, 'srl', hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, - UIndex, FailLabName, IndexOkLab)]; + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]; Num when is_integer(Num) -> %% This is 3 branches, 2 loads and 4 alus = 9 instr [test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab), FailLabName, 0.99), BoxedOkLab, - hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), - hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)), + get_header(Header, Tuple), hipe_rtl:mk_branch(Header, 'and', hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq', hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99), TupleOkLab, hipe_rtl:mk_alu(Arity, Header, 'srl', hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, - hipe_rtl:mk_imm(Num), FailLabName, IndexOkLab)]; + gen_element_tail(Dst, Tuple, Arity, hipe_rtl:mk_imm(Num), FailLabName, + IndexOkLab)]; _ -> %% This is 4 branches, 2 loads, and 6 alus = 12 instr :( [test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab), @@ -745,8 +747,7 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) -> test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab), FailLabName, 0.99), BoxedOkLab, - hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), - hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)), + get_header(Header, Tuple), hipe_rtl:mk_branch(Header, 'and', hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq', hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99), @@ -754,20 +755,21 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) -> untag_fixnum(UIndex, Index), hipe_rtl:mk_alu(Arity, Header, 'srl', hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))| - gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, - UIndex, FailLabName, IndexOkLab)] + gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)] end. -gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, - UIndex, FailLabName, IndexOkLab) -> +gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab) -> + ZeroIndex = hipe_rtl:mk_new_reg_gcsafe(), + Offset = hipe_rtl:mk_new_reg_gcsafe(), + Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple %% now check that 1 <= UIndex <= Arity - %% if UIndex < 1, then (Arity - UIndex) >= Arity - %% if UIndex > Arity, then (Arity - UIndex) < 0, which is >=u Arity - %% otherwise, 0 <= (Arity - UIndex) < Arity - [hipe_rtl:mk_alu(InvIndex, Arity, 'sub', UIndex), - hipe_rtl:mk_branch(InvIndex, 'geu', Arity, FailLabName, + %% by checking the equivalent (except for when Arity>=2^(WordSize-1)) + %% (UIndex - 1) <u Arity + [hipe_rtl:mk_alu(ZeroIndex, UIndex, 'sub', hipe_rtl:mk_imm(1)), + hipe_rtl:mk_branch(ZeroIndex, 'geu', Arity, FailLabName, hipe_rtl:label_name(IndexOkLab), 0.01), IndexOkLab, + hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)), hipe_rtl:mk_alu(Offset, UIndex, 'sll', hipe_rtl:mk_imm(hipe_rtl_arch:log2_word_size())), hipe_rtl:mk_load(Dst, Ptr, Offset)]. diff --git a/lib/hipe/test/basic_SUITE_data/basic_tuples.erl b/lib/hipe/test/basic_SUITE_data/basic_tuples.erl index 94c187e364..96e39d565a 100644 --- a/lib/hipe/test/basic_SUITE_data/basic_tuples.erl +++ b/lib/hipe/test/basic_SUITE_data/basic_tuples.erl @@ -55,6 +55,8 @@ test_element(T0, T1, T2, N) -> List = lists:seq(1, N), Tuple = list_to_tuple(List), ok = get_elements(List, Tuple, 1), + %% element/2 of larger tuple with omitted bounds test + true = lists:all(fun(I) -> I * I =:= square(I) end, lists:seq(1, 20)), %% some cases that throw exceptions {'EXIT', _} = (catch my_element(0, T2)), {'EXIT', _} = (catch my_element(3, T2)), @@ -73,6 +75,18 @@ get_elements([Element|Rest], Tuple, Pos) -> get_elements([], _Tuple, _Pos) -> ok. +squares() -> + {1*1, 2*2, 3*3, 4*4, 5*5, 6*6, 7*7, 8*8, 9*9, 10*10, + 11*11, 12*12, 13*13, 14*14, 15*15, 16*16, 17*17, 18*18, 19*19, 20*20}. + +square(N) when is_integer(N), N >= 1, N =< 20 -> + %% The guard tests lets the range analysis conclude N to be an integer in the + %% 1..20 range. 20-1=19 is bigger than ?SET_LIMIT in erl_types.erl, and will + %% thus be represented by an ?int_range() rather than an ?int_set(). + %% Because of the range analysis, the bounds test of this element/2 call + %% should be omitted. + element(N, squares()). + %%-------------------------------------------------------------------- %% Tests set_element/3. diff --git a/lib/hipe/util/hipe_vectors.erl b/lib/hipe/util/hipe_vectors.erl index fc4e4edb24..788dacd11b 100644 --- a/lib/hipe/util/hipe_vectors.erl +++ b/lib/hipe/util/hipe_vectors.erl @@ -116,8 +116,7 @@ get(Vec, Ix) -> %% --------------------------------------------------------------------- -ifdef(USE_ARRAYS). -%%-opaque vector(E) :: array:array(E). --type vector(E) :: array:array(E). % Work around dialyzer bug +-opaque vector(E) :: array:array(E). new(N, V) -> array:new(N, {default, V}). size(V) -> array:size(V). diff --git a/lib/hipe/vsn.mk b/lib/hipe/vsn.mk index cb4174381a..172d976931 100644 --- a/lib/hipe/vsn.mk +++ b/lib/hipe/vsn.mk @@ -1 +1 @@ -HIPE_VSN = 3.15.3 +HIPE_VSN = 3.15.4 diff --git a/lib/hipe/x86/hipe_rtl_to_x86.erl b/lib/hipe/x86/hipe_rtl_to_x86.erl index 29cad6ca51..31e4f6e4ac 100644 --- a/lib/hipe/x86/hipe_rtl_to_x86.erl +++ b/lib/hipe/x86/hipe_rtl_to_x86.erl @@ -124,7 +124,6 @@ conv_insn(I, Map, Data) -> hipe_rtl:call_continuation(I), hipe_rtl:call_fail(I), hipe_rtl:call_type(I)), - %% XXX Fixme: this ++ is probably inefficient. {FixArgs++I2, Map2, Data}; #comment{} -> I2 = [hipe_x86:mk_comment(hipe_rtl:comment_text(I))], diff --git a/lib/hipe/x86/hipe_x86_assemble.erl b/lib/hipe/x86/hipe_x86_assemble.erl index ef9c32ef41..fb0beba293 100644 --- a/lib/hipe/x86/hipe_x86_assemble.erl +++ b/lib/hipe/x86/hipe_x86_assemble.erl @@ -148,6 +148,8 @@ insn_size(I) -> translate_insn(I, Context, Options) -> case I of + #alu{aluop='xor', src=#x86_temp{reg=Reg}=Src, dst=#x86_temp{reg=Reg}=Dst} -> + [{'xor', {temp_to_reg32(Dst), temp_to_rm32(Src)}, I}]; #alu{} -> Arg = resolve_alu_args(hipe_x86:alu_src(I), hipe_x86:alu_dst(I), Context), [{hipe_x86:alu_op(I), Arg, I}]; @@ -228,11 +230,11 @@ translate_insn(I, Context, Options) -> #move64{} -> translate_move64(I, Context); #movsx{} -> - Arg = resolve_movx_args(hipe_x86:movsx_src(I), hipe_x86:movsx_dst(I)), - [{movsx, Arg, I}]; + Src = resolve_movx_src(hipe_x86:movsx_src(I)), + [{movsx, {temp_to_regArch(hipe_x86:movsx_dst(I)), Src}, I}]; #movzx{} -> - Arg = resolve_movx_args(hipe_x86:movzx_src(I), hipe_x86:movzx_dst(I)), - [{movzx, Arg, I}]; + Src = resolve_movx_src(hipe_x86:movzx_src(I)), + [{movzx, {temp_to_reg32(hipe_x86:movzx_dst(I)), Src}, I}]; %% pseudo_call: eliminated before assembly %% pseudo_jcc: eliminated before assembly %% pseudo_tailcall: eliminated before assembly @@ -845,16 +847,15 @@ translate_move64(I, _Context) -> exit({?MODULE, I}). -endif. %%% mov{s,z}x -resolve_movx_args(Src=#x86_mem{type=Type}, Dst=#x86_temp{}) -> - {temp_to_regArch(Dst), - case Type of - byte -> - mem_to_rm8(Src); - int16 -> - mem_to_rm16(Src); - int32 -> - mem_to_rm32(Src) - end}. +resolve_movx_src(Src=#x86_mem{type=Type}) -> + case Type of + byte -> + mem_to_rm8(Src); + int16 -> + mem_to_rm16(Src); + int32 -> + mem_to_rm32(Src) + end. %%% alu/cmp (_not_ test) resolve_alu_args(Src, Dst, Context) -> diff --git a/lib/hipe/x86/hipe_x86_postpass.erl b/lib/hipe/x86/hipe_x86_postpass.erl index b84e9bed91..925054dd68 100644 --- a/lib/hipe/x86/hipe_x86_postpass.erl +++ b/lib/hipe/x86/hipe_x86_postpass.erl @@ -57,9 +57,10 @@ postpass(#defun{code=Code0}=Defun, Options) -> peephole_optimization(Insns) -> peep(Insns, [], []). -%% MoveSelf related peep-opts + +%% MoveSelf related peep-opts %% ------------------------------ -peep([#fmove{src=Src, dst=Src} | Insns], Res,Lst) -> +peep([#fmove{src=Src, dst=Src} | Insns], Res,Lst) -> peep(Insns, Res, [moveSelf1|Lst]); peep([I=#fmove{src=Src, dst=Dst}, #fmove{src=Dst, dst=Src} | Insns], Res,Lst) -> @@ -159,8 +160,7 @@ peep([#jcc{label=Lab}, I=#label{label=Lab}|Insns], Res, Lst) -> %% ElimSet0 %% -------- -peep([#move{src=#x86_imm{value=0},dst=Dst}|Insns],Res,Lst) -when (Dst==#x86_temp{}) -> +peep([#move{src=#x86_imm{value=0},dst=Dst=#x86_temp{}}|Insns],Res,Lst) -> peep(Insns, [#alu{aluop='xor', src=Dst, dst=Dst}|Res], [elimSet0|Lst]); %% ElimMDPow2 diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml index 398fc7e5b6..2ed02e021e 100644 --- a/lib/inets/doc/src/notes.xml +++ b/lib/inets/doc/src/notes.xml @@ -33,7 +33,60 @@ <file>notes.xml</file> </header> - <section><title>Inets 6.3.4</title> + <section><title>Inets 6.3.6</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Chunk size decoding could fail. The symptom was that + chunk decoding sometimes failed depending on timing of + the received stream. If chunk size was split into two + different packets decoding would fail.</p> + <p> + Own Id: OTP-13571 Aux Id: ERL-116 </p> + </item> + <item> + <p> + Prevent httpc user process to hang if httpc_handler + process terminates unexpectedly</p> + <p> + Own Id: OTP-14091</p> + </item> + <item> + <p> + Correct Host header, to include port number, when + redirecting requests.</p> + <p> + Own Id: OTP-14097</p> + </item> + <item> + <p> + Shutdown gracefully on connection or TLS handshake errors</p> + <p> + Own Id: OTP-14173 Aux Id: seq13262 </p> + </item> + </list> + </section> + +</section> + +<section><title>Inets 6.3.5</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Correct misstakes in ftp client introduced in inets-6.3.4</p> + <p> + Own Id: OTP-14203 Aux Id: OTP-13982 </p> + </item> + </list> + </section> + +</section> + +<section><title>Inets 6.3.4</title> <section><title>Fixed Bugs and Malfunctions</title> <list> @@ -698,7 +751,7 @@ <list> <item> <p> - Gracefully handle invalid content-lenght headers instead + Gracefully handle invalid content-length headers instead of crashing in list_to_integer.</p> <p> Own Id: OTP-12429</p> diff --git a/lib/inets/src/ftp/ftp.erl b/lib/inets/src/ftp/ftp.erl index 911f5b71a7..23d6483291 100644 --- a/lib/inets/src/ftp/ftp.erl +++ b/lib/inets/src/ftp/ftp.erl @@ -1477,10 +1477,7 @@ handle_info({Trpt, Socket, Data}, #state{dsock = {Trpt,Socket}} = State0) when T handle_info({Cls, Socket}, #state{dsock = {Trpt,Socket}, caller = {recv_file, Fd}} = State) when {Cls,Trpt}=={tcp_closed,tcp} ; {Cls,Trpt}=={ssl_closed,ssl} -> - case file_close(Fd) of - ok -> ok; - {error,einval} -> ok - end, + file_close(Fd), progress_report({transfer_size, 0}, State), activate_ctrl_connection(State), {noreply, State#state{dsock = undefined, data = <<>>}}; @@ -2066,10 +2063,7 @@ handle_ctrl_result({pos_prel, _}, #state{caller = {recv_file, _}} = State0) -> end; handle_ctrl_result({Status, _}, #state{caller = {recv_file, Fd}} = State) -> - case file_close(Fd) of - ok -> ok; - {error, einval} -> ok - end, + file_close(Fd), close_data_connection(State), ctrl_result_response(Status, State#state{dsock = undefined}, {error, epath}); @@ -2345,7 +2339,7 @@ accept_data_connection(#state{mode = passive} = State) -> send_ctrl_message(_S=#state{csock = Socket, verbose = Verbose}, Message) -> verbose(lists:flatten(Message),Verbose,send), ?DBG('<--ctrl ~p ---- ~s~p~n',[Socket,Message,_S]), - ok = send_message(Socket, Message). + _ = send_message(Socket, Message). send_data_message(_S=#state{dsock = Socket}, Message) -> ?DBG('<==data ~p ==== ~s~n~p~n',[Socket,Message,_S]), @@ -2366,37 +2360,44 @@ send_message({tcp, Socket}, Message) -> send_message({ssl, Socket}, Message) -> ssl:send(Socket, Message). -activate_ctrl_connection(#state{csock = Socket, ctrl_data = {<<>>, _, _}}) -> - ok = activate_connection(Socket); -activate_ctrl_connection(#state{csock = Socket}) -> - ok = activate_connection(Socket), +activate_ctrl_connection(#state{csock = CSock, ctrl_data = {<<>>, _, _}}) -> + activate_connection(CSock); +activate_ctrl_connection(#state{csock = CSock}) -> + activate_connection(CSock), %% We have already received at least part of the next control message, %% that has been saved in ctrl_data, process this first. - self() ! {socket_type(Socket), unwrap_socket(Socket), <<>>}, + self() ! {socket_type(CSock), unwrap_socket(CSock), <<>>}, ok. +activate_data_connection(#state{dsock = DSock} = State) -> + activate_connection(DSock), + State. + +activate_connection(Socket) -> + ignore_return_value( + case socket_type(Socket) of + tcp -> inet:setopts(unwrap_socket(Socket), [{active, once}]); + ssl -> ssl:setopts(unwrap_socket(Socket), [{active, once}]) + end). + + +ignore_return_value(_) -> ok. + unwrap_socket({tcp,Socket}) -> Socket; unwrap_socket({ssl,Socket}) -> Socket. socket_type({tcp,_Socket}) -> tcp; socket_type({ssl,_Socket}) -> ssl. -activate_data_connection(#state{dsock = Socket} = State) -> - ok = activate_connection(Socket), - State. - -activate_connection({tcp, Socket}) -> inet:setopts(Socket, [{active, once}]); -activate_connection({ssl, Socket}) -> ssl:setopts(Socket, [{active, once}]). - close_ctrl_connection(#state{csock = undefined}) -> ok; close_ctrl_connection(#state{csock = Socket}) -> close_connection(Socket). close_data_connection(#state{dsock = undefined}) -> ok; close_data_connection(#state{dsock = Socket}) -> close_connection(Socket). -close_connection({lsock,Socket}) -> gen_tcp:close(Socket); -close_connection({tcp, Socket}) -> gen_tcp:close(Socket); -close_connection({ssl, Socket}) -> ssl:close(Socket). +close_connection({lsock,Socket}) -> ignore_return_value( gen_tcp:close(Socket) ); +close_connection({tcp, Socket}) -> ignore_return_value( gen_tcp:close(Socket) ); +close_connection({ssl, Socket}) -> ignore_return_value( ssl:close(Socket) ). %% ------------ FILE HANDLING ---------------------------------------- send_file(#state{tls_upgrading_data_connection = {true, CTRL, _}} = State, Fd) -> @@ -2408,7 +2409,7 @@ send_file(State, Fd) -> progress_report({binary, Bin}, State), send_file(State, Fd); {ok, _, _} -> - ok = file_close(Fd), + file_close(Fd), close_data_connection(State), progress_report({transfer_size, 0}, State), activate_ctrl_connection(State), @@ -2423,7 +2424,7 @@ file_open(File, Option) -> file:open(File, [raw, binary, Option]). file_close(Fd) -> - file:close(Fd). + ignore_return_value( file:close(Fd) ). file_read(Fd) -> case file:read(Fd, ?FILE_BUFSIZE) of diff --git a/lib/inets/src/http_client/httpc_response.erl b/lib/inets/src/http_client/httpc_response.erl index 0fd5faa466..d24705a845 100644 --- a/lib/inets/src/http_client/httpc_response.erl +++ b/lib/inets/src/http_client/httpc_response.erl @@ -434,7 +434,7 @@ format_response({StatusLine, Headers, Body}) -> Length = list_to_integer(Headers#http_response_h.'content-length'), {NewBody, Data} = case Length of - -1 -> % When no lenght indicator is provided + -1 -> % When no length indicator is provided {Body, <<>>}; Length when (Length =< size(Body)) -> <<BodyThisReq:Length/binary, Next/binary>> = Body, diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl index 7e20a9ba67..82273c8c74 100644 --- a/lib/inets/src/http_server/httpd_request_handler.erl +++ b/lib/inets/src/http_server/httpd_request_handler.erl @@ -241,9 +241,9 @@ handle_info({tcp_closed, _}, State) -> handle_info({ssl_closed, _}, State) -> {stop, normal, State}; handle_info({tcp_error, _, _} = Reason, State) -> - {stop, Reason, State}; + {stop, {shutdown, Reason}, State}; handle_info({ssl_error, _, _} = Reason, State) -> - {stop, Reason, State}; + {stop, {shutdown, Reason}, State}; %% Timeouts handle_info(timeout, #state{mfa = {_, parse, _}} = State) -> diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src index 3a31daeb20..d28d4cd766 100644 --- a/lib/inets/src/inets_app/inets.appup.src +++ b/lib/inets/src/inets_app/inets.appup.src @@ -18,10 +18,14 @@ %% %CopyrightEnd% {"%VSN%", [ + {<<"6.2.4">>, [{load_module, httpd_request_handler, + soft_purge, soft_purge, []}]}, {<<"6\\..*">>,[{restart_application, inets}]}, {<<"5\\..*">>,[{restart_application, inets}]} ], [ + {<<"6.2.4">>, [{load_module, httpd_request_handler, + soft_purge, soft_purge, []}]}, {<<"6\\..*">>,[{restart_application, inets}]}, {<<"5\\..*">>,[{restart_application, inets}]} ] diff --git a/lib/inets/test/httpd_1_1.erl b/lib/inets/test/httpd_1_1.erl index 3755ed117b..2b5968ca12 100644 --- a/lib/inets/test/httpd_1_1.erl +++ b/lib/inets/test/httpd_1_1.erl @@ -405,11 +405,11 @@ getRangeSize(Head)-> {multiPart, BoundaryString}; _X1 -> case re:run(Head, ?CONTENT_RANGE "bytes=.*\r\n", [{capture, first}]) of - {match, [{Start, Lenght}]} -> + {match, [{Start, Length}]} -> %% Get the range data remove the fieldname and the %% end of line. RangeInfo = string:substr(Head, Start + 1 + 20, - Lenght - (20 +2)), + Length - (20 +2)), rangeSize(string:strip(RangeInfo)); _X2 -> error diff --git a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf index 3f9fde03b5..ec05fc6714 100644 --- a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf +++ b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf @@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10 MaxClients 50 -# KeepAlive set the flag for persistent connections. For peristent connections +# KeepAlive set the flag for persistent connections. For persistent connections # set KeepAlive to on. To use One request per connection set the flag to off # Note: The value has changed since previous version of INETS. KeepAlive on diff --git a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf index 3f9fde03b5..ec05fc6714 100644 --- a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf +++ b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf @@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10 MaxClients 50 -# KeepAlive set the flag for persistent connections. For peristent connections +# KeepAlive set the flag for persistent connections. For persistent connections # set KeepAlive to on. To use One request per connection set the flag to off # Note: The value has changed since previous version of INETS. KeepAlive on diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk index eef5abd610..5637170c15 100644 --- a/lib/inets/vsn.mk +++ b/lib/inets/vsn.mk @@ -19,6 +19,6 @@ # %CopyrightEnd% APPLICATION = inets -INETS_VSN = 6.3.4 +INETS_VSN = 6.3.6 PRE_VSN = APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)" diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java index 7891871e76..b9b4223155 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java @@ -30,7 +30,7 @@ import java.util.Random; * received from the peer. * * <p> - * This abstract class provides the neccesary methods to maintain the actual + * This abstract class provides the necessary methods to maintain the actual * connection and encode the messages and headers in the proper format according * to the Erlang distribution protocol. Subclasses can use these methods to * provide a more or less transparent communication channel as desired. diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java index 70c9e6db4a..bd3a3f4ad3 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java @@ -38,7 +38,7 @@ package com.ericsson.otp.erlang; * <p> * Mailboxes can be named, either at creation or later. Messages can be sent to * named mailboxes and named Erlang processes without knowing the - * {@link OtpErlangPid pid} that identifies the mailbox. This is neccessary in + * {@link OtpErlangPid pid} that identifies the mailbox. This is necessary in * order to set up initial communication between parts of an application. Each * mailbox can have at most one name. * </p> diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml index df681a505f..b342fff0d3 100644 --- a/lib/kernel/doc/src/kernel_app.xml +++ b/lib/kernel/doc/src/kernel_app.xml @@ -11,7 +11,7 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software @@ -19,7 +19,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - + </legalnotice> <title>kernel</title> @@ -58,6 +58,60 @@ </section> <section> + <title>OS Signal Event Handler</title> + <p>Asynchronous OS signals may be subscribed to via the Kernel applications event manager + (see <seealso marker="doc/design_principles:des_princ">OTP Design Principles</seealso> and + <seealso marker="stdlib:gen_event"><c>gen_event(3)</c></seealso>) registered as <c>erl_signal_server</c>. + A default signal handler is installed which handles the following signals:</p> + <taglist> + <tag><c>sigusr1</c></tag> + <item><p>The default handler will halt Erlang and produce a crashdump + with slogan "Received SIGUSR1". + This is equivalent to calling <c>erlang:halt("Received SIGUSR1")</c>. + </p></item> + + <tag><c>sigquit</c></tag> + <item><p>The default handler will halt Erlang immediately. + This is equivalent to calling <c>erlang:halt()</c>. + </p></item> + + <tag><c>sigterm</c></tag> + <item><p>The default handler will terminate Erlang normally. + This is equivalent to calling <c>init:stop()</c>. + </p></item> + </taglist> + + <section> + <title>Events</title> + <p>Any event handler added to <c>erl_signal_server</c> must handle the following events.</p> + <taglist> + <tag><c>sighup</c></tag> + <item><p>Hangup detected on controlling terminal or death of controlling process</p></item> + <tag><c>sigquit</c></tag> + <item><p>Quit from keyboard</p></item> + <tag><c>sigabrt</c></tag> + <item><p>Abort signal from abort</p></item> + <tag><c>sigalrm</c></tag> + <item><p>Timer signal from alarm</p></item> + <tag><c>sigterm</c></tag> + <item><p>Termination signal</p></item> + <tag><c>sigusr1</c></tag> + <item><p>User-defined signal 1</p></item> + <tag><c>sigusr2</c></tag> + <item><p>User-defined signal 2</p></item> + <tag><c>sigchld</c></tag> + <item><p>Child process stopped or terminated</p></item> + <tag><c>sigstop</c></tag> + <item><p>Stop process</p></item> + <tag><c>sigtstp</c></tag> + <item><p>Stop typed at terminal</p></item> + </taglist> + + <p>Setting OS signals are described in <seealso marker="os#set_signal/2"><c>os:set_signal/2</c></seealso>.</p> + </section> + </section> + + <section> <title>Configuration</title> <p>The following configuration parameters are defined for the Kernel application. For more information about configuration parameters, @@ -379,6 +433,28 @@ MaxT = TickTime + TickTime / 4</code> return as soon as possible for <c>application_controller</c> to terminate properly.</p> </item> + <tag><c>source_search_rules = [DirRule] | [SuffixRule] </c></tag> + <item> + <marker id="source_search_rules"></marker> + <p>Where:</p> + <list type="bulleted"> + <item><c>DirRule = {ObjDirSuffix,SrcDirSuffix}</c></item> + <item><c>SuffixRule = {ObjSuffix,SrcSuffix,[DirRule]}</c></item> + <item><c>ObjDirSuffix = string()</c></item> + <item><c>SrcDirSuffix = string()</c></item> + <item><c>ObjSuffix = string()</c></item> + <item><c>SrcSuffix = string()</c></item> + </list> + <p>Specifies a list of rules for use by <c>filelib:find_file/2</c> and + <c>filelib:find_source/2</c>. If this is set to some other value + than the empty list, it replaces the default rules. Rules can be + simple pairs of directory suffixes, such as <c>{"ebin", + "src"}</c>, which are used by <c>filelib:find_file/2</c>, or + triples specifying separate directory suffix rules depending on + file name extensions, for example <c>[{".beam", ".erl", [{"ebin", + "src"}]}</c>, which are used by <c>filelib:find_source/2</c>. Both + kinds of rules can be mixed in the list.</p> + </item> </taglist> </section> @@ -405,4 +481,3 @@ MaxT = TickTime + TickTime / 4</code> <seealso marker="stdlib:timer"><c>timer(3)</c></seealso></p> </section> </appref> - diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml index 9277c2d353..ad349c5aaf 100644 --- a/lib/kernel/doc/src/notes.xml +++ b/lib/kernel/doc/src/notes.xml @@ -31,6 +31,45 @@ </header> <p>This document describes the changes made to the Kernel application.</p> +<section><title>Kernel 5.2</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix a race during cleanup of os:cmd that would cause + os:cmd to hang indefinitely.</p> + <p> + Own Id: OTP-14232 Aux Id: seq13275 </p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p>The functions in the '<c>file</c>' module that take a + list of paths (e.g. <c>file:path_consult/2</c>) will now + continue to search in the path if the path contains + something that is not a directory.</p> + <p> + Own Id: OTP-14191</p> + </item> + <item> + <p>Two OTP processes that are known to receive many + messages are 'rex' (used by 'rpc') and 'error_logger'. + Those processes will now store unprocessed messages + outside the process heap, which will potentially decrease + the cost of garbage collections.</p> + <p> + Own Id: OTP-14192</p> + </item> + </list> + </section> + +</section> + <section><title>Kernel 5.1.1</title> <section><title>Fixed Bugs and Malfunctions</title> @@ -108,7 +147,7 @@ <item> <p> Close stdin of commands run in os:cmd. This is a - backwards compatiblity fix that restores the behaviour of + backwards compatibility fix that restores the behaviour of pre 19.0 os:cmd.</p> <p> Own Id: OTP-13867 Aux Id: seq13178 </p> @@ -1445,7 +1484,7 @@ dependent, so applications aiming to be portable should consider using <c>{ipv6_v6only,true}</c> when creating an <c>inet6</c> listening/destination socket, and if - neccesary also create an <c>inet</c> socket on the same + necessary also create an <c>inet</c> socket on the same port for IPv4 traffic. See the documentation.</p> <p> Own Id: OTP-8928 Aux Id: kunagi-193 [104] </p> diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml index 739ac35d2a..6ba69d12a3 100644 --- a/lib/kernel/doc/src/os.xml +++ b/lib/kernel/doc/src/os.xml @@ -11,7 +11,7 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software @@ -19,7 +19,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - + </legalnotice> <title>os</title> @@ -156,6 +156,32 @@ DirOut = os:cmd("dir"), % on Win32 platform</code> </func> <func> + <name name="set_signal" arity="2"/> + <fsummary>Enables or disables handling of OS signals.</fsummary> + <desc> + <p>Enables or disables OS signals.</p> + <p>Each signal my be set to one of the following options:</p> + <taglist> + <tag><c>ignore</c></tag> + <item> + This signal will be ignored. + </item> + + <tag><c>default</c></tag> + <item> + This signal will use the default signal handler for the operating system. + </item> + + <tag><c>handle</c></tag> + <item> + This signal will notify <c>erl_signal_server</c> when it is received by + the Erlang runtime system. + </item> + </taglist> + </desc> + </func> + + <func> <name name="system_time" arity="0"/> <fsummary>Current OS system time.</fsummary> <desc> @@ -296,4 +322,3 @@ calendar:now_to_universal_time(TS), </func> </funcs> </erlref> - diff --git a/lib/kernel/include/inet.hrl b/lib/kernel/include/inet.hrl index b39df8c3f2..df788aca08 100644 --- a/lib/kernel/include/inet.hrl +++ b/lib/kernel/include/inet.hrl @@ -22,7 +22,7 @@ -record(hostent, { - h_name :: inet:hostname(), %% offical name of host + h_name :: inet:hostname(), %% official name of host h_aliases = [] :: [inet:hostname()], %% alias list h_addrtype :: 'inet' | 'inet6', %% host address type h_length :: non_neg_integer(), %% length of address diff --git a/lib/kernel/src/Makefile b/lib/kernel/src/Makefile index 2b72f78dcf..2a89faaf13 100644 --- a/lib/kernel/src/Makefile +++ b/lib/kernel/src/Makefile @@ -71,6 +71,7 @@ MODULES = \ erl_distribution \ erl_epmd \ erl_reply \ + erl_signal_handler \ erts_debug \ error_handler \ error_logger \ diff --git a/lib/kernel/src/application_controller.erl b/lib/kernel/src/application_controller.erl index 0e61153613..3b642f5873 100644 --- a/lib/kernel/src/application_controller.erl +++ b/lib/kernel/src/application_controller.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2016. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1620,7 +1620,7 @@ conv(_) -> []. make_term(Str) -> case erl_scan:string(Str) of {ok, Tokens, _} -> - case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of + case erl_parse:parse_term(Tokens ++ [{dot, erl_anno:new(1)}]) of {ok, Term} -> Term; {error, {_,M,Reason}} -> diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl index 5a7ca493cc..2a06d0cb15 100644 --- a/lib/kernel/src/code.erl +++ b/lib/kernel/src/code.erl @@ -489,13 +489,13 @@ prepare_check_uniq_1([], [_|_]=Errors) -> {error,Errors}. partition_on_load(Prep) -> - P = fun({_,{Bin,_,_}}) -> - erlang:has_prepared_code_on_load(Bin) + P = fun({_,{PC,_,_}}) -> + erlang:has_prepared_code_on_load(PC) end, lists:partition(P, Prep). verify_prepared([{M,{Prep,Name,_Native}}|T]) - when is_atom(M), is_binary(Prep), is_list(Name) -> + when is_atom(M), is_list(Name) -> try erlang:has_prepared_code_on_load(Prep) of false -> verify_prepared(T); @@ -562,10 +562,10 @@ prepare_loading_fun() -> GetNative = get_native_fun(), fun(Mod, FullName, Beam) -> case erlang:prepare_loading(Mod, Beam) of - Prepared when is_binary(Prepared) -> - {ok,{Prepared,FullName,GetNative(Beam)}}; {error,_}=Error -> - Error + Error; + Prepared -> + {ok,{Prepared,FullName,GetNative(Beam)}} end end. diff --git a/lib/kernel/src/dist_ac.erl b/lib/kernel/src/dist_ac.erl index 6c2fa0b6b1..e63c969b79 100644 --- a/lib/kernel/src/dist_ac.erl +++ b/lib/kernel/src/dist_ac.erl @@ -123,7 +123,7 @@ load_application(AppName, DistNodes) -> gen_server:call(?DIST_AC, {load_application, AppName, DistNodes}, infinity). takeover_application(AppName, RestartType) -> - case validRestartType(RestartType) of + case valid_restart_type(RestartType) of true -> wait_for_sync_dacs(), Nodes = get_nodes(AppName), @@ -1514,10 +1514,10 @@ dist_del_node(Appls, Node) -> Appl#appl{run = NRun} end, Appls). -validRestartType(permanent) -> true; -validRestartType(temporary) -> true; -validRestartType(transient) -> true; -validRestartType(_RestartType) -> false. +valid_restart_type(permanent) -> true; +valid_restart_type(temporary) -> true; +valid_restart_type(transient) -> true; +valid_restart_type(_RestartType) -> false. dist_mismatch(AppName, Node) -> error_msg("Distribution mismatch for application \"~p\" on nodes ~p and ~p~n", diff --git a/lib/kernel/src/erl_signal_handler.erl b/lib/kernel/src/erl_signal_handler.erl new file mode 100644 index 0000000000..8f924d2adc --- /dev/null +++ b/lib/kernel/src/erl_signal_handler.erl @@ -0,0 +1,57 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(erl_signal_handler). +-behaviour(gen_event). +-export([init/1, format_status/2, + handle_event/2, handle_call/2, handle_info/2, + terminate/2, code_change/3]). + +-record(state,{}). + +init(_Args) -> + {ok, #state{}}. + +handle_event(sigusr1, S) -> + erlang:halt("Received SIGUSR1"), + {ok, S}; +handle_event(sigquit, S) -> + erlang:halt(), + {ok, S}; +handle_event(sigterm, S) -> + error_logger:info_msg("SIGTERM received - shutting down~n"), + ok = init:stop(), + {ok, S}; +handle_event(_SignalMsg, S) -> + {ok, S}. + +handle_info(_Info, S) -> + {ok, S}. + +handle_call(_Request, S) -> + {ok, ok, S}. + +format_status(_Opt, [_Pdict,_S]) -> + ok. + +code_change(_OldVsn, S, _Extra) -> + {ok, S}. + +terminate(_Args, _S) -> + ok. diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl index 3523f680a3..3ee8e2c6e6 100644 --- a/lib/kernel/src/error_logger.erl +++ b/lib/kernel/src/error_logger.erl @@ -360,8 +360,12 @@ init(Max) when is_integer(Max) -> %% go back. init({go_back, _PostState}) -> {ok, {?buffer_size, 0, []}}; -init(_) -> %% Start and just relay to other - {ok, []}. %% node if node(GLeader) =/= node(). +init(_) -> + %% The error logger process may receive a huge amount of + %% messages. Make sure that they are stored off heap to + %% avoid exessive GCs. + process_flag(message_queue_data, off_heap), + {ok, []}. -spec handle_event(term(), state()) -> {'ok', state()}. diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl index 1971df9038..79e72cdc6d 100644 --- a/lib/kernel/src/file.erl +++ b/lib/kernel/src/file.erl @@ -1424,7 +1424,7 @@ path_open_first([Path|Rest], Name, Mode, LastError) -> case open(FileName, Mode) of {ok, Fd} -> {ok, Fd, FileName}; - {error, enoent} -> + {error, Reason} when Reason =:= enoent; Reason =:= enotdir -> path_open_first(Rest, Name, Mode, LastError); Error -> Error diff --git a/lib/kernel/src/inet_parse.erl b/lib/kernel/src/inet_parse.erl index b0a3ee3008..9b47199e08 100644 --- a/lib/kernel/src/inet_parse.erl +++ b/lib/kernel/src/inet_parse.erl @@ -701,8 +701,8 @@ dup(N, E, L) when is_integer(N), N >= 1 -> -%% Convert IPv4 adress to ascii -%% Convert IPv6 / IPV4 adress to ascii (plain format) +%% Convert IPv4 address to ascii +%% Convert IPv6 / IPV4 address to ascii (plain format) ntoa({A,B,C,D}) -> integer_to_list(A) ++ "." ++ integer_to_list(B) ++ "." ++ integer_to_list(C) ++ "." ++ integer_to_list(D); diff --git a/lib/kernel/src/inet_udp.erl b/lib/kernel/src/inet_udp.erl index 8a8aa8ecca..c69791b9aa 100644 --- a/lib/kernel/src/inet_udp.erl +++ b/lib/kernel/src/inet_udp.erl @@ -113,7 +113,7 @@ fdopen(Fd, Opts) -> %% Here's how: %% Reverse the list. %% For each head option go through the tail and remove -%% all occurences of the same option from the tail. +%% all occurrences of the same option from the tail. %% Store that head option and iterate using the new tail. %% Return the list of stored head options. optuniquify(List) -> @@ -122,8 +122,8 @@ optuniquify(List) -> optuniquify([], Result) -> Result; optuniquify([Opt | Tail], Result) -> - %% Remove all occurences of Opt in Tail, - %% prepend Opt to Result, + %% Remove all occurrences of Opt in Tail, + %% prepend Opt to Result, %% then iterate back here. optuniquify(Opt, Tail, [], Result). diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src index 4d08a55c7c..25e4ddd95c 100644 --- a/lib/kernel/src/kernel.app.src +++ b/lib/kernel/src/kernel.app.src @@ -34,6 +34,7 @@ erl_boot_server, erl_distribution, erl_reply, + erl_signal_handler, error_handler, error_logger, file, diff --git a/lib/kernel/src/kernel.appup.src b/lib/kernel/src/kernel.appup.src index b505524471..2dc90e2b3e 100644 --- a/lib/kernel/src/kernel.appup.src +++ b/lib/kernel/src/kernel.appup.src @@ -18,7 +18,7 @@ %% %CopyrightEnd% {"%VSN%", %% Up from - max one major revision back - [{<<"5\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.* + [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.* %% Down to - max one major revision back - [{<<"5\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.* + [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.* }. diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl index 3d0ef81318..59eca242b1 100644 --- a/lib/kernel/src/kernel.erl +++ b/lib/kernel/src/kernel.erl @@ -32,6 +32,14 @@ start(_, []) -> case supervisor:start_link({local, kernel_sup}, kernel, []) of {ok, Pid} -> + %% add signal handler + case whereis(erl_signal_server) of + %% in case of minimal mode + undefined -> ok; + _ -> + ok = gen_event:add_handler(erl_signal_server, erl_signal_handler, []) + end, + %% add error handler Type = get_error_logger_type(), case error_logger:swap_handler(Type) of ok -> {ok, Pid, []}; @@ -131,6 +139,9 @@ init([]) -> permanent, 2000, worker, [inet_db]}, NetSup = {net_sup, {erl_distribution, start_link, []}, permanent, infinity, supervisor,[erl_distribution]}, + SigSrv = #{id => erl_signal_server, + start => {gen_event, start_link, [{local, erl_signal_server}]}, + type => worker, restart => permanent, shutdown => 2000, modules => dynamic}, DistAC = start_dist_ac(), Timer = start_timer(), @@ -141,7 +152,7 @@ init([]) -> permanent, infinity, supervisor, [?MODULE]}, {ok, {SupFlags, [Code, Rpc, Global, InetDb | DistAC] ++ - [NetSup, Glo_grp, File, + [NetSup, Glo_grp, File, SigSrv, StdError, User, Config, SafeSupervisor] ++ Timer}} end; init(safe) -> diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl index f8519d3a5e..7e83b17add 100644 --- a/lib/kernel/src/os.erl +++ b/lib/kernel/src/os.erl @@ -29,7 +29,7 @@ -export([getenv/0, getenv/1, getenv/2, getpid/0, perf_counter/0, perf_counter/1, - putenv/2, system_time/0, system_time/1, + putenv/2, set_signal/2, system_time/0, system_time/1, timestamp/0, unsetenv/1]). -spec getenv() -> [string()]. @@ -104,6 +104,15 @@ timestamp() -> unsetenv(_) -> erlang:nif_error(undef). +-spec set_signal(Signal, Option) -> 'ok' when + Signal :: 'sighup' | 'sigquit' | 'sigabrt' | 'sigalrm' | + 'sigterm' | 'sigusr1' | 'sigusr2' | 'sigchld' | + 'sigstop' | 'sigtstp', + Option :: 'default' | 'handle' | 'ignore'. + +set_signal(_Signal, _Option) -> + erlang:nif_error(undef). + %%% End of BIFs -spec type() -> {Osfamily, Osname} when @@ -289,12 +298,11 @@ get_data(Port, MonRef, Eot, Sofar) -> more -> get_data(Port, MonRef, Eot, [Sofar,Bytes]); Last -> - Port ! {self(), close}, - flush_until_closed(Port), - flush_exit(Port), + catch port_close(Port), + flush_until_down(Port, MonRef), iolist_to_binary([Sofar, Last]) end; - {'DOWN', MonRef, _, _ , _} -> + {'DOWN', MonRef, _, _, _} -> flush_exit(Port), iolist_to_binary(Sofar) end. @@ -308,18 +316,25 @@ eot(Bs, Eot) -> binary:part(Bs,{0, Pos}) end. -flush_until_closed(Port) -> +%% When port_close returns we know that all the +%% messages sent have been sent and that the +%% DOWN message is after them all. +flush_until_down(Port, MonRef) -> receive {Port, {data, _Bytes}} -> - flush_until_closed(Port); - {Port, closed} -> - true + flush_until_down(Port, MonRef); + {'DOWN', MonRef, _, _, _} -> + flush_exit(Port) end. +%% The exit signal is always delivered before +%% the down signal, so we can be sure that if there +%% was an exit message sent, it will be in the +%% mailbox now. flush_exit(Port) -> receive {'EXIT', Port, _} -> ok - after 1 -> % force context switch + after 0 -> ok end. diff --git a/lib/kernel/src/rpc.erl b/lib/kernel/src/rpc.erl index 21bff02214..bd6ea26678 100644 --- a/lib/kernel/src/rpc.erl +++ b/lib/kernel/src/rpc.erl @@ -67,17 +67,27 @@ %%------------------------------------------------------------------------ + +%% The rex server may receive a huge amount of +%% messages. Make sure that they are stored off heap to +%% avoid exessive GCs. + +-define(SPAWN_OPTS, [{spawn_opt,[{message_queue_data,off_heap}]}]). + %% Remote execution and broadcasting facility -spec start() -> {'ok', pid()} | 'ignore' | {'error', term()}. start() -> - gen_server:start({local,?NAME}, ?MODULE, [], []). + gen_server:start({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS). -spec start_link() -> {'ok', pid()} | 'ignore' | {'error', term()}. start_link() -> - gen_server:start_link({local,?NAME}, ?MODULE, [], []). + %% The rex server process may receive a huge amount of + %% messages. Make sure that they are stored off heap to + %% avoid exessive GCs. + gen_server:start_link({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS). -spec stop() -> term(). diff --git a/lib/kernel/test/application_SUITE.erl b/lib/kernel/test/application_SUITE.erl index 81407e9d96..b4cf31b210 100644 --- a/lib/kernel/test/application_SUITE.erl +++ b/lib/kernel/test/application_SUITE.erl @@ -1498,7 +1498,7 @@ otp_5363(Conf) when is_list(Conf) -> %% Ticket: OTP-5606 %% Slogan: Problems with starting a distributed application %%----------------------------------------------------------------- -%% Test of several processes simultanously starting the same +%% Test of several processes simultaneously starting the same %% distributed application. otp_5606(Conf) when is_list(Conf) -> diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl index 4914ce9e4c..19d36a7613 100644 --- a/lib/kernel/test/code_SUITE.erl +++ b/lib/kernel/test/code_SUITE.erl @@ -323,7 +323,7 @@ load_abs(Config) when is_list(Config) -> {error, nofile} = code:load_abs(TestDir ++ "/duuuumy_mod"), {error, badfile} = code:load_abs(TestDir ++ "/code_a_test"), {'EXIT', _} = (catch code:load_abs({})), - {'EXIT', _} = (catch code:load_abs("Non-latin-имя-файла")), + {error, nofile} = code:load_abs("Non-latin-имя-файла"), {module, code_b_test} = code:load_abs(TestDir ++ "/code_b_test"), code:stick_dir(TestDir), {error, sticky_directory} = code:load_abs(TestDir ++ "/code_b_test"), @@ -621,20 +621,28 @@ sticky_compiler(Files, PrivDir) -> [R || R <- Rets, R =/= ok]. do_sticky_compile(Mod, Dir) -> - %% Make sure that the module is loaded. A module being sticky - %% only prevents it from begin reloaded, not from being loaded - %% from the wrong place to begin with. - Mod = Mod:module_info(module), - File = filename:append(Dir, atom_to_list(Mod)), - Src = io_lib:format("-module(~s).\n" - "-export([test/1]).\n" - "test(me) -> fail.\n", [Mod]), - ok = file:write_file(File++".erl", Src), - case c:c(File, [{outdir,Dir}]) of - {ok,Module} -> - Module:test(me); - {error,sticky_directory} -> - ok + case code:is_sticky(Mod) of + true -> + %% Make sure that the module is loaded. A module being sticky + %% only prevents it from begin reloaded, not from being loaded + %% from the wrong place to begin with. + Mod = Mod:module_info(module), + File = filename:append(Dir, atom_to_list(Mod)), + Src = io_lib:format("-module(~s).\n" + "-export([test/1]).\n" + "test(me) -> fail.\n", [Mod]), + ok = file:write_file(File++".erl", Src), + case c:c(File, [{outdir,Dir}]) of + {ok,Module} -> + Module:test(me); + {error,sticky_directory} -> + ok + end; + false -> + %% For some reason the module is not sticky + %% could be that the .erlang file has + %% unstuck it? + {Mod, is_not_sticky} end. %% Test that the -pa and -pz options work as expected. @@ -1352,9 +1360,8 @@ create_big_boot(Config) -> %% corresponding beam file (if hipe is not enabled). filter_app("hipe",_) -> false; -%% Dialyzer and typer depends on hipe +%% Dialyzer depends on hipe filter_app("dialyzer",_) -> false; -filter_app("typer",_) -> false; %% Orber requires explicit configuration filter_app("orber",_) -> false; diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl index f630896e03..09c80a0956 100644 --- a/lib/kernel/test/erl_distribution_SUITE.erl +++ b/lib/kernel/test/erl_distribution_SUITE.erl @@ -233,7 +233,7 @@ time_ping(Node) -> erlang:convert_time_unit(T1 - T0, native, millisecond). %% Keep the connection with the client node up. -%% This is neccessary as the client node runs with much shorter +%% This is necessary as the client node runs with much shorter %% tick time !! keep_conn(Node) -> sleep(1), @@ -1059,7 +1059,7 @@ monitor_nodes_otp_6481_test(Config, TestType) when is_list(Config) -> RemotePid = spawn(Node, fun () -> receive after 1500 -> ok end, - %% infinit loop of msgs + %% infinite loop of msgs %% we want an endless stream of messages and the kill %% the node mercilessly. %% We then want to ensure that the nodedown message arrives diff --git a/lib/kernel/test/erl_distribution_wb_SUITE.erl b/lib/kernel/test/erl_distribution_wb_SUITE.erl index 6a23ad0d11..61aa3b32ee 100644 --- a/lib/kernel/test/erl_distribution_wb_SUITE.erl +++ b/lib/kernel/test/erl_distribution_wb_SUITE.erl @@ -30,7 +30,7 @@ %% 1) %% -%% Connections are now always set up symetrically with respect to +%% Connections are now always set up symmetrically with respect to %% publication. If connecting node doesn't send DFLAG_PUBLISHED %% the other node wont send DFLAG_PUBLISHED. If the connecting %% node send DFLAG_PUBLISHED but the other node doesn't send diff --git a/lib/kernel/test/error_logger_SUITE.erl b/lib/kernel/test/error_logger_SUITE.erl index b6e7551741..bb01c2384d 100644 --- a/lib/kernel/test/error_logger_SUITE.erl +++ b/lib/kernel/test/error_logger_SUITE.erl @@ -30,6 +30,7 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, + off_heap/1, error_report/1, info_report/1, error/1, info/1, emulator/1, tty/1, logfile/1, add/1, delete/1]). @@ -45,7 +46,7 @@ suite() -> {timetrap,{minutes,1}}]. all() -> - [error_report, info_report, error, info, emulator, tty, + [off_heap, error_report, info_report, error, info, emulator, tty, logfile, add, delete]. groups() -> @@ -66,6 +67,16 @@ end_per_group(_GroupName, Config) -> %%----------------------------------------------------------------- +off_heap(_Config) -> + %% The error_logger process may receive a huge amount of + %% messages. Make sure that they are stored off heap to + %% avoid exessive GCs. + MQD = message_queue_data, + {MQD,off_heap} = process_info(whereis(error_logger), MQD), + ok. + +%%----------------------------------------------------------------- + error_report(Config) when is_list(Config) -> error_logger:add_report_handler(?MODULE, self()), Rep1 = [{tag1,"data1"},{tag2,data2},{tag3,3}], diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl index f2094431d8..b402f01758 100644 --- a/lib/kernel/test/file_SUITE.erl +++ b/lib/kernel/test/file_SUITE.erl @@ -18,7 +18,7 @@ %% %CopyrightEnd% %% -%% This is a developement feature when developing a new file module, +%% This is a development feature when developing a new file module, %% ugly but practical. -ifndef(FILE_MODULE). -define(FILE_MODULE, file). diff --git a/lib/kernel/test/file_SUITE_data/realmen.html b/lib/kernel/test/file_SUITE_data/realmen.html index c810a5d088..92e13f23b8 100644 --- a/lib/kernel/test/file_SUITE_data/realmen.html +++ b/lib/kernel/test/file_SUITE_data/realmen.html @@ -237,7 +237,7 @@ destroy most of the interesting uses for EQUIVALENCE, and make it impossible to modify the operating system code with negative subscripts. Worst of all, bounds checking is inefficient. -<LI> Source code maintainance systems. A Real Programmer keeps his +<LI> Source code maintenance systems. A Real Programmer keeps his code locked up in a card file, because it implies that its owner cannot leave his important programs unguarded [5]. @@ -396,7 +396,7 @@ double stuff Oreos for special occasions. <LI> Underneath the Oreos is a flow-charting template, left there by the previous occupant of the office. (Real Programmers write programs, -not documentation. Leave that to the maintainence people.) +not documentation. Leave that to the maintenance people.) </UL> <P> diff --git a/lib/kernel/test/multi_load_SUITE.erl b/lib/kernel/test/multi_load_SUITE.erl index 369e25ac64..920839f4f9 100644 --- a/lib/kernel/test/multi_load_SUITE.erl +++ b/lib/kernel/test/multi_load_SUITE.erl @@ -144,14 +144,14 @@ prep_magic([H|T]) -> prep_magic(Tuple) when is_tuple(Tuple) -> L = prep_magic(tuple_to_list(Tuple)), list_to_tuple(L); -prep_magic(Bin) when is_binary(Bin) -> - try erlang:has_prepared_code_on_load(Bin) of +prep_magic(Ref) when is_reference(Ref) -> + try erlang:has_prepared_code_on_load(Ref) of false -> - %% Create a different kind of magic binary. + %% Create a different kind of magic ref. ets:match_spec_compile([{'_',[true],['$_']}]) catch _:_ -> - Bin + Ref end; prep_magic(Other) -> Other. diff --git a/lib/kernel/test/rpc_SUITE.erl b/lib/kernel/test/rpc_SUITE.erl index 1c72ddc87f..d76c4097d8 100644 --- a/lib/kernel/test/rpc_SUITE.erl +++ b/lib/kernel/test/rpc_SUITE.erl @@ -21,7 +21,8 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2]). --export([call/1, block_call/1, multicall/1, multicall_timeout/1, +-export([off_heap/1, + call/1, block_call/1, multicall/1, multicall_timeout/1, multicall_dies/1, multicall_node_dies/1, called_dies/1, called_node_dies/1, called_throws/1, call_benchmark/1, async_call/1]). @@ -35,7 +36,7 @@ suite() -> {timetrap,{minutes,2}}]. all() -> - [call, block_call, multicall, multicall_timeout, + [off_heap, call, block_call, multicall, multicall_timeout, multicall_dies, multicall_node_dies, called_dies, called_node_dies, called_throws, call_benchmark, async_call]. @@ -55,6 +56,13 @@ init_per_group(_GroupName, Config) -> end_per_group(_GroupName, Config) -> Config. +off_heap(_Config) -> + %% The rex server process may receive a huge amount of + %% messages. Make sure that they are stored off heap to + %% avoid exessive GCs. + MQD = message_queue_data, + {MQD,off_heap} = process_info(whereis(rex), MQD), + ok. %% Test different rpc calls. diff --git a/lib/kernel/vsn.mk b/lib/kernel/vsn.mk index 8d2517e680..76b020e8ed 100644 --- a/lib/kernel/vsn.mk +++ b/lib/kernel/vsn.mk @@ -1 +1 @@ -KERNEL_VSN = 5.1.1 +KERNEL_VSN = 5.2 diff --git a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl index ae4a990779..9c75ee5926 100644 --- a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl +++ b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl @@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid, transactionResult = Res, %% These fields are actually not %% supported in this implementation, - %% but because the messanger module + %% but because the messenger module %% cannot see any diff between the %% various v3 implementations... segmentNumber = asn1_NOVALUE, diff --git a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl index e7fb85d137..7e85be4d64 100644 --- a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl +++ b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl @@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid, transactionResult = Res, %% These fields are actually not %% supported in this implementation, - %% but because the messanger module + %% but because the messenger module %% cannot see any diff between the %% various v3 implementations... segmentNumber = asn1_NOVALUE, diff --git a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl index 722e97a743..700392efe2 100644 --- a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl +++ b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl @@ -434,7 +434,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid, transactionResult = Res, %% These fields are actually not %% supported in this implementation, - %% but because the messanger module + %% but because the messenger module %% cannot see any diff between the %% various v3 implementations... segmentNumber = asn1_NOVALUE, diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml index 51c98d0d3e..9f59759cb6 100644 --- a/lib/mnesia/doc/src/notes.xml +++ b/lib/mnesia/doc/src/notes.xml @@ -39,7 +39,23 @@ thus constitutes one section in this document. The title of each section is the version number of Mnesia.</p> - <section><title>Mnesia 4.14.2</title> + <section><title>Mnesia 4.14.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fixed crash in checkpoint handling when table was deleted + during backup.</p> + <p> + Own Id: OTP-14167</p> + </item> + </list> + </section> + +</section> + +<section><title>Mnesia 4.14.2</title> <section><title>Fixed Bugs and Malfunctions</title> <list> diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl index 9eb939e8d3..fc626940b4 100644 --- a/lib/mnesia/src/mnesia_checkpoint.erl +++ b/lib/mnesia/src/mnesia_checkpoint.erl @@ -909,7 +909,7 @@ retainer_loop(Cp = #checkpoint_args{name=Name}) -> retainer_loop(Cp2); {From, {iter_end, Iter}} -> - retainer_fixtable(Iter#iter.oid_tab, false), + ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)), Iters = Cp#checkpoint_args.iterators -- [Iter], reply(From, Name, ok), retainer_loop(Cp#checkpoint_args{iterators = Iters}); @@ -971,7 +971,8 @@ do_stop(Cp) -> unset({checkpoint, Name}), lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers), Iters = Cp#checkpoint_args.iterators, - lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters). + [?SAFE(retainer_fixtable(Tab, false)) || #iter{main_tab=Tab} <- Iters], + ok. deactivate_tab(R) -> Name = R#retainer.cp_name, @@ -1151,7 +1152,7 @@ do_change_copy(Cp, Tab, FromType, ToType) -> Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}. check_iter(From, Iter) when Iter#iter.pid == From -> - retainer_fixtable(Iter#iter.oid_tab, false), + ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)), false; check_iter(_From, _Iter) -> true. diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl index 7320d381ea..6f7531245f 100644 --- a/lib/mnesia/src/mnesia_event.erl +++ b/lib/mnesia/src/mnesia_event.erl @@ -114,7 +114,8 @@ handle_table_event({Oper, Record, TransId}, State) -> handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) -> {ok, State}; -handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) -> +handle_system_event({mnesia_checkpoint_deactivated, Checkpoint}, State) -> + report_error("Checkpoint '~p' has been deactivated, last table copy deleted.\n",[Checkpoint]), {ok, State}; handle_system_event({mnesia_up, Node}, State) -> diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl index ab78c9b13e..ff58974aba 100644 --- a/lib/mnesia/src/mnesia_monitor.erl +++ b/lib/mnesia/src/mnesia_monitor.erl @@ -169,7 +169,7 @@ check_protocol([{Node, {accept, Mon, Version, Protocol}} | Tail], Protocols) -> verbose("Failed to connect with ~p. ~p protocols rejected. " "expected version = ~p, expected protocol = ~p~n", [Node, Protocols, Version, Protocol]), - unlink(Mon), % Get rid of unneccessary link + unlink(Mon), % Get rid of unnecessary link check_protocol(Tail, Protocols) end; check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) -> diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl index 0e4017e4c3..b0d7965886 100644 --- a/lib/mnesia/src/mnesia_schema.erl +++ b/lib/mnesia/src/mnesia_schema.erl @@ -1941,7 +1941,7 @@ make_change_table_copy_type(Tab, Node, ToS) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% change index functions .... -%% Pos is allready added by 1 in both of these functions +%% Pos is already added by 1 in both of these functions add_table_index(Tab, Pos) -> schema_transaction(fun() -> do_add_table_index(Tab, Pos) end). diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl index e745ec9b04..044cf501fd 100644 --- a/lib/mnesia/test/mnesia_evil_backup.erl +++ b/lib/mnesia/test/mnesia_evil_backup.erl @@ -723,18 +723,18 @@ bup_records(File, Mod) -> exit(Reason) end. -sops_with_checkpoint(doc) -> +sops_with_checkpoint(doc) -> ["Test schema operations during a checkpoint"]; sops_with_checkpoint(suite) -> []; sops_with_checkpoint(Config) when is_list(Config) -> - Ns = ?acquire_nodes(2, Config), - + Ns = [N1,N2] = ?acquire_nodes(2, Config), + ?match({ok, cp1, Ns}, mnesia:activate_checkpoint([{name, cp1},{max,mnesia:system_info(tables)}])), - Tab = tab, + Tab = tab, ?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies,Ns}])), OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)], [mnesia:dirty_write(R) || R <- OldRecs], - + ?match({ok, cp2, Ns}, mnesia:activate_checkpoint([{name, cp2},{max,mnesia:system_info(tables)}])), File1 = "cp1_delete_me.BUP", ?match(ok, mnesia:dirty_write({Tab,6,-6})), @@ -742,16 +742,16 @@ sops_with_checkpoint(Config) when is_list(Config) -> ?match(ok, mnesia:dirty_write({Tab,7,-7})), File2 = "cp2_delete_me.BUP", ?match(ok, mnesia:backup_checkpoint(cp2, File2)), - + ?match(ok, mnesia:deactivate_checkpoint(cp1)), ?match(ok, mnesia:backup_checkpoint(cp2, File1)), ?match(ok, mnesia:dirty_write({Tab,8,-8})), - + ?match({atomic,ok}, mnesia:delete_table(Tab)), ?match({error,_}, mnesia:backup_checkpoint(cp2, File2)), ?match({'EXIT',_}, mnesia:dirty_write({Tab,9,-9})), - ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])), + ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])), Test = fun(N) when N > 5 -> ?error("To many records in backup ~p ~n", [N]); (N) -> case mnesia:dirty_read(Tab,N) of [{Tab,N,B}] when -B =:= N -> ok; @@ -759,8 +759,29 @@ sops_with_checkpoint(Config) when is_list(Config) -> end end, [Test(N) || N <- mnesia:dirty_all_keys(Tab)], - ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])), - + ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])), + + %% Mnesia crashes when deleting a table during backup + ?match([], mnesia_test_lib:stop_mnesia([N2])), + Tab2 = ram, + ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies,[N1]}])), + ?match({ok, cp3, _}, mnesia:activate_checkpoint([{name, cp3}, + {ram_overrides_dump,true}, + {min,[Tab2]}])), + Write = fun Loop (N) -> + case N > 0 of + true -> + mnesia:dirty_write({Tab2, N+100, N+100}), + Loop(N-1); + false -> + ok + end + end, + ok = Write(100000), + spawn_link(fun() -> ?match({atomic, ok},mnesia:delete_table(Tab2)) end), + + %% We don't check result here, depends on timing of above call + mnesia:backup_checkpoint(cp3, File2), file:delete(File1), file:delete(File2), - ?verify_mnesia(Ns, []). + ?verify_mnesia([N1], [N2]). diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk index 439b21e58c..e272a469bb 100644 --- a/lib/mnesia/vsn.mk +++ b/lib/mnesia/vsn.mk @@ -1 +1 @@ -MNESIA_VSN = 4.14.2 +MNESIA_VSN = 4.14.3 diff --git a/lib/observer/doc/src/notes.xml b/lib/observer/doc/src/notes.xml index 8f3ebcb4de..79e2b2b9db 100644 --- a/lib/observer/doc/src/notes.xml +++ b/lib/observer/doc/src/notes.xml @@ -32,6 +32,47 @@ <p>This document describes the changes made to the Observer application.</p> +<section><title>Observer 2.3.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + etop erroneously reported the average scheduler + utilization since the tool was first started instead of + the scheduler utilization since last update. This is now + corrected.</p> + <p> + Own Id: OTP-14090 Aux Id: seq13232 </p> + </item> + <item> + <p> + crashdump_viewer crashed when the 'Slogan' had more than + one line. This is now corrected.</p> + <p> + Own Id: OTP-14093 Aux Id: ERL-318 </p> + </item> + <item> + <p> + When clicking an HTML-link to a port before the port tab + has been opened for the first time, observer would crash + since port info is not initiated. This is now corrected.</p> + <p> + Own Id: OTP-14151 Aux Id: PR-1296 </p> + </item> + <item> + <p>The dialyzer and observer applications will now use a + portable way to find the home directory. That means that + there is no longer any need to manually set the HOME + environment variable on Windows.</p> + <p> + Own Id: OTP-14249 Aux Id: ERL-161 </p> + </item> + </list> + </section> + +</section> + <section><title>Observer 2.3</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml index 6eb72f3e58..ae85ab7a29 100644 --- a/lib/observer/doc/src/observer_ug.xml +++ b/lib/observer/doc/src/observer_ug.xml @@ -107,6 +107,11 @@ see module <seealso marker="erts:erts_alloc"><c>erts_alloc</c></seealso> in application ERTS.</p> + <p>The <c>Max Carrier size</c> column shows the maximum value seen by observer + since the last node change or since the start of the application, i.e. switching + nodes will reset the max column. Values are sampled so higher values may have + existed than what is shown. + </p> </section> <section> diff --git a/lib/observer/src/cdv_bin_cb.erl b/lib/observer/src/cdv_bin_cb.erl index 0cea1fdcf0..200c728a62 100644 --- a/lib/observer/src/cdv_bin_cb.erl +++ b/lib/observer/src/cdv_bin_cb.erl @@ -58,7 +58,7 @@ binary_to_term_fun(Bin) -> try binary_to_term(Bin) of Term -> plain_html(io_lib:format("~p",[Term])) catch error:badarg -> - Warning = "This binary can not be coverted to an Erlang term", + Warning = "This binary can not be converted to an Erlang term", observer_html_lib:warning(Warning) end end. diff --git a/lib/observer/src/observer_alloc_wx.erl b/lib/observer/src/observer_alloc_wx.erl index ca54080e15..cad02087be 100644 --- a/lib/observer/src/observer_alloc_wx.erl +++ b/lib/observer/src/observer_alloc_wx.erl @@ -36,6 +36,7 @@ wins, mem, samples, + max, panel, paint, appmon, @@ -74,7 +75,8 @@ init([Notebook, Parent]) -> wins = Windows, mem = MemWin, paint = PaintInfo, - time = setup_time() + time = setup_time(), + max = #{} } } catch _:Err -> @@ -126,16 +128,17 @@ handle_info({Key, {promise_reply, {badrpc, _}}}, #state{async=Key} = State) -> {noreply, State#state{active=false, appmon=undefined}}; handle_info({Key, {promise_reply, SysInfo}}, - #state{async=Key, panel=_Panel, samples=Data, active=Active, wins=Wins0, - time=#ti{tick=Tick, disp=Disp0}=Ti} = S0) -> + #state{async=Key, samples=Data, max=Max0, + active=Active, wins=Wins0, time=#ti{tick=Tick, disp=Disp0}=Ti} = S0) -> Disp = trunc(Disp0), Next = max(Tick - Disp, 0), erlang:send_after(1000 div ?DISP_FREQ, self(), {refresh, Next}), Info = alloc_info(SysInfo), + Max = lists:foldl(fun calc_max/2, Max0, Info), {Wins, Samples} = add_data(Info, Data, Wins0, Ti, Active), - S1 = S0#state{time=Ti#ti{tick=Next}, wins=Wins, samples=Samples, async=undefined}, + S1 = S0#state{time=Ti#ti{tick=Next}, wins=Wins, samples=Samples, max=Max, async=undefined}, if Active -> - update_alloc(S0, Info), + update_alloc(S0, Info, Max), State = precalc(S1), {noreply, State}; true -> @@ -187,25 +190,35 @@ code_change(_, _, State) -> restart_fetcher(Node, #state{panel=Panel, wins=Wins0, time=Ti} = State) -> SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []), Info = alloc_info(SysInfo), + Max = lists:foldl(fun calc_max/2, #{}, Info), {Wins, Samples} = add_data(Info, {0, queue:new()}, Wins0, Ti, true), erlang:send_after(1000 div ?DISP_FREQ, self(), {refresh, 0}), wxWindow:refresh(Panel), precalc(State#state{active=true, appmon=Node, time=Ti#ti{tick=0}, - wins=Wins, samples=Samples}). + wins=Wins, samples=Samples, max=Max}). precalc(#state{samples=Data0, paint=Paint, time=Ti, wins=Wins0}=State) -> Wins = [precalc(Ti, Data0, Paint, Win) || Win <- Wins0], State#state{wins=Wins}. +calc_max({Name, _, Cs}, Max0) -> + case maps:get(Name, Max0, 0) of + Value when Value < Cs -> + Max0#{Name=>Cs}; + _V -> + Max0 + end. -update_alloc(#state{mem=Grid}, Fields) -> +update_alloc(#state{mem=Grid}, Fields, Max) -> wxWindow:freeze(Grid), - Max = wxListCtrl:getItemCount(Grid), + Last = wxListCtrl:getItemCount(Grid), Update = fun({Name, BS, CS}, Row) -> - (Row >= Max) andalso wxListCtrl:insertItem(Grid, Row, ""), + (Row >= Last) andalso wxListCtrl:insertItem(Grid, Row, ""), + MaxV = maps:get(Name, Max, CS), wxListCtrl:setItem(Grid, Row, 0, observer_lib:to_str(Name)), wxListCtrl:setItem(Grid, Row, 1, observer_lib:to_str(BS div 1024)), wxListCtrl:setItem(Grid, Row, 2, observer_lib:to_str(CS div 1024)), + wxListCtrl:setItem(Grid, Row, 3, observer_lib:to_str(MaxV div 1024)), Row + 1 end, wx:foldl(Update, 0, Fields), @@ -269,7 +282,9 @@ create_mem_info(Parent) -> end, ListItems = [{"Allocator Type", ?wxLIST_FORMAT_LEFT, 200}, {"Block size (kB)", ?wxLIST_FORMAT_RIGHT, 150}, - {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}], + {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}, + {"Max Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150} + ], lists:foldl(AddListEntry, 0, ListItems), wxListItem:destroy(Li), diff --git a/lib/observer/src/observer_perf_wx.erl b/lib/observer/src/observer_perf_wx.erl index b0ead42e3f..0cbcdbceb4 100644 --- a/lib/observer/src/observer_perf_wx.erl +++ b/lib/observer/src/observer_perf_wx.erl @@ -55,7 +55,7 @@ -define(wxGC, wxGraphicsContext). --record(paint, {font, small, pen, pen2, pens, usegc = false}). +-record(paint, {font, small, pen, pen2, pens, dot_pens, usegc = false}). start_link(Notebook, Parent) -> wx_object:start_link(?MODULE, [Notebook, Parent], []). @@ -124,13 +124,17 @@ setup_graph_drawing(Panels) -> {F, SF} end, BlackPen = wxPen:new({0,0,0}, [{width, 1}]), - Pens = [wxPen:new(Col, [{width, 1}]) || Col <- tuple_to_list(colors())], + Pens = [wxPen:new(Col, [{width, 1}, {style, ?wxSOLID}]) + || Col <- tuple_to_list(colors())], + DotPens = [wxPen:new(Col, [{width, 1}, {style, ?wxDOT}]) + || Col <- tuple_to_list(colors())], #paint{usegc = UseGC, font = Font, small = SmallFont, pen = ?wxGREY_PEN, pen2 = BlackPen, - pens = list_to_tuple(Pens) + pens = list_to_tuple(Pens), + dot_pens = list_to_tuple(DotPens) }. @@ -181,17 +185,17 @@ handle_cast(Event, _State) -> %%%%%%%%%% handle_info({stats, 1, _, _, _} = Stats, #state{panel=Panel, samples=Data, active=Active, wins=Wins0, - time=#ti{tick=Tick, disp=Disp0}=Ti} = State0) -> + appmon=Node, time=#ti{tick=Tick, disp=Disp0}=Ti} = State0) -> if Active -> Disp = trunc(Disp0), Next = max(Tick - Disp, 0), erlang:send_after(1000 div ?DISP_FREQ, self(), {refresh, Next}), - {Wins, Samples} = add_data(Stats, Data, Wins0, Ti, Active), + {Wins, Samples} = add_data(Stats, Data, Wins0, Ti, Active, Node), State = precalc(State0#state{time=Ti#ti{tick=Next}, wins=Wins, samples=Samples}), wxWindow:refresh(Panel), {noreply, State}; true -> - {Wins1, Samples} = add_data(Stats, Data, Wins0, Ti, Active), + {Wins1, Samples} = add_data(Stats, Data, Wins0, Ti, Active, Node), Wins = [W#win{max=undefined} || W <- Wins1], {noreply, State0#state{samples=Samples, wins=Wins, time=Ti#ti{tick=0}}} end; @@ -247,13 +251,17 @@ restart_fetcher(Node, #state{appmon=Old, panel=Panel, time=#ti{fetch=Freq}=Ti, w reset_data() -> {0, queue:new()}. -add_data(Stats, {N, Q0}, Wins, #ti{fetch=Fetch, secs=Secs}, Active) when N > (Secs*Fetch+1) -> +add_data(Stats, Q, Wins, Ti, Active) -> + add_data(Stats, Q, Wins, Ti, Active, ignore). + +add_data(Stats, {N, Q0}, Wins, #ti{fetch=Fetch, secs=Secs}, Active, Node) + when N > (Secs*Fetch+1) -> {{value, Drop}, Q} = queue:out(Q0), - add_data_1(Wins, Stats, N, {Drop,Q}, Active); -add_data(Stats, {N, Q}, Wins, _, Active) -> - add_data_1(Wins, Stats, N+1, {empty, Q}, Active). + add_data_1(Wins, Stats, N, {Drop,Q}, Active, Node); +add_data(Stats, {N, Q}, Wins, _, Active, Node) -> + add_data_1(Wins, Stats, N+1, {empty, Q}, Active, Node). -add_data_1([#win{state={_,St}}|_]=Wins0, Last, N, {Drop, Q}, Active) +add_data_1([#win{state={_,St}}|_]=Wins0, Last, N, {Drop, Q}, Active, Node) when St /= undefined -> try {Wins, Stat} = @@ -269,14 +277,12 @@ add_data_1([#win{state={_,St}}|_]=Wins0, Last, N, {Drop, Q}, Active) end, #{}, Wins0), {Wins, {N,queue:in(Stat#{}, Q)}} catch no_scheduler_change -> - {[Win#win{state=init_data(Id, Last), - info = info(Id, Last)} + {[Win#win{state=init_data(Id, Last), info=info(Id, Last, Node)} || #win{name=Id}=Win <- Wins0], {0,queue:new()}} end; -add_data_1(Wins, Stats, 1, {_, Q}, _) -> - {[Win#win{state=init_data(Id, Stats), - info = info(Id, Stats)} +add_data_1(Wins, Stats, 1, {_, Q}, _, Node) -> + {[Win#win{state=init_data(Id, Stats), info=info(Id, Stats, Node)} || #win{name=Id}=Win <- Wins], {0,Q}}. add_data_2(#win{name=Id, state=S0}=Win, Stats, Map) -> @@ -382,16 +388,24 @@ lmax(MState, Values, State) -> init_data(runq, {stats, _, T0, _, _}) -> {mk_max(),lists:sort(T0)}; init_data(io, {stats, _, _, {{_,In0}, {_,Out0}}, _}) -> {mk_max(), {In0,Out0}}; -init_data(memory, _) -> {mk_max(), info(memory, undefined)}; +init_data(memory, _) -> {mk_max(), info(memory, undefined, undefined)}; init_data(alloc, _) -> {mk_max(), unused}; init_data(utilz, _) -> {mk_max(), unused}. -info(runq, {stats, _, T0, _, _}) -> lists:seq(1, length(T0)); -info(memory, _) -> [total, processes, atom, binary, code, ets]; -info(io, _) -> [input, output]; -info(alloc, First) -> [Type || {Type, _, _} <- First]; -info(utilz, First) -> [Type || {Type, _, _} <- First]; -info(_, []) -> []. +info(runq, {stats, _, T0, _, _}, Node) -> + Dirty = get_dirty_cpu(Node), + {lists:seq(1, length(T0)-Dirty), Dirty}; +info(memory, _, _) -> [total, processes, atom, binary, code, ets]; +info(io, _, _) -> [input, output]; +info(alloc, First, _) -> [Type || {Type, _, _} <- First]; +info(utilz, First, _) -> [Type || {Type, _, _} <- First]; +info(_, [], _) -> []. + +get_dirty_cpu(Node) -> + case rpc:call(node(Node), erlang, system_info, [dirty_cpu_schedulers]) of + {badrpc,_R} -> 0; + N -> N + end. collect_data(runq, {stats, _, T0, _, _}, {Max,S0}) -> S1 = lists:sort(T0), @@ -471,9 +485,10 @@ window_geom({W,H}, {_, Max, _Unit, MaxUnit}, %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -draw_win(DC, #win{no_samples=Samples, geom=#{scale:={WS,HS}}, graphs=Graphs, max={_,Max,_,_}}=Win, +draw_win(DC, #win{name=Name, no_samples=Samples, geom=#{scale:={WS,HS}}, + graphs=Graphs, max={_,Max,_,_}, info=Info}=Win, #ti{tick=Tick, fetch=FetchFreq, secs=Secs, disp=DispFreq}=Ti, - Paint=#paint{pens=Pens}) when Samples >= 2, Graphs =/= [] -> + Paint=#paint{pens=Pens, dot_pens=Dots}) when Samples >= 2, Graphs =/= [] -> %% Draw graphs {X0,Y0,DrawBs} = draw_borders(DC, Ti, Win, Paint), Offset = Tick / DispFreq, @@ -483,14 +498,23 @@ draw_win(DC, #win{no_samples=Samples, geom=#{scale:={WS,HS}}, graphs=Graphs, max end, Start = X0 + (max(Secs*FetchFreq+Full-Samples, 0) - Offset)*WS, Last = Secs*FetchFreq*WS+X0, + Dirty = case {Name, Info} of + {runq, {_, DCpu}} -> DCpu; + _ -> 0 + end, + NoGraphs = length(Graphs), + NoCpu = NoGraphs - Dirty, Draw = fun(Lines0, N) -> - setPen(DC, element(1+ ((N-1) rem tuple_size(Pens)), Pens)), + case Dirty > 0 andalso N > NoCpu of + true -> setPen(DC, element(1+ ((N-NoCpu-1) rem tuple_size(Dots)), Dots)); + false -> setPen(DC, element(1+ ((N-1) rem tuple_size(Pens)), Pens)) + end, Order = lists:reverse(Lines0), [{_,Y}|Lines] = translate(Order, {Start, Y0}, 0, WS, {X0,Max*HS,Last}, []), strokeLines(DC, [{Last,Y}|Lines]), N-1 end, - lists:foldl(Draw, length(Graphs), Graphs), + lists:foldl(Draw, NoGraphs, Graphs), DrawBs(), ok; @@ -655,11 +679,17 @@ draw_borders(DC, #ti{secs=Secs, fetch=FetchFreq}, case Type of runq -> + {TextInfo, DirtyCpus} = Info, drawText(DC, "Scheduler Utilization (%) ", TopTextX, ?BH), TN0 = Text(TopTextX, BottomTextY, "Scheduler: ", 0), - lists:foldl(fun(Id, Pos0) -> - Text(Pos0, BottomTextY, integer_to_list(Id), Id) - end, TN0, Info); + Id = fun(Id, Pos0) -> + Text(Pos0, BottomTextY, integer_to_list(Id), Id) + end, + TN1 = lists:foldl(Id, TN0, TextInfo), + TN2 = Text(TN1, BottomTextY, "Dirty cpu: ", 0), + TN3 = lists:foldl(Id, TN2, lists:seq(1, DirtyCpus)), + _ = Text(TN3, BottomTextY, "(dotted)", 0), + ok; memory -> drawText(DC, "Memory Usage " ++ Unit, TopTextX,?BH), lists:foldl(fun(MType, {PenId, Pos0}) -> @@ -748,10 +778,10 @@ calc_max1(Max) -> end. colors() -> - {{240, 100, 100}, {100, 240, 100}, {100, 100, 240}, - {220, 220, 80}, {100, 240, 240}, {240, 100, 240}, - {100, 25, 25}, {25, 100, 25}, {25, 25, 100}, - {120, 120, 0}, {25, 100, 100}, {100, 50, 100} + {{240, 100, 100}, {0, 128, 0}, {25, 45, 170}, {255, 165, 0}, + {220, 220, 40}, {100, 240, 240},{240, 100, 240}, {160, 40, 40}, + {100, 100, 240}, {140, 140, 0}, {25, 200, 100}, {120, 25, 240}, + {255, 140, 163}, {25, 120, 120}, {120, 25, 120}, {110, 90, 60} }. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl index 3031a1f90d..83de4fa64c 100644 --- a/lib/observer/src/observer_wx.erl +++ b/lib/observer/src/observer_wx.erl @@ -636,7 +636,8 @@ create_connect_dialog(connect, #state{frame = Frame}) -> wxWindow:setSizerAndFit(Dialog, VSizer), wxSizer:setSizeHints(VSizer, Dialog), - CookiePath = filename:join(os:getenv("HOME"), ".erlang.cookie"), + {ok,[[HomeDir]]} = init:get_argument(home), + CookiePath = filename:join(HomeDir, ".erlang.cookie"), DefaultCookie = case filelib:is_file(CookiePath) of true -> {ok, Bin} = file:read_file(CookiePath), diff --git a/lib/observer/test/crashdump_helper.erl b/lib/observer/test/crashdump_helper.erl index 4239a3d0d1..e57c8162e4 100644 --- a/lib/observer/test/crashdump_helper.erl +++ b/lib/observer/test/crashdump_helper.erl @@ -44,7 +44,7 @@ n1_proc(Creator,_N2,Pid2,Port2,_L) -> Ref = make_ref(), Pid = self(), Bin = list_to_binary(lists:seq(1, 255)), - SubBin = element(1, split_binary(element(2, split_binary(Bin, 8)), 17)), + <<_:2,SubBin:17/binary,_/bits>> = Bin, register(named_port,Port), diff --git a/lib/observer/vsn.mk b/lib/observer/vsn.mk index dd23b08484..ca9ad72473 100644 --- a/lib/observer/vsn.mk +++ b/lib/observer/vsn.mk @@ -1 +1 @@ -OBSERVER_VSN = 2.3 +OBSERVER_VSN = 2.3.1 diff --git a/lib/orber/src/cdr_encode.erl b/lib/orber/src/cdr_encode.erl index f922b330a0..d8d1809f9d 100644 --- a/lib/orber/src/cdr_encode.erl +++ b/lib/orber/src/cdr_encode.erl @@ -683,7 +683,7 @@ enc_fixed(_Env, Digits, Scale, Fixed, _Bytes, _Len) -> orber:dbg("[~p] cdr_encode:enc_fixed(~p, ~p, ~p)~n" "The supplied fixed type incorrect. Check that the 'digits' and 'scale' field~n" "match the definition in the IDL-specification. The value field must be~n" - "a list of Digits lenght.", + "a list of Digits length.", [?LINE, Digits, Scale, Fixed], ?DEBUG_LEVEL), corba:raise(#'MARSHAL'{completion_status=?COMPLETED_MAYBE}). diff --git a/lib/orber/src/orber_iiop.hrl b/lib/orber/src/orber_iiop.hrl index 6bc82fb6d6..1b5d6a84ef 100644 --- a/lib/orber/src/orber_iiop.hrl +++ b/lib/orber/src/orber_iiop.hrl @@ -279,8 +279,8 @@ %%---------------------------------------------------------------------- %% Profile Body %% -%% iiop_version: describes the version of IIOP that the agent at the -%% specified adress is prepared to receive. +%% iiop_version: describes the version of IIOP that the agent at the +%% specified address is prepared to receive. %% host: identifies the internet host to which the GIOP messages %% for the specified object may be sent. %% port: contains the TCP?IP port number where the target agnet is listening diff --git a/lib/orber/src/orber_initial_references.erl b/lib/orber/src/orber_initial_references.erl index 738d702088..8caf69a68b 100644 --- a/lib/orber/src/orber_initial_references.erl +++ b/lib/orber/src/orber_initial_references.erl @@ -89,7 +89,7 @@ install(Timeout, Options) -> end, Wait = mnesia:wait_for_tables([orber_references], Timeout), - %% Check if any error has occured yet. If there are errors, return them. + %% Check if any error has occurred yet. If there are errors, return them. if DB_Result == {atomic, ok}, Wait == ok -> diff --git a/lib/orber/src/orber_objectkeys.erl b/lib/orber/src/orber_objectkeys.erl index 1233e4e721..3b1851e9b5 100644 --- a/lib/orber/src/orber_objectkeys.erl +++ b/lib/orber/src/orber_objectkeys.erl @@ -344,7 +344,7 @@ install(Timeout, Options) -> end, Wait = mnesia:wait_for_tables([orber_objkeys], Timeout), - %% Check if any error has occured yet. If there are errors, return them. + %% Check if any error has occurred yet. If there are errors, return them. if DB_Result == {atomic, ok}, Wait == ok -> diff --git a/lib/os_mon/doc/src/notes.xml b/lib/os_mon/doc/src/notes.xml index e6e80b046d..df4151147c 100644 --- a/lib/os_mon/doc/src/notes.xml +++ b/lib/os_mon/doc/src/notes.xml @@ -31,6 +31,21 @@ </header> <p>This document describes the changes made to the OS_Mon application.</p> +<section><title>Os_Mon 2.4.2</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Support s390x in os_mon.</p> + <p> + Own Id: OTP-14161 Aux Id: PR-1309 </p> + </item> + </list> + </section> + +</section> + <section><title>Os_Mon 2.4.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/os_mon/vsn.mk b/lib/os_mon/vsn.mk index 1ac0fb1d27..59a3d9dee4 100644 --- a/lib/os_mon/vsn.mk +++ b/lib/os_mon/vsn.mk @@ -1 +1 @@ -OS_MON_VSN = 2.4.1 +OS_MON_VSN = 2.4.2 diff --git a/lib/parsetools/doc/src/yecc.xml b/lib/parsetools/doc/src/yecc.xml index 9188bd2a22..004fc1668d 100644 --- a/lib/parsetools/doc/src/yecc.xml +++ b/lib/parsetools/doc/src/yecc.xml @@ -207,7 +207,7 @@ <code> Header "%% Copyright (C)" "%% @private" -"%% @Author John"</code> +"%% @Author John".</code> <p>Next comes a declaration of the <c>nonterminal categories</c> to be used in the rules. For example:</p> <code type="none"> diff --git a/lib/parsetools/src/leex.erl b/lib/parsetools/src/leex.erl index 602e47404d..e0f37ae9df 100644 --- a/lib/parsetools/src/leex.erl +++ b/lib/parsetools/src/leex.erl @@ -1264,7 +1264,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}. %% {Action, AcceptLength, CurrTokLen, RestChars, Line, State}. %% The return CurrTokLen is always the current number of characters -%% scanned in the current token. The returns have the follwoing +%% scanned in the current token. The returns have the following %% meanings: %% {Action, AcceptLength, RestChars, Line} - %% The scanner has reached an accepting end-state, for example after @@ -1281,7 +1281,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}. %% %% {reject, AcceptLength, CurrTokLen, RestChars, Line, State} - %% {Action, AcceptLength, CurrTokLen, RestChars, Line, State} - -%% The scanner has reached a non-accepting transistion state. If +%% The scanner has reached a non-accepting transition state. If %% RestChars == [] we need to get more characters to continue. %% Otherwise if 'reject' then no accepting state has been reached it %% is an error. If we have an Action and AcceptLength then these are diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl index f6b80eb1b4..05446c1a85 100644 --- a/lib/parsetools/src/yecc.erl +++ b/lib/parsetools/src/yecc.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2016. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -81,7 +81,7 @@ -record(rule, { n, % rule n in the grammar file - line, + anno, symbols, % the names of symbols tokens, is_guard, % the action is a guard (not used) @@ -105,7 +105,7 @@ -record(user_code, {state, terminal, funname, action}). --record(symbol, {line = none, name}). +-record(symbol, {anno = none, name}). %% ACCEPT is neither an atom nor a non-terminal. -define(ACCEPT, {}). @@ -517,7 +517,7 @@ parse_grammar(Grammar, Inport, NextLine, St0) -> parse_grammar(Inport, NextLine, St). parse_grammar({error,ErrorLine,Error}, St) -> - add_error(ErrorLine, Error, St); + add_error(erl_anno:new(ErrorLine), Error, St); parse_grammar({rule, Rule, Tokens}, St0) -> NmbrOfDaughters = case Rule of [_, #symbol{name = '$empty'}] -> 0; @@ -534,15 +534,15 @@ parse_grammar({rule, Rule, Tokens}, St0) -> St#yecc{rules_list = [RuleDef | St#yecc.rules_list]}; parse_grammar({prec, Prec}, St) -> St#yecc{prec = Prec ++ St#yecc.prec}; -parse_grammar({#symbol{}, [{string,Line,String}]}, St) -> - add_error(Line, {bad_symbol, String}, St); -parse_grammar({#symbol{line = Line, name = Name}, Symbols}, St) -> +parse_grammar({#symbol{}, [{string,Anno,String}]}, St) -> + add_error(Anno, {bad_symbol, String}, St); +parse_grammar({#symbol{anno = Anno, name = Name}, Symbols}, St) -> CF = fun(I) -> case element(I, St) of [] -> setelement(I, St, Symbols); _ -> - add_error(Line, {duplicate_declaration, Name}, St) + add_error(Anno, {duplicate_declaration, Name}, St) end end, OneSymbol = length(Symbols) =:= 1, @@ -553,7 +553,7 @@ parse_grammar({#symbol{line = Line, name = Name}, Symbols}, St) -> 'Endsymbol' when OneSymbol -> CF(#yecc.endsymbol); 'Expect' when OneSymbol -> CF(#yecc.expect_shift_reduce); 'States' when OneSymbol -> CF(#yecc.expect_n_states); % undocumented - _ -> add_warning(Line, bad_declaration, St) + _ -> add_warning(Anno, bad_declaration, St) end. read_grammar(Inport, St, Line) -> @@ -599,7 +599,7 @@ precedence(_) -> unknown. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% check_grammar(St0) -> - Empty = #symbol{line = none, name = '$empty'}, + Empty = #symbol{anno = none, name = '$empty'}, AllSymbols = St0#yecc.nonterminals ++ St0#yecc.terminals ++ [Empty], St1 = St0#yecc{all_symbols = AllSymbols}, Cs = [fun check_nonterminals/1, fun check_terminals/1, @@ -640,12 +640,12 @@ check_rootsymbol(St) -> case St#yecc.rootsymbol of [] -> add_error(rootsymbol_missing, St); - [#symbol{line = Line, name = SymName}] -> + [#symbol{anno = Anno, name = SymName}] -> case kind_of_symbol(St, SymName) of nonterminal -> St#yecc{rootsymbol = SymName}; _ -> - add_error(Line, {bad_rootsymbol, SymName}, St) + add_error(Anno, {bad_rootsymbol, SymName}, St) end end. @@ -653,12 +653,12 @@ check_endsymbol(St) -> case St#yecc.endsymbol of [] -> St#yecc{endsymbol = '$end'}; - [#symbol{line = Line, name = SymName}] -> + [#symbol{anno = Anno, name = SymName}] -> case kind_of_symbol(St, SymName) of nonterminal -> - add_error(Line, {endsymbol_is_nonterminal, SymName}, St); + add_error(Anno, {endsymbol_is_nonterminal, SymName}, St); terminal -> - add_error(Line, {endsymbol_is_terminal, SymName}, St); + add_error(Anno, {endsymbol_is_terminal, SymName}, St); _ -> St#yecc{endsymbol = SymName} end @@ -670,8 +670,8 @@ check_expect(St0) -> St0#yecc{expect_shift_reduce = 0}; [#symbol{name = Expect}] when is_integer(Expect) -> St0#yecc{expect_shift_reduce = Expect}; - [#symbol{line = Line, name = Name}] -> - St1 = add_error(Line, {bad_expect, Name}, St0), + [#symbol{anno = Anno, name = Name}] -> + St1 = add_error(Anno, {bad_expect, Name}, St0), St1#yecc{expect_shift_reduce = 0} end. @@ -681,27 +681,27 @@ check_states(St) -> St; [#symbol{name = NStates}] when is_integer(NStates) -> St#yecc{expect_n_states = NStates}; - [#symbol{line = Line, name = Name}] -> - add_error(Line, {bad_states, Name}, St) + [#symbol{anno = Anno, name = Name}] -> + add_error(Anno, {bad_states, Name}, St) end. check_precedences(St0) -> {St1, _} = - foldr(fun({#symbol{line = Line, name = Op},_I,_A}, {St,Ps}) -> + foldr(fun({#symbol{anno = Anno, name = Op},_I,_A}, {St,Ps}) -> case member(Op, Ps) of true -> - {add_error(Line, {duplicate_precedence,Op}, St), + {add_error(Anno, {duplicate_precedence,Op}, St), Ps}; false -> {St, [Op | Ps]} end end, {St0,[]}, St0#yecc.prec), - foldl(fun({#symbol{line = Line, name = Op},I,A}, St) -> + foldl(fun({#symbol{anno = Anno, name = Op},I,A}, St) -> case kind_of_symbol(St, Op) of endsymbol -> - add_error(Line,{precedence_op_is_endsymbol,Op}, St); + add_error(Anno,{precedence_op_is_endsymbol,Op}, St); unknown -> - add_error(Line, {precedence_op_is_unknown, Op}, St); + add_error(Anno, {precedence_op_is_unknown, Op}, St); _ -> St#yecc{prec = [{Op,I,A} | St#yecc.prec]} end @@ -709,13 +709,13 @@ check_precedences(St0) -> check_rule(Rule0, {St0,Rules}) -> Symbols = Rule0#rule.symbols, - #symbol{line = HeadLine, name = Head} = hd(Symbols), + #symbol{anno = HeadAnno, name = Head} = hd(Symbols), case member(Head, St0#yecc.nonterminals) of false -> - {add_error(HeadLine, {undefined_nonterminal, Head}, St0), Rules}; + {add_error(HeadAnno, {undefined_nonterminal, Head}, St0), Rules}; true -> St = check_rhs(tl(Symbols), St0), - Rule = Rule0#rule{line = HeadLine, symbols = names(Symbols)}, + Rule = Rule0#rule{anno = HeadAnno, symbols = names(Symbols)}, {St, [Rule | Rules]} end. @@ -725,7 +725,7 @@ check_rules(St0) -> [] -> add_error(no_grammar_rules, St); _ -> - Rule = #rule{line = none, + Rule = #rule{anno = none, symbols = [?ACCEPT, St#yecc.rootsymbol], tokens = []}, Rules1 = [Rule | Rules0], @@ -740,9 +740,9 @@ duplicates(List) -> names(Symbols) -> map(fun(Symbol) -> Symbol#symbol.name end, Symbols). -symbol_line(Name, St) -> - #symbol{line = Line} = symbol_find(Name, St#yecc.all_symbols), - Line. +symbol_anno(Name, St) -> + #symbol{anno = Anno} = symbol_find(Name, St#yecc.all_symbols), + Anno. symbol_member(Symbol, Symbols) -> symbol_find(Symbol#symbol.name, Symbols) =/= false. @@ -894,31 +894,33 @@ report_warnings(St) -> add_error(E, St) -> add_error(none, E, St). -add_error(Line, E, St) -> - add_error(St#yecc.infile, Line, E, St). +add_error(Anno, E, St) -> + add_error(St#yecc.infile, Anno, E, St). -add_error(File, Line, E, St) -> - St#yecc{errors = [{File,{Line,?MODULE,E}}|St#yecc.errors]}. +add_error(File, Anno, E, St) -> + Loc = location(Anno), + St#yecc{errors = [{File,{Loc,?MODULE,E}}|St#yecc.errors]}. add_errors(SymNames, E0, St0) -> foldl(fun(SymName, St) -> - add_error(symbol_line(SymName, St), {E0, SymName}, St) + add_error(symbol_anno(SymName, St), {E0, SymName}, St) end, St0, SymNames). -add_warning(Line, W, St) -> - St#yecc{warnings = [{St#yecc.infile,{Line,?MODULE,W}}|St#yecc.warnings]}. +add_warning(Anno, W, St) -> + Loc = location(Anno), + St#yecc{warnings = [{St#yecc.infile,{Loc,?MODULE,W}}|St#yecc.warnings]}. add_warnings(SymNames, W0, St0) -> foldl(fun(SymName, St) -> - add_warning(symbol_line(SymName, St), {W0, SymName}, St) + add_warning(symbol_anno(SymName, St), {W0, SymName}, St) end, St0, SymNames). check_rhs([#symbol{name = '$empty'}], St) -> St; check_rhs(Rhs, St0) -> case symbol_find('$empty', Rhs) of - #symbol{line = Line} -> - add_error(Line, illegal_empty, St0); + #symbol{anno = Anno} -> + add_error(Anno, illegal_empty, St0); false -> foldl(fun(Sym, St) -> case symbol_member(Sym, St#yecc.all_symbols) of @@ -926,13 +928,13 @@ check_rhs(Rhs, St0) -> St; false -> E = {undefined_symbol,Sym#symbol.name}, - add_error(Sym#symbol.line, E, St) + add_error(Sym#symbol.anno, E, St) end end, St0, Rhs) end. check_action(Tokens) -> - case erl_parse:parse_exprs(add_roberts_dot(Tokens, 0)) of + case erl_parse:parse_exprs(add_roberts_dot(Tokens, erl_anno:new(0))) of {error, _Error} -> {false, false}; {ok, [Expr | Exprs]} -> @@ -940,10 +942,10 @@ check_action(Tokens) -> {IsGuard, true} end. -add_roberts_dot([], Line) -> - [{'dot', Line}]; -add_roberts_dot([{'dot', Line} | _], _) -> - [{'dot', Line}]; +add_roberts_dot([], Anno) -> + [{'dot', Anno}]; +add_roberts_dot([{'dot', Anno} | _], _) -> + [{'dot', Anno}]; add_roberts_dot([Token | Tokens], _) -> [Token | add_roberts_dot(Tokens, element(2, Token))]. @@ -953,21 +955,22 @@ subst_pseudo_vars([H0 | T0], NmbrOfDaughters, St0) -> {H, St1} = subst_pseudo_vars(H0, NmbrOfDaughters, St0), {T, St} = subst_pseudo_vars(T0, NmbrOfDaughters, St1), {[H | T], St}; -subst_pseudo_vars({atom, Line, Atom}, NmbrOfDaughters, St0) -> +subst_pseudo_vars({atom, Anno, Atom}, NmbrOfDaughters, St0) -> case atom_to_list(Atom) of [$$ | Rest] -> try list_to_integer(Rest) of N when N > 0, N =< NmbrOfDaughters -> - {{var, Line, list_to_atom(append("__", Rest))}, St0}; + {{var, Anno, list_to_atom(append("__", Rest))}, St0}; _ -> - St = add_error(Line, {undefined_pseudo_variable, Atom}, + St = add_error(Anno, + {undefined_pseudo_variable, Atom}, St0), - {{atom, Line, '$undefined'}, St} + {{atom, Anno, '$undefined'}, St} catch - error: _ -> {{atom, Line, Atom}, St0} + error: _ -> {{atom, Anno, Atom}, St0} end; _ -> - {{atom, Line, Atom}, St0} + {{atom, Anno, Atom}, St0} end; subst_pseudo_vars(Tuple, NmbrOfDaughters, St0) when is_tuple(Tuple) -> {L, St} = subst_pseudo_vars(tuple_to_list(Tuple), NmbrOfDaughters, St0), @@ -2295,9 +2298,9 @@ function_name(Name, Suf) -> list_to_atom(concat([Name, '_' | quoted_atom(Suf)])). rule(RulePointer, St) -> - #rule{n = N, line = Line, symbols = Symbols} = + #rule{n = N, anno = Anno, symbols = Symbols} = dict:fetch(RulePointer, St#yecc.rule_pointer2rule), - {Symbols, Line, N}. + {Symbols, Anno, N}. get_rule(RuleNmbr, St) -> dict:fetch(RuleNmbr, St#yecc.rule_pointer2rule). @@ -2463,7 +2466,7 @@ include(St, File, Outport) -> include1(eof, _, _, _File, L, _St) -> L; include1({error, _}=_Error, _Inport, _Outport, File, L, St) -> - throw(add_error(File, L, cannot_parse, St)); + throw(add_error(File, erl_anno:new(L), cannot_parse, St)); include1(Line, Inport, Outport, File, L, St) -> Incr = case member($\n, Line) of true -> 1; @@ -2488,7 +2491,7 @@ includefile_version(Includefile) -> parse_file(Epp) -> case epp:parse_erl_form(Epp) of - {ok, {function,_Line,yeccpars1,7,_Clauses}} -> + {ok, {function,_Anno,yeccpars1,7,_Clauses}} -> {1,4}; {eof,_Line} -> {1,1}; @@ -2503,7 +2506,7 @@ pp_tokens(Tokens, Line0, Enc) -> pp_tokens1([], _Line0, _Enc, _T0) -> []; pp_tokens1([T | Ts], Line0, Enc, T0) -> - Line = element(2, T), + Line = location(anno(T)), [pp_sep(Line, Line0, T0), pp_symbol(T, Enc)|pp_tokens1(Ts, Line, Enc, T)]. pp_symbol({var,_,Var}, _Enc) -> Var; @@ -2538,10 +2541,17 @@ output_file_directive(St, _Filename, _Line) -> St. first_line(Tokens) -> - element(2, hd(Tokens)). + location(anno(hd(Tokens))). last_line(Tokens) -> - element(2, lists:last(Tokens)). + location(anno(lists:last(Tokens))). + +location(none) -> none; +location(Anno) -> + erl_anno:line(Anno). + +anno(Token) -> + element(2, Token). %% Keep track of the current line in the generated file. fwrite(#yecc{outport = Outport, line = Line}=St, Format, Args) -> diff --git a/lib/parsetools/src/yeccgramm.yrl b/lib/parsetools/src/yeccgramm.yrl index c7b2ef6a86..40aa85a43e 100644 --- a/lib/parsetools/src/yeccgramm.yrl +++ b/lib/parsetools/src/yeccgramm.yrl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2015. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -39,43 +39,38 @@ rule -> head '->' symbols attached_code dot: {rule, ['$1' | '$3'], '$4'}. head -> symbol : '$1'. symbols -> symbol : ['$1']. symbols -> symbol symbols : ['$1' | '$2']. -strings -> string : [string('$1')]. -strings -> string strings : [string('$1') | '$2']. +strings -> string : ['$1']. +strings -> string strings : ['$1' | '$2']. attached_code -> ':' tokens : {erlang_code, '$2'}. -attached_code -> '$empty' : {erlang_code, [{atom, 0, '$undefined'}]}. +attached_code -> '$empty' : {erlang_code, + [{atom, erl_anno:new(0), '$undefined'}]}. tokens -> token : ['$1']. tokens -> token tokens : ['$1' | '$2']. symbol -> var : symbol('$1'). symbol -> atom : symbol('$1'). symbol -> integer : symbol('$1'). symbol -> reserved_word : symbol('$1'). -token -> var : token('$1'). -token -> atom : token('$1'). -token -> float : token('$1'). -token -> integer : token('$1'). -token -> string : token('$1'). -token -> char : token('$1'). -token -> reserved_symbol : {value_of('$1'), line_of('$1')}. -token -> reserved_word : {value_of('$1'), line_of('$1')}. -token -> '->' : {'->', line_of('$1')}. % Have to be treated in this -token -> ':' : {':', line_of('$1')}. % manner, because they are also - % special symbols of the metagrammar +token -> var : '$1'. +token -> atom : '$1'. +token -> float : '$1'. +token -> integer : '$1'. +token -> string : '$1'. +token -> char : '$1'. +token -> reserved_symbol : {value_of('$1'), anno_of('$1')}. +token -> reserved_word : {value_of('$1'), anno_of('$1')}. +token -> '->' : {'->', anno_of('$1')}. % Have to be treated in this +token -> ':' : {':', anno_of('$1')}. % manner, because they are also + % special symbols of the metagrammar Erlang code. --record(symbol, {line, name}). +-record(symbol, {anno, name}). symbol(Symbol) -> - #symbol{line = line_of(Symbol), name = value_of(Symbol)}. - -token(Token) -> - setelement(2, Token, line_of(Token)). - -string(Token) -> - setelement(2, Token, line_of(Token)). + #symbol{anno = anno_of(Symbol), name = value_of(Symbol)}. value_of(Token) -> element(3, Token). -line_of(Token) -> - erl_anno:line(element(2, Token)). +anno_of(Token) -> + element(2, Token). diff --git a/lib/parsetools/src/yeccparser.erl b/lib/parsetools/src/yeccparser.erl index 0025284ccf..6f6f66d56c 100644 --- a/lib/parsetools/src/yeccparser.erl +++ b/lib/parsetools/src/yeccparser.erl @@ -1,29 +1,23 @@ -module(yeccparser). -export([parse/1, parse_and_scan/1, format_error/1]). --file("yeccgramm.yrl", 63). +-file("yeccgramm.yrl", 65). --record(symbol, {line, name}). +-record(symbol, {anno, name}). symbol(Symbol) -> - #symbol{line = line_of(Symbol), name = value_of(Symbol)}. - -token(Token) -> - setelement(2, Token, line_of(Token)). - -string(Token) -> - setelement(2, Token, line_of(Token)). + #symbol{anno = anno_of(Symbol), name = value_of(Symbol)}. value_of(Token) -> element(3, Token). -line_of(Token) -> - erl_anno:line(element(2, Token)). +anno_of(Token) -> + element(2, Token). --file("lib/parsetools/include/yeccpre.hrl", 0). +-file("/ldisk/hasse/otp/lib/parsetools/include/yeccpre.hrl", 0). %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2015. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -147,21 +141,10 @@ yecc_end(Line) -> {'$end', Line}. yecctoken_end_location(Token) -> - try - Str = erl_scan:text(Token), - Line = erl_scan:line(Token), - Parts = re:split(Str, "\n"), - Dline = length(Parts) - 1, - Yline = Line + Dline, - case erl_scan:column(Token) of - Column when is_integer(Column) -> - Col = byte_size(lists:last(Parts)), - {Yline, Col + if Dline =:= 0 -> Column; true -> 1 end}; - undefined -> - Yline - end - catch _:_ -> - yecctoken_location(Token) + try erl_anno:end_location(element(2, Token)) of + undefined -> yecctoken_location(Token); + Loc -> Loc + catch _:_ -> yecctoken_location(Token) end. -compile({nowarn_unused_function, yeccerror/1}). @@ -172,15 +155,15 @@ yeccerror(Token) -> -compile({nowarn_unused_function, yecctoken_to_string/1}). yecctoken_to_string(Token) -> - case catch erl_scan:text(Token) of - Txt when is_list(Txt) -> Txt; - _ -> yecctoken2string(Token) + try erl_scan:text(Token) of + undefined -> yecctoken2string(Token); + Txt -> Txt + catch _:_ -> yecctoken2string(Token) end. yecctoken_location(Token) -> - case catch erl_scan:location(Token) of - Loc when Loc =/= undefined -> Loc; - _ -> element(2, Token) + try erl_scan:location(Token) + catch _:_ -> element(2, Token) end. -compile({nowarn_unused_function, yecctoken2string/1}). @@ -204,8 +187,9 @@ yecctoken2string(Other) -> --file("yeccgramm.erl", 207). +-file("yeccgramm.erl", 190). +-dialyzer({nowarn_function, yeccpars2/7}). yeccpars2(0=S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr); %% yeccpars2(1=S, Cat, Ss, Stack, T, Ts, Tzr) -> @@ -281,6 +265,7 @@ yeccpars2(35=S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2(Other, _, _, _, _, _, _) -> erlang:error({yecc_bug,"1.4",{missing_state_in_action_table, Other}}). +-dialyzer({nowarn_function, yeccpars2_0/7}). yeccpars2_0(S, atom, Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 6, Ss, Stack, T, Ts, Tzr); yeccpars2_0(S, integer, Ss, Stack, T, Ts, Tzr) -> @@ -308,11 +293,13 @@ yeccpars2_1(_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_2(_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccgoto_grammar(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccpars2_3/7}). yeccpars2_3(S, '->', Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 10, Ss, Stack, T, Ts, Tzr); yeccpars2_3(_, _, _, _, T, _, _) -> yeccerror(T). +-dialyzer({nowarn_function, yeccpars2_4/7}). yeccpars2_4(_S, '$end', _Ss, Stack, _T, _Ts, _Tzr) -> {ok, hd(Stack)}; yeccpars2_4(_, _, _, _, T, _, _) -> @@ -362,11 +349,13 @@ yeccpars2_13(_S, Cat, Ss, Stack, T, Ts, Tzr) -> NewStack = yeccpars2_13_(Stack), yeccgoto_symbols(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccpars2_14/7}). yeccpars2_14(S, dot, Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 29, Ss, Stack, T, Ts, Tzr); yeccpars2_14(_, _, _, _, T, _, _) -> yeccerror(T). +-dialyzer({nowarn_function, yeccpars2_15/7}). yeccpars2_15(S, '->', Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 18, Ss, Stack, T, Ts, Tzr); yeccpars2_15(S, ':', Ss, Stack, T, Ts, Tzr) -> @@ -428,20 +417,16 @@ yeccpars2_19(_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). yeccpars2_20(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_20_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_21(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_21_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_22(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_22_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_23(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_23_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_24(_S, Cat, Ss, Stack, T, Ts, Tzr) -> NewStack = yeccpars2_24_(Stack), @@ -452,12 +437,10 @@ yeccpars2_25(_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). yeccpars2_26(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_26_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_27(_S, Cat, Ss, Stack, T, Ts, Tzr) -> - NewStack = yeccpars2_27_(Stack), - yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr). + yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr). yeccpars2_28(_S, Cat, Ss, Stack, T, Ts, Tzr) -> [_|Nss] = Ss, @@ -469,11 +452,13 @@ yeccpars2_29(_S, Cat, Ss, Stack, T, Ts, Tzr) -> NewStack = yeccpars2_29_(Stack), yeccgoto_rule(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccpars2_30/7}). yeccpars2_30(S, dot, Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 35, Ss, Stack, T, Ts, Tzr); yeccpars2_30(_, _, _, _, T, _, _) -> yeccerror(T). +-dialyzer({nowarn_function, yeccpars2_31/7}). yeccpars2_31(S, dot, Ss, Stack, T, Ts, Tzr) -> yeccpars1(S, 34, Ss, Stack, T, Ts, Tzr); yeccpars2_31(_, _, _, _, T, _, _) -> @@ -500,26 +485,33 @@ yeccpars2_35(_S, Cat, Ss, Stack, T, Ts, Tzr) -> NewStack = yeccpars2_35_(Stack), yeccgoto_declaration(hd(Nss), Cat, Nss, NewStack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_attached_code/7}). yeccgoto_attached_code(11, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_14(14, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_declaration/7}). yeccgoto_declaration(0=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_5(_S, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_grammar/7}). yeccgoto_grammar(0, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_4(4, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_head/7}). yeccgoto_head(0, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_3(3, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_rule/7}). yeccgoto_rule(0=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_2(_S, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_strings/7}). yeccgoto_strings(1, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_31(31, Cat, Ss, Stack, T, Ts, Tzr); yeccgoto_strings(32=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_33(_S, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_symbol/7}). yeccgoto_symbol(0, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_1(1, Cat, Ss, Stack, T, Ts, Tzr); yeccgoto_symbol(1, Cat, Ss, Stack, T, Ts, Tzr) -> @@ -529,6 +521,7 @@ yeccgoto_symbol(10, Cat, Ss, Stack, T, Ts, Tzr) -> yeccgoto_symbol(12, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_12(12, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_symbols/7}). yeccgoto_symbols(1, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_30(30, Cat, Ss, Stack, T, Ts, Tzr); yeccgoto_symbols(10, Cat, Ss, Stack, T, Ts, Tzr) -> @@ -536,18 +529,20 @@ yeccgoto_symbols(10, Cat, Ss, Stack, T, Ts, Tzr) -> yeccgoto_symbols(12=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_13(_S, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_token/7}). yeccgoto_token(15, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_17(17, Cat, Ss, Stack, T, Ts, Tzr); yeccgoto_token(17, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_17(17, Cat, Ss, Stack, T, Ts, Tzr). +-dialyzer({nowarn_function, yeccgoto_tokens/7}). yeccgoto_tokens(15=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_16(_S, Cat, Ss, Stack, T, Ts, Tzr); yeccgoto_tokens(17=_S, Cat, Ss, Stack, T, Ts, Tzr) -> yeccpars2_28(_S, Cat, Ss, Stack, T, Ts, Tzr). -compile({inline,yeccpars2_6_/1}). --file("yeccgramm.yrl", 44). +-file("yeccgramm.yrl", 46). yeccpars2_6_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -555,7 +550,7 @@ yeccpars2_6_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_7_/1}). --file("yeccgramm.yrl", 45). +-file("yeccgramm.yrl", 47). yeccpars2_7_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -563,7 +558,7 @@ yeccpars2_7_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_8_/1}). --file("yeccgramm.yrl", 46). +-file("yeccgramm.yrl", 48). yeccpars2_8_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -571,7 +566,7 @@ yeccpars2_8_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_9_/1}). --file("yeccgramm.yrl", 43). +-file("yeccgramm.yrl", 45). yeccpars2_9_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -579,14 +574,15 @@ yeccpars2_9_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_11_/1}). --file("yeccgramm.yrl", 40). +-file("yeccgramm.yrl", 41). yeccpars2_11_(__Stack0) -> [begin - { erlang_code , [ { atom , 0 , '$undefined' } ] } + { erlang_code , + [ { atom , erl_anno : new ( 0 ) , '$undefined' } ] } end | __Stack0]. -compile({inline,yeccpars2_12_/1}). --file("yeccgramm.yrl", 35). +-file("yeccgramm.yrl", 36). yeccpars2_12_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -594,7 +590,7 @@ yeccpars2_12_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_13_/1}). --file("yeccgramm.yrl", 36). +-file("yeccgramm.yrl", 37). yeccpars2_13_(__Stack0) -> [__2,__1 | __Stack] = __Stack0, [begin @@ -602,7 +598,7 @@ yeccpars2_13_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_16_/1}). --file("yeccgramm.yrl", 39). +-file("yeccgramm.yrl", 40). yeccpars2_16_(__Stack0) -> [__2,__1 | __Stack] = __Stack0, [begin @@ -610,7 +606,7 @@ yeccpars2_16_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_17_/1}). --file("yeccgramm.yrl", 41). +-file("yeccgramm.yrl", 43). yeccpars2_17_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin @@ -618,87 +614,39 @@ yeccpars2_17_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_18_/1}). --file("yeccgramm.yrl", 55). +-file("yeccgramm.yrl", 57). yeccpars2_18_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin - { '->' , line_of ( __1 ) } + { '->' , anno_of ( __1 ) } end | __Stack]. -compile({inline,yeccpars2_19_/1}). --file("yeccgramm.yrl", 56). +-file("yeccgramm.yrl", 58). yeccpars2_19_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin - { ':' , line_of ( __1 ) } - end | __Stack]. - --compile({inline,yeccpars2_20_/1}). --file("yeccgramm.yrl", 48). -yeccpars2_20_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) - end | __Stack]. - --compile({inline,yeccpars2_21_/1}). --file("yeccgramm.yrl", 52). -yeccpars2_21_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) - end | __Stack]. - --compile({inline,yeccpars2_22_/1}). --file("yeccgramm.yrl", 49). -yeccpars2_22_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) - end | __Stack]. - --compile({inline,yeccpars2_23_/1}). --file("yeccgramm.yrl", 50). -yeccpars2_23_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) + { ':' , anno_of ( __1 ) } end | __Stack]. -compile({inline,yeccpars2_24_/1}). --file("yeccgramm.yrl", 53). +-file("yeccgramm.yrl", 55). yeccpars2_24_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin - { value_of ( __1 ) , line_of ( __1 ) } + { value_of ( __1 ) , anno_of ( __1 ) } end | __Stack]. -compile({inline,yeccpars2_25_/1}). --file("yeccgramm.yrl", 54). +-file("yeccgramm.yrl", 56). yeccpars2_25_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin - { value_of ( __1 ) , line_of ( __1 ) } - end | __Stack]. - --compile({inline,yeccpars2_26_/1}). --file("yeccgramm.yrl", 51). -yeccpars2_26_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) - end | __Stack]. - --compile({inline,yeccpars2_27_/1}). --file("yeccgramm.yrl", 47). -yeccpars2_27_(__Stack0) -> - [__1 | __Stack] = __Stack0, - [begin - token ( __1 ) + { value_of ( __1 ) , anno_of ( __1 ) } end | __Stack]. -compile({inline,yeccpars2_28_/1}). --file("yeccgramm.yrl", 42). +-file("yeccgramm.yrl", 44). yeccpars2_28_(__Stack0) -> [__2,__1 | __Stack] = __Stack0, [begin @@ -706,7 +654,7 @@ yeccpars2_28_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_29_/1}). --file("yeccgramm.yrl", 33). +-file("yeccgramm.yrl", 34). yeccpars2_29_(__Stack0) -> [__5,__4,__3,__2,__1 | __Stack] = __Stack0, [begin @@ -714,23 +662,23 @@ yeccpars2_29_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_32_/1}). --file("yeccgramm.yrl", 37). +-file("yeccgramm.yrl", 38). yeccpars2_32_(__Stack0) -> [__1 | __Stack] = __Stack0, [begin - [ string ( __1 ) ] + [ __1 ] end | __Stack]. -compile({inline,yeccpars2_33_/1}). --file("yeccgramm.yrl", 38). +-file("yeccgramm.yrl", 39). yeccpars2_33_(__Stack0) -> [__2,__1 | __Stack] = __Stack0, [begin - [ string ( __1 ) | __2 ] + [ __1 | __2 ] end | __Stack]. -compile({inline,yeccpars2_34_/1}). --file("yeccgramm.yrl", 32). +-file("yeccgramm.yrl", 33). yeccpars2_34_(__Stack0) -> [__3,__2,__1 | __Stack] = __Stack0, [begin @@ -738,7 +686,7 @@ yeccpars2_34_(__Stack0) -> end | __Stack]. -compile({inline,yeccpars2_35_/1}). --file("yeccgramm.yrl", 31). +-file("yeccgramm.yrl", 32). yeccpars2_35_(__Stack0) -> [__3,__2,__1 | __Stack] = __Stack0, [begin @@ -746,4 +694,4 @@ yeccpars2_35_(__Stack0) -> end | __Stack]. --file("yeccgramm.yrl", 82). +-file("yeccgramm.yrl", 77). diff --git a/lib/parsetools/test/yecc_SUITE.erl b/lib/parsetools/test/yecc_SUITE.erl index 5bd71d5d19..2c37278d4b 100644 --- a/lib/parsetools/test/yecc_SUITE.erl +++ b/lib/parsetools/test/yecc_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2005-2016. All Rights Reserved. +%% Copyright Ericsson AB 2005-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1524,7 +1524,7 @@ otp_7945(suite) -> []; otp_7945(Config) when is_list(Config) -> A2 = erl_anno:new(2), A3 = erl_anno:new(3), - {error,_} = erl_parse:parse([{atom,3,foo},{'.',A2,9,9}]), + {error,_} = erl_parse:parse([{atom,A3,foo},{'.',A2,9,9}]), ok. otp_8483(doc) -> diff --git a/lib/public_key/asn1/PKCS-8.asn1 b/lib/public_key/asn1/PKCS-8.asn1 index 8412345b68..292a7b2029 100644 --- a/lib/public_key/asn1/PKCS-8.asn1 +++ b/lib/public_key/asn1/PKCS-8.asn1 @@ -26,7 +26,7 @@ BEGIN -- This import is really unnecessary since ALGORITHM-IDENTIFIER is defined as a -- TYPE-IDENTIFIER --- Renome this import and replace all occurences of ALGORITHM-IDENTIFIER with +-- Rename this import and replace all occurrences of ALGORITHM-IDENTIFIER with -- TYPE-IDENTIFIER as a workaround for weaknesses in the ASN.1 compiler --AlgorithmIdentifier, ALGORITHM-IDENTIFIER -- FROM PKCS5v2-0 {iso(1) member-body(2) us(840) rsadsi(113549) diff --git a/lib/public_key/doc/src/notes.xml b/lib/public_key/doc/src/notes.xml index 74d1a57936..92e314186e 100644 --- a/lib/public_key/doc/src/notes.xml +++ b/lib/public_key/doc/src/notes.xml @@ -35,6 +35,34 @@ <file>notes.xml</file> </header> +<section><title>Public_Key 1.4</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + New function <c>pkix_verify_hostname/2,3</c> Implements + certificate hostname checking. See the manual and RFC + 6125.</p> + <p> + Own Id: OTP-13009</p> + </item> + <item> + <p> + The ssh host key fingerprint generation now also takes a + list of algorithms and returns a list of corresponding + fingerprints. See + <c>public_key:ssh_hostkey_fingerprint/2</c> and the + option <c>silently_accept_hosts</c> in + <c>ssh:connect</c>.</p> + <p> + Own Id: OTP-14223</p> + </item> + </list> + </section> + +</section> + <section><title>Public_Key 1.3</title> <section><title>Improvements and New Features</title> diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml index 37aa05e0fd..2300ce3937 100644 --- a/lib/public_key/doc/src/public_key.xml +++ b/lib/public_key/doc/src/public_key.xml @@ -331,13 +331,16 @@ </func> <func> - <name>generate_key(Params) -> {Public::binary(), Private::binary()} | #'ECPrivateKey'{} </name> + <name>generate_key(Params) -> {Public::binary(), Private::binary()} | #'ECPrivateKey'{} | {#'RSAPublicKey'{}, #'RSAPrivateKey'{}}</name> <fsummary>Generates a new keypair.</fsummary> <type> - <v>Params = #'DHParameter'{} | {namedCurve, oid()} | #'ECParameters'{}</v> + <v>Params = #'DHParameter'{} | {namedCurve, oid()} | #'ECParameters'{} + | {rsa, Size::integer(), PubExp::integer} </v> </type> <desc> - <p>Generates a new keypair.</p> + <p>Generates a new keypair. See also + <seealso marker="crypto:crypto#generate_key/2">crypto:generate_key/2</seealso> + </p> </desc> </func> @@ -857,6 +860,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{}, <func> <name>ssh_hostkey_fingerprint(HostKey) -> string()</name> <name>ssh_hostkey_fingerprint(DigestType, HostKey) -> string()</name> + <name>ssh_hostkey_fingerprint([DigestType], HostKey) -> [string()]</name> <fsummary>Calculates a ssh fingerprint for a hostkey.</fsummary> <type> <v>Key = public_key()</v> @@ -880,6 +884,10 @@ fun(#'DistributionPoint'{}, #'CertificateList'{}, 5> public_key:ssh_hostkey_fingerprint(sha256,Key). "SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ" + + 6> public_key:ssh_hostkey_fingerprint([sha,sha256],Key). + ["SHA1:bSLY/C4QXLDL/Iwmhyg0PGW9UbY", + "SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ"] </code> </desc> </func> diff --git a/lib/public_key/src/public_key.app.src b/lib/public_key/src/public_key.app.src index 88ef07c5a6..dbd732c384 100644 --- a/lib/public_key/src/public_key.app.src +++ b/lib/public_key/src/public_key.app.src @@ -14,7 +14,7 @@ {applications, [asn1, crypto, kernel, stdlib]}, {registered, []}, {env, []}, - {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0","crypto-3.3", + {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0","crypto-3.8", "asn1-3.0"]} ] }. diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl index 42b6826404..8f185bbbd4 100644 --- a/lib/public_key/src/public_key.erl +++ b/lib/public_key/src/public_key.erl @@ -395,9 +395,15 @@ dh_gex_group(Min, N, Max, Groups) -> pubkey_ssh:dh_gex_group(Min, N, Max, Groups). %%-------------------------------------------------------------------- --spec generate_key(#'DHParameter'{} | {namedCurve, Name ::oid()} | - #'ECParameters'{}) -> {Public::binary(), Private::binary()} | - #'ECPrivateKey'{}. +-spec generate_key(#'DHParameter'{}) -> + {Public::binary(), Private::binary()}; + ({namedCurve, Name ::oid()}) -> + #'ECPrivateKey'{}; + (#'ECParameters'{}) -> + #'ECPrivateKey'{}; + ({rsa, Size::pos_integer(), PubExp::pos_integer()}) -> + {#'RSAPublicKey'{}, #'RSAPrivateKey'{}}. + %% Description: Generates a new keypair %%-------------------------------------------------------------------- generate_key(#'DHParameter'{prime = P, base = G}) -> @@ -405,7 +411,49 @@ generate_key(#'DHParameter'{prime = P, base = G}) -> generate_key({namedCurve, _} = Params) -> ec_generate_key(Params); generate_key(#'ECParameters'{} = Params) -> - ec_generate_key(Params). + ec_generate_key(Params); +generate_key({rsa, ModulusSize, PublicExponent}) -> + case crypto:generate_key(rsa, {ModulusSize,PublicExponent}) of + {[E, N], [E, N, D, P, Q, D_mod_P_1, D_mod_Q_1, InvQ_mod_P]} -> + Nint = crypto:bytes_to_integer(N), + Eint = crypto:bytes_to_integer(E), + {#'RSAPublicKey'{modulus = Nint, + publicExponent = Eint}, + #'RSAPrivateKey'{version = 0, % Two-factor (I guess since otherPrimeInfos is not given) + modulus = Nint, + publicExponent = Eint, + privateExponent = crypto:bytes_to_integer(D), + prime1 = crypto:bytes_to_integer(P), + prime2 = crypto:bytes_to_integer(Q), + exponent1 = crypto:bytes_to_integer(D_mod_P_1), + exponent2 = crypto:bytes_to_integer(D_mod_Q_1), + coefficient = crypto:bytes_to_integer(InvQ_mod_P)} + }; + + {[E, N], [E, N, D]} -> % FIXME: what to set the other fields in #'RSAPrivateKey'? + % Answer: Miller [Mil76] + % G.L. Miller. Riemann's hypothesis and tests for primality. + % Journal of Computer and Systems Sciences, + % 13(3):300-307, + % 1976. + Nint = crypto:bytes_to_integer(N), + Eint = crypto:bytes_to_integer(E), + {#'RSAPublicKey'{modulus = Nint, + publicExponent = Eint}, + #'RSAPrivateKey'{version = 0, % Two-factor (I guess since otherPrimeInfos is not given) + modulus = Nint, + publicExponent = Eint, + privateExponent = crypto:bytes_to_integer(D), + prime1 = '?', + prime2 = '?', + exponent1 = '?', + exponent2 = '?', + coefficient = '?'} + }; + + Other -> + Other + end. %%-------------------------------------------------------------------- -spec compute_key(#'ECPoint'{} , #'ECPrivateKey'{}) -> binary(). @@ -893,21 +941,31 @@ oid2ssh_curvename(?'secp521r1') -> <<"nistp521">>. %%-------------------------------------------------------------------- -spec ssh_hostkey_fingerprint(public_key()) -> string(). --spec ssh_hostkey_fingerprint(digest_type(), public_key()) -> string(). +-spec ssh_hostkey_fingerprint( digest_type(), public_key()) -> string() + ; ([digest_type()], public_key()) -> [string()] + . ssh_hostkey_fingerprint(Key) -> - sshfp_string(md5, Key). + sshfp_string(md5, public_key:ssh_encode(Key,ssh2_pubkey) ). -ssh_hostkey_fingerprint(HashAlg, Key) -> - lists:concat([sshfp_alg_name(HashAlg), - [$: | sshfp_string(HashAlg, Key)] - ]). +ssh_hostkey_fingerprint(HashAlgs, Key) when is_list(HashAlgs) -> + EncKey = public_key:ssh_encode(Key, ssh2_pubkey), + [sshfp_full_string(HashAlg,EncKey) || HashAlg <- HashAlgs]; +ssh_hostkey_fingerprint(HashAlg, Key) when is_atom(HashAlg) -> + EncKey = public_key:ssh_encode(Key, ssh2_pubkey), + sshfp_full_string(HashAlg, EncKey). -sshfp_string(HashAlg, Key) -> + +sshfp_string(HashAlg, EncodedKey) -> %% Other HashAlgs than md5 will be printed with %% other formats than hextstr by %% ssh-keygen -E <alg> -lf <file> - fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, public_key:ssh_encode(Key,ssh2_pubkey))). + fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, EncodedKey)). + +sshfp_full_string(HashAlg, EncKey) -> + lists:concat([sshfp_alg_name(HashAlg), + [$: | sshfp_string(HashAlg, EncKey)] + ]). sshfp_alg_name(sha) -> "SHA1"; sshfp_alg_name(Alg) -> string:to_upper(atom_to_list(Alg)). @@ -1188,8 +1246,11 @@ ec_curve_spec( #'ECParameters'{fieldID = FieldId, curve = PCurve, base = Base, o FieldId#'FieldID'.parameters}, Curve = {PCurve#'Curve'.a, PCurve#'Curve'.b, none}, {Field, Curve, Base, Order, CoFactor}; -ec_curve_spec({namedCurve, OID}) -> - pubkey_cert_records:namedCurves(OID). +ec_curve_spec({namedCurve, OID}) when is_tuple(OID), is_integer(element(1,OID)) -> + ec_curve_spec({namedCurve, pubkey_cert_records:namedCurves(OID)}); +ec_curve_spec({namedCurve, Name}) when is_atom(Name) -> + crypto:ec_curve(Name). + ec_key({PubKey, PrivateKey}, Params) -> #'ECPrivateKey'{version = 1, diff --git a/lib/public_key/test/erl_make_certs.erl b/lib/public_key/test/erl_make_certs.erl index 3dab70784c..00be7dd5b3 100644 --- a/lib/public_key/test/erl_make_certs.erl +++ b/lib/public_key/test/erl_make_certs.erl @@ -346,10 +346,24 @@ make_key(ec, _Opts) -> %% RSA key generation (OBS: for testing only) %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +gen_rsa2(Size) -> + try + %% The numbers 2048,17 is choosen to not cause the cryptolib on + %% FIPS-enabled test machines be mad at us. + public_key:generate_key({rsa, 2048, 17}) + of + {_Public, Private} -> Private + catch + error:notsup -> + %% Disabled dirty_schedulers => crypto:generate_key not working + weak_gen_rsa2(Size) + end. + + -define(SMALL_PRIMES, [65537,97,89,83,79,73,71,67,61,59,53, 47,43,41,37,31,29,23,19,17,13,11,7,5,3]). -gen_rsa2(Size) -> +weak_gen_rsa2(Size) -> P = prime(Size), Q = prime(Size), N = P*Q, diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl index 615ff32539..68aa152911 100644 --- a/lib/public_key/test/public_key_SUITE.erl +++ b/lib/public_key/test/public_key_SUITE.erl @@ -54,7 +54,8 @@ all() -> ssh_hostkey_fingerprint_sha, ssh_hostkey_fingerprint_sha256, ssh_hostkey_fingerprint_sha384, - ssh_hostkey_fingerprint_sha512 + ssh_hostkey_fingerprint_sha512, + ssh_hostkey_fingerprint_list ]. groups() -> @@ -93,20 +94,21 @@ end_per_group(_GroupName, Config) -> %%------------------------------------------------------------------- init_per_testcase(TestCase, Config) -> case TestCase of - ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase(md5, Config); - ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase(md5, Config); - ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase(sha, Config); - ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase(sha256, Config); - ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase(sha384, Config); - ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase(sha512, Config); + ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase([md5], Config); + ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase([md5], Config); + ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase([sha], Config); + ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase([sha256], Config); + ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase([sha384], Config); + ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase([sha512], Config); + ssh_hostkey_fingerprint_list -> init_fingerprint_testcase([sha,md5], Config); _ -> init_common_per_testcase(Config) end. -init_fingerprint_testcase(Alg, Config) -> - CryptoSupports = lists:member(Alg, proplists:get_value(hashs, crypto:supports())), - case CryptoSupports of - false -> {skip,{Alg,not_supported}}; - true -> init_common_per_testcase(Config) +init_fingerprint_testcase(Algs, Config) -> + Hashs = proplists:get_value(hashs, crypto:supports(), []), + case Algs -- Hashs of + [] -> init_common_per_testcase(Config); + UnsupportedAlgs -> {skip,{UnsupportedAlgs,not_supported}} end. init_common_per_testcase(Config0) -> @@ -600,6 +602,14 @@ ssh_hostkey_fingerprint_sha512(_Config) -> Expected = public_key:ssh_hostkey_fingerprint(sha512, ssh_hostkey(rsa)). %%-------------------------------------------------------------------- +%% Since this kind of fingerprint is not available yet on standard +%% distros, we do like this instead. +ssh_hostkey_fingerprint_list(_Config) -> + Expected = ["SHA1:Soammnaqg06jrm2jivMSnzQGlmk", + "MD5:4b:0b:63:de:0f:a7:3a:ab:2c:cc:2d:d1:21:37:1d:3a"], + Expected = public_key:ssh_hostkey_fingerprint([sha,md5], ssh_hostkey(rsa)). + +%%-------------------------------------------------------------------- encrypt_decrypt() -> [{doc, "Test public_key:encrypt_private and public_key:decrypt_public"}]. encrypt_decrypt(Config) when is_list(Config) -> diff --git a/lib/public_key/vsn.mk b/lib/public_key/vsn.mk index 2f541d8d84..b94768ae77 100644 --- a/lib/public_key/vsn.mk +++ b/lib/public_key/vsn.mk @@ -1 +1 @@ -PUBLIC_KEY_VSN = 1.3 +PUBLIC_KEY_VSN = 1.4 diff --git a/lib/reltool/doc/src/notes.xml b/lib/reltool/doc/src/notes.xml index 2365a68feb..b47d451055 100644 --- a/lib/reltool/doc/src/notes.xml +++ b/lib/reltool/doc/src/notes.xml @@ -38,7 +38,22 @@ thus constitutes one section in this document. The title of each section is the version number of Reltool.</p> - <section><title>Reltool 0.7.2</title> + <section><title>Reltool 0.7.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fixed xml issues in old release notes</p> + <p> + Own Id: OTP-14269</p> + </item> + </list> + </section> + +</section> + +<section><title>Reltool 0.7.2</title> <section><title>Fixed Bugs and Malfunctions</title> <list> @@ -52,13 +67,13 @@ Some dependency chains would even be missed for applications that are included in a 'rel' spec in the reltool config. E.g.</p> - <p> + <list> <item>Application x has y as included application, and y in turn has z as included application. Then z is not included. </item> <item>Application x has y in its 'applications' tag in the .app file, and y in turn has z as included application. Then z is not included.</item> - </list></p> + </list> <p> These bugs are now corrected.</p> <p> diff --git a/lib/reltool/src/reltool.hrl b/lib/reltool/src/reltool.hrl index 3b1e868757..c61c3a0c71 100644 --- a/lib/reltool/src/reltool.hrl +++ b/lib/reltool/src/reltool.hrl @@ -289,8 +289,8 @@ "^lib", "^releases"]). -define(EMBEDDED_EXCL_SYS_FILTERS, - ["^bin/(erlc|dialyzer|typer)(|\\.exe)\$", - "^erts.*/bin/(erlc|dialyzer|typer)(|\\.exe)\$", + ["^bin/(erlc|dialyzer)(|\\.exe)\$", + "^erts.*/bin/(erlc|dialyzer)(|\\.exe)\$", "^erts.*/bin/.*(debug|pdb)"]). -define(EMBEDDED_INCL_APP_FILTERS, ["^ebin", "^include", @@ -303,7 +303,7 @@ "^erts.*/bin", "^lib\$"]). -define(STANDALONE_EXCL_SYS_FILTERS, - ["^erts.*/bin/(erlc|dialyzer|typer)(|\\.exe)\$", + ["^erts.*/bin/(erlc|dialyzer)(|\\.exe)\$", "^erts.*/bin/(start|escript|to_erl|run_erl)(|\\.exe)\$", "^erts.*/bin/.*(debug|pdb)"]). -define(STANDALONE_INCL_APP_FILTERS, ["^ebin", diff --git a/lib/reltool/vsn.mk b/lib/reltool/vsn.mk index 2b23ff6f20..2d07eeb8f0 100644 --- a/lib/reltool/vsn.mk +++ b/lib/reltool/vsn.mk @@ -1 +1 @@ -RELTOOL_VSN = 0.7.2 +RELTOOL_VSN = 0.7.3 diff --git a/lib/runtime_tools/doc/src/notes.xml b/lib/runtime_tools/doc/src/notes.xml index 4c79a560ec..0eafc437cc 100644 --- a/lib/runtime_tools/doc/src/notes.xml +++ b/lib/runtime_tools/doc/src/notes.xml @@ -32,6 +32,24 @@ <p>This document describes the changes made to the Runtime_Tools application.</p> +<section><title>Runtime_Tools 1.11.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + etop erroneously reported the average scheduler + utilization since the tool was first started instead of + the scheduler utilization since last update. This is now + corrected.</p> + <p> + Own Id: OTP-14090 Aux Id: seq13232 </p> + </item> + </list> + </section> + +</section> + <section><title>Runtime_Tools 1.11</title> <section><title>Improvements and New Features</title> diff --git a/lib/runtime_tools/vsn.mk b/lib/runtime_tools/vsn.mk index 53fc51c198..8ec532de76 100644 --- a/lib/runtime_tools/vsn.mk +++ b/lib/runtime_tools/vsn.mk @@ -1 +1 @@ -RUNTIME_TOOLS_VSN = 1.11 +RUNTIME_TOOLS_VSN = 1.11.1 diff --git a/lib/sasl/doc/src/notes.xml b/lib/sasl/doc/src/notes.xml index 190b937f03..cd3f0e1864 100644 --- a/lib/sasl/doc/src/notes.xml +++ b/lib/sasl/doc/src/notes.xml @@ -31,6 +31,28 @@ </header> <p>This document describes the changes made to the SASL application.</p> +<section><title>SASL 3.0.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + When both options 'warnings_as_errors' and 'silent' were + given to systools:make_script or systools:make_relup, no + error reason would be returned if warnings occurred. + Instead only the atom 'error' was returned. This is now + corrected.</p> + <p> + Options 'warnings_as_errors' and 'no_warn_sasl' are now + also allowed for systools:make_tar.</p> + <p> + Own Id: OTP-14170</p> + </item> + </list> + </section> + +</section> + <section><title>SASL 3.0.2</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/sasl/doc/src/systools.xml b/lib/sasl/doc/src/systools.xml index fa503fa573..4ca4a08329 100644 --- a/lib/sasl/doc/src/systools.xml +++ b/lib/sasl/doc/src/systools.xml @@ -268,7 +268,7 @@ <fsummary>Creates a release package.</fsummary> <type> <v>Name = string()</v> - <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir}</v> + <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir} | | no_warn_sasl | warnings_as_errors</v> <v> Dir = string()</v> <v> IncDir = src | include | atom()</v> <v> Var = {VarName,PreFix}</v> @@ -297,6 +297,10 @@ directory unless <c>Name</c> contains a path. If option <c>{outdir,Dir}</c> is specified, it is located in <c>Dir</c> instead.</p> + <p>If SASL is not included as an application in + the <c>.rel</c> file, a warning is issued because such a + release cannot be used in an upgrade. To turn off this + warning, add option <c>no_warn_sasl</c>.</p> <p>By default, the release package contains the directories <c>lib/App-Vsn/ebin</c> and <c>lib/App-Vsn/priv</c> for each included application. If more directories are to be included, diff --git a/lib/sasl/src/release_handler.erl b/lib/sasl/src/release_handler.erl index 1fcc9a0288..3250311b8f 100644 --- a/lib/sasl/src/release_handler.erl +++ b/lib/sasl/src/release_handler.erl @@ -831,7 +831,7 @@ do_unpack_release(Root, RelDir, ReleaseName, Releases) -> Tar = filename:join(RelDir, ReleaseName ++ ".tar.gz"), do_check_file(Tar, regular), Rel = ReleaseName ++ ".rel", - extract_rel_file(filename:join("releases", Rel), Tar, Root), + _ = extract_rel_file(filename:join("releases", Rel), Tar, Root), RelFile = filename:join(RelDir, Rel), Release = check_rel(Root, RelFile, false), #release{vsn = Vsn} = Release, @@ -1841,14 +1841,12 @@ do_check_file(Master, FileName, Type) -> %% by the user in another way, i.e. ignore this here. %%----------------------------------------------------------------- extract_rel_file(Rel, Tar, Root) -> - erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]). + _ = erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]). extract_tar(Root, Tar) -> case erl_tar:extract(Tar, [keep_old_files, {cwd, Root}, compressed]) of ok -> ok; - {error, Reason, Name} -> % Old erl_tar. - throw({error, {cannot_extract_file, Name, Reason}}); {error, {Name, Reason}} -> % New erl_tar (R3A). throw({error, {cannot_extract_file, Name, Reason}}) end. diff --git a/lib/sasl/src/systools_make.erl b/lib/sasl/src/systools_make.erl index 6a16c8689e..f03b03dc08 100644 --- a/lib/sasl/src/systools_make.erl +++ b/lib/sasl/src/systools_make.erl @@ -94,7 +94,11 @@ make_script(RelName, Output, Flags) when is_list(RelName), Warnings = wsasl(Flags, Warnings0), case systools_lib:werror(Flags, Warnings) of true -> - return(ok,Warnings,Flags); + Warnings1 = [W || {warning,W}<-Warnings], + return({error,?MODULE, + {warnings_treated_as_errors,Warnings1}}, + Warnings, + Flags); false -> case generate_script(Output,Release,Appls,Flags) of ok -> @@ -115,7 +119,6 @@ make_script(RelName, _Output, Flags) when is_list(Flags) -> make_script(RelName, _Output, Flags) -> badarg(Flags,[RelName, Flags]). - wsasl(Options, Warnings) -> case lists:member(no_warn_sasl,Options) of true -> lists:delete({warning,missing_sasl},Warnings); @@ -148,21 +151,10 @@ get_outdir(Flags) -> return(ok,Warnings,Flags) -> case member(silent,Flags) of true -> - case systools_lib:werror(Flags, Warnings) of - true -> - error; - false -> - {ok,?MODULE,Warnings} - end; + {ok,?MODULE,Warnings}; _ -> - case member(warnings_as_errors,Flags) of - true -> - io:format("~ts",[format_warning(Warnings, true)]), - error; - false -> - io:format("~ts",[format_warning(Warnings)]), - ok - end + io:format("~ts",[format_warning(Warnings)]), + ok end; return({error,Mod,Error},_,Flags) -> case member(silent,Flags) of @@ -300,6 +292,8 @@ add_apply_upgrade(Script,Args) -> %% {variables,[{Name,AbsString}]} %% {machine, jam | beam | vee} %% {var_tar, include | ownfile | omit} +%% no_warn_sasl +%% warnings_as_errors %% %% The tar file contains: %% lib/App-Vsn/ebin @@ -332,13 +326,23 @@ make_tar(RelName, Flags) when is_list(RelName), is_list(Flags) -> Path = make_set(Path1 ++ code:get_path()), ModTestP = {member(src_tests, Flags),xref_p(Flags)}, case get_release(RelName, Path, ModTestP, machine(Flags)) of - {ok, Release, Appls, Warnings} -> - case catch mk_tar(RelName, Release, Appls, Flags, Path1) of - ok -> - return(ok,Warnings,Flags); - Error -> - return(Error,Warnings,Flags) - end; + {ok, Release, Appls, Warnings0} -> + Warnings = wsasl(Flags, Warnings0), + case systools_lib:werror(Flags, Warnings) of + true -> + Warnings1 = [W || {warning,W}<-Warnings], + return({error,?MODULE, + {warnings_treated_as_errors,Warnings1}}, + Warnings, + Flags); + false -> + case catch mk_tar(RelName, Release, Appls, Flags, Path1) of + ok -> + return(ok,Warnings,Flags); + Error -> + return(Error,Warnings,Flags) + end + end; Error -> return(Error,[],Flags) end; @@ -1904,8 +1908,10 @@ del_tar(Tar, TarName) -> file:delete(TarName). add_to_tar(Tar, FromFile, ToFile) -> - case erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of + case catch erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of ok -> ok; + {'EXIT', Reason} -> + throw({error, {tar_error, {add, FromFile, Reason}}}); {error, Error} -> throw({error, {tar_error, {add, FromFile, Error}}}) end. @@ -2113,90 +2119,80 @@ cas([Y | Args], X) -> %% Check Options for make_tar check_args_tar(Args) -> - cat(Args, {undef, undef, undef, undef, undef, undef, undef, undef, undef, undef, []}). + cat(Args, []). -cat([], {_Path,_Sil,_Dirs,_Erts,_Test,_Var,_VarTar,_Mach,_Xref,_XrefApps, X}) -> +cat([], X) -> X; %%% path --------------------------------------------------------------- -cat([{path, P} | Args], {Path, Sil, Dirs, Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(P) -> +cat([{path, P} | Args], X) when is_list(P) -> case check_path(P) of ok -> - cat(Args, {P, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); + cat(Args, X); error -> - cat(Args, {Path, Sil, Dirs, Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X++[{path,P}]}) + cat(Args, X++[{path,P}]) end; %%% silent ------------------------------------------------------------- -cat([silent | Args], {Path, _Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) -> - cat(Args, {Path, silent, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); +cat([silent | Args], X) -> + cat(Args, X); %%% dirs --------------------------------------------------------------- -cat([{dirs, D} | Args], {Path, Sil, Dirs, Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X}) -> +cat([{dirs, D} | Args], X) -> case check_dirs(D) of ok -> - cat(Args, {Path, Sil, D, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); + cat(Args, X); error -> - cat(Args, {Path, Sil, Dirs, Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X++[{dirs, D}]}) + cat(Args, X++[{dirs, D}]) end; %%% erts --------------------------------------------------------------- -cat([{erts, E} | Args], {Path, Sil, Dirs, _Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(E)-> - cat(Args, {Path, Sil, Dirs, E, Test, Var, VarTar, Mach, Xref, XrefApps, X}); +cat([{erts, E} | Args], X) when is_list(E)-> + cat(Args, X); %%% src_tests ---------------------------------------------------- -cat([src_tests | Args], {Path, Sil, Dirs, Erts, _Test, Var, VarTar, Mach, Xref, XrefApps, X}) -> - cat(Args, {Path, Sil, Dirs, Erts, src_tests, Var, VarTar, Mach, - Xref, XrefApps, X}); +cat([src_tests | Args], X) -> + cat(Args, X); %%% variables ---------------------------------------------------------- -cat([{variables, V} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(V) -> +cat([{variables, V} | Args], X) when is_list(V) -> case check_vars(V) of ok -> - cat(Args, {Path, Sil, Dirs, Erts, Test, V, VarTar, Mach, Xref, XrefApps, X}); + cat(Args, X); error -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, - Xref, XrefApps, X++[{variables, V}]}) + cat(Args, X++[{variables, V}]) end; %%% var_tar ------------------------------------------------------------ -cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test, - Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == include -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, include, Mach, Xref, XrefApps, X}); -cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test, - Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == ownfile -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, ownfile, Mach, Xref, XrefApps, X}); -cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test, - Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == omit -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, omit, Mach, Xref, XrefApps, X}); +cat([{var_tar, VT} | Args], X) when VT == include; + VT == ownfile; + VT == omit -> + cat(Args, X); %%% machine ------------------------------------------------------------ -cat([{machine, M} | Args], {Path, Sil, Dirs, Erts, Test, - Var, VarTar, Mach, Xref, XrefApps, X}) when is_atom(M) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); +cat([{machine, M} | Args], X) when is_atom(M) -> + cat(Args, X); %%% exref -------------------------------------------------------------- -cat([exref | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, _Xref, XrefApps, X}) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, exref, XrefApps, X}); +cat([exref | Args], X) -> + cat(Args, X); %%% exref Apps --------------------------------------------------------- -cat([{exref, Apps} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Apps) -> +cat([{exref, Apps} | Args], X) when is_list(Apps) -> case check_apps(Apps) of ok -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, - Xref, Apps, X}); + cat(Args, X); error -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, - Xref, XrefApps, X++[{exref, Apps}]}) + cat(Args, X++[{exref, Apps}]) end; %%% outdir Dir --------------------------------------------------------- -cat([{outdir, Dir} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Dir) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, - Xref, XrefApps, X}); +cat([{outdir, Dir} | Args], X) when is_list(Dir) -> + cat(Args, X); %%% otp_build (secret, not documented) --------------------------------- -cat([otp_build | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); +cat([otp_build | Args], X) -> + cat(Args, X); +%%% warnings_as_errors ---- +cat([warnings_as_errors | Args], X) -> + cat(Args, X); +%%% no_warn_sasl ---- +cat([no_warn_sasl | Args], X) -> + cat(Args, X); %%% no_module_tests (kept for backwards compatibility, but ignored) ---- -cat([no_module_tests | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}); +cat([no_module_tests | Args], X) -> + cat(Args, X); %%% ERROR -------------------------------------------------------------- -cat([Y | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) -> - cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X++[Y]}). +cat([Y | Args], X) -> + cat(Args, X++[Y]). check_path([]) -> ok; @@ -2296,6 +2292,9 @@ format_error({delete,File,Error}) -> [File,file:format_error(Error)]); format_error({tar_error,What}) -> form_tar_err(What); +format_error({warnings_treated_as_errors,Warnings}) -> + io_lib:format("Warnings being treated as errors:~n~ts", + [map(fun(W) -> form_warn("",W) end, Warnings)]); format_error(ListOfErrors) when is_list(ListOfErrors) -> format_errors(ListOfErrors); format_error(E) -> io_lib:format("~p~n",[E]). @@ -2352,24 +2351,15 @@ form_tar_err({add, File, Error}) -> %% Format warning format_warning(Warnings) -> - format_warning(Warnings, false). - -format_warning(Warnings, Werror) -> - Prefix = case Werror of - true -> - ""; - false -> - "*WARNING* " - end, - map(fun({warning,W}) -> form_warn(Prefix, W) end, Warnings). - -form_warn(Prefix, {source_not_found,{Mod,_,App,_,_}}) -> + map(fun({warning,W}) -> form_warn("*WARNING* ", W) end, Warnings). + +form_warn(Prefix, {source_not_found,{Mod,App,_}}) -> io_lib:format("~ts~w: Source code not found: ~w.erl~n", [Prefix,App,Mod]); form_warn(Prefix, {{parse_error, File},{_,_,App,_,_}}) -> io_lib:format("~ts~w: Parse error: ~p~n", [Prefix,App,File]); -form_warn(Prefix, {obj_out_of_date,{Mod,_,App,_,_}}) -> +form_warn(Prefix, {obj_out_of_date,{Mod,App,_}}) -> io_lib:format("~ts~w: Object code (~w) out of date~n", [Prefix,App,Mod]); form_warn(Prefix, {exref_undef, Undef}) -> @@ -2379,8 +2369,8 @@ form_warn(Prefix, {exref_undef, Undef}) -> end, map(F, Undef); form_warn(Prefix, missing_sasl) -> - io_lib:format("~ts: Missing application sasl. " + io_lib:format("~tsMissing application sasl. " "Can not upgrade with this release~n", [Prefix]); form_warn(Prefix, What) -> - io_lib:format("~ts ~p~n", [Prefix,What]). + io_lib:format("~ts~p~n", [Prefix,What]). diff --git a/lib/sasl/src/systools_relup.erl b/lib/sasl/src/systools_relup.erl index 28534dc0c8..7e1844b400 100644 --- a/lib/sasl/src/systools_relup.erl +++ b/lib/sasl/src/systools_relup.erl @@ -155,36 +155,12 @@ mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs) -> mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Opts) -> case check_opts(Opts) of [] -> - R = (catch do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs, - add_code_path(Opts), Opts)), - case {get_opt(silent, Opts), get_opt(noexec, Opts)} of - {false, false} -> - case R of - {ok, _Res, _Mod, Ws} -> - print_warnings(Ws, Opts), - case systools_lib:werror(Opts, Ws) of - true -> - error; - false -> - ok - end; - Other -> - print_error(Other), - error - end; - _ -> - case R of - {ok, _Res, _Mod, Ws} -> - case systools_lib:werror(Opts, Ws) of - true -> - error; - false -> - R - end; - R -> - R - end - end; + R = try do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs, + add_code_path(Opts), Opts) + catch throw:Error -> + Error + end, + done_mk_relup(Opts, R); BadArg -> erlang:error({badarg, BadArg}) end. @@ -224,17 +200,45 @@ do_mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Path, Opts) -> {Dn, Ws2} = foreach_baserel_dn(TopRel, TopApps, BaseDnRelDcs, Path, Opts, Ws1), Relup = {TopRel#release.vsn, Up, Dn}, - case systools_lib:werror(Opts, Ws2) of - true -> - ok; - false -> - write_relup_file(Relup, Opts) - end, - {ok, Relup, ?MODULE, Ws2}; + + {ok, Relup, Ws2}; Other -> - throw(Other) + Other end. +done_mk_relup(Opts, {ok,Relup,Ws}) -> + WAE = get_opt(warnings_as_errors,Opts), + Silent = get_opt(silent,Opts), + Noexec = get_opt(noexec,Opts), + + if WAE andalso Ws=/=[] -> + return_error(Silent, + {error,?MODULE,{warnings_treated_as_errors, Ws}}); + not Noexec -> + case write_relup_file(Relup,Opts) of + ok -> + return_ok(Silent,Relup,Ws); + Error -> + return_error(Silent,Error) + end; + true -> % noexec + return_ok(true,Relup,Ws) + end; +done_mk_relup(Opts, Error) -> + return_error(get_opt(silent,Opts) orelse get_opt(noexec,Opts), Error). + +return_error(true, Error) -> + Error; +return_error(false, Error) -> + print_error(Error), + error. + +return_ok(true,Relup,Ws) -> + {ok,Relup,?MODULE,Ws}; +return_ok(false,_Relup,Ws) -> + print_warnings(Ws), + ok. + %%----------------------------------------------------------------- %% foreach_baserel_up(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret %% foreach_baserel_dn(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret @@ -529,33 +533,18 @@ to_list(X) when is_list(X) -> X. %% Writes a relup file. %% write_relup_file(Relup, Opts) -> - case get_opt(noexec, Opts) of - true -> - ok; - _ -> - Filename = case get_opt(outdir, Opts) of - OutDir when is_list(OutDir) -> - filename:join(filename:absname(OutDir), - "relup"); - false -> - "relup"; - Badarg -> - throw({error, ?MODULE, {badarg, {outdir,Badarg}}}) - end, - - case file:open(Filename, [write]) of - {ok, Fd} -> - io:format(Fd, "~p.~n", [Relup]), - case file:close(Fd) of - ok -> ok; - {error,Reason} -> - throw({error, ?MODULE, - {file_problem, {"relup", {close,Reason}}}}) - end; - {error, Reason} -> - throw({error, ?MODULE, - {file_problem, {"relup", {open, Reason}}}}) - end + Filename = filename:join(filename:absname(get_opt(outdir,Opts)), + "relup"), + case file:open(Filename, [write]) of + {ok, Fd} -> + io:format(Fd, "~p.~n", [Relup]), + case file:close(Fd) of + ok -> ok; + {error,Reason} -> + {error, ?MODULE, {file_problem, {"relup", {close,Reason}}}} + end; + {error, Reason} -> + {error, ?MODULE, {file_problem, {"relup", {open, Reason}}}} end. add_code_path(Opts) -> @@ -593,10 +582,9 @@ default(path) -> false; default(noexec) -> false; default(silent) -> false; default(restart_emulator) -> false; -default(outdir) -> false. +default(outdir) -> "."; +default(warnings_as_errors) -> false. -print_error({'EXIT', Err}) -> - print_error(Err); print_error({error, Mod, Error}) -> S = apply(Mod, format_error, [Error]), io:format(S, []); @@ -614,24 +602,20 @@ format_error({missing_sasl,Release}) -> io_lib:format("No sasl application in release ~ts, ~ts. " "Can not be upgraded.", [Release#release.name, Release#release.vsn]); +format_error({warnings_treated_as_errors, Warnings}) -> + io_lib:format("Warnings being treated as errors:~n~ts", + [[format_warning("",W) || W <- Warnings]]); format_error(Error) -> - io:format("~p~n", [Error]). + io_lib:format("~p~n", [Error]). -print_warnings(Ws, Opts) when is_list(Ws) -> - lists:foreach(fun(W) -> print_warning(W, Opts) end, Ws); -print_warnings(W, Opts) -> - print_warning(W, Opts). +print_warnings(Ws) when is_list(Ws) -> + lists:foreach(fun(W) -> print_warning(W) end, Ws); +print_warnings(W) -> + print_warning(W). -print_warning(W, Opts) -> - Prefix = case lists:member(warnings_as_errors, Opts) of - true -> - ""; - false -> - "*WARNING* " - end, - S = format_warning(Prefix, W), - io:format("~ts", [S]). +print_warning(W) -> + io:format("~ts", [format_warning(W)]). format_warning(W) -> format_warning("*WARNING* ", W). @@ -639,6 +623,8 @@ format_warning(W) -> format_warning(Prefix, {erts_vsn_changed, {Rel1, Rel2}}) -> io_lib:format("~tsThe ERTS version changed between ~p and ~p~n", [Prefix, Rel1, Rel2]); +format_warning(Prefix, pre_R15_emulator_upgrade) -> + io_lib:format("~tsUpgrade from an OTP version earlier than R15. New code should be compiled with the old emulator.~n",[Prefix]); format_warning(Prefix, What) -> io_lib:format("~ts~p~n",[Prefix, What]). diff --git a/lib/sasl/test/systools_SUITE.erl b/lib/sasl/test/systools_SUITE.erl index dd5f277a77..0c98232467 100644 --- a/lib/sasl/test/systools_SUITE.erl +++ b/lib/sasl/test/systools_SUITE.erl @@ -29,6 +29,8 @@ -module(systools_SUITE). +-compile(export_all). + %%-define(debug, true). -include_lib("common_test/include/ct.hrl"). @@ -39,31 +41,6 @@ -include_lib("kernel/include/file.hrl"). --export([all/0,suite/0,groups/0,init_per_group/2,end_per_group/2]). - --export([script_options/1, normal_script/1, unicode_script/1, - unicode_script/2, no_mod_vsn_script/1, - wildcard_script/1, variable_script/1, no_sasl_script/1, - no_dot_erlang_script/1, - abnormal_script/1, src_tests_script/1, crazy_script/1, - included_script/1, included_override_script/1, - included_fail_script/1, included_bug_script/1, exref_script/1, - duplicate_modules_script/1, - otp_3065_circular_dependenies/1, included_and_used_sort_script/1]). --export([tar_options/1, normal_tar/1, no_mod_vsn_tar/1, system_files_tar/1, - system_files_tar/2, invalid_system_files_tar/1, - invalid_system_files_tar/2, variable_tar/1, - src_tests_tar/1, var_tar/1, exref_tar/1, link_tar/1, - otp_9507_path_ebin/1]). --export([normal_relup/1, restart_relup/1, abnormal_relup/1, no_sasl_relup/1, - no_appup_relup/1, bad_appup_relup/1, app_start_type_relup/1, - regexp_relup/1]). --export([normal_hybrid/1,hybrid_no_old_sasl/1,hybrid_no_new_sasl/1]). --export([otp_6226_outdir/1, app_file_defaults/1]). --export([init_per_suite/1, end_per_suite/1, - init_per_testcase/2, end_per_testcase/2]). --export([delete_tree/1]). - -import(lists, [foldl/3]). -define(default_timeout, ?t:minutes(20)). @@ -91,7 +68,8 @@ groups() -> {tar, [], [tar_options, normal_tar, no_mod_vsn_tar, system_files_tar, invalid_system_files_tar, variable_tar, - src_tests_tar, var_tar, exref_tar, link_tar, otp_9507_path_ebin]}, + src_tests_tar, var_tar, exref_tar, link_tar, no_sasl_tar, + otp_9507_path_ebin]}, {relup, [], [normal_relup, restart_relup, abnormal_relup, no_sasl_relup, no_appup_relup, bad_appup_relup, app_start_type_relup, regexp_relup @@ -238,6 +216,7 @@ normal_script(Config) when is_list(Config) -> %% Check the same but w. silent flag {ok, _, []} = systools:make_script(LatestName, [silent]), + {ok, _, []} = systools:make_script(LatestName, [silent,warnings_as_errors]), %% Use the local option ok = systools:make_script(LatestName, [local]), @@ -456,9 +435,16 @@ no_sasl_script(Config) when is_list(Config) -> {ok, _ , [{warning,missing_sasl}]} = systools:make_script(LatestName,[{path, P},silent]), + {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} = + systools:make_script(LatestName,[{path, P},silent,warnings_as_errors]), + {ok, _ , []} = systools:make_script(LatestName,[{path, P},silent, no_warn_sasl]), + {ok, _ , []} = + systools:make_script(LatestName,[{path, P},silent, no_warn_sasl, + warnings_as_errors]), + ok = file:set_cwd(OldDir), ok. @@ -525,7 +511,9 @@ src_tests_script(Config) when is_list(Config) -> ok = file:delete(BootFile), false = filelib:is_regular(BootFile), %% With warnings_as_errors and src_tests option, an error should be issued - error = + {error, systools_make, + {warnings_treated_as_errors, [{obj_out_of_date,_}, + {source_not_found,_}]}} = systools:make_script(LatestName, [silent, {path, N}, src_tests, warnings_as_errors]), error = @@ -745,7 +733,7 @@ exref_script(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, [{path,P}, silent]), + {ok, _, []} = systools:make_script(LatestName, [{path,P}, silent]), %% Complete exref {ok, _, W1} = @@ -894,10 +882,10 @@ normal_tar(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]), ok = systools:make_tar(LatestName, [{path, P}]), ok = check_tar(fname([lib,'db-2.1',ebin,'db.app']), LatestName), - {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]), + {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]), ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName), ok = file:set_cwd(OldDir), @@ -918,10 +906,10 @@ no_mod_vsn_tar(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]), ok = systools:make_tar(LatestName, [{path, P}]), ok = check_tar(fname([lib,'db-3.1',ebin,'db.app']), LatestName), - {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]), + {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]), ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName), ok = file:set_cwd(OldDir), @@ -945,11 +933,11 @@ system_files_tar(Config) -> ok = file:write_file("sys.config","[].\n"), ok = file:write_file("relup","{\"LATEST\",[],[]}.\n"), - {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]), ok = systools:make_tar(LatestName, [{path, P}]), ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName), ok = check_tar(fname(["releases","LATEST","relup"]), LatestName), - {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]), + {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]), ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName), ok = check_tar(fname(["releases","LATEST","relup"]), LatestName), @@ -978,7 +966,7 @@ invalid_system_files_tar(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]), %% Add dummy relup and sys.config - faulty sys.config ok = file:write_file("sys.config","[]\n"), %!!! syntax error - missing '.' @@ -1036,7 +1024,7 @@ variable_tar(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}, {variables,[{"TEST", LibDir}]}]), @@ -1045,7 +1033,7 @@ variable_tar(Config) when is_list(Config) -> {variables,[{"TEST", LibDir}]}]), ok = check_var_tar("TEST", LatestName), - {ok, _, _} = systools:make_tar(LatestName, + {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent, {variables,[{"TEST", LibDir}]}]), ok = check_var_tar("TEST", LatestName), @@ -1174,7 +1162,7 @@ var_tar(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}, {variables,[{"TEST", LibDir}]}]), @@ -1218,7 +1206,7 @@ exref_tar(Config) when is_list(Config) -> ok = file:set_cwd(LatestDir), - {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]), %% Complete exref {ok, _, W1} = @@ -1248,7 +1236,41 @@ exref_tar(Config) when is_list(Config) -> ok = file:set_cwd(OldDir), ok. +%% make_tar: Create tar without sasl appl. Check warning. +no_sasl_tar(Config) when is_list(Config) -> + {ok, OldDir} = file:get_cwd(), + {LatestDir, LatestName} = create_script(latest1_no_sasl,Config), + + DataDir = filename:absname(?copydir), + LibDir = fname([DataDir, d_normal, lib]), + P = [fname([LibDir, '*', ebin]), + fname([DataDir, lib, kernel, ebin]), + fname([DataDir, lib, stdlib, ebin]), + fname([DataDir, lib, sasl, ebin])], + + ok = file:set_cwd(LatestDir), + + {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]), + ok = systools:make_tar(LatestName, [{path, P}]), + {ok, _, [{warning,missing_sasl}]} = + systools:make_tar(LatestName, [{path, P}, silent]), + {ok, _, []} = + systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl]), + {ok, _, []} = + systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl, + warnings_as_errors]), + TarFile = LatestName ++ ".tar.gz", + true = filelib:is_regular(TarFile), + ok = file:delete(TarFile), + {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} = + systools:make_tar(LatestName, [{path, P}, silent, warnings_as_errors]), + error = + systools:make_tar(LatestName, [{path, P}, warnings_as_errors]), + false = filelib:is_regular(TarFile), + + ok = file:set_cwd(OldDir), + ok. %% make_tar: OTP-9507 - make_tar failed when path given as just 'ebin'. otp_9507_path_ebin(Config) when is_list(Config) -> @@ -1268,7 +1290,7 @@ otp_9507_path_ebin(Config) when is_list(Config) -> fname([DataDir, lib, kernel, ebin]), fname([DataDir, lib, stdlib, ebin]), fname([DataDir, lib, sasl, ebin])], - {ok, _, _} = systools:make_script(RelName, [silent, {path, P1}]), + {ok, _, []} = systools:make_script(RelName, [silent, {path, P1}]), ok = systools:make_tar(RelName, [{path, P1}]), Content1 = tar_contents(RelName), @@ -1309,7 +1331,7 @@ normal_relup(Config) when is_list(Config) -> ok = systools:make_relup(LatestName, [LatestName1], [LatestName1], [{path, P}]), ok = check_relup([{db, "2.1"}], [{db, "1.0"}]), - {ok, _, _, []} = + {ok, Relup, _, []} = systools:make_relup(LatestName, [LatestName1], [LatestName1], [{path, P}, silent]), ok = check_relup([{db, "2.1"}], [{db, "1.0"}]), @@ -1322,7 +1344,9 @@ normal_relup(Config) when is_list(Config) -> error = systools:make_relup(LatestName, [LatestName2], [LatestName1], [{path, P}, warnings_as_errors]), - error = + {error, systools_relup, + {warnings_treated_as_errors,[pre_R15_emulator_upgrade, + {erts_vsn_changed, _}]}} = systools:make_relup(LatestName, [LatestName2], [LatestName1], [{path, P}, silent, warnings_as_errors]), @@ -1341,6 +1365,14 @@ normal_relup(Config) when is_list(Config) -> %% relup file should exist now true = filelib:is_regular("relup"), + %% file should not be written if noexec option is used. + %% delete before running tests. + ok = file:delete("relup"), + {ok,Relup,_,[]} = + systools:make_relup(LatestName, [LatestName1], [LatestName1], + [{path, P}, noexec]), + false = filelib:is_regular("relup"), + ok = file:set_cwd(OldDir), ok. diff --git a/lib/sasl/vsn.mk b/lib/sasl/vsn.mk index e35a0c2977..6aa662a743 100644 --- a/lib/sasl/vsn.mk +++ b/lib/sasl/vsn.mk @@ -1 +1 @@ -SASL_VSN = 3.0.2 +SASL_VSN = 3.0.3 diff --git a/lib/snmp/doc/src/notes.xml b/lib/snmp/doc/src/notes.xml index 3323d32878..f1919a6bb1 100644 --- a/lib/snmp/doc/src/notes.xml +++ b/lib/snmp/doc/src/notes.xml @@ -34,7 +34,27 @@ </header> - <section><title>SNMP 5.2.4</title> + <section><title>SNMP 5.2.5</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + The SNMP MIB compiler has been fixed to compile MIBS with + refinements on user types such as in RFC 4669 + RADIUS-AUTH-SERVER-MIB.mib. Problem reported and + researched by Kenneth Lakin and Daniel Goertzen.</p> + <p> + See also: https://bugs.erlang.org/browse/ERL-325</p> + <p> + Own Id: OTP-14145 Aux Id: ERL-325 </p> + </item> + </list> + </section> + +</section> + +<section><title>SNMP 5.2.4</title> <section><title>Fixed Bugs and Malfunctions</title> <list> diff --git a/lib/snmp/test/snmp_manager_test.erl b/lib/snmp/test/snmp_manager_test.erl index 71f4017d8b..054e998af4 100644 --- a/lib/snmp/test/snmp_manager_test.erl +++ b/lib/snmp/test/snmp_manager_test.erl @@ -1760,7 +1760,7 @@ do_simple_sync_get2(Node, TargetName, Oids, Get, PostVerify) "~n Rem: ~w", [Reply, _Rem]), %% verify that the operation actually worked: - %% The order should be the same, so no need to seach + %% The order should be the same, so no need to search ?line ok = case Reply of {noError, 0, [#varbind{oid = ?sysObjectID_instance, value = SysObjectID}, @@ -2709,7 +2709,7 @@ do_simple_set2(Node, TargetName, VAVs, Set, PostVerify) -> "~n Rem: ~w", [Reply, _Rem]), %% verify that the operation actually worked: - %% The order should be the same, so no need to seach + %% The order should be the same, so no need to search %% The value we get should be exactly the same as we sent ?line ok = case Reply of {noError, 0, [#varbind{oid = ?sysName_instance, @@ -5118,10 +5118,10 @@ inform_swarm_collector(N) -> %% Note that we need to deal with re-transmissions! %% That is, the agent did not receive the ack in time, -%% and therefor did a re-transmit. This means that we -%% expect to receive more inform's then we actually -%% sent. So for sucess we assume: -%% +%% and therefor did a re-transmit. This means that we +%% expect to receive more inform's then we actually +%% sent. So for success we assume: +%% %% SentAckCnt = N %% RespCnt = N %% RecvCnt >= N diff --git a/lib/ssh/doc/src/notes.xml b/lib/ssh/doc/src/notes.xml index 1837350284..02a39f030c 100644 --- a/lib/ssh/doc/src/notes.xml +++ b/lib/ssh/doc/src/notes.xml @@ -30,6 +30,86 @@ <file>notes.xml</file> </header> +<section><title>Ssh 4.4.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Fix bug when opening connections. If the tcp setup + failed, that would in some cases not result in an error + return value.</p> + <p> + Own Id: OTP-14108</p> + </item> + <item> + <p> + Reduce information leakage in case of decryption errors.</p> + <p> + Own Id: OTP-14109</p> + </item> + <item> + <p> + The key exchange algorithm + diffie-hellman-group-exchange-sha* has a server-option + <c>{dh_gex_limits,{Min,Max}}</c>. There was a hostkey + signature validation error on the client side if the + option was used and the <c>Min</c> or the <c>Max</c> + differed from the corresponding values obtained from the + client.</p> + <p> + This bug is now corrected.</p> + <p> + Own Id: OTP-14166</p> + </item> + <item> + <p> + The sftpd server now correctly uses <c>root_dir</c> and + <c>cwd</c> when resolving file paths if both are + provided. The <c>cwd</c> handling is also corrected.</p> + <p> + Thanks to kape1395!</p> + <p> + Own Id: OTP-14225 Aux Id: PR-1331, PR-1335 </p> + </item> + <item> + <p> + Ssh_cli used a function that does not handle non-utf8 + unicode correctly.</p> + <p> + Own Id: OTP-14230 Aux Id: ERL-364 </p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + The implementation of the key exchange algorithms + diffie-hellman-group-exchange-sha* are optimized, up to a + factor of 11 for the slowest ( = biggest and safest) + group size.</p> + <p> + Own Id: OTP-14169 Aux Id: seq-13261 </p> + </item> + <item> + <p> + The ssh host key fingerprint generation now also takes a + list of algorithms and returns a list of corresponding + fingerprints. See + <c>public_key:ssh_hostkey_fingerprint/2</c> and the + option <c>silently_accept_hosts</c> in + <c>ssh:connect</c>.</p> + <p> + Own Id: OTP-14223</p> + </item> + </list> + </section> + +</section> + <section><title>Ssh 4.4</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml index 6b49f89449..1f07e826ce 100644 --- a/lib/ssh/doc/src/ssh.xml +++ b/lib/ssh/doc/src/ssh.xml @@ -153,7 +153,7 @@ <item> <p>IP version to use.</p> </item> - <tag><c><![CDATA[{user_dir, string()}]]></c></tag> + <tag><marker id="opt_user_dir"></marker><c><![CDATA[{user_dir, string()}]]></c></tag> <item> <p>Sets the user directory, that is, the directory containing <c>ssh</c> configuration files for the user, such as @@ -175,22 +175,48 @@ supplied with this option. </p> </item> - <tag><c><![CDATA[{silently_accept_hosts, boolean() | accept_fun() | {crypto:digest_type(), accept_fun()} }]]></c> - <br/> - <c><![CDATA[accept_fun() :: fun(PeerName::string(), FingerPrint::string()) -> boolean()]]></c> + <tag> + <c><![CDATA[{silently_accept_hosts, boolean()}]]></c> <br/> + <c><![CDATA[{silently_accept_hosts, CallbackFun}]]></c> <br/> + <c><![CDATA[{silently_accept_hosts, {HashAlgoSpec, CallbackFun} }]]></c> <br/> + <br/> + <c><![CDATA[HashAlgoSpec = crypto:digest_type() | [ crypto:digest_type() ] ]]></c><br/> + <c><![CDATA[CallbackFun = fun(PeerName, FingerPrint) -> boolean()]]></c><br/> + <c><![CDATA[PeerName = string()]]></c><br/> + <c><![CDATA[FingerPrint = string() | [ string() ] ]]></c> </tag> <item> - <p>When <c>true</c>, hosts are added to the - file <c><![CDATA[known_hosts]]></c> without asking the user. - Defaults to <c>false</c> which will give a user question on stdio of whether to accept or reject a previously - unseen host.</p> - <p>If the option value is has an <c>accept_fun()</c>, that fun will called with the arguments - <c>(PeerName, PeerHostKeyFingerPrint)</c>. The fingerprint is calculated on the Peer's Host Key with - <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso>. - </p> - <p>If the <c>crypto:digest_type()</c> is present, the fingerprint is calculated with that digest type by the function - <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>. - </p> + <p>This option guides the <c>connect</c> function how to act when the connected server presents a Host + Key that the client has not seen before. The default is to ask the user with a question on stdio of whether to + accept or reject the new Host Key. + See also the option <seealso marker="#opt_user_dir"><c>user_dir</c></seealso> + for the path to the file <c>known_hosts</c> where previously accepted Host Keys are recorded. + </p> + <p>The option can be given in three different forms as seen above:</p> + <list> + <item>The value is a <c>boolean()</c>. The value <c>true</c> will make the client accept any unknown + Host Key without any user interaction. The value <c>false</c> keeps the default behaviour of asking the + the user on stdio. + </item> + <item>A <c>CallbackFun</c> will be called and the boolean return value <c>true</c> will make the client + accept the Host Key. A return value of <c>false</c> will make the client to reject the Host Key and therefore + also the connection will be closed. The arguments to the fun are: + <list type="bulleted"> + <item><c>PeerName</c> - a string with the name or address of the remote host.</item> + <item><c>FingerPrint</c> - the fingerprint of the Host Key as + <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso> + calculates it. + </item> + </list> + </item> + <item>A tuple <c>{HashAlgoSpec, CallbackFun}</c>. The <c>HashAlgoSpec</c> specifies which hash algorithm + shall be used to calculate the fingerprint used in the call of the <c>CallbackFun</c>. The <c>HashALgoSpec</c> + is either an atom or a list of atoms as the first argument in + <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>. + If it is a list of hash algorithm names, the <c>FingerPrint</c> argument in the <c>CallbackFun</c> will be + a list of fingerprints in the same order as the corresponding name in the <c>HashAlgoSpec</c> list. + </item> + </list> </item> <tag><c><![CDATA[{user_interaction, boolean()}]]></c></tag> <item> @@ -200,7 +226,7 @@ supplying a password. Defaults to <c>true</c>. Even if user interaction is allowed it can be suppressed by other options, such as <c>silently_accept_hosts</c> - and <c>password</c>. However, those optins are not always desirable + and <c>password</c>. However, those options are not always desirable to use from a security point of view.</p> </item> @@ -700,9 +726,10 @@ </func> <func> - <name>daemon_info(Daemon) -> {ok, [{port,Port}]} | {error,Error}</name> + <name>daemon_info(Daemon) -> {ok, [DaemonInfo]} | {error,Error}</name> <fsummary>Get info about a daemon</fsummary> <type> + <v>DaemonInfo = {port,Port::pos_integer()} | {listen_address, any|ip_address()} | {profile,atom()}</v> <v>Port = integer()</v> <v>Error = bad_daemon_ref</v> </type> diff --git a/lib/ssh/doc/src/ssh_app.xml b/lib/ssh/doc/src/ssh_app.xml index 5cc4c24889..5f710decc1 100644 --- a/lib/ssh/doc/src/ssh_app.xml +++ b/lib/ssh/doc/src/ssh_app.xml @@ -146,7 +146,10 @@ <item>diffie-hellman-group-exchange-sha1</item> <item>diffie-hellman-group-exchange-sha256</item> <item>diffie-hellman-group14-sha1</item> - <item>diffie-hellman-group1-sha1</item> + <item>diffie-hellman-group14-sha256</item> + <item>diffie-hellman-group16-sha512</item> + <item>diffie-hellman-group18-sha512</item> + <item>(diffie-hellman-group1-sha1, retired: can be enabled with the <c>preferred_algorithms</c> option)</item> </list> </item> @@ -157,7 +160,7 @@ <item>ecdsa-sha2-nistp384</item> <item>ecdsa-sha2-nistp521</item> <item>ssh-rsa</item> - <item>ssh-dss</item> + <item>(ssh-dss, retired: can be enabled with the <c>preferred_algorithms</c> option)</item> </list> </item> @@ -306,6 +309,8 @@ <p>Comment: Defines hmac-sha2-256 and hmac-sha2-512 </p> </item> + + <item>Work in progress: <url href="https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2">https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2-05</url>, Key Exchange (KEX) Method Updates and Recommendations for Secure Shell (SSH)</item> </list> diff --git a/lib/ssh/doc/src/using_ssh.xml b/lib/ssh/doc/src/using_ssh.xml index 0861c641c7..864378b640 100644 --- a/lib/ssh/doc/src/using_ssh.xml +++ b/lib/ssh/doc/src/using_ssh.xml @@ -305,7 +305,7 @@ ok = erl_tar:close(HandleRead), <code type="erl" > -module(ssh_echo_server). --behaviour(ssh_subsystem). +-behaviour(ssh_daemon_channel). -record(state, { n, id, diff --git a/lib/ssh/src/Makefile b/lib/ssh/src/Makefile index 7ab6f22424..f826fdfd9b 100644 --- a/lib/ssh/src/Makefile +++ b/lib/ssh/src/Makefile @@ -51,6 +51,7 @@ MODULES= \ ssh_sup \ sshc_sup \ sshd_sup \ + ssh_options \ ssh_connection_sup \ ssh_connection \ ssh_connection_handler \ diff --git a/lib/ssh/src/ssh.app.src b/lib/ssh/src/ssh.app.src index 76b7d8cd55..974292fde1 100644 --- a/lib/ssh/src/ssh.app.src +++ b/lib/ssh/src/ssh.app.src @@ -7,6 +7,7 @@ ssh_app, ssh_acceptor, ssh_acceptor_sup, + ssh_options, ssh_auth, ssh_message, ssh_bits, @@ -41,11 +42,10 @@ {env, []}, {mod, {ssh_app, []}}, {runtime_dependencies, [ - "crypto-3.3", + "crypto-3.7.3", "erts-6.0", "kernel-3.0", - "public_key-1.1", - "stdlib-3.1" + "public_key-1.4", + "stdlib-3.3" ]}]}. - diff --git a/lib/ssh/src/ssh.erl b/lib/ssh/src/ssh.erl index 31e343e81b..e2a289d737 100644 --- a/lib/ssh/src/ssh.erl +++ b/lib/ssh/src/ssh.erl @@ -40,10 +40,24 @@ ]). %%% Type exports --export_type([connection_ref/0, - channel_id/0 +-export_type([ssh_daemon_ref/0, + ssh_connection_ref/0, + ssh_channel_id/0, + role/0, + subsystem_spec/0, + subsystem_name/0, + channel_callback/0, + channel_init_args/0, + algs_list/0, + alg_entry/0, + simple_algs/0, + double_algs/0 ]). +-opaque ssh_daemon_ref() :: daemon_ref() . +-opaque ssh_connection_ref() :: connection_ref() . +-opaque ssh_channel_id() :: channel_id(). + %%-------------------------------------------------------------------- -spec start() -> ok | {error, term()}. -spec start(permanent | transient | temporary) -> ok | {error, term()}. @@ -71,55 +85,63 @@ stop() -> application:stop(ssh). %%-------------------------------------------------------------------- --spec connect(port(), proplists:proplist()) -> {ok, pid()} | {error, term()}. +-spec connect(inet:socket(), proplists:proplist()) -> ok_error(connection_ref()). + +-spec connect(inet:socket(), proplists:proplist(), timeout()) -> ok_error(connection_ref()) + ; (string(), inet:port_number(), proplists:proplist()) -> ok_error(connection_ref()). --spec connect(port(), proplists:proplist(), timeout()) -> {ok, pid()} | {error, term()} - ; (string(), integer(), proplists:proplist()) -> {ok, pid()} | {error, term()}. +-spec connect(string(), inet:port_number(), proplists:proplist(), timeout()) -> ok_error(connection_ref()). --spec connect(string(), integer(), proplists:proplist(), timeout()) -> {ok, pid()} | {error, term()}. %% %% Description: Starts an ssh connection. %%-------------------------------------------------------------------- -connect(Socket, Options) -> - connect(Socket, Options, infinity). +connect(Socket, UserOptions) when is_port(Socket), + is_list(UserOptions) -> + connect(Socket, UserOptions, infinity). -connect(Socket, Options, Timeout) when is_port(Socket) -> - case handle_options(Options) of +connect(Socket, UserOptions, Timeout) when is_port(Socket), + is_list(UserOptions) -> + case ssh_options:handle_options(client, UserOptions) of {error, Error} -> {error, Error}; - {_SocketOptions, SshOptions} -> - case valid_socket_to_use(Socket, Options) of + Options -> + case valid_socket_to_use(Socket, ?GET_OPT(transport,Options)) of ok -> {ok, {Host,_Port}} = inet:sockname(Socket), - Opts = [{user_pid,self()}, {host,fmt_host(Host)} | SshOptions], + Opts = ?PUT_INTERNAL_OPT([{user_pid,self()}, {host,fmt_host(Host)}], Options), ssh_connection_handler:start_connection(client, Socket, Opts, Timeout); {error,SockError} -> {error,SockError} end end; -connect(Host, Port, Options) when is_integer(Port), Port>0 -> - connect(Host, Port, Options, infinity). +connect(Host, Port, UserOptions) when is_integer(Port), + Port>0, + is_list(UserOptions) -> + connect(Host, Port, UserOptions, infinity). -connect(Host, Port, Options, Timeout) -> - case handle_options(Options) of +connect(Host, Port, UserOptions, Timeout) when is_integer(Port), + Port>0, + is_list(UserOptions) -> + case ssh_options:handle_options(client, UserOptions) of {error, _Reason} = Error -> Error; - {SocketOptions, SshOptions} -> - {_, Transport, _} = TransportOpts = - proplists:get_value(transport, Options, {tcp, gen_tcp, tcp_closed}), - ConnectionTimeout = proplists:get_value(connect_timeout, Options, infinity), - try Transport:connect(Host, Port, [ {active, false} | SocketOptions], ConnectionTimeout) of + Options -> + {_, Transport, _} = TransportOpts = ?GET_OPT(transport, Options), + ConnectionTimeout = ?GET_OPT(connect_timeout, Options), + SocketOpts = [{active,false} | ?GET_OPT(socket_options,Options)], + try Transport:connect(Host, Port, SocketOpts, ConnectionTimeout) of {ok, Socket} -> - Opts = [{user_pid,self()}, {host,Host} | SshOptions], + Opts = ?PUT_INTERNAL_OPT([{user_pid,self()}, {host,Host}], Options), ssh_connection_handler:start_connection(client, Socket, Opts, Timeout); {error, Reason} -> {error, Reason} catch - exit:{function_clause, _} -> + exit:{function_clause, _F} -> + io:format('function_clause ~p~n',[_F]), {error, {options, {transport, TransportOpts}}}; exit:badarg -> - {error, {options, {socket_options, SocketOptions}}} + {error, {options, {socket_options, SocketOpts}}} end end. @@ -148,9 +170,11 @@ channel_info(ConnectionRef, ChannelId, Options) -> ssh_connection_handler:channel_info(ConnectionRef, ChannelId, Options). %%-------------------------------------------------------------------- --spec daemon(integer()) -> {ok, pid()} | {error, term()}. --spec daemon(integer()|port(), proplists:proplist()) -> {ok, pid()} | {error, term()}. --spec daemon(any | inet:ip_address(), integer(), proplists:proplist()) -> {ok, pid()} | {error, term()}. +-spec daemon(inet:port_number()) -> ok_error(daemon_ref()). +-spec daemon(inet:port_number()|inet:socket(), proplists:proplist()) -> ok_error(daemon_ref()). +-spec daemon(any | inet:ip_address(), inet:port_number(), proplists:proplist()) -> ok_error(daemon_ref()) + ;(socket, inet:socket(), proplists:proplist()) -> ok_error(daemon_ref()) + . %% Description: Starts a server listening for SSH connections %% on the given port. @@ -158,34 +182,38 @@ channel_info(ConnectionRef, ChannelId, Options) -> daemon(Port) -> daemon(Port, []). -daemon(Port, Options) when is_integer(Port) -> - daemon(any, Port, Options); -daemon(Socket, Options0) when is_port(Socket) -> - Options = daemon_shell_opt(Options0), - start_daemon(Socket, Options). +daemon(Port, UserOptions) when is_integer(Port), Port >= 0 -> + daemon(any, Port, UserOptions); + +daemon(Socket, UserOptions) when is_port(Socket) -> + daemon(socket, Socket, UserOptions). -daemon(HostAddr, Port, Options0) -> - Options1 = daemon_shell_opt(Options0), - {Host, Inet, Options} = daemon_host_inet_opt(HostAddr, Options1), - start_daemon(Host, Port, Options, Inet). + +daemon(Host0, Port, UserOptions0) -> + {Host, UserOptions} = handle_daemon_args(Host0, UserOptions0), + start_daemon(Host, Port, ssh_options:handle_options(server, UserOptions)). %%-------------------------------------------------------------------- +-spec daemon_info(daemon_ref()) -> ok_error( [{atom(), term()}] ). + daemon_info(Pid) -> case catch ssh_system_sup:acceptor_supervisor(Pid) of AsupPid when is_pid(AsupPid) -> - [Port] = - [Prt || {{ssh_acceptor_sup,any,Prt,default}, + [{ListenAddr,Port,Profile}] = + [{LA,Prt,Prf} || {{ssh_acceptor_sup,LA,Prt,Prf}, _WorkerPid,worker,[ssh_acceptor]} <- supervisor:which_children(AsupPid)], - {ok, [{port,Port}]}; - + {ok, [{port,Port}, + {listen_address,ListenAddr}, + {profile,Profile} + ]}; _ -> {error,bad_daemon_ref} end. %%-------------------------------------------------------------------- --spec stop_listener(pid()) -> ok. --spec stop_listener(inet:ip_address(), integer()) -> ok. +-spec stop_listener(daemon_ref()) -> ok. +-spec stop_listener(inet:ip_address(), inet:port_number()) -> ok. %% %% Description: Stops the listener, but leaves %% existing connections started by the listener up and running. @@ -198,8 +226,9 @@ stop_listener(Address, Port, Profile) -> ssh_system_sup:stop_listener(Address, Port, Profile). %%-------------------------------------------------------------------- --spec stop_daemon(pid()) -> ok. --spec stop_daemon(inet:ip_address(), integer()) -> ok. +-spec stop_daemon(daemon_ref()) -> ok. +-spec stop_daemon(inet:ip_address(), inet:port_number()) -> ok. +-spec stop_daemon(inet:ip_address(), inet:port_number(), atom()) -> ok. %% %% Description: Stops the listener and all connections started by %% the listener. @@ -210,10 +239,11 @@ stop_daemon(Address, Port) -> ssh_system_sup:stop_system(Address, Port, ?DEFAULT_PROFILE). stop_daemon(Address, Port, Profile) -> ssh_system_sup:stop_system(Address, Port, Profile). + %%-------------------------------------------------------------------- --spec shell(port() | string()) -> _. --spec shell(port() | string(), proplists:proplist()) -> _. --spec shell(string(), integer(), proplists:proplist()) -> _. +-spec shell(inet:socket() | string()) -> _. +-spec shell(inet:socket() | string(), proplists:proplist()) -> _. +-spec shell(string(), inet:port_number(), proplists:proplist()) -> _. %% Host = string() %% Port = integer() @@ -254,6 +284,7 @@ start_shell(Error) -> Error. %%-------------------------------------------------------------------- +-spec default_algorithms() -> algs_list() . %%-------------------------------------------------------------------- default_algorithms() -> ssh_transport:default_algorithms(). @@ -261,109 +292,96 @@ default_algorithms() -> %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- -valid_socket_to_use(Socket, Options) -> - case proplists:get_value(transport, Options, {tcp, gen_tcp, tcp_closed}) of - {tcp,_,_} -> - %% Is this tcp-socket a valid socket? - case {is_tcp_socket(Socket), - {ok,[{active,false}]} == inet:getopts(Socket, [active]) - } - of - {true, true} -> - ok; - {true, false} -> - {error, not_passive_mode}; - _ -> - {error, not_tcp_socket} - end; - {L4,_,_} -> - {error, {unsupported,L4}} +handle_daemon_args(Host, UserOptions0) -> + case Host of + socket -> + {Host, UserOptions0}; + any -> + {ok, Host0} = inet:gethostname(), + Inet = proplists:get_value(inet, UserOptions0, inet), + {Host0, [Inet | UserOptions0]}; + {_,_,_,_} -> + {Host, [inet, {ip,Host} | UserOptions0]}; + {_,_,_,_,_,_,_,_} -> + {Host, [inet6, {ip,Host} | UserOptions0]}; + _ -> + error(badarg) end. -is_tcp_socket(Socket) -> {ok,[]} =/= inet:getopts(Socket, [delay_send]). - - - -daemon_shell_opt(Options) -> - case proplists:get_value(shell, Options) of - undefined -> - [{shell, {shell, start, []}} | Options]; - _ -> - Options - end. - -daemon_host_inet_opt(HostAddr, Options1) -> - case HostAddr of - any -> - {ok, Host0} = inet:gethostname(), - {Host0, proplists:get_value(inet, Options1, inet), Options1}; - {_,_,_,_} -> - {HostAddr, inet, - [{ip, HostAddr} | Options1]}; - {_,_,_,_,_,_,_,_} -> - {HostAddr, inet6, - [{ip, HostAddr} | Options1]} - end. +%%%---------------------------------------------------------------- +valid_socket_to_use(Socket, {tcp,_,_}) -> + %% Is this tcp-socket a valid socket? + case {is_tcp_socket(Socket), + {ok,[{active,false}]} == inet:getopts(Socket, [active]) + } + of + {true, true} -> + ok; + {true, false} -> + {error, not_passive_mode}; + _ -> + {error, not_tcp_socket} + end; +valid_socket_to_use(_, {L4,_,_}) -> + {error, {unsupported,L4}}. -start_daemon(Socket, Options) -> - case handle_options(Options) of - {error, Error} -> - {error, Error}; - {SocketOptions, SshOptions} -> - case valid_socket_to_use(Socket, Options) of - ok -> - try - do_start_daemon(Socket, [{role,server}|SshOptions], SocketOptions) - catch - throw:bad_fd -> {error,bad_fd}; - _C:_E -> {error,{cannot_start_daemon,_C,_E}} - end; - {error,SockError} -> - {error,SockError} - end + +is_tcp_socket(Socket) -> + case inet:getopts(Socket, [delay_send]) of + {ok,[_]} -> true; + _ -> false end. -start_daemon(Host, Port, Options, Inet) -> - case handle_options(Options) of - {error, _Reason} = Error -> - Error; - {SocketOptions, SshOptions}-> - try - do_start_daemon(Host, Port, [{role,server}|SshOptions] , [Inet|SocketOptions]) - catch - throw:bad_fd -> {error,bad_fd}; - _C:_E -> {error,{cannot_start_daemon,_C,_E}} - end +%%%---------------------------------------------------------------- +start_daemon(_, _, {error,Error}) -> + {error,Error}; + +start_daemon(socket, Socket, Options) -> + case valid_socket_to_use(Socket, ?GET_OPT(transport,Options)) of + ok -> + try + do_start_daemon(Socket, Options) + catch + throw:bad_fd -> {error,bad_fd}; + throw:bad_socket -> {error,bad_socket}; + _C:_E -> {error,{cannot_start_daemon,_C,_E}} + end; + {error,SockError} -> + {error,SockError} + end; + +start_daemon(Host, Port, Options) -> + try + do_start_daemon(Host, Port, Options) + catch + throw:bad_fd -> {error,bad_fd}; + throw:bad_socket -> {error,bad_socket}; + _C:_E -> {error,{cannot_start_daemon,_C,_E}} end. -do_start_daemon(Socket, SshOptions, SocketOptions) -> + +do_start_daemon(Socket, Options) -> {ok, {IP,Port}} = try {ok,_} = inet:sockname(Socket) catch _:_ -> throw(bad_socket) end, Host = fmt_host(IP), - Profile = proplists:get_value(profile, SshOptions, ?DEFAULT_PROFILE), - Opts = [{asocket, Socket}, - {asock_owner,self()}, - {address, Host}, - {port, Port}, - {role, server}, - {socket_opts, SocketOptions}, - {ssh_opts, SshOptions}], - {_, Callback, _} = proplists:get_value(transport, SshOptions, {tcp, gen_tcp, tcp_closed}), + Opts = ?PUT_INTERNAL_OPT([{asocket, Socket}, + {asock_owner,self()}, + {address, Host}, + {port, Port}, + {role, server}], Options), + + Profile = ?GET_OPT(profile, Options), case ssh_system_sup:system_supervisor(Host, Port, Profile) of undefined -> - %% It would proably make more sense to call the - %% address option host but that is a too big change at the - %% monent. The name is a legacy name! try sshd_sup:start_child(Opts) of {error, {already_started, _}} -> {error, eaddrinuse}; Result = {ok,_} -> - ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket), - Result; + call_ssh_acceptor_handle_connection(Host, Port, Opts, Socket, Result); Result = {error, _} -> Result catch @@ -376,57 +394,47 @@ do_start_daemon(Socket, SshOptions, SocketOptions) -> {error, {already_started, _}} -> {error, eaddrinuse}; {ok, _} -> - ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket), - {ok, Sup}; + call_ssh_acceptor_handle_connection(Host, Port, Opts, Socket, {ok,Sup}); Other -> Other end end. -do_start_daemon(Host0, Port0, SshOptions, SocketOptions) -> +do_start_daemon(Host0, Port0, Options0) -> {Host,Port1} = try - case proplists:get_value(fd, SocketOptions) of + case ?GET_SOCKET_OPT(fd, Options0) of undefined -> {Host0,Port0}; Fd when Port0==0 -> - find_hostport(Fd); - _ -> - {Host0,Port0} + find_hostport(Fd) end catch _:_ -> throw(bad_fd) end, - Profile = proplists:get_value(profile, SshOptions, ?DEFAULT_PROFILE), - {Port, WaitRequestControl, Opts0} = + {Port, WaitRequestControl, Options1} = case Port1 of 0 -> %% Allocate the socket here to get the port number... - {_, Callback, _} = - proplists:get_value(transport, SshOptions, {tcp, gen_tcp, tcp_closed}), - {ok,LSock} = ssh_acceptor:callback_listen(Callback, 0, SocketOptions), + {ok,LSock} = ssh_acceptor:callback_listen(0, Options0), {ok,{_,LPort}} = inet:sockname(LSock), {LPort, - {LSock,Callback}, - [{lsocket,LSock},{lsock_owner,self()}] + LSock, + ?PUT_INTERNAL_OPT({lsocket,{LSock,self()}}, Options0) }; _ -> - {Port1, false, []} + {Port1, false, Options0} end, - Opts = [{address, Host}, - {port, Port}, - {role, server}, - {socket_opts, SocketOptions}, - {ssh_opts, SshOptions} | Opts0], + Options = ?PUT_INTERNAL_OPT([{address, Host}, + {port, Port}, + {role, server}], Options1), + Profile = ?GET_OPT(profile, Options0), case ssh_system_sup:system_supervisor(Host, Port, Profile) of undefined -> - %% It would proably make more sense to call the - %% address option host but that is a too big change at the - %% monent. The name is a legacy name! - try sshd_sup:start_child(Opts) of + try sshd_sup:start_child(Options) of {error, {already_started, _}} -> {error, eaddrinuse}; Result = {ok,_} -> - sync_request_control(WaitRequestControl), + sync_request_control(WaitRequestControl, Options), Result; Result = {error, _} -> Result @@ -434,22 +442,34 @@ do_start_daemon(Host0, Port0, SshOptions, SocketOptions) -> exit:{noproc, _} -> {error, ssh_not_started} end; - Sup -> + Sup -> AccPid = ssh_system_sup:acceptor_supervisor(Sup), - case ssh_acceptor_sup:start_child(AccPid, Opts) of + case ssh_acceptor_sup:start_child(AccPid, Options) of {error, {already_started, _}} -> {error, eaddrinuse}; {ok, _} -> - sync_request_control(WaitRequestControl), + sync_request_control(WaitRequestControl, Options), {ok, Sup}; Other -> Other end end. -sync_request_control(false) -> +call_ssh_acceptor_handle_connection(Host, Port, Options, Socket, DefaultResult) -> + {_, Callback, _} = ?GET_OPT(transport, Options), + try ssh_acceptor:handle_connection(Callback, Host, Port, Options, Socket) + of + {error,Error} -> {error,Error}; + _ -> DefaultResult + catch + C:R -> {error,{could_not_start_connection,{C,R}}} + end. + + +sync_request_control(false, _Options) -> ok; -sync_request_control({LSock,Callback}) -> +sync_request_control(LSock, Options) -> + {_, Callback, _} = ?GET_OPT(transport, Options), receive {request_control,LSock,ReqPid} -> ok = Callback:controlling_process(LSock, ReqPid), @@ -465,512 +485,6 @@ find_hostport(Fd) -> ok = inet:close(S), HostPort. - -handle_options(Opts) -> - try handle_option(algs_compatibility(proplists:unfold(Opts)), [], []) of - {Inet, Ssh} -> - {handle_ip(Inet), Ssh} - catch - throw:Error -> - Error - end. - - -algs_compatibility(Os0) -> - %% Take care of old options 'public_key_alg' and 'pref_public_key_algs' - case proplists:get_value(public_key_alg, Os0) of - undefined -> - Os0; - A when is_atom(A) -> - %% Skip public_key_alg if pref_public_key_algs is defined: - Os = lists:keydelete(public_key_alg, 1, Os0), - case proplists:get_value(pref_public_key_algs,Os) of - undefined when A == 'ssh-rsa' ; A==ssh_rsa -> - [{pref_public_key_algs,['ssh-rsa','ssh-dss']} | Os]; - undefined when A == 'ssh-dss' ; A==ssh_dsa -> - [{pref_public_key_algs,['ssh-dss','ssh-rsa']} | Os]; - undefined -> - throw({error, {eoptions, {public_key_alg,A} }}); - _ -> - Os - end; - V -> - throw({error, {eoptions, {public_key_alg,V} }}) - end. - - -handle_option([], SocketOptions, SshOptions) -> - {SocketOptions, SshOptions}; -handle_option([{system_dir, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{user_dir, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{user_dir_fun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{silently_accept_hosts, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{user_interaction, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{connect_timeout, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{user, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{dsa_pass_phrase, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{rsa_pass_phrase, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{password, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{user_passwords, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{pwdfun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{key_cb, {Module, Options}} | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option({key_cb, Module}), - handle_ssh_priv_option({key_cb_private, Options}) | - SshOptions]); -handle_option([{key_cb, Module} | Rest], SocketOptions, SshOptions) -> - handle_option([{key_cb, {Module, []}} | Rest], SocketOptions, SshOptions); -handle_option([{keyboard_interact_fun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -%%Backwards compatibility -handle_option([{allow_user_interaction, Value} | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option({user_interaction, Value}) | SshOptions]); -handle_option([{infofun, _} = Opt | Rest],SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{connectfun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{disconnectfun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{unexpectedfun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{failfun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{ssh_msg_debug_fun, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -%%Backwards compatibility should not be underscore between ip and v6 in API -handle_option([{ip_v6_disabled, Value} | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option({ipv6_disabled, Value}) | SshOptions]); -handle_option([{ipv6_disabled, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{transport, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{subsystems, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{ssh_cli, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{shell, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{exec, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{auth_methods, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{auth_method_kb_interactive_data, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{pref_public_key_algs, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{preferred_algorithms,_} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{dh_gex_groups,_} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{dh_gex_limits,_} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{quiet_mode, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{idle_time, _} = Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{rekey_limit, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{max_sessions, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{max_channels, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{negotiation_timeout, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{parallel_login, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -%% (Is handled by proplists:unfold above:) -%% handle_option([parallel_login|Rest], SocketOptions, SshOptions) -> -%% handle_option(Rest, SocketOptions, [handle_ssh_option({parallel_login,true}) | SshOptions]); -handle_option([{minimal_remote_max_packet_size, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{id_string, _ID} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{profile, _ID} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{max_random_length_padding, _Bool} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([{tstflg, _} = Opt|Rest], SocketOptions, SshOptions) -> - handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]); -handle_option([Opt | Rest], SocketOptions, SshOptions) -> - handle_option(Rest, [handle_inet_option(Opt) | SocketOptions], SshOptions). - - -handle_ssh_option({tstflg,_F} = Opt) -> Opt; -handle_ssh_option({minimal_remote_max_packet_size, Value} = Opt) when is_integer(Value), Value >=0 -> - Opt; -handle_ssh_option({system_dir, Value} = Opt) when is_list(Value) -> - check_dir(Opt); -handle_ssh_option({user_dir, Value} = Opt) when is_list(Value) -> - check_dir(Opt); -handle_ssh_option({user_dir_fun, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_boolean(Value) -> - Opt; -handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_function(Value,2) -> - Opt; -handle_ssh_option({silently_accept_hosts, {DigestAlg,Value}} = Opt) when is_function(Value,2) -> - case lists:member(DigestAlg, [md5, sha, sha224, sha256, sha384, sha512]) of - true -> - Opt; - false -> - throw({error, {eoptions, Opt}}) - end; -handle_ssh_option({user_interaction, Value} = Opt) when is_boolean(Value) -> - Opt; -handle_ssh_option({preferred_algorithms,[_|_]} = Opt) -> - handle_pref_algs(Opt); - -handle_ssh_option({dh_gex_groups,L0}) when is_list(L0) -> - {dh_gex_groups, - collect_per_size( - lists:foldl( - fun({N,G,P}, Acc) when is_integer(N),N>0, - is_integer(G),G>0, - is_integer(P),P>0 -> - [{N,{G,P}} | Acc]; - ({N,{G,P}}, Acc) when is_integer(N),N>0, - is_integer(G),G>0, - is_integer(P),P>0 -> - [{N,{G,P}} | Acc]; - ({N,GPs}, Acc) when is_list(GPs) -> - lists:foldr(fun({Gi,Pi}, Acci) when is_integer(Gi),Gi>0, - is_integer(Pi),Pi>0 -> - [{N,{Gi,Pi}} | Acci] - end, Acc, GPs) - end, [], L0))}; - -handle_ssh_option({dh_gex_groups,{Tag,File=[C|_]}}=Opt) when is_integer(C), C>0, - Tag == file ; - Tag == ssh_moduli_file -> - {ok,GroupDefs} = - case Tag of - file -> - file:consult(File); - ssh_moduli_file -> - case file:open(File,[read]) of - {ok,D} -> - try - {ok,Moduli} = read_moduli_file(D, 1, []), - file:close(D), - {ok, Moduli} - catch - _:_ -> - throw({error, {{eoptions, Opt}, "Bad format in file "++File}}) - end; - {error,enoent} -> - throw({error, {{eoptions, Opt}, "File not found:"++File}}); - {error,Error} -> - throw({error, {{eoptions, Opt}, io_lib:format("Error reading file ~s: ~p",[File,Error])}}) - end - end, - - try - handle_ssh_option({dh_gex_groups,GroupDefs}) - catch - _:_ -> - throw({error, {{eoptions, Opt}, "Bad format in file: "++File}}) - end; - - -handle_ssh_option({dh_gex_limits,{Min,Max}} = Opt) when is_integer(Min), Min>0, - is_integer(Max), Max>=Min -> - %% Server - Opt; -handle_ssh_option({dh_gex_limits,{Min,I,Max}} = Opt) when is_integer(Min), Min>0, - is_integer(I), I>=Min, - is_integer(Max), Max>=I -> - %% Client - Opt; -handle_ssh_option({pref_public_key_algs, Value} = Opt) when is_list(Value), length(Value) >= 1 -> - case handle_user_pref_pubkey_algs(Value, []) of - {true, NewOpts} -> - {pref_public_key_algs, NewOpts}; - _ -> - throw({error, {eoptions, Opt}}) - end; -handle_ssh_option({connect_timeout, Value} = Opt) when is_integer(Value); Value == infinity -> - Opt; -handle_ssh_option({max_sessions, Value} = Opt) when is_integer(Value), Value>0 -> - Opt; -handle_ssh_option({max_channels, Value} = Opt) when is_integer(Value), Value>0 -> - Opt; -handle_ssh_option({negotiation_timeout, Value} = Opt) when is_integer(Value); Value == infinity -> - Opt; -handle_ssh_option({parallel_login, Value} = Opt) when Value==true ; Value==false -> - Opt; -handle_ssh_option({user, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({dsa_pass_phrase, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({rsa_pass_phrase, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({password, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({user_passwords, Value} = Opt) when is_list(Value)-> - Opt; -handle_ssh_option({pwdfun, Value} = Opt) when is_function(Value,2) -> - Opt; -handle_ssh_option({pwdfun, Value} = Opt) when is_function(Value,4) -> - Opt; -handle_ssh_option({key_cb, Value} = Opt) when is_atom(Value) -> - Opt; -handle_ssh_option({key_cb, {CallbackMod, CallbackOptions}} = Opt) when is_atom(CallbackMod), - is_list(CallbackOptions) -> - Opt; -handle_ssh_option({keyboard_interact_fun, Value} = Opt) when is_function(Value,3) -> - Opt; -handle_ssh_option({compression, Value} = Opt) when is_atom(Value) -> - Opt; -handle_ssh_option({exec, {Module, Function, _}} = Opt) when is_atom(Module), - is_atom(Function) -> - Opt; -handle_ssh_option({exec, Function} = Opt) when is_function(Function) -> - Opt; -handle_ssh_option({auth_methods, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({auth_method_kb_interactive_data, {Name,Instruction,Prompt,Echo}} = Opt) when is_list(Name), - is_list(Instruction), - is_list(Prompt), - is_boolean(Echo) -> - Opt; -handle_ssh_option({auth_method_kb_interactive_data, F} = Opt) when is_function(F,3) -> - Opt; -handle_ssh_option({infofun, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({connectfun, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({disconnectfun, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({unexpectedfun, Value} = Opt) when is_function(Value,2) -> - Opt; -handle_ssh_option({failfun, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({ssh_msg_debug_fun, Value} = Opt) when is_function(Value,4) -> - Opt; - -handle_ssh_option({ipv6_disabled, Value} = Opt) when is_boolean(Value) -> - throw({error, {{ipv6_disabled, Opt}, option_no_longer_valid_use_inet_option_instead}}); -handle_ssh_option({transport, {Protocol, Cb, ClosTag}} = Opt) when is_atom(Protocol), - is_atom(Cb), - is_atom(ClosTag) -> - Opt; -handle_ssh_option({subsystems, Value} = Opt) when is_list(Value) -> - Opt; -handle_ssh_option({ssh_cli, {Cb, _}}= Opt) when is_atom(Cb) -> - Opt; -handle_ssh_option({ssh_cli, no_cli} = Opt) -> - Opt; -handle_ssh_option({shell, {Module, Function, _}} = Opt) when is_atom(Module), - is_atom(Function) -> - Opt; -handle_ssh_option({shell, Value} = Opt) when is_function(Value) -> - Opt; -handle_ssh_option({quiet_mode, Value} = Opt) when is_boolean(Value) -> - Opt; -handle_ssh_option({idle_time, Value} = Opt) when is_integer(Value), Value > 0 -> - Opt; -handle_ssh_option({rekey_limit, Value} = Opt) when is_integer(Value) -> - Opt; -handle_ssh_option({id_string, random}) -> - {id_string, {random,2,5}}; %% 2 - 5 random characters -handle_ssh_option({id_string, ID} = Opt) when is_list(ID) -> - Opt; -handle_ssh_option({max_random_length_padding, Value} = Opt) when is_integer(Value), - Value =< 255 -> - Opt; -handle_ssh_option({profile, Value} = Opt) when is_atom(Value) -> - Opt; -handle_ssh_option(Opt) -> - throw({error, {eoptions, Opt}}). - -handle_ssh_priv_option({key_cb_private, Value} = Opt) when is_list(Value) -> - Opt. - -handle_inet_option({active, _} = Opt) -> - throw({error, {{eoptions, Opt}, "SSH has built in flow control, " - "and active is handled internally, user is not allowed" - "to specify this option"}}); - -handle_inet_option({inet, Value}) when (Value == inet) or (Value == inet6) -> - Value; -handle_inet_option({reuseaddr, _} = Opt) -> - throw({error, {{eoptions, Opt},"Is set internally, user is not allowed" - "to specify this option"}}); -%% Option verified by inet -handle_inet_option(Opt) -> - Opt. - - -%% Check preferred algs - -handle_pref_algs({preferred_algorithms,Algs}) -> - try alg_duplicates(Algs, [], []) of - [] -> - {preferred_algorithms, - [try ssh_transport:supported_algorithms(Key) - of - DefAlgs -> handle_pref_alg(Key,Vals,DefAlgs) - catch - _:_ -> throw({error, {{eoptions, {preferred_algorithms,Key}}, - "Bad preferred_algorithms key"}}) - end || {Key,Vals} <- Algs] - }; - - Dups -> - throw({error, {{eoptions, {preferred_algorithms,Dups}}, "Duplicates found"}}) - catch - _:_ -> - throw({error, {{eoptions, preferred_algorithms}, "Malformed"}}) - end. - -alg_duplicates([{K,V}|KVs], Ks, Dups0) -> - Dups = - case lists:member(K,Ks) of - true -> - [K|Dups0]; - false -> - Dups0 - end, - case V--lists:usort(V) of - [] -> - alg_duplicates(KVs, [K|Ks], Dups); - Ds -> - alg_duplicates(KVs, [K|Ks], Dups++Ds) - end; -alg_duplicates([], _Ks, Dups) -> - Dups. - -handle_pref_alg(Key, - Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}], - [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}] - ) -> - chk_alg_vs(Key, C2Ss, Sup_C2Ss), - chk_alg_vs(Key, S2Cs, Sup_S2Cs), - {Key, Vs}; - -handle_pref_alg(Key, - Vs=[{server2client,[_|_]},{client2server,[_|_]}], - Sup=[{client2server,_},{server2client,_}] - ) -> - handle_pref_alg(Key, lists:reverse(Vs), Sup); - -handle_pref_alg(Key, - Vs=[V|_], - Sup=[{client2server,_},{server2client,_}] - ) when is_atom(V) -> - handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup); - -handle_pref_alg(Key, - Vs=[V|_], - Sup=[S|_] - ) when is_atom(V), is_atom(S) -> - chk_alg_vs(Key, Vs, Sup), - {Key, Vs}; - -handle_pref_alg(Key, Vs, _) -> - throw({error, {{eoptions, {preferred_algorithms,[{Key,Vs}]}}, "Badly formed list"}}). - -chk_alg_vs(OptKey, Values, SupportedValues) -> - case (Values -- SupportedValues) of - [] -> Values; - Bad -> throw({error, {{eoptions, {OptKey,Bad}}, "Unsupported value(s) found"}}) - end. - -handle_ip(Inet) -> %% Default to ipv4 - case lists:member(inet, Inet) of - true -> - Inet; - false -> - case lists:member(inet6, Inet) of - true -> - Inet; - false -> - [inet | Inet] - end - end. - -check_dir({_,Dir} = Opt) -> - case directory_exist_readable(Dir) of - ok -> - Opt; - {error,Error} -> - throw({error, {eoptions,{Opt,Error}}}) - end. - -directory_exist_readable(Dir) -> - case file:read_file_info(Dir) of - {ok, #file_info{type = directory, - access = Access}} -> - case Access of - read -> ok; - read_write -> ok; - _ -> {error, eacces} - end; - - {ok, #file_info{}}-> - {error, enotdir}; - - {error, Error} -> - {error, Error} - end. - - - -collect_per_size(L) -> - lists:foldr( - fun({Sz,GP}, [{Sz,GPs}|Acc]) -> [{Sz,[GP|GPs]}|Acc]; - ({Sz,GP}, Acc) -> [{Sz,[GP]}|Acc] - end, [], lists:sort(L)). - -read_moduli_file(D, I, Acc) -> - case io:get_line(D,"") of - {error,Error} -> - {error,Error}; - eof -> - {ok, Acc}; - "#" ++ _ -> read_moduli_file(D, I+1, Acc); - <<"#",_/binary>> -> read_moduli_file(D, I+1, Acc); - Data -> - Line = if is_binary(Data) -> binary_to_list(Data); - is_list(Data) -> Data - end, - try - [_Time,_Type,_Tests,_Tries,Size,G,P] = string:tokens(Line," \r\n"), - M = {list_to_integer(Size), - {list_to_integer(G), list_to_integer(P,16)} - }, - read_moduli_file(D, I+1, [M|Acc]) - catch - _:_ -> - read_moduli_file(D, I+1, Acc) - end - end. - -handle_user_pref_pubkey_algs([], Acc) -> - {true, lists:reverse(Acc)}; -handle_user_pref_pubkey_algs([H|T], Acc) -> - case lists:member(H, ?SUPPORTED_USER_KEYS) of - true -> - handle_user_pref_pubkey_algs(T, [H| Acc]); - - false when H==ssh_dsa -> handle_user_pref_pubkey_algs(T, ['ssh-dss'| Acc]); - false when H==ssh_rsa -> handle_user_pref_pubkey_algs(T, ['ssh-rsa'| Acc]); - - false -> - false - end. - fmt_host({A,B,C,D}) -> lists:concat([A,".",B,".",C,".",D]); fmt_host(T={_,_,_,_,_,_,_,_}) -> diff --git a/lib/ssh/src/ssh.hrl b/lib/ssh/src/ssh.hrl index 4cd91177f6..c1ba58ed40 100644 --- a/lib/ssh/src/ssh.hrl +++ b/lib/ssh/src/ssh.hrl @@ -33,6 +33,10 @@ -define(REKEY_DATA_TIMOUT, 60000). -define(DEFAULT_PROFILE, default). +-define(DEFAULT_TRANSPORT, {tcp, gen_tcp, tcp_closed} ). + +-define(MAX_RND_PADDING_LEN, 15). + -define(SUPPORTED_AUTH_METHODS, "publickey,keyboard-interactive,password"). -define(SUPPORTED_USER_KEYS, ['ssh-rsa','ssh-dss','ecdsa-sha2-nistp256','ecdsa-sha2-nistp384','ecdsa-sha2-nistp521']). @@ -64,10 +68,49 @@ -define(string_utf8(X), << ?STRING(unicode:characters_to_binary(X)) >> ). -define(binary(X), << ?STRING(X) >>). +%% Cipher details -define(SSH_CIPHER_NONE, 0). -define(SSH_CIPHER_3DES, 3). -define(SSH_CIPHER_AUTHFILE, ?SSH_CIPHER_3DES). +%% Option access macros +-define(do_get_opt(C,K,O), ssh_options:get_value(C,K,O, ?MODULE,?LINE)). +-define(do_get_opt(C,K,O,D), ssh_options:get_value(C,K,O,D,?MODULE,?LINE)). + +-define(GET_OPT(Key,Opts), ?do_get_opt(user_options, Key,Opts ) ). +-define(GET_INTERNAL_OPT(Key,Opts), ?do_get_opt(internal_options,Key,Opts ) ). +-define(GET_INTERNAL_OPT(Key,Opts,Def), ?do_get_opt(internal_options,Key,Opts,Def) ). +-define(GET_SOCKET_OPT(Key,Opts), ?do_get_opt(socket_options, Key,Opts ) ). +-define(GET_SOCKET_OPT(Key,Opts,Def), ?do_get_opt(socket_options, Key,Opts,Def) ). + +-define(do_put_opt(C,KV,O), ssh_options:put_value(C,KV,O, ?MODULE,?LINE)). + +-define(PUT_OPT(KeyVal,Opts), ?do_put_opt(user_options, KeyVal,Opts) ). +-define(PUT_INTERNAL_OPT(KeyVal,Opts), ?do_put_opt(internal_options,KeyVal,Opts) ). +-define(PUT_SOCKET_OPT(KeyVal,Opts), ?do_put_opt(socket_options, KeyVal,Opts) ). + +%% Types +-type role() :: client | server . +-type ok_error(SuccessType) :: {ok, SuccessType} | {error, any()} . +-type daemon_ref() :: pid() . + +-type subsystem_spec() :: {subsystem_name(), {channel_callback(), channel_init_args()}} . +-type subsystem_name() :: string() . +-type channel_callback() :: atom() . +-type channel_init_args() :: list() . + +-type algs_list() :: list( alg_entry() ). +-type alg_entry() :: {kex, simple_algs()} + | {public_key, simple_algs()} + | {cipher, double_algs()} + | {mac, double_algs()} + | {compression, double_algs()} . +-type simple_algs() :: list( atom() ) . +-type double_algs() :: list( {client2serverlist,simple_algs()} | {server2client,simple_algs()} ) + | simple_algs() . + + +%% Records -record(ssh, { role, %% client | server @@ -127,7 +170,7 @@ recv_sequence = 0, keyex_key, keyex_info, - random_length_padding = 15, % From RFC 4253 section 6. + random_length_padding = ?MAX_RND_PADDING_LEN, % From RFC 4253 section 6. %% User auth user, diff --git a/lib/ssh/src/ssh_acceptor.erl b/lib/ssh/src/ssh_acceptor.erl index 13c9d9af4a..42be18f2ad 100644 --- a/lib/ssh/src/ssh_acceptor.erl +++ b/lib/ssh/src/ssh_acceptor.erl @@ -25,56 +25,63 @@ -include("ssh.hrl"). %% Internal application API --export([start_link/5, +-export([start_link/4, number_of_connections/1, - callback_listen/3, + callback_listen/2, handle_connection/5]). %% spawn export --export([acceptor_init/6, acceptor_loop/6]). +-export([acceptor_init/5, acceptor_loop/6]). -define(SLEEP_TIME, 200). %%==================================================================== %% Internal application API %%==================================================================== -start_link(Port, Address, SockOpts, Opts, AcceptTimeout) -> - Args = [self(), Port, Address, SockOpts, Opts, AcceptTimeout], +start_link(Port, Address, Options, AcceptTimeout) -> + Args = [self(), Port, Address, Options, AcceptTimeout], proc_lib:start_link(?MODULE, acceptor_init, Args). %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- -acceptor_init(Parent, Port, Address, SockOpts, Opts, AcceptTimeout) -> - {_, Callback, _} = - proplists:get_value(transport, Opts, {tcp, gen_tcp, tcp_closed}), - - SockOwner = proplists:get_value(lsock_owner, Opts), - LSock = proplists:get_value(lsocket, Opts), - UseExistingSocket = - case catch inet:sockname(LSock) of - {ok,{_,Port}} -> is_pid(SockOwner); - _ -> false - end, - - case UseExistingSocket of - true -> - proc_lib:init_ack(Parent, {ok, self()}), +acceptor_init(Parent, Port, Address, Opts, AcceptTimeout) -> + {_, Callback, _} = ?GET_OPT(transport, Opts), + try + {LSock0,SockOwner0} = ?GET_INTERNAL_OPT(lsocket, Opts), + true = is_pid(SockOwner0), + {ok,{_,Port}} = inet:sockname(LSock0), + {LSock0, SockOwner0} + of + {LSock, SockOwner} -> + %% Use existing socket + proc_lib:init_ack(Parent, {ok, self()}), request_ownership(LSock, SockOwner), - acceptor_loop(Callback, Port, Address, Opts, LSock, AcceptTimeout); - - false -> - case (catch do_socket_listen(Callback, Port, SockOpts)) of - {ok, ListenSocket} -> - proc_lib:init_ack(Parent, {ok, self()}), - acceptor_loop(Callback, - Port, Address, Opts, ListenSocket, AcceptTimeout); - Error -> - proc_lib:init_ack(Parent, Error), - error - end + acceptor_loop(Callback, Port, Address, Opts, LSock, AcceptTimeout) + catch + error:{badkey,lsocket} -> + %% Open new socket + try + socket_listen(Port, Opts) + of + {ok, ListenSocket} -> + proc_lib:init_ack(Parent, {ok, self()}), + {_, Callback, _} = ?GET_OPT(transport, Opts), + acceptor_loop(Callback, + Port, Address, Opts, ListenSocket, AcceptTimeout); + {error,Error} -> + proc_lib:init_ack(Parent, Error), + {error,Error} + catch + _:_ -> + {error,listen_socket_failed} + end; + + _:_ -> + {error,use_existing_socket_failed} end. + request_ownership(LSock, SockOwner) -> SockOwner ! {request_control,LSock,self()}, receive @@ -82,23 +89,25 @@ request_ownership(LSock, SockOwner) -> end. -do_socket_listen(Callback, Port0, Opts) -> - Port = - case proplists:get_value(fd, Opts) of - undefined -> Port0; - _ -> 0 - end, - callback_listen(Callback, Port, Opts). - -callback_listen(Callback, Port, Opts0) -> - Opts = [{active, false}, {reuseaddr,true} | Opts0], - case Callback:listen(Port, Opts) of +socket_listen(Port0, Opts) -> + Port = case ?GET_SOCKET_OPT(fd, Opts) of + undefined -> Port0; + _ -> 0 + end, + callback_listen(Port, Opts). + + +callback_listen(Port, Opts0) -> + {_, Callback, _} = ?GET_OPT(transport, Opts0), + Opts = ?PUT_SOCKET_OPT([{active, false}, {reuseaddr,true}], Opts0), + SockOpts = ?GET_OPT(socket_options, Opts), + case Callback:listen(Port, SockOpts) of {error, nxdomain} -> - Callback:listen(Port, lists:delete(inet6, Opts)); + Callback:listen(Port, lists:delete(inet6, SockOpts)); {error, enetunreach} -> - Callback:listen(Port, lists:delete(inet6, Opts)); + Callback:listen(Port, lists:delete(inet6, SockOpts)); {error, eafnosupport} -> - Callback:listen(Port, lists:delete(inet6, Opts)); + Callback:listen(Port, lists:delete(inet6, SockOpts)); Other -> Other end. @@ -120,21 +129,21 @@ acceptor_loop(Callback, Port, Address, Opts, ListenSocket, AcceptTimeout) -> end. handle_connection(Callback, Address, Port, Options, Socket) -> - SSHopts = proplists:get_value(ssh_opts, Options, []), - Profile = proplists:get_value(profile, SSHopts, ?DEFAULT_PROFILE), + Profile = ?GET_OPT(profile, Options), SystemSup = ssh_system_sup:system_supervisor(Address, Port, Profile), - MaxSessions = proplists:get_value(max_sessions,SSHopts,infinity), + MaxSessions = ?GET_OPT(max_sessions, Options), case number_of_connections(SystemSup) < MaxSessions of true -> {ok, SubSysSup} = ssh_system_sup:start_subsystem(SystemSup, Options), ConnectionSup = ssh_subsystem_sup:connection_supervisor(SubSysSup), - Timeout = proplists:get_value(negotiation_timeout, SSHopts, 2*60*1000), + NegTimeout = ?GET_OPT(negotiation_timeout, Options), ssh_connection_handler:start_connection(server, Socket, - [{supervisors, [{system_sup, SystemSup}, - {subsystem_sup, SubSysSup}, - {connection_sup, ConnectionSup}]} - | Options], Timeout); + ?PUT_INTERNAL_OPT( + {supervisors, [{system_sup, SystemSup}, + {subsystem_sup, SubSysSup}, + {connection_sup, ConnectionSup}]}, + Options), NegTimeout); false -> Callback:close(Socket), IPstr = if is_tuple(Address) -> inet:ntoa(Address); diff --git a/lib/ssh/src/ssh_acceptor_sup.erl b/lib/ssh/src/ssh_acceptor_sup.erl index 129f85a3e0..77f7826918 100644 --- a/lib/ssh/src/ssh_acceptor_sup.erl +++ b/lib/ssh/src/ssh_acceptor_sup.erl @@ -44,14 +44,13 @@ start_link(Servers) -> supervisor:start_link(?MODULE, [Servers]). -start_child(AccSup, ServerOpts) -> - Spec = child_spec(ServerOpts), +start_child(AccSup, Options) -> + Spec = child_spec(Options), case supervisor:start_child(AccSup, Spec) of {error, already_present} -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), - Profile = proplists:get_value(profile, - proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Profile = ?GET_OPT(profile, Options), stop_child(AccSup, Address, Port, Profile), supervisor:start_child(AccSup, Spec); Reply -> @@ -70,24 +69,23 @@ stop_child(AccSup, Address, Port, Profile) -> %%%========================================================================= %%% Supervisor callback %%%========================================================================= -init([ServerOpts]) -> +init([Options]) -> RestartStrategy = one_for_one, MaxR = 10, MaxT = 3600, - Children = [child_spec(ServerOpts)], + Children = [child_spec(Options)], {ok, {{RestartStrategy, MaxR, MaxT}, Children}}. %%%========================================================================= %%% Internal functions %%%========================================================================= -child_spec(ServerOpts) -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), - Timeout = proplists:get_value(timeout, ServerOpts, ?DEFAULT_TIMEOUT), - Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), +child_spec(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Timeout = ?GET_INTERNAL_OPT(timeout, Options, ?DEFAULT_TIMEOUT), + Profile = ?GET_OPT(profile, Options), Name = id(Address, Port, Profile), - SocketOpts = proplists:get_value(socket_opts, ServerOpts), - StartFunc = {ssh_acceptor, start_link, [Port, Address, SocketOpts, ServerOpts, Timeout]}, + StartFunc = {ssh_acceptor, start_link, [Port, Address, Options, Timeout]}, Restart = transient, Shutdown = brutal_kill, Modules = [ssh_acceptor], diff --git a/lib/ssh/src/ssh_auth.erl b/lib/ssh/src/ssh_auth.erl index 9b54ecb2dd..88c8144063 100644 --- a/lib/ssh/src/ssh_auth.erl +++ b/lib/ssh/src/ssh_auth.erl @@ -96,14 +96,14 @@ unique(L) -> password_msg([#ssh{opts = Opts, io_cb = IoCb, user = User, service = Service} = Ssh0]) -> {Password,Ssh} = - case proplists:get_value(password, Opts) of + case ?GET_OPT(password, Opts) of undefined when IoCb == ssh_no_io -> {not_ok, Ssh0}; undefined -> - {IoCb:read_password("ssh password: ",Ssh0), Ssh0}; + {IoCb:read_password("ssh password: ",Opts), Ssh0}; PW -> %% If "password" option is given it should not be tried again - {PW, Ssh0#ssh{opts = lists:keyreplace(password,1,Opts,{password,not_ok})}} + {PW, Ssh0#ssh{opts = ?PUT_OPT({password,not_ok}, Opts)}} end, case Password of not_ok -> @@ -123,7 +123,7 @@ password_msg([#ssh{opts = Opts, io_cb = IoCb, keyboard_interactive_msg([#ssh{user = User, opts = Opts, service = Service} = Ssh]) -> - case proplists:get_value(password, Opts) of + case ?GET_OPT(password, Opts) of not_ok -> {not_ok,Ssh}; % No need to use a failed pwd once more _ -> @@ -141,8 +141,9 @@ publickey_msg([Alg, #ssh{user = User, service = Service, opts = Opts} = Ssh]) -> Hash = ssh_transport:sha(Alg), - KeyCb = proplists:get_value(key_cb, Opts, ssh_file), - case KeyCb:user_key(Alg, Opts) of + {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts), + UserOpts = ?GET_OPT(user_options, Opts), + case KeyCb:user_key(Alg, [{key_cb_private,KeyCbOpts}|UserOpts]) of {ok, PrivKey} -> StrAlgo = atom_to_list(Alg), case encode_public_key(StrAlgo, ssh_transport:extract_public_key(PrivKey)) of @@ -174,13 +175,19 @@ service_request_msg(Ssh) -> %%%---------------------------------------------------------------- init_userauth_request_msg(#ssh{opts = Opts} = Ssh) -> - case user_name(Opts) of - {ok, User} -> + case ?GET_OPT(user, Opts) of + undefined -> + ErrStr = "Could not determine the users name", + ssh_connection_handler:disconnect( + #ssh_msg_disconnect{code = ?SSH_DISCONNECT_ILLEGAL_USER_NAME, + description = ErrStr}); + + User -> Msg = #ssh_msg_userauth_request{user = User, service = "ssh-connection", method = "none", data = <<>>}, - Algs0 = proplists:get_value(pref_public_key_algs, Opts, ?SUPPORTED_USER_KEYS), + Algs0 = ?GET_OPT(pref_public_key_algs, Opts), %% The following line is not strictly correct. The call returns the %% supported HOST key types while we are interested in USER keys. However, %% they "happens" to be the same (for now). This could change.... @@ -194,12 +201,7 @@ init_userauth_request_msg(#ssh{opts = Opts} = Ssh) -> ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User, userauth_preference = Prefs, userauth_methods = none, - service = "ssh-connection"}); - {error, no_user} -> - ErrStr = "Could not determine the users name", - ssh_connection_handler:disconnect( - #ssh_msg_disconnect{code = ?SSH_DISCONNECT_ILLEGAL_USER_NAME, - description = ErrStr}) + service = "ssh-connection"}) end. %%%---------------------------------------------------------------- @@ -342,7 +344,7 @@ handle_userauth_request(#ssh_msg_userauth_request{user = User, false}, {Name, Instruction, Prompt, Echo} = - case proplists:get_value(auth_method_kb_interactive_data, Opts) of + case ?GET_OPT(auth_method_kb_interactive_data, Opts) of undefined -> Default; {_,_,_,_}=V -> @@ -407,9 +409,9 @@ handle_userauth_info_response(#ssh_msg_userauth_info_response{num_responses = 1, user = User, userauth_supported_methods = Methods} = Ssh) -> SendOneEmpty = - (proplists:get_value(tstflg,Opts) == one_empty) + (?GET_OPT(tstflg,Opts) == one_empty) orelse - proplists:get_value(one_empty, proplists:get_value(tstflg,Opts,[]), false), + proplists:get_value(one_empty, ?GET_OPT(tstflg,Opts), false), case check_password(User, unicode:characters_to_list(Password), Opts, Ssh) of {true,Ssh1} when SendOneEmpty==true -> @@ -460,27 +462,8 @@ method_preference(Algs) -> ], Algs). -user_name(Opts) -> - Env = case os:type() of - {win32, _} -> - "USERNAME"; - {unix, _} -> - "LOGNAME" - end, - case proplists:get_value(user, Opts, os:getenv(Env)) of - false -> - case os:getenv("USER") of - false -> - {error, no_user}; - User -> - {ok, User} - end; - User -> - {ok, User} - end. - check_password(User, Password, Opts, Ssh) -> - case proplists:get_value(pwdfun, Opts) of + case ?GET_OPT(pwdfun, Opts) of undefined -> Static = get_password_option(Opts, User), {Password == Static, Ssh}; @@ -510,17 +493,18 @@ check_password(User, Password, Opts, Ssh) -> end. get_password_option(Opts, User) -> - Passwords = proplists:get_value(user_passwords, Opts, []), + Passwords = ?GET_OPT(user_passwords, Opts), case lists:keysearch(User, 1, Passwords) of {value, {User, Pw}} -> Pw; - false -> proplists:get_value(password, Opts, false) + false -> ?GET_OPT(password, Opts) end. pre_verify_sig(User, Alg, KeyBlob, Opts) -> try {ok, Key} = decode_public_key_v2(KeyBlob, Alg), - KeyCb = proplists:get_value(key_cb, Opts, ssh_file), - KeyCb:is_auth_key(Key, User, Opts) + {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts), + UserOpts = ?GET_OPT(user_options, Opts), + KeyCb:is_auth_key(Key, User, [{key_cb_private,KeyCbOpts}|UserOpts]) catch _:_ -> false @@ -529,9 +513,10 @@ pre_verify_sig(User, Alg, KeyBlob, Opts) -> verify_sig(SessionId, User, Service, Alg, KeyBlob, SigWLen, Opts) -> try {ok, Key} = decode_public_key_v2(KeyBlob, Alg), - KeyCb = proplists:get_value(key_cb, Opts, ssh_file), - case KeyCb:is_auth_key(Key, User, Opts) of + {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts), + UserOpts = ?GET_OPT(user_options, Opts), + case KeyCb:is_auth_key(Key, User, [{key_cb_private,KeyCbOpts}|UserOpts]) of true -> PlainText = build_sig_data(SessionId, User, Service, KeyBlob, Alg), @@ -565,9 +550,9 @@ decode_keyboard_interactive_prompts(_NumPrompts, Data) -> keyboard_interact_get_responses(IoCb, Opts, Name, Instr, PromptInfos) -> NumPrompts = length(PromptInfos), - keyboard_interact_get_responses(proplists:get_value(user_interaction, Opts, true), - proplists:get_value(keyboard_interact_fun, Opts), - proplists:get_value(password, Opts, undefined), IoCb, Name, + keyboard_interact_get_responses(?GET_OPT(user_interaction, Opts), + ?GET_OPT(keyboard_interact_fun, Opts), + ?GET_OPT(password, Opts), IoCb, Name, Instr, PromptInfos, Opts, NumPrompts). diff --git a/lib/ssh/src/ssh_cli.erl b/lib/ssh/src/ssh_cli.erl index 8af0ecc5f9..4c4f61e036 100644 --- a/lib/ssh/src/ssh_cli.erl +++ b/lib/ssh/src/ssh_cli.erl @@ -453,14 +453,20 @@ move_cursor(From, To, #ssh_pty{width=Width, term=Type}) -> %% %%% make sure that there is data to send %% %%% before calling ssh_connection:send write_chars(ConnectionHandler, ChannelId, Chars) -> - case erlang:iolist_size(Chars) of - 0 -> - ok; - _ -> - ssh_connection:send(ConnectionHandler, ChannelId, - ?SSH_EXTENDED_DATA_DEFAULT, Chars) + case has_chars(Chars) of + false -> ok; + true -> ssh_connection:send(ConnectionHandler, + ChannelId, + ?SSH_EXTENDED_DATA_DEFAULT, + Chars) end. +has_chars([C|_]) when is_integer(C) -> true; +has_chars([H|T]) when is_list(H) ; is_binary(H) -> has_chars(H) orelse has_chars(T); +has_chars(<<_:8,_/binary>>) -> true; +has_chars(_) -> false. + + %%% tail, works with empty lists tl1([_|A]) -> A; tl1(_) -> []. @@ -493,14 +499,12 @@ start_shell(ConnectionHandler, State) -> [peer, user]), ShellFun = case is_function(Shell) of true -> - User = - proplists:get_value(user, ConnectionInfo), + User = proplists:get_value(user, ConnectionInfo), case erlang:fun_info(Shell, arity) of {arity, 1} -> fun() -> Shell(User) end; {arity, 2} -> - {_, PeerAddr} = - proplists:get_value(peer, ConnectionInfo), + {_, PeerAddr} = proplists:get_value(peer, ConnectionInfo), fun() -> Shell(User, PeerAddr) end; _ -> Shell @@ -519,8 +523,7 @@ start_shell(ConnectionHandler, Cmd, #state{exec=Shell} = State) when is_function ConnectionInfo = ssh_connection_handler:connection_info(ConnectionHandler, [peer, user]), - User = - proplists:get_value(user, ConnectionInfo), + User = proplists:get_value(user, ConnectionInfo), ShellFun = case erlang:fun_info(Shell, arity) of {arity, 1} -> @@ -528,8 +531,7 @@ start_shell(ConnectionHandler, Cmd, #state{exec=Shell} = State) when is_function {arity, 2} -> fun() -> Shell(Cmd, User) end; {arity, 3} -> - {_, PeerAddr} = - proplists:get_value(peer, ConnectionInfo), + {_, PeerAddr} = proplists:get_value(peer, ConnectionInfo), fun() -> Shell(Cmd, User, PeerAddr) end; _ -> Shell diff --git a/lib/ssh/src/ssh_connect.hrl b/lib/ssh/src/ssh_connect.hrl index 4fb6bc39f3..c91c56435e 100644 --- a/lib/ssh/src/ssh_connect.hrl +++ b/lib/ssh/src/ssh_connect.hrl @@ -22,9 +22,9 @@ %%% Description : SSH connection protocol --type role() :: client | server . --type connection_ref() :: pid(). -type channel_id() :: pos_integer(). +-type connection_ref() :: pid(). + -define(DEFAULT_PACKET_SIZE, 65536). -define(DEFAULT_WINDOW_SIZE, 10*?DEFAULT_PACKET_SIZE). diff --git a/lib/ssh/src/ssh_connection.erl b/lib/ssh/src/ssh_connection.erl index c7a2c92670..930ccecb4c 100644 --- a/lib/ssh/src/ssh_connection.erl +++ b/lib/ssh/src/ssh_connection.erl @@ -56,8 +56,8 @@ %%-------------------------------------------------------------------- %%-------------------------------------------------------------------- --spec session_channel(pid(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}. --spec session_channel(pid(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}. +-spec session_channel(connection_ref(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}. +-spec session_channel(connection_ref(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}. %% Description: Opens a channel for a ssh session. A session is a %% remote execution of a program. The program may be a shell, an @@ -81,7 +81,7 @@ session_channel(ConnectionHandler, InitialWindowSize, end. %%-------------------------------------------------------------------- --spec exec(pid(), channel_id(), string(), timeout()) -> +-spec exec(connection_ref(), channel_id(), string(), timeout()) -> success | failure | {error, timeout | closed}. %% Description: Will request that the server start the @@ -92,7 +92,7 @@ exec(ConnectionHandler, ChannelId, Command, TimeOut) -> true, [?string(Command)], TimeOut). %%-------------------------------------------------------------------- --spec shell(pid(), channel_id()) -> _. +-spec shell(connection_ref(), channel_id()) -> _. %% Description: Will request that the user's default shell (typically %% defined in /etc/passwd in UNIX systems) be started at the other @@ -102,7 +102,7 @@ shell(ConnectionHandler, ChannelId) -> ssh_connection_handler:request(ConnectionHandler, self(), ChannelId, "shell", false, <<>>, 0). %%-------------------------------------------------------------------- --spec subsystem(pid(), channel_id(), string(), timeout()) -> +-spec subsystem(connection_ref(), channel_id(), string(), timeout()) -> success | failure | {error, timeout | closed}. %% %% Description: Executes a predefined subsystem. @@ -112,11 +112,11 @@ subsystem(ConnectionHandler, ChannelId, SubSystem, TimeOut) -> ChannelId, "subsystem", true, [?string(SubSystem)], TimeOut). %%-------------------------------------------------------------------- --spec send(pid(), channel_id(), iodata()) -> +-spec send(connection_ref(), channel_id(), iodata()) -> ok | {error, closed}. --spec send(pid(), channel_id(), integer()| iodata(), timeout() | iodata()) -> +-spec send(connection_ref(), channel_id(), integer()| iodata(), timeout() | iodata()) -> ok | {error, timeout} | {error, closed}. --spec send(pid(), channel_id(), integer(), iodata(), timeout()) -> +-spec send(connection_ref(), channel_id(), integer(), iodata(), timeout()) -> ok | {error, timeout} | {error, closed}. %% %% @@ -134,7 +134,7 @@ send(ConnectionHandler, ChannelId, Type, Data, TimeOut) -> ssh_connection_handler:send(ConnectionHandler, ChannelId, Type, Data, TimeOut). %%-------------------------------------------------------------------- --spec send_eof(pid(), channel_id()) -> ok | {error, closed}. +-spec send_eof(connection_ref(), channel_id()) -> ok | {error, closed}. %% %% %% Description: Sends eof on the channel <ChannelId>. @@ -143,7 +143,7 @@ send_eof(ConnectionHandler, Channel) -> ssh_connection_handler:send_eof(ConnectionHandler, Channel). %%-------------------------------------------------------------------- --spec adjust_window(pid(), channel_id(), integer()) -> ok | {error, closed}. +-spec adjust_window(connection_ref(), channel_id(), integer()) -> ok | {error, closed}. %% %% %% Description: Adjusts the ssh flowcontrol window. @@ -152,7 +152,7 @@ adjust_window(ConnectionHandler, Channel, Bytes) -> ssh_connection_handler:adjust_window(ConnectionHandler, Channel, Bytes). %%-------------------------------------------------------------------- --spec setenv(pid(), channel_id(), string(), string(), timeout()) -> +-spec setenv(connection_ref(), channel_id(), string(), string(), timeout()) -> success | failure | {error, timeout | closed}. %% %% @@ -165,7 +165,7 @@ setenv(ConnectionHandler, ChannelId, Var, Value, TimeOut) -> %%-------------------------------------------------------------------- --spec close(pid(), channel_id()) -> ok. +-spec close(connection_ref(), channel_id()) -> ok. %% %% %% Description: Sends a close message on the channel <ChannelId>. @@ -174,7 +174,7 @@ close(ConnectionHandler, ChannelId) -> ssh_connection_handler:close(ConnectionHandler, ChannelId). %%-------------------------------------------------------------------- --spec reply_request(pid(), boolean(), success | failure, channel_id()) -> ok. +-spec reply_request(connection_ref(), boolean(), success | failure, channel_id()) -> ok. %% %% %% Description: Send status replies to requests that want such replies. @@ -185,9 +185,9 @@ reply_request(_,false, _, _) -> ok. %%-------------------------------------------------------------------- --spec ptty_alloc(pid(), channel_id(), proplists:proplist()) -> +-spec ptty_alloc(connection_ref(), channel_id(), proplists:proplist()) -> success | failiure | {error, closed}. --spec ptty_alloc(pid(), channel_id(), proplists:proplist(), timeout()) -> +-spec ptty_alloc(connection_ref(), channel_id(), proplists:proplist(), timeout()) -> success | failiure | {error, timeout} | {error, closed}. %% @@ -197,16 +197,16 @@ reply_request(_,false, _, _) -> ptty_alloc(ConnectionHandler, Channel, Options) -> ptty_alloc(ConnectionHandler, Channel, Options, infinity). ptty_alloc(ConnectionHandler, Channel, Options0, TimeOut) -> - Options = backwards_compatible(Options0, []), - {Width, PixWidth} = pty_default_dimensions(width, Options), - {Height, PixHeight} = pty_default_dimensions(height, Options), + TermData = backwards_compatible(Options0, []), % FIXME + {Width, PixWidth} = pty_default_dimensions(width, TermData), + {Height, PixHeight} = pty_default_dimensions(height, TermData), pty_req(ConnectionHandler, Channel, - proplists:get_value(term, Options, os:getenv("TERM", ?DEFAULT_TERMINAL)), - proplists:get_value(width, Options, Width), - proplists:get_value(height, Options, Height), - proplists:get_value(pixel_widh, Options, PixWidth), - proplists:get_value(pixel_height, Options, PixHeight), - proplists:get_value(pty_opts, Options, []), TimeOut + proplists:get_value(term, TermData, os:getenv("TERM", ?DEFAULT_TERMINAL)), + proplists:get_value(width, TermData, Width), + proplists:get_value(height, TermData, Height), + proplists:get_value(pixel_widh, TermData, PixWidth), + proplists:get_value(pixel_height, TermData, PixHeight), + proplists:get_value(pty_opts, TermData, []), TimeOut ). %%-------------------------------------------------------------------- %% Not yet officialy supported! The following functions are part of the @@ -417,7 +417,8 @@ handle_msg(#ssh_msg_channel_open{channel_type = "session" = Type, maximum_packet_size = PacketSz}, #connection{options = SSHopts} = Connection0, server) -> - MinAcceptedPackSz = proplists:get_value(minimal_remote_max_packet_size, SSHopts, 0), + MinAcceptedPackSz = + ?GET_OPT(minimal_remote_max_packet_size, SSHopts), if MinAcceptedPackSz =< PacketSz -> @@ -574,7 +575,6 @@ handle_msg(#ssh_msg_channel_request{recipient_channel = ChannelId, PixWidth, PixHeight, decode_pty_opts(Modes)}, Channel = ssh_channel:cache_lookup(Cache, ChannelId), - handle_cli_msg(Connection, Channel, {pty, ChannelId, WantReply, PtyRequest}); @@ -691,7 +691,6 @@ handle_cli_msg(#connection{channel_cache = Cache} = Connection, #channel{user = undefined, remote_id = RemoteId, local_id = ChannelId} = Channel0, Reply0) -> - case (catch start_cli(Connection, ChannelId)) of {ok, Pid} -> erlang:monitor(process, Pid), @@ -819,7 +818,7 @@ start_channel(Cb, Id, Args, SubSysSup, Exec, Opts) -> ssh_channel_sup:start_child(ChannelSup, ChildSpec). assert_limit_num_channels_not_exceeded(ChannelSup, Opts) -> - MaxNumChannels = proplists:get_value(max_channels, Opts, infinity), + MaxNumChannels = ?GET_OPT(max_channels, Opts), NumChannels = length([x || {_,_,worker,[ssh_channel]} <- supervisor:which_children(ChannelSup)]), if @@ -858,8 +857,8 @@ setup_session(#connection{channel_cache = Cache check_subsystem("sftp"= SsName, Options) -> - case proplists:get_value(subsystems, Options, no_subsys) of - no_subsys -> + case ?GET_OPT(subsystems, Options) of + no_subsys -> % FIXME: Can 'no_subsys' ever be matched? {SsName, {Cb, Opts}} = ssh_sftpd:subsystem_spec([]), {Cb, Opts}; SubSystems -> @@ -867,7 +866,7 @@ check_subsystem("sftp"= SsName, Options) -> end; check_subsystem(SsName, Options) -> - Subsystems = proplists:get_value(subsystems, Options, []), + Subsystems = ?GET_OPT(subsystems, Options), case proplists:get_value(SsName, Subsystems, {none, []}) of Fun when is_function(Fun) -> {Fun, []}; @@ -1022,12 +1021,13 @@ pty_req(ConnectionHandler, Channel, Term, Width, Height, ?uint32(PixWidth),?uint32(PixHeight), encode_pty_opts(PtyOpts)], TimeOut). -pty_default_dimensions(Dimension, Options) -> - case proplists:get_value(Dimension, Options, 0) of +pty_default_dimensions(Dimension, TermData) -> + case proplists:get_value(Dimension, TermData, 0) of N when is_integer(N), N > 0 -> {N, 0}; _ -> - case proplists:get_value(list_to_atom("pixel_" ++ atom_to_list(Dimension)), Options, 0) of + PixelDim = list_to_atom("pixel_" ++ atom_to_list(Dimension)), + case proplists:get_value(PixelDim, TermData, 0) of N when is_integer(N), N > 0 -> {0, N}; _ -> diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl index 4496c657c3..b9c643c77e 100644 --- a/lib/ssh/src/ssh_connection_handler.erl +++ b/lib/ssh/src/ssh_connection_handler.erl @@ -76,7 +76,7 @@ %%-------------------------------------------------------------------- -spec start_link(role(), inet:socket(), - proplists:proplist() + ssh_options:options() ) -> {ok, pid()}. %% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . start_link(Role, Socket, Options) -> @@ -99,12 +99,10 @@ stop(ConnectionHandler)-> %% Internal application API %%==================================================================== --define(DefaultTransport, {tcp, gen_tcp, tcp_closed} ). - %%-------------------------------------------------------------------- -spec start_connection(role(), inet:socket(), - proplists:proplist(), + ssh_options:options(), timeout() ) -> {ok, connection_ref()} | {error, term()}. %% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . @@ -121,9 +119,8 @@ start_connection(client = Role, Socket, Options, Timeout) -> end; start_connection(server = Role, Socket, Options, Timeout) -> - SSH_Opts = proplists:get_value(ssh_opts, Options, []), try - case proplists:get_value(parallel_login, SSH_Opts, false) of + case ?GET_OPT(parallel_login, Options) of true -> HandshakerPid = spawn_link(fun() -> @@ -346,7 +343,7 @@ renegotiate_data(ConnectionHandler) -> | undefined, last_size_rekey = 0 :: non_neg_integer(), event_queue = [] :: list(), - opts :: proplists:proplist(), + opts :: ssh_options:options(), inet_initial_recbuf_size :: pos_integer() | undefined }). @@ -357,15 +354,14 @@ renegotiate_data(ConnectionHandler) -> %%-------------------------------------------------------------------- -spec init_connection_handler(role(), inet:socket(), - proplists:proplist() + ssh_options:options() ) -> no_return(). %% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . init_connection_handler(Role, Socket, Opts) -> process_flag(trap_exit, true), S0 = init_process_state(Role, Socket, Opts), try - {Protocol, Callback, CloseTag} = - proplists:get_value(transport, Opts, ?DefaultTransport), + {Protocol, Callback, CloseTag} = ?GET_OPT(transport, Opts), S0#data{ssh_params = init_ssh_record(Role, Socket, Opts), transport_protocol = Protocol, transport_cb = Callback, @@ -393,7 +389,7 @@ init_process_state(Role, Socket, Opts) -> port_bindings = [], requests = [], options = Opts}, - starter = proplists:get_value(user_pid, Opts), + starter = ?GET_INTERNAL_OPT(user_pid, Opts), socket = Socket, opts = Opts }, @@ -409,13 +405,18 @@ init_process_state(Role, Socket, Opts) -> init_connection(server, C = #connection{}, Opts) -> - Sups = proplists:get_value(supervisors, Opts), - SystemSup = proplists:get_value(system_sup, Sups), - SubSystemSup = proplists:get_value(subsystem_sup, Sups), + Sups = ?GET_INTERNAL_OPT(supervisors, Opts), + + SystemSup = proplists:get_value(system_sup, Sups), + SubSystemSup = proplists:get_value(subsystem_sup, Sups), ConnectionSup = proplists:get_value(connection_sup, Sups), - Shell = proplists:get_value(shell, Opts), - Exec = proplists:get_value(exec, Opts), - CliSpec = proplists:get_value(ssh_cli, Opts, {ssh_cli, [Shell]}), + + Shell = ?GET_OPT(shell, Opts), + Exec = ?GET_OPT(exec, Opts), + CliSpec = case ?GET_OPT(ssh_cli, Opts) of + undefined -> {ssh_cli, [Shell]}; + Spec -> Spec + end, C#connection{cli_spec = CliSpec, exec = Exec, system_supervisor = SystemSup, @@ -426,41 +427,38 @@ init_connection(server, C = #connection{}, Opts) -> init_ssh_record(Role, Socket, Opts) -> {ok, PeerAddr} = inet:peername(Socket), - KeyCb = proplists:get_value(key_cb, Opts, ssh_file), - AuthMethods = proplists:get_value(auth_methods, - Opts, - case Role of - server -> ?SUPPORTED_AUTH_METHODS; - client -> undefined - end), + KeyCb = ?GET_OPT(key_cb, Opts), + AuthMethods = + case Role of + server -> ?GET_OPT(auth_methods, Opts); + client -> undefined + end, S0 = #ssh{role = Role, key_cb = KeyCb, opts = Opts, userauth_supported_methods = AuthMethods, available_host_keys = supported_host_keys(Role, KeyCb, Opts), - random_length_padding = proplists:get_value(max_random_length_padding, - Opts, - (#ssh{})#ssh.random_length_padding) + random_length_padding = ?GET_OPT(max_random_length_padding, Opts) }, {Vsn, Version} = ssh_transport:versions(Role, Opts), case Role of client -> - PeerName = proplists:get_value(host, Opts), + PeerName = ?GET_INTERNAL_OPT(host, Opts), S0#ssh{c_vsn = Vsn, c_version = Version, - io_cb = case proplists:get_value(user_interaction, Opts, true) of + io_cb = case ?GET_OPT(user_interaction, Opts) of true -> ssh_io; false -> ssh_no_io end, - userauth_quiet_mode = proplists:get_value(quiet_mode, Opts, false), + userauth_quiet_mode = ?GET_OPT(quiet_mode, Opts), peer = {PeerName, PeerAddr} }; server -> S0#ssh{s_vsn = Vsn, s_version = Version, - io_cb = proplists:get_value(io_cb, Opts, ssh_io), + io_cb = ?GET_INTERNAL_OPT(io_cb, Opts, ssh_io), userauth_methods = string:tokens(AuthMethods, ","), kb_tries_left = 3, peer = {undefined, PeerAddr} @@ -849,14 +847,12 @@ handle_event(_, Msg = #ssh_msg_userauth_failure{}, {userauth_keyboard_interactiv handle_event(_, Msg=#ssh_msg_userauth_failure{}, {userauth_keyboard_interactive_info_response, client}, #data{ssh_params = Ssh0} = D0) -> Opts = Ssh0#ssh.opts, - D = case proplists:get_value(password, Opts) of + D = case ?GET_OPT(password, Opts) of undefined -> D0; _ -> D0#data{ssh_params = - Ssh0#ssh{opts = - lists:keyreplace(password,1,Opts, - {password,not_ok})}} % FIXME:intermodule dependency + Ssh0#ssh{opts = ?PUT_OPT({password,not_ok}, Opts)}} % FIXME:intermodule dependency end, {next_state, {userauth,client}, D, [{next_event, internal, Msg}]}; @@ -954,7 +950,7 @@ handle_event(cast, renegotiate, _, _) -> handle_event(cast, data_size, {connected,Role}, D) -> {ok, [{send_oct,Sent0}]} = inet:getstat(D#data.socket, [send_oct]), Sent = Sent0 - D#data.last_size_rekey, - MaxSent = proplists:get_value(rekey_limit, D#data.opts, 1024000000), + MaxSent = ?GET_OPT(rekey_limit, D#data.opts), timer:apply_after(?REKEY_DATA_TIMOUT, gen_statem, cast, [self(), data_size]), case Sent >= MaxSent of true -> @@ -1294,11 +1290,12 @@ handle_event(info, UnexpectedMessage, StateName, D = #data{ssh_params = Ssh}) -> "Unexpected message '~p' received in state '~p'\n" "Role: ~p\n" "Peer: ~p\n" - "Local Address: ~p\n", [UnexpectedMessage, - StateName, - Ssh#ssh.role, - Ssh#ssh.peer, - proplists:get_value(address, Ssh#ssh.opts)])), + "Local Address: ~p\n", + [UnexpectedMessage, + StateName, + Ssh#ssh.role, + Ssh#ssh.peer, + ?GET_INTERNAL_OPT(address, Ssh#ssh.opts)])), error_logger:info_report(Msg), keep_state_and_data; @@ -1312,11 +1309,12 @@ handle_event(info, UnexpectedMessage, StateName, D = #data{ssh_params = Ssh}) -> "Message: ~p\n" "Role: ~p\n" "Peer: ~p\n" - "Local Address: ~p\n", [Other, - UnexpectedMessage, - Ssh#ssh.role, - element(2,Ssh#ssh.peer), - proplists:get_value(address, Ssh#ssh.opts)] + "Local Address: ~p\n", + [Other, + UnexpectedMessage, + Ssh#ssh.role, + element(2,Ssh#ssh.peer), + ?GET_INTERNAL_OPT(address, Ssh#ssh.opts)] )), error_logger:error_report(Msg), keep_state_and_data @@ -1438,11 +1436,11 @@ code_change(_OldVsn, StateName, State, _Extra) -> %%-------------------------------------------------------------------- %% Starting -start_the_connection_child(UserPid, Role, Socket, Options) -> - Sups = proplists:get_value(supervisors, Options), +start_the_connection_child(UserPid, Role, Socket, Options0) -> + Sups = ?GET_INTERNAL_OPT(supervisors, Options0), ConnectionSup = proplists:get_value(connection_sup, Sups), - Opts = [{supervisors, Sups}, {user_pid, UserPid} | proplists:get_value(ssh_opts, Options, [])], - {ok, Pid} = ssh_connection_sup:start_child(ConnectionSup, [Role, Socket, Opts]), + Options = ?PUT_INTERNAL_OPT({user_pid,UserPid}, Options0), + {ok, Pid} = ssh_connection_sup:start_child(ConnectionSup, [Role, Socket, Options]), ok = socket_control(Socket, Pid, Options), Pid. @@ -1481,35 +1479,41 @@ renegotiation(_) -> false. %%-------------------------------------------------------------------- supported_host_keys(client, _, Options) -> try - case proplists:get_value(public_key, - proplists:get_value(preferred_algorithms,Options,[]) - ) of - undefined -> - ssh_transport:default_algorithms(public_key); - L -> - L -- (L--ssh_transport:default_algorithms(public_key)) - end + find_sup_hkeys(Options) of [] -> - {stop, {shutdown, "No public key algs"}}; + error({shutdown, "No public key algs"}); Algs -> [atom_to_list(A) || A<-Algs] catch exit:Reason -> - {stop, {shutdown, Reason}} + error({shutdown, Reason}) end; supported_host_keys(server, KeyCb, Options) -> - [atom_to_list(A) || A <- proplists:get_value(public_key, - proplists:get_value(preferred_algorithms,Options,[]), - ssh_transport:default_algorithms(public_key) - ), + [atom_to_list(A) || A <- find_sup_hkeys(Options), available_host_key(KeyCb, A, Options) ]. -%% Alg :: atom() -available_host_key(KeyCb, Alg, Opts) -> - element(1, catch KeyCb:host_key(Alg, Opts)) == ok. +find_sup_hkeys(Options) -> + case proplists:get_value(public_key, + ?GET_OPT(preferred_algorithms,Options) + ) + of + undefined -> + ssh_transport:default_algorithms(public_key); + L -> + NonSupported = L--ssh_transport:supported_algorithms(public_key), + L -- NonSupported + end. + + + +%% Alg :: atom() +available_host_key({KeyCb,KeyCbOpts}, Alg, Opts) -> + UserOpts = ?GET_OPT(user_options, Opts), + element(1, + catch KeyCb:host_key(Alg, [{key_cb_private,KeyCbOpts}|UserOpts])) == ok. send_msg(Msg, State=#data{ssh_params=Ssh0}) when is_tuple(Msg) -> {Bytes, Ssh} = ssh_transport:ssh_packet(Msg, Ssh0), @@ -1765,47 +1769,24 @@ get_repl(X, Acc) -> exit({get_repl,X,Acc}). %%%---------------------------------------------------------------- -disconnect_fun({disconnect,Msg}, D) -> - disconnect_fun(Msg, D); -disconnect_fun(Reason, #data{opts=Opts}) -> - case proplists:get_value(disconnectfun, Opts) of - undefined -> - ok; - Fun -> - catch Fun(Reason) - end. - -unexpected_fun(UnexpectedMessage, #data{opts = Opts, - ssh_params = #ssh{peer = {_,Peer} } - } ) -> - case proplists:get_value(unexpectedfun, Opts) of - undefined -> - report; - Fun -> - catch Fun(UnexpectedMessage, Peer) - end. +-define(CALL_FUN(Key,D), catch (?GET_OPT(Key, D#data.opts)) ). + +disconnect_fun({disconnect,Msg}, D) -> ?CALL_FUN(disconnectfun,D)(Msg); +disconnect_fun(Reason, D) -> ?CALL_FUN(disconnectfun,D)(Reason). +unexpected_fun(UnexpectedMessage, #data{ssh_params = #ssh{peer = {_,Peer} }} = D) -> + ?CALL_FUN(unexpectedfun,D)(UnexpectedMessage, Peer). debug_fun(#ssh_msg_debug{always_display = Display, message = DbgMsg, language = Lang}, - #data{opts = Opts}) -> - case proplists:get_value(ssh_msg_debug_fun, Opts) of - undefined -> - ok; - Fun -> - catch Fun(self(), Display, DbgMsg, Lang) - end. + D) -> + ?CALL_FUN(ssh_msg_debug_fun,D)(self(), Display, DbgMsg, Lang). -connected_fun(User, Method, #data{ssh_params = #ssh{peer = {_,Peer}}, - opts = Opts}) -> - case proplists:get_value(connectfun, Opts) of - undefined -> - ok; - Fun -> - catch Fun(User, Peer, Method) - end. +connected_fun(User, Method, #data{ssh_params = #ssh{peer = {_,Peer}}} = D) -> + ?CALL_FUN(connectfun,D)(User, Peer, Method). + retry_fun(_, undefined, _) -> ok; @@ -1819,7 +1800,7 @@ retry_fun(User, Reason, #data{ssh_params = #ssh{opts = Opts, _ -> {infofun, Reason} end, - Fun = proplists:get_value(Tag, Opts, fun(_,_)-> ok end), + Fun = ?GET_OPT(Tag, Opts), try erlang:fun_info(Fun, arity) of {arity, 2} -> %% Backwards compatible @@ -1838,7 +1819,7 @@ retry_fun(User, Reason, #data{ssh_params = #ssh{opts = Opts, %%% channels open for a while. cache_init_idle_timer(D) -> - case proplists:get_value(idle_time, D#data.opts, infinity) of + case ?GET_OPT(idle_time, D#data.opts) of infinity -> D#data{idle_timer_value = infinity, idle_timer_ref = infinity % A flag used later... @@ -1901,9 +1882,8 @@ start_channel_request_timer(Channel, From, Time) -> %%% Connection start and initalization helpers socket_control(Socket, Pid, Options) -> - {_, TransportCallback, _} = % For example {_,gen_tcp,_} - proplists:get_value(transport, Options, ?DefaultTransport), - case TransportCallback:controlling_process(Socket, Pid) of + {_, Callback, _} = ?GET_OPT(transport, Options), + case Callback:controlling_process(Socket, Pid) of ok -> gen_statem:cast(Pid, socket_control); {error, Reason} -> diff --git a/lib/ssh/src/ssh_file.erl b/lib/ssh/src/ssh_file.erl index 216f65f33a..898b4cc5c4 100644 --- a/lib/ssh/src/ssh_file.erl +++ b/lib/ssh/src/ssh_file.erl @@ -192,8 +192,8 @@ lookup_user_key(Key, User, Opts) -> ssh_dir({remoteuser, User}, Opts) -> case proplists:get_value(user_dir_fun, Opts) of undefined -> - case proplists:get_value(user_dir, Opts) of - undefined -> + case proplists:get_value(user_dir, Opts, false) of + false -> default_user_dir(); Dir -> Dir diff --git a/lib/ssh/src/ssh_io.erl b/lib/ssh/src/ssh_io.erl index 1d8f370884..6828fd4760 100644 --- a/lib/ssh/src/ssh_io.erl +++ b/lib/ssh/src/ssh_io.erl @@ -27,17 +27,17 @@ -export([yes_no/2, read_password/2, read_line/2, format/2]). -include("ssh.hrl"). -read_line(Prompt, Ssh) -> +read_line(Prompt, Opts) -> format("~s", [listify(Prompt)]), - proplists:get_value(user_pid, Ssh) ! {self(), question}, + ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), question}, receive Answer when is_list(Answer) -> Answer end. -yes_no(Prompt, Ssh) -> +yes_no(Prompt, Opts) -> format("~s [y/n]?", [Prompt]), - proplists:get_value(user_pid, Ssh#ssh.opts) ! {self(), question}, + ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), question}, receive %% I can't see that the atoms y and n are ever received, but it must %% be investigated before removing @@ -52,15 +52,13 @@ yes_no(Prompt, Ssh) -> "N" -> no; _ -> format("please answer y or n\n",[]), - yes_no(Prompt, Ssh) + yes_no(Prompt, Opts) end end. - -read_password(Prompt, #ssh{opts=Opts}) -> read_password(Prompt, Opts); -read_password(Prompt, Opts) when is_list(Opts) -> +read_password(Prompt, Opts) -> format("~s", [listify(Prompt)]), - proplists:get_value(user_pid, Opts) ! {self(), user_password}, + ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), user_password}, receive Answer when is_list(Answer) -> case trim(Answer) of diff --git a/lib/ssh/src/ssh_options.erl b/lib/ssh/src/ssh_options.erl new file mode 100644 index 0000000000..395be6b220 --- /dev/null +++ b/lib/ssh/src/ssh_options.erl @@ -0,0 +1,895 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2004-2017. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +%% + +-module(ssh_options). + +-include("ssh.hrl"). +-include_lib("kernel/include/file.hrl"). + +-export([default/1, + get_value/5, get_value/6, + put_value/5, + handle_options/2 + ]). + +-export_type([options/0 + ]). + +%%%================================================================ +%%% Types + +-type options() :: #{socket_options := socket_options(), + internal_options := internal_options(), + option_key() => any() + }. + +-type socket_options() :: proplists:proplist(). +-type internal_options() :: #{option_key() => any()}. + +-type option_key() :: atom(). + +-type option_in() :: proplists:property() | proplists:proplist() . + +-type option_class() :: internal_options | socket_options | user_options . + +-type option_declaration() :: #{class := user_options, + chk := fun((any) -> boolean() | {true,any()}), + default => any() + }. + +-type option_declarations() :: #{ {option_key(),def} := option_declaration() }. + +-type error() :: {error,{eoptions,any()}} . + +%%%================================================================ +%%% +%%% Get an option +%%% + +-spec get_value(option_class(), option_key(), options(), + atom(), non_neg_integer()) -> any() | no_return(). + +get_value(Class, Key, Opts, _CallerMod, _CallerLine) when is_map(Opts) -> + case Class of + internal_options -> maps:get(Key, maps:get(internal_options,Opts)); + socket_options -> proplists:get_value(Key, maps:get(socket_options,Opts)); + user_options -> maps:get(Key, Opts) + end; +get_value(Class, Key, Opts, _CallerMod, _CallerLine) -> + io:format("*** Bad Opts GET OPT ~p ~p:~p Key=~p,~n Opts=~p~n",[Class,_CallerMod,_CallerLine,Key,Opts]), + error({bad_options,Class, Key, Opts, _CallerMod, _CallerLine}). + + +-spec get_value(option_class(), option_key(), options(), any(), + atom(), non_neg_integer()) -> any() | no_return(). + +get_value(socket_options, Key, Opts, Def, _CallerMod, _CallerLine) when is_map(Opts) -> + proplists:get_value(Key, maps:get(socket_options,Opts), Def); +get_value(Class, Key, Opts, Def, CallerMod, CallerLine) when is_map(Opts) -> + try get_value(Class, Key, Opts, CallerMod, CallerLine) + catch + error:{badkey,Key} -> Def + end; +get_value(Class, Key, Opts, _Def, _CallerMod, _CallerLine) -> + io:format("*** Bad Opts GET OPT ~p ~p:~p Key=~p,~n Opts=~p~n",[Class,_CallerMod,_CallerLine,Key,Opts]), + error({bad_options,Class, Key, Opts, _CallerMod, _CallerLine}). + + +%%%================================================================ +%%% +%%% Put an option +%%% + +-spec put_value(option_class(), option_in(), options(), + atom(), non_neg_integer()) -> options(). + +put_value(user_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) -> + put_user_value(KeyVal, Opts); + +put_value(internal_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) -> + InternalOpts = maps:get(internal_options,Opts), + Opts#{internal_options := put_internal_value(KeyVal, InternalOpts)}; + +put_value(socket_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) -> + SocketOpts = maps:get(socket_options,Opts), + Opts#{socket_options := put_socket_value(KeyVal, SocketOpts)}. + + +%%%---------------- +put_user_value(L, Opts) when is_list(L) -> + lists:foldl(fun put_user_value/2, Opts, L); +put_user_value({Key,Value}, Opts) -> + Opts#{Key := Value}. + +%%%---------------- +put_internal_value(L, IntOpts) when is_list(L) -> + lists:foldl(fun put_internal_value/2, IntOpts, L); +put_internal_value({Key,Value}, IntOpts) -> + IntOpts#{Key => Value}. + +%%%---------------- +put_socket_value(L, SockOpts) when is_list(L) -> + L ++ SockOpts; +put_socket_value({Key,Value}, SockOpts) -> + [{Key,Value} | SockOpts]; +put_socket_value(A, SockOpts) when is_atom(A) -> + [A | SockOpts]. + +%%%================================================================ +%%% +%%% Initialize the options +%%% + +-spec handle_options(role(), proplists:proplist()) -> options() | error() . + +-spec handle_options(role(), proplists:proplist(), options()) -> options() | error() . + +handle_options(Role, PropList0) -> + handle_options(Role, PropList0, #{socket_options => [], + internal_options => #{}, + user_options => [] + }). + +handle_options(Role, PropList0, Opts0) when is_map(Opts0), + is_list(PropList0) -> + PropList1 = proplists:unfold(PropList0), + try + OptionDefinitions = default(Role), + InitialMap = + maps:fold( + fun({K,def}, #{default:=V}, M) -> M#{K=>V}; + (_,_,M) -> M + end, + Opts0#{user_options => + maps:get(user_options,Opts0) ++ PropList1 + }, + OptionDefinitions), + %% Enter the user's values into the map; unknown keys are + %% treated as socket options + lists:foldl(fun(KV, Vals) -> + save(KV, OptionDefinitions, Vals) + end, InitialMap, PropList1) + catch + error:{eoptions, KV, undefined} -> + {error, {eoptions,KV}}; + + error:{eoptions, KV, Txt} when is_list(Txt) -> + {error, {eoptions,{KV,lists:flatten(Txt)}}}; + + error:{eoptions, KV, Extra} -> + {error, {eoptions,{KV,Extra}}} + end. + + +check_fun(Key, Defs) -> + #{chk := Fun} = maps:get({Key,def}, Defs), + Fun. + +%%%================================================================ +%%% +%%% Check and save one option +%%% + + +%%% First some prohibited inet options: +save({K,V}, _, _) when K == reuseaddr ; + K == active + -> + forbidden_option(K, V); + +%%% then compatibility conversions: +save({allow_user_interaction,V}, Opts, Vals) -> + save({user_interaction,V}, Opts, Vals); + +save({public_key_alg,V}, Defs, Vals) -> % To remove in OTP-20 + New = case V of + 'ssh-rsa' -> ['ssh-rsa', 'ssh-dss']; + ssh_rsa -> ['ssh-rsa', 'ssh-dss']; + 'ssh-dss' -> ['ssh-dss', 'ssh-rsa']; + ssh_dsa -> ['ssh-dss', 'ssh-rsa']; + _ -> error({eoptions, {public_key_alg,V}, + "Unknown algorithm, try pref_public_key_algs instead"}) + end, + save({pref_public_key_algs,New}, Defs, Vals); + +%% Special case for socket options 'inet' and 'inet6' +save(Inet, Defs, OptMap) when Inet==inet ; Inet==inet6 -> + save({inet,Inet}, Defs, OptMap); + +%% Two clauses to prepare for a proplists:unfold +save({Inet,true}, Defs, OptMap) when Inet==inet ; Inet==inet6 -> save({inet,Inet}, Defs, OptMap); +save({Inet,false}, _Defs, OptMap) when Inet==inet ; Inet==inet6 -> OptMap; + +%% and finaly the 'real stuff': +save({Key,Value}, Defs, OptMap) when is_map(OptMap) -> + try (check_fun(Key,Defs))(Value) + of + true -> + OptMap#{Key := Value}; + {true, ModifiedValue} -> + OptMap#{Key := ModifiedValue}; + false -> + error({eoptions, {Key,Value}, "Bad value"}) + catch + %% An unknown Key (= not in the definition map) is + %% regarded as an inet option: + error:{badkey,{inet,def}} -> + %% atomic (= non-tuple) options 'inet' and 'inet6': + OptMap#{socket_options := [Value | maps:get(socket_options,OptMap)]}; + error:{badkey,{Key,def}} -> + OptMap#{socket_options := [{Key,Value} | maps:get(socket_options,OptMap)]}; + + %% But a Key that is known but the value does not validate + %% by the check fun will give an error exception: + error:{check,{BadValue,Extra}} -> + error({eoptions, {Key,BadValue}, Extra}) + end. + +%%%================================================================ +%%% +%%% Default options +%%% + +-spec default(role() | common) -> option_declarations() . + +default(server) -> + (default(common)) + #{ + {subsystems, def} => + #{default => [ssh_sftpd:subsystem_spec([])], + chk => fun(L) -> + is_list(L) andalso + lists:all(fun({Name,{CB,Args}}) -> + check_string(Name) andalso + is_atom(CB) andalso + is_list(Args); + (_) -> + false + end, L) + end, + class => user_options + }, + + {shell, def} => + #{default => {shell, start, []}, + chk => fun({M,F,A}) -> is_atom(M) andalso is_atom(F) andalso is_list(A); + (V) -> check_function1(V) orelse check_function2(V) + end, + class => user_options + }, + + {exec, def} => % FIXME: need some archeology.... + #{default => undefined, + chk => fun({M,F,_}) -> is_atom(M) andalso is_atom(F); + (V) -> is_function(V) + end, + class => user_options + }, + + {ssh_cli, def} => + #{default => undefined, + chk => fun({Cb, As}) -> is_atom(Cb) andalso is_list(As); + (V) -> V == no_cli + end, + class => user_options + }, + + {system_dir, def} => + #{default => "/etc/ssh", + chk => fun(V) -> check_string(V) andalso check_dir(V) end, + class => user_options + }, + + {auth_methods, def} => + #{default => ?SUPPORTED_AUTH_METHODS, + chk => fun check_string/1, + class => user_options + }, + + {auth_method_kb_interactive_data, def} => + #{default => undefined, % Default value can be constructed when User is known + chk => fun({S1,S2,S3,B}) -> + check_string(S1) andalso + check_string(S2) andalso + check_string(S3) andalso + is_boolean(B); + (F) -> + check_function3(F) + end, + class => user_options + }, + + {user_passwords, def} => + #{default => [], + chk => fun(V) -> + is_list(V) andalso + lists:all(fun({S1,S2}) -> + check_string(S1) andalso + check_string(S2) + end, V) + end, + class => user_options + }, + + {password, def} => + #{default => undefined, + chk => fun check_string/1, + class => user_options + }, + + {dh_gex_groups, def} => + #{default => undefined, + chk => fun check_dh_gex_groups/1, + class => user_options + }, + + {dh_gex_limits, def} => + #{default => {0, infinity}, + chk => fun({I1,I2}) -> + check_pos_integer(I1) andalso + check_pos_integer(I2) andalso + I1 < I2; + (_) -> + false + end, + class => user_options + }, + + {pwdfun, def} => + #{default => undefined, + chk => fun(V) -> check_function4(V) orelse check_function2(V) end, + class => user_options + }, + + {negotiation_timeout, def} => + #{default => 2*60*1000, + chk => fun check_timeout/1, + class => user_options + }, + + {max_sessions, def} => + #{default => infinity, + chk => fun check_pos_integer/1, + class => user_options + }, + + {max_channels, def} => + #{default => infinity, + chk => fun check_pos_integer/1, + class => user_options + }, + + {parallel_login, def} => + #{default => false, + chk => fun erlang:is_boolean/1, + class => user_options + }, + + {minimal_remote_max_packet_size, def} => + #{default => 0, + chk => fun check_pos_integer/1, + class => user_options + }, + + {failfun, def} => + #{default => fun(_,_,_) -> void end, + chk => fun(V) -> check_function3(V) orelse + check_function2(V) % Backwards compatibility + end, + class => user_options + }, + + {connectfun, def} => + #{default => fun(_,_,_) -> void end, + chk => fun check_function3/1, + class => user_options + }, + +%%%%% Undocumented + {infofun, def} => + #{default => fun(_,_,_) -> void end, + chk => fun(V) -> check_function3(V) orelse + check_function2(V) % Backwards compatibility + end, + class => user_options + } + }; + +default(client) -> + (default(common)) + #{ + {dsa_pass_phrase, def} => + #{default => undefined, + chk => fun check_string/1, + class => user_options + }, + + {rsa_pass_phrase, def} => + #{default => undefined, + chk => fun check_string/1, + class => user_options + }, + + {silently_accept_hosts, def} => + #{default => false, + chk => fun check_silently_accept_hosts/1, + class => user_options + }, + + {user_interaction, def} => + #{default => true, + chk => fun erlang:is_boolean/1, + class => user_options + }, + + {pref_public_key_algs, def} => + #{default => + %% Get dynamically supported keys in the order of the ?SUPPORTED_USER_KEYS + [A || A <- ?SUPPORTED_USER_KEYS, + lists:member(A, ssh_transport:supported_algorithms(public_key))], + chk => + fun check_pref_public_key_algs/1, + class => + ssh + }, + + {dh_gex_limits, def} => + #{default => {1024, 6144, 8192}, % FIXME: Is this true nowadays? + chk => fun({Min,I,Max}) -> + lists:all(fun check_pos_integer/1, + [Min,I,Max]); + (_) -> false + end, + class => user_options + }, + + {connect_timeout, def} => + #{default => infinity, + chk => fun check_timeout/1, + class => user_options + }, + + {user, def} => + #{default => + begin + Env = case os:type() of + {win32, _} -> "USERNAME"; + {unix, _} -> "LOGNAME" + end, + case os:getenv(Env) of + false -> + case os:getenv("USER") of + false -> undefined; + User -> User + end; + User -> + User + end + end, + chk => fun check_string/1, + class => user_options + }, + + {password, def} => + #{default => undefined, + chk => fun check_string/1, + class => user_options + }, + + {quiet_mode, def} => + #{default => false, + chk => fun erlang:is_boolean/1, + class => user_options + }, + + {idle_time, def} => + #{default => infinity, + chk => fun check_timeout/1, + class => user_options + }, + +%%%%% Undocumented + {keyboard_interact_fun, def} => + #{default => undefined, + chk => fun check_function3/1, + class => user_options + } + }; + +default(common) -> + #{ + {user_dir, def} => + #{default => false, % FIXME: TBD ~/.ssh at time of call when user is known + chk => fun(V) -> check_string(V) andalso check_dir(V) end, + class => user_options + }, + + {preferred_algorithms, def} => + #{default => ssh:default_algorithms(), + chk => fun check_preferred_algorithms/1, + class => user_options + }, + + {id_string, def} => + #{default => undefined, % FIXME: see ssh_transport:ssh_vsn/0 + chk => fun(random) -> + {true, {random,2,5}}; % 2 - 5 random characters + ({random,I1,I2}) -> + %% Undocumented + check_pos_integer(I1) andalso + check_pos_integer(I2) andalso + I1=<I2; + (V) -> + check_string(V) + end, + class => user_options + }, + + {key_cb, def} => + #{default => {ssh_file, []}, + chk => fun({Mod,Opts}) -> is_atom(Mod) andalso is_list(Opts); + (Mod) when is_atom(Mod) -> {true, {Mod,[]}}; + (_) -> false + end, + class => user_options + }, + + {profile, def} => + #{default => ?DEFAULT_PROFILE, + chk => fun erlang:is_atom/1, + class => user_options + }, + + %% This is a "SocketOption"... + %% {fd, def} => + %% #{default => undefined, + %% chk => fun erlang:is_integer/1, + %% class => user_options + %% }, + + {disconnectfun, def} => + #{default => fun(_) -> void end, + chk => fun check_function1/1, + class => user_options + }, + + {unexpectedfun, def} => + #{default => fun(_,_) -> report end, + chk => fun check_function2/1, + class => user_options + }, + + {ssh_msg_debug_fun, def} => + #{default => fun(_,_,_,_) -> void end, + chk => fun check_function4/1, + class => user_options + }, + + {rekey_limit, def} => % FIXME: Why not common? + #{default => 1024000000, + chk => fun check_non_neg_integer/1, + class => user_options + }, + +%%%%% Undocumented + {transport, def} => + #{default => ?DEFAULT_TRANSPORT, + chk => fun({A,B,C}) -> + is_atom(A) andalso is_atom(B) andalso is_atom(C) + end, + class => user_options + }, + + {vsn, def} => + #{default => {2,0}, + chk => fun({Maj,Min}) -> check_non_neg_integer(Maj) andalso check_non_neg_integer(Min); + (_) -> false + end, + class => user_options + }, + + {tstflg, def} => + #{default => [], + chk => fun erlang:is_list/1, + class => user_options + }, + + {user_dir_fun, def} => + #{default => undefined, + chk => fun check_function1/1, + class => user_options + }, + + {max_random_length_padding, def} => + #{default => ?MAX_RND_PADDING_LEN, + chk => fun check_non_neg_integer/1, + class => user_options + } + }. + + +%%%================================================================ +%%%================================================================ +%%%================================================================ + +%%% +%%% check_*/1 -> true | false | error({check,Spec}) +%%% See error_in_check/2,3 +%%% + +%%% error_in_check(BadValue) -> error_in_check(BadValue, undefined). + +error_in_check(BadValue, Extra) -> error({check,{BadValue,Extra}}). + + +%%%---------------------------------------------------------------- +check_timeout(infinity) -> true; +check_timeout(I) -> check_pos_integer(I). + +%%%---------------------------------------------------------------- +check_pos_integer(I) -> is_integer(I) andalso I>0. + +%%%---------------------------------------------------------------- +check_non_neg_integer(I) -> is_integer(I) andalso I>=0. + +%%%---------------------------------------------------------------- +check_function1(F) -> is_function(F,1). +check_function2(F) -> is_function(F,2). +check_function3(F) -> is_function(F,3). +check_function4(F) -> is_function(F,4). + +%%%---------------------------------------------------------------- +check_pref_public_key_algs(V) -> + %% Get the dynamically supported keys, that is, thoose + %% that are stored + PKs = ssh_transport:supported_algorithms(public_key), + CHK = fun(A, Ack) -> + case lists:member(A, PKs) of + true -> + [A|Ack]; + false -> + %% Check with the documented options, that is, + %% the one we can handle + case lists:member(A,?SUPPORTED_USER_KEYS) of + false -> + %% An algorithm ssh never can handle + error_in_check(A, "Not supported public key"); + true -> + %% An algorithm ssh can handle, but not in + %% this very call + Ack + end + end + end, + case lists:foldr( + fun(ssh_dsa, Ack) -> CHK('ssh-dss', Ack); % compatibility + (ssh_rsa, Ack) -> CHK('ssh-rsa', Ack); % compatibility + (X, Ack) -> CHK(X, Ack) + end, [], V) + of + V -> true; + [] -> false; + V1 -> {true,V1} + end. + + +%%%---------------------------------------------------------------- +%% Check that it is a directory and is readable +check_dir(Dir) -> + case file:read_file_info(Dir) of + {ok, #file_info{type = directory, + access = Access}} -> + case Access of + read -> true; + read_write -> true; + _ -> error_in_check(Dir, eacces) + end; + + {ok, #file_info{}}-> + error_in_check(Dir, enotdir); + + {error, Error} -> + error_in_check(Dir, Error) + end. + +%%%---------------------------------------------------------------- +check_string(S) -> is_list(S). % FIXME: stub + +%%%---------------------------------------------------------------- +check_dh_gex_groups({file,File}) when is_list(File) -> + case file:consult(File) of + {ok, GroupDefs} -> + check_dh_gex_groups(GroupDefs); + {error, Error} -> + error_in_check({file,File},Error) + end; + +check_dh_gex_groups({ssh_moduli_file,File}) when is_list(File) -> + case file:open(File,[read]) of + {ok,D} -> + try + read_moduli_file(D, 1, []) + of + {ok,Moduli} -> + check_dh_gex_groups(Moduli); + {error,Error} -> + error_in_check({ssh_moduli_file,File}, Error) + catch + _:_ -> + error_in_check({ssh_moduli_file,File}, "Bad format in file "++File) + after + file:close(D) + end; + + {error, Error} -> + error_in_check({ssh_moduli_file,File}, Error) + end; + +check_dh_gex_groups(L0) when is_list(L0), is_tuple(hd(L0)) -> + {true, + collect_per_size( + lists:foldl( + fun({N,G,P}, Acc) when is_integer(N),N>0, + is_integer(G),G>0, + is_integer(P),P>0 -> + [{N,{G,P}} | Acc]; + ({N,{G,P}}, Acc) when is_integer(N),N>0, + is_integer(G),G>0, + is_integer(P),P>0 -> + [{N,{G,P}} | Acc]; + ({N,GPs}, Acc) when is_list(GPs) -> + lists:foldr(fun({Gi,Pi}, Acci) when is_integer(Gi),Gi>0, + is_integer(Pi),Pi>0 -> + [{N,{Gi,Pi}} | Acci] + end, Acc, GPs) + end, [], L0))}; + +check_dh_gex_groups(_) -> + false. + + + +collect_per_size(L) -> + lists:foldr( + fun({Sz,GP}, [{Sz,GPs}|Acc]) -> [{Sz,[GP|GPs]}|Acc]; + ({Sz,GP}, Acc) -> [{Sz,[GP]}|Acc] + end, [], lists:sort(L)). + +read_moduli_file(D, I, Acc) -> + case io:get_line(D,"") of + {error,Error} -> + {error,Error}; + eof -> + {ok, Acc}; + "#" ++ _ -> read_moduli_file(D, I+1, Acc); + <<"#",_/binary>> -> read_moduli_file(D, I+1, Acc); + Data -> + Line = if is_binary(Data) -> binary_to_list(Data); + is_list(Data) -> Data + end, + try + [_Time,_Class,_Tests,_Tries,Size,G,P] = string:tokens(Line," \r\n"), + M = {list_to_integer(Size), + {list_to_integer(G), list_to_integer(P,16)} + }, + read_moduli_file(D, I+1, [M|Acc]) + catch + _:_ -> + read_moduli_file(D, I+1, Acc) + end + end. + +%%%---------------------------------------------------------------- +-define(SHAs, [md5, sha, sha224, sha256, sha384, sha512]). + +check_silently_accept_hosts(B) when is_boolean(B) -> true; +check_silently_accept_hosts(F) when is_function(F,2) -> true; +check_silently_accept_hosts({S,F}) when is_atom(S), + is_function(F,2) -> + lists:member(S, ?SHAs) andalso + lists:member(S, proplists:get_value(hashs,crypto:supports())); +check_silently_accept_hosts({L,F}) when is_list(L), + is_function(F,2) -> + lists:all(fun(S) -> + lists:member(S, ?SHAs) andalso + lists:member(S, proplists:get_value(hashs,crypto:supports())) + end, L); +check_silently_accept_hosts(_) -> false. + +%%%---------------------------------------------------------------- +check_preferred_algorithms(Algs) -> + try alg_duplicates(Algs, [], []) + of + [] -> + {true, + [try ssh_transport:supported_algorithms(Key) + of + DefAlgs -> handle_pref_alg(Key,Vals,DefAlgs) + catch + _:_ -> error_in_check(Key,"Bad preferred_algorithms key") + end || {Key,Vals} <- Algs] + }; + + Dups -> + error_in_check(Dups, "Duplicates") + catch + _:_ -> + false + end. + +alg_duplicates([{K,V}|KVs], Ks, Dups0) -> + Dups = + case lists:member(K,Ks) of + true -> [K|Dups0]; + false -> Dups0 + end, + case V--lists:usort(V) of + [] -> alg_duplicates(KVs, [K|Ks], Dups); + Ds -> alg_duplicates(KVs, [K|Ks], Dups++Ds) + end; +alg_duplicates([], _Ks, Dups) -> + Dups. + +handle_pref_alg(Key, + Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}], + [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}] + ) -> + chk_alg_vs(Key, C2Ss, Sup_C2Ss), + chk_alg_vs(Key, S2Cs, Sup_S2Cs), + {Key, Vs}; + +handle_pref_alg(Key, + Vs=[{server2client,[_|_]},{client2server,[_|_]}], + Sup=[{client2server,_},{server2client,_}] + ) -> + handle_pref_alg(Key, lists:reverse(Vs), Sup); + +handle_pref_alg(Key, + Vs=[V|_], + Sup=[{client2server,_},{server2client,_}] + ) when is_atom(V) -> + handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup); + +handle_pref_alg(Key, + Vs=[V|_], + Sup=[S|_] + ) when is_atom(V), is_atom(S) -> + chk_alg_vs(Key, Vs, Sup), + {Key, Vs}; + +handle_pref_alg(Key, Vs, _) -> + error_in_check({Key,Vs}, "Badly formed list"). + +chk_alg_vs(OptKey, Values, SupportedValues) -> + case (Values -- SupportedValues) of + [] -> Values; + Bad -> error_in_check({OptKey,Bad}, "Unsupported value(s) found") + end. + +%%%---------------------------------------------------------------- +forbidden_option(K,V) -> + Txt = io_lib:format("The option '~s' is used internally. The " + "user is not allowed to specify this option.", + [K]), + error({eoptions, {K,V}, Txt}). + +%%%---------------------------------------------------------------- diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl index b937f0412d..140856c8e3 100644 --- a/lib/ssh/src/ssh_sftp.erl +++ b/lib/ssh/src/ssh_sftp.erl @@ -100,18 +100,14 @@ start_channel(Socket) when is_port(Socket) -> start_channel(Host) when is_list(Host) -> start_channel(Host, []). -start_channel(Socket, Options) when is_port(Socket) -> - Timeout = - %% A mixture of ssh:connect and ssh_sftp:start_channel: - case proplists:get_value(connect_timeout, Options, undefined) of - undefined -> - proplists:get_value(timeout, Options, infinity); - TO -> - TO - end, - case ssh:connect(Socket, Options, Timeout) of +start_channel(Socket, UserOptions) when is_port(Socket) -> + {SshOpts, _ChanOpts, SftpOpts} = handle_options(UserOptions), + Timeout = % A mixture of ssh:connect and ssh_sftp:start_channel: + proplists:get_value(connect_timeout, SshOpts, + proplists:get_value(timeout, SftpOpts, infinity)), + case ssh:connect(Socket, SshOpts, Timeout) of {ok,Cm} -> - case start_channel(Cm, Options) of + case start_channel(Cm, UserOptions) of {ok, Pid} -> {ok, Pid, Cm}; Error -> @@ -120,9 +116,9 @@ start_channel(Socket, Options) when is_port(Socket) -> Error -> Error end; -start_channel(Cm, Opts) when is_pid(Cm) -> - Timeout = proplists:get_value(timeout, Opts, infinity), - {_, ChanOpts, SftpOpts} = handle_options(Opts, [], [], []), +start_channel(Cm, UserOptions) when is_pid(Cm) -> + Timeout = proplists:get_value(timeout, UserOptions, infinity), + {_SshOpts, ChanOpts, SftpOpts} = handle_options(UserOptions), case ssh_xfer:attach(Cm, [], ChanOpts) of {ok, ChannelId, Cm} -> case ssh_channel:start(Cm, ChannelId, @@ -143,15 +139,17 @@ start_channel(Cm, Opts) when is_pid(Cm) -> Error end; -start_channel(Host, Opts) -> - start_channel(Host, 22, Opts). -start_channel(Host, Port, Opts) -> - {SshOpts, ChanOpts, SftpOpts} = handle_options(Opts, [], [], []), - Timeout = proplists:get_value(timeout, SftpOpts, infinity), +start_channel(Host, UserOptions) -> + start_channel(Host, 22, UserOptions). + +start_channel(Host, Port, UserOptions) -> + {SshOpts, ChanOpts, SftpOpts} = handle_options(UserOptions), + Timeout = % A mixture of ssh:connect and ssh_sftp:start_channel: + proplists:get_value(connect_timeout, SshOpts, + proplists:get_value(timeout, SftpOpts, infinity)), case ssh_xfer:connect(Host, Port, SshOpts, ChanOpts, Timeout) of {ok, ChannelId, Cm} -> - case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm, - ChannelId, SftpOpts]) of + case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm,ChannelId,SftpOpts]) of {ok, Pid} -> case wait_for_version_negotiation(Pid, Timeout) of ok -> @@ -294,7 +292,7 @@ read(Pid, Handle, Len) -> read(Pid, Handle, Len, FileOpTimeout) -> call(Pid, {read,false,Handle, Len}, FileOpTimeout). -%% TODO this ought to be a cast! Is so in all practial meaning +%% TODO this ought to be a cast! Is so in all practical meaning %% even if it is obscure! apread(Pid, Handle, Offset, Len) -> call(Pid, {pread,true,Handle, Offset, Len}, infinity). @@ -313,12 +311,12 @@ write(Pid, Handle, Data) -> write(Pid, Handle, Data, FileOpTimeout) -> call(Pid, {write,false,Handle,Data}, FileOpTimeout). -%% TODO this ought to be a cast! Is so in all practial meaning +%% TODO this ought to be a cast! Is so in all practical meaning %% even if it is obscure! apwrite(Pid, Handle, Offset, Data) -> call(Pid, {pwrite,true,Handle,Offset,Data}, infinity). -%% TODO this ought to be a cast! Is so in all practial meaning +%% TODO this ought to be a cast! Is so in all practical meaning %% even if it is obscure! awrite(Pid, Handle, Data) -> call(Pid, {write,true,Handle,Data}, infinity). @@ -865,6 +863,9 @@ terminate(_Reason, State) -> %%==================================================================== %% Internal functions %%==================================================================== +handle_options(UserOptions) -> + handle_options(UserOptions, [], [], []). + handle_options([], Sftp, Chan, Ssh) -> {Ssh, Chan, Sftp}; handle_options([{timeout, _} = Opt | Rest], Sftp, Chan, Ssh) -> diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl index b739955836..9352046795 100644 --- a/lib/ssh/src/ssh_sftpd.erl +++ b/lib/ssh/src/ssh_sftpd.erl @@ -664,29 +664,25 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 -> do_open(ReqId, State, Path, Flags). do_open(ReqId, State0, Path, Flags) -> - #state{file_handler = FileMod, file_state = FS0, root = Root, xf = #ssh_xfer{vsn = Vsn}} = State0, - XF = State0#state.xf, - F = [binary | Flags], - {IsDir, _FS1} = FileMod:is_dir(Path, FS0), + #state{file_handler = FileMod, file_state = FS0, xf = #ssh_xfer{vsn = Vsn}} = State0, + AbsPath = relate_file_name(Path, State0), + {IsDir, _FS1} = FileMod:is_dir(AbsPath, FS0), case IsDir of true when Vsn > 5 -> ssh_xfer:xf_send_status(State0#state.xf, ReqId, - ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory"); + ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory"), + State0; true -> ssh_xfer:xf_send_status(State0#state.xf, ReqId, - ?SSH_FX_FAILURE, "File is a directory"); + ?SSH_FX_FAILURE, "File is a directory"), + State0; false -> - AbsPath = case Root of - "" -> - Path; - _ -> - relate_file_name(Path, State0) - end, - {Res, FS1} = FileMod:open(AbsPath, F, FS0), + OpenFlags = [binary | Flags], + {Res, FS1} = FileMod:open(AbsPath, OpenFlags, FS0), State1 = State0#state{file_state = FS1}, case Res of {ok, IoDevice} -> - add_handle(State1, XF, ReqId, file, {Path,IoDevice}); + add_handle(State1, State0#state.xf, ReqId, file, {Path,IoDevice}); {error, Error} -> ssh_xfer:xf_send_status(State1#state.xf, ReqId, ssh_xfer:encode_erlang_status(Error)), @@ -742,6 +738,10 @@ resolve_symlinks_2([], State, _LinkCnt, AccPath) -> {{ok, AccPath}, State}. +%% The File argument is always in a user visible file system, i.e. +%% is under Root and is relative to CWD or Root, if starts with "/". +%% The result of the function is always an absolute path in a +%% "backend" file system. relate_file_name(File, State) -> relate_file_name(File, State, _Canonicalize=true). @@ -749,19 +749,20 @@ relate_file_name(File, State, Canonicalize) when is_binary(File) -> relate_file_name(unicode:characters_to_list(File), State, Canonicalize); relate_file_name(File, #state{cwd = CWD, root = ""}, Canonicalize) -> relate_filename_to_path(File, CWD, Canonicalize); -relate_file_name(File, #state{root = Root}, Canonicalize) -> - case is_within_root(Root, File) of - true -> - File; - false -> - RelFile = make_relative_filename(File), - NewFile = relate_filename_to_path(RelFile, Root, Canonicalize), - case is_within_root(Root, NewFile) of - true -> - NewFile; - false -> - Root - end +relate_file_name(File, #state{cwd = CWD, root = Root}, Canonicalize) -> + CWD1 = case is_within_root(Root, CWD) of + true -> CWD; + false -> Root + end, + AbsFile = case make_relative_filename(File) of + File -> + relate_filename_to_path(File, CWD1, Canonicalize); + RelFile -> + relate_filename_to_path(RelFile, Root, Canonicalize) + end, + case is_within_root(Root, AbsFile) of + true -> AbsFile; + false -> Root end. is_within_root(Root, File) -> diff --git a/lib/ssh/src/ssh_subsystem_sup.erl b/lib/ssh/src/ssh_subsystem_sup.erl index 637f5f398f..cf82db458f 100644 --- a/lib/ssh/src/ssh_subsystem_sup.erl +++ b/lib/ssh/src/ssh_subsystem_sup.erl @@ -26,6 +26,8 @@ -behaviour(supervisor). +-include("ssh.hrl"). + -export([start_link/1, connection_supervisor/1, channel_supervisor/1 @@ -37,8 +39,8 @@ %%%========================================================================= %%% API %%%========================================================================= -start_link(Opts) -> - supervisor:start_link(?MODULE, [Opts]). +start_link(Options) -> + supervisor:start_link(?MODULE, [Options]). connection_supervisor(SupPid) -> Children = supervisor:which_children(SupPid), @@ -53,42 +55,42 @@ channel_supervisor(SupPid) -> %%%========================================================================= -spec init( [term()] ) -> {ok,{supervisor:sup_flags(),[supervisor:child_spec()]}} | ignore . -init([Opts]) -> +init([Options]) -> RestartStrategy = one_for_all, MaxR = 0, MaxT = 3600, - Children = child_specs(Opts), + Children = child_specs(Options), {ok, {{RestartStrategy, MaxR, MaxT}, Children}}. %%%========================================================================= %%% Internal functions %%%========================================================================= -child_specs(Opts) -> - case proplists:get_value(role, Opts) of +child_specs(Options) -> + case ?GET_INTERNAL_OPT(role, Options) of client -> []; server -> - [ssh_channel_child_spec(Opts), ssh_connectinon_child_spec(Opts)] + [ssh_channel_child_spec(Options), ssh_connectinon_child_spec(Options)] end. -ssh_connectinon_child_spec(Opts) -> - Address = proplists:get_value(address, Opts), - Port = proplists:get_value(port, Opts), - Role = proplists:get_value(role, Opts), +ssh_connectinon_child_spec(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Role = ?GET_INTERNAL_OPT(role, Options), Name = id(Role, ssh_connection_sup, Address, Port), - StartFunc = {ssh_connection_sup, start_link, [Opts]}, + StartFunc = {ssh_connection_sup, start_link, [Options]}, Restart = temporary, Shutdown = 5000, Modules = [ssh_connection_sup], Type = supervisor, {Name, StartFunc, Restart, Shutdown, Type, Modules}. -ssh_channel_child_spec(Opts) -> - Address = proplists:get_value(address, Opts), - Port = proplists:get_value(port, Opts), - Role = proplists:get_value(role, Opts), +ssh_channel_child_spec(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Role = ?GET_INTERNAL_OPT(role, Options), Name = id(Role, ssh_channel_sup, Address, Port), - StartFunc = {ssh_channel_sup, start_link, [Opts]}, + StartFunc = {ssh_channel_sup, start_link, [Options]}, Restart = temporary, Shutdown = infinity, Modules = [ssh_channel_sup], diff --git a/lib/ssh/src/ssh_system_sup.erl b/lib/ssh/src/ssh_system_sup.erl index e97ac7b01a..b0bbd3aae5 100644 --- a/lib/ssh/src/ssh_system_sup.erl +++ b/lib/ssh/src/ssh_system_sup.erl @@ -45,12 +45,12 @@ %%%========================================================================= %%% Internal API %%%========================================================================= -start_link(ServerOpts) -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), - Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), +start_link(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Profile = ?GET_OPT(profile, Options), Name = make_name(Address, Port, Profile), - supervisor:start_link({local, Name}, ?MODULE, [ServerOpts]). + supervisor:start_link({local, Name}, ?MODULE, [Options]). stop_listener(SysSup) -> stop_acceptor(SysSup). @@ -127,12 +127,12 @@ restart_acceptor(Address, Port, Profile) -> %%%========================================================================= -spec init( [term()] ) -> {ok,{supervisor:sup_flags(),[supervisor:child_spec()]}} | ignore . -init([ServerOpts]) -> +init([Options]) -> RestartStrategy = one_for_one, MaxR = 0, MaxT = 3600, - Children = case proplists:get_value(asocket,ServerOpts) of - undefined -> child_specs(ServerOpts); + Children = case ?GET_INTERNAL_OPT(asocket,Options,undefined) of + undefined -> child_specs(Options); _ -> [] end, {ok, {{RestartStrategy, MaxR, MaxT}, Children}}. @@ -140,24 +140,24 @@ init([ServerOpts]) -> %%%========================================================================= %%% Internal functions %%%========================================================================= -child_specs(ServerOpts) -> - [ssh_acceptor_child_spec(ServerOpts)]. +child_specs(Options) -> + [ssh_acceptor_child_spec(Options)]. -ssh_acceptor_child_spec(ServerOpts) -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), - Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), +ssh_acceptor_child_spec(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Profile = ?GET_OPT(profile, Options), Name = id(ssh_acceptor_sup, Address, Port, Profile), - StartFunc = {ssh_acceptor_sup, start_link, [ServerOpts]}, + StartFunc = {ssh_acceptor_sup, start_link, [Options]}, Restart = transient, Shutdown = infinity, Modules = [ssh_acceptor_sup], Type = supervisor, {Name, StartFunc, Restart, Shutdown, Type, Modules}. -ssh_subsystem_child_spec(ServerOpts) -> +ssh_subsystem_child_spec(Options) -> Name = make_ref(), - StartFunc = {ssh_subsystem_sup, start_link, [ServerOpts]}, + StartFunc = {ssh_subsystem_sup, start_link, [Options]}, Restart = temporary, Shutdown = infinity, Modules = [ssh_subsystem_sup], diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl index 4012ae3914..02c995399a 100644 --- a/lib/ssh/src/ssh_transport.erl +++ b/lib/ssh/src/ssh_transport.erl @@ -79,6 +79,10 @@ default_algorithms() -> [{K,default_algorithms(K)} || K <- algo_classes()]. algo_classes() -> [kex, public_key, cipher, mac, compression]. +default_algorithms(kex) -> + supported_algorithms(kex, [ + 'diffie-hellman-group1-sha1' % Gone in OpenSSH 7.3.p1 + ]); default_algorithms(cipher) -> supported_algorithms(cipher, same(['AEAD_AES_128_GCM', @@ -95,34 +99,39 @@ supported_algorithms() -> [{K,supported_algorithms(K)} || K <- algo_classes()]. supported_algorithms(kex) -> select_crypto_supported( [ - {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]}, {'ecdh-sha2-nistp384', [{public_keys,ecdh}, {ec_curve,secp384r1}, {hashs,sha384}]}, - {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]}, + {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]}, + {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]}, {'diffie-hellman-group-exchange-sha256', [{public_keys,dh}, {hashs,sha256}]}, + {'diffie-hellman-group16-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1 + {'diffie-hellman-group18-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1 + {'diffie-hellman-group14-sha256', [{public_keys,dh}, {hashs,sha256}]}, % In OpenSSH 7.3.p1 + {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]}, {'diffie-hellman-group-exchange-sha1', [{public_keys,dh}, {hashs,sha}]}, - {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]}, {'diffie-hellman-group1-sha1', [{public_keys,dh}, {hashs,sha}]} ]); supported_algorithms(public_key) -> select_crypto_supported( - [{'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]}, + [ {'ecdsa-sha2-nistp384', [{public_keys,ecdsa}, {hashs,sha384}, {ec_curve,secp384r1}]}, {'ecdsa-sha2-nistp521', [{public_keys,ecdsa}, {hashs,sha512}, {ec_curve,secp521r1}]}, + {'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]}, {'ssh-rsa', [{public_keys,rsa}, {hashs,sha} ]}, - {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]} + {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]} % Gone in OpenSSH 7.3.p1 ]); supported_algorithms(cipher) -> same( select_crypto_supported( - [{'aes256-ctr', [{ciphers,{aes_ctr,256}}]}, - {'aes192-ctr', [{ciphers,{aes_ctr,192}}]}, - {'aes128-ctr', [{ciphers,{aes_ctr,128}}]}, - {'aes128-cbc', [{ciphers,aes_cbc128}]}, + [ + {'[email protected]', [{ciphers,{aes_gcm,256}}]}, + {'aes256-ctr', [{ciphers,{aes_ctr,256}}]}, + {'aes192-ctr', [{ciphers,{aes_ctr,192}}]}, {'[email protected]', [{ciphers,{aes_gcm,128}}]}, - {'[email protected]', [{ciphers,{aes_gcm,256}}]}, - {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]}, + {'aes128-ctr', [{ciphers,{aes_ctr,128}}]}, {'AEAD_AES_256_GCM', [{ciphers,{aes_gcm,256}}]}, + {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]}, + {'aes128-cbc', [{ciphers,aes_cbc128}]}, {'3des-cbc', [{ciphers,des3_cbc}]} ] )); @@ -144,14 +153,14 @@ supported_algorithms(compression) -> %%%---------------------------------------------------------------------------- versions(client, Options)-> - Vsn = proplists:get_value(vsn, Options, ?DEFAULT_CLIENT_VERSION), + Vsn = ?GET_INTERNAL_OPT(vsn, Options, ?DEFAULT_CLIENT_VERSION), {Vsn, format_version(Vsn, software_version(Options))}; versions(server, Options) -> - Vsn = proplists:get_value(vsn, Options, ?DEFAULT_SERVER_VERSION), + Vsn = ?GET_INTERNAL_OPT(vsn, Options, ?DEFAULT_SERVER_VERSION), {Vsn, format_version(Vsn, software_version(Options))}. software_version(Options) -> - case proplists:get_value(id_string, Options) of + case ?GET_OPT(id_string, Options) of undefined -> "Erlang"++ssh_vsn(); {random,Nlo,Nup} -> @@ -162,7 +171,7 @@ software_version(Options) -> ssh_vsn() -> try {ok,L} = application:get_all_key(ssh), - proplists:get_value(vsn,L,"") + proplists:get_value(vsn, L, "") of "" -> ""; VSN when is_list(VSN) -> "/" ++ VSN; @@ -223,13 +232,7 @@ key_exchange_init_msg(Ssh0) -> kex_init(#ssh{role = Role, opts = Opts, available_host_keys = HostKeyAlgs}) -> Random = ssh_bits:random(16), - PrefAlgs = - case proplists:get_value(preferred_algorithms,Opts) of - undefined -> - default_algorithms(); - Algs0 -> - Algs0 - end, + PrefAlgs = ?GET_OPT(preferred_algorithms, Opts), kexinit_message(Role, Random, PrefAlgs, HostKeyAlgs). key_init(client, Ssh, Value) -> @@ -275,11 +278,12 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own, true -> key_exchange_first_msg(Algoritms#alg.kex, Ssh0#ssh{algorithms = Algoritms}); - _ -> + {false,Alg} -> %% TODO: Correct code? ssh_connection_handler:disconnect( #ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED, - description = "Selection of key exchange algorithm failed" + description = "Selection of key exchange algorithm failed: " + ++ Alg }) end; @@ -289,30 +293,39 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own, case verify_algorithm(Algoritms) of true -> {ok, Ssh#ssh{algorithms = Algoritms}}; - _ -> + {false,Alg} -> ssh_connection_handler:disconnect( #ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED, - description = "Selection of key exchange algorithm failed" + description = "Selection of key exchange algorithm failed: " + ++ Alg }) end. -verify_algorithm(#alg{kex = undefined}) -> false; -verify_algorithm(#alg{hkey = undefined}) -> false; -verify_algorithm(#alg{send_mac = undefined}) -> false; -verify_algorithm(#alg{recv_mac = undefined}) -> false; -verify_algorithm(#alg{encrypt = undefined}) -> false; -verify_algorithm(#alg{decrypt = undefined}) -> false; -verify_algorithm(#alg{compress = undefined}) -> false; -verify_algorithm(#alg{decompress = undefined}) -> false; -verify_algorithm(#alg{kex = Kex}) -> lists:member(Kex, supported_algorithms(kex)). +verify_algorithm(#alg{kex = undefined}) -> {false, "kex"}; +verify_algorithm(#alg{hkey = undefined}) -> {false, "hkey"}; +verify_algorithm(#alg{send_mac = undefined}) -> {false, "send_mac"}; +verify_algorithm(#alg{recv_mac = undefined}) -> {false, "recv_mac"}; +verify_algorithm(#alg{encrypt = undefined}) -> {false, "encrypt"}; +verify_algorithm(#alg{decrypt = undefined}) -> {false, "decrypt"}; +verify_algorithm(#alg{compress = undefined}) -> {false, "compress"}; +verify_algorithm(#alg{decompress = undefined}) -> {false, "decompress"}; +verify_algorithm(#alg{kex = Kex}) -> + case lists:member(Kex, supported_algorithms(kex)) of + true -> true; + false -> {false, "kex"} + end. %%%---------------------------------------------------------------- %%% %%% Key exchange initialization %%% key_exchange_first_msg(Kex, Ssh0) when Kex == 'diffie-hellman-group1-sha1' ; - Kex == 'diffie-hellman-group14-sha1' -> + Kex == 'diffie-hellman-group14-sha1' ; + Kex == 'diffie-hellman-group14-sha256' ; + Kex == 'diffie-hellman-group16-sha512' ; + Kex == 'diffie-hellman-group18-sha512' + -> {G, P} = dh_group(Kex), Sz = dh_bits(Ssh0#ssh.algorithms), {Public, Private} = generate_key(dh, [P,G,2*Sz]), @@ -322,10 +335,7 @@ key_exchange_first_msg(Kex, Ssh0) when Kex == 'diffie-hellman-group1-sha1' ; key_exchange_first_msg(Kex, Ssh0=#ssh{opts=Opts}) when Kex == 'diffie-hellman-group-exchange-sha1' ; Kex == 'diffie-hellman-group-exchange-sha256' -> - {Min,NBits0,Max} = - proplists:get_value(dh_gex_limits, Opts, {?DEFAULT_DH_GROUP_MIN, - ?DEFAULT_DH_GROUP_NBITS, - ?DEFAULT_DH_GROUP_MAX}), + {Min,NBits0,Max} = ?GET_OPT(dh_gex_limits, Opts), DhBits = dh_bits(Ssh0#ssh.algorithms), NBits1 = %% NIST Special Publication 800-57 Part 1 Revision 4: Recommendation for Key Management @@ -358,6 +368,9 @@ key_exchange_first_msg(Kex, Ssh0) when Kex == 'ecdh-sha2-nistp256' ; %%% %%% diffie-hellman-group1-sha1 %%% diffie-hellman-group14-sha1 +%%% diffie-hellman-group14-sha256 +%%% diffie-hellman-group16-sha512 +%%% diffie-hellman-group18-sha512 %%% handle_kexdh_init(#ssh_msg_kexdh_init{e = E}, Ssh0 = #ssh{algorithms = #alg{kex=Kex} = Algs}) -> @@ -436,7 +449,7 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request{min = Min0, %% server {Min, Max} = adjust_gex_min_max(Min0, Max0, Opts), case public_key:dh_gex_group(Min, NBits, Max, - proplists:get_value(dh_gex_groups,Opts)) of + ?GET_OPT(dh_gex_groups,Opts)) of {ok, {_, {G,P}}} -> {SshPacket, Ssh} = ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0), @@ -459,7 +472,7 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits}, %% This message was in the draft-00 of rfc4419 %% (https://tools.ietf.org/html/draft-ietf-secsh-dh-group-exchange-00) %% In later drafts and the rfc is "is used for backward compatibility". - %% Unfortunatly the rfc does not specify how to treat the parameter n + %% Unfortunately the rfc does not specify how to treat the parameter n %% if there is no group of that modulus length :( %% The draft-00 however specifies that n is the "... number of bits %% the subgroup should have at least". @@ -470,7 +483,7 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits}, Max0 = 8192, {Min, Max} = adjust_gex_min_max(Min0, Max0, Opts), case public_key:dh_gex_group(Min, NBits, Max, - proplists:get_value(dh_gex_groups,Opts)) of + ?GET_OPT(dh_gex_groups,Opts)) of {ok, {_, {G,P}}} -> {SshPacket, Ssh} = ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0), @@ -495,22 +508,18 @@ handle_kex_dh_gex_request(_, _) -> adjust_gex_min_max(Min0, Max0, Opts) -> - case proplists:get_value(dh_gex_limits, Opts) of - undefined -> - {Min0, Max0}; - {Min1, Max1} -> - Min2 = max(Min0, Min1), - Max2 = min(Max0, Max1), - if - Min2 =< Max2 -> - {Min2, Max2}; - Max2 < Min2 -> - ssh_connection_handler:disconnect( - #ssh_msg_disconnect{ - code = ?SSH_DISCONNECT_PROTOCOL_ERROR, - description = "No possible diffie-hellman-group-exchange group possible" - }) - end + {Min1, Max1} = ?GET_OPT(dh_gex_limits, Opts), + Min2 = max(Min0, Min1), + Max2 = min(Max0, Max1), + if + Min2 =< Max2 -> + {Min2, Max2}; + Max2 < Min2 -> + ssh_connection_handler:disconnect( + #ssh_msg_disconnect{ + code = ?SSH_DISCONNECT_PROTOCOL_ERROR, + description = "No possible diffie-hellman-group-exchange group possible" + }) end. @@ -697,9 +706,9 @@ sid(#ssh{session_id = Id}, _) -> %% The host key should be read from storage %% get_host_key(SSH) -> - #ssh{key_cb = Mod, opts = Opts, algorithms = ALG} = SSH, - - case Mod:host_key(ALG#alg.hkey, Opts) of + #ssh{key_cb = {KeyCb,KeyCbOpts}, opts = Opts, algorithms = ALG} = SSH, + UserOpts = ?GET_OPT(user_options, Opts), + case KeyCb:host_key(ALG#alg.hkey, [{key_cb_private,KeyCbOpts}|UserOpts]) of {ok, #'RSAPrivateKey'{} = Key} -> Key; {ok, #'DSAPrivateKey'{} = Key} -> Key; {ok, #'ECPrivateKey'{} = Key} -> Key; @@ -745,7 +754,7 @@ public_algo({#'ECPoint'{},{namedCurve,OID}}) -> accepted_host(Ssh, PeerName, Public, Opts) -> - case proplists:get_value(silently_accept_hosts, Opts, false) of + case ?GET_OPT(silently_accept_hosts, Opts) of F when is_function(F,2) -> true == (catch F(PeerName, public_key:ssh_hostkey_fingerprint(Public))); {DigestAlg,F} when is_function(F,2) -> @@ -756,16 +765,16 @@ accepted_host(Ssh, PeerName, Public, Opts) -> yes == yes_no(Ssh, "New host " ++ PeerName ++ " accept") end. -known_host_key(#ssh{opts = Opts, key_cb = Mod, peer = Peer} = Ssh, +known_host_key(#ssh{opts = Opts, key_cb = {KeyCb,KeyCbOpts}, peer = {PeerName,_}} = Ssh, Public, Alg) -> - PeerName = peer_name(Peer), - case Mod:is_host_key(Public, PeerName, Alg, Opts) of + UserOpts = ?GET_OPT(user_options, Opts), + case KeyCb:is_host_key(Public, PeerName, Alg, [{key_cb_private,KeyCbOpts}|UserOpts]) of true -> ok; false -> case accepted_host(Ssh, PeerName, Public, Opts) of true -> - Mod:add_host_key(PeerName, Public, Opts); + KeyCb:add_host_key(PeerName, Public, [{key_cb_private,KeyCbOpts}|UserOpts]); false -> {error, rejected} end @@ -1604,48 +1613,27 @@ mac('hmac-sha2-256', Key, SeqNum, Data) -> mac('hmac-sha2-512', Key, SeqNum, Data) -> crypto:hmac(sha512, Key, [<<?UINT32(SeqNum)>>, Data]). -%% return N hash bytes (HASH) -hash(SSH, Char, Bits) -> - HASH = - case SSH#ssh.kex of - 'diffie-hellman-group1-sha1' -> - fun(Data) -> crypto:hash(sha, Data) end; - 'diffie-hellman-group14-sha1' -> - fun(Data) -> crypto:hash(sha, Data) end; - - 'diffie-hellman-group-exchange-sha1' -> - fun(Data) -> crypto:hash(sha, Data) end; - 'diffie-hellman-group-exchange-sha256' -> - fun(Data) -> crypto:hash(sha256, Data) end; - - 'ecdh-sha2-nistp256' -> - fun(Data) -> crypto:hash(sha256,Data) end; - 'ecdh-sha2-nistp384' -> - fun(Data) -> crypto:hash(sha384,Data) end; - 'ecdh-sha2-nistp521' -> - fun(Data) -> crypto:hash(sha512,Data) end; - _ -> - exit({bad_algorithm,SSH#ssh.kex}) - end, - hash(SSH, Char, Bits, HASH). -hash(_SSH, _Char, 0, _HASH) -> +%%%---------------------------------------------------------------- +%% return N hash bytes (HASH) +hash(_SSH, _Char, 0) -> <<>>; -hash(SSH, Char, N, HASH) -> -K = SSH#ssh.shared_secret, % K = ssh_bits:mpint(SSH#ssh.shared_secret), +hash(SSH, Char, N) -> + HashAlg = sha(SSH#ssh.kex), + K = SSH#ssh.shared_secret, H = SSH#ssh.exchanged_hash, - SessionID = SSH#ssh.session_id, - K1 = HASH([K, H, Char, SessionID]), + K1 = crypto:hash(HashAlg, [K, H, Char, SSH#ssh.session_id]), Sz = N div 8, - <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HASH), + <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HashAlg), Key. -hash(_K, _H, Ki, N, _HASH) when N =< 0 -> +hash(_K, _H, Ki, N, _HashAlg) when N =< 0 -> Ki; -hash(K, H, Ki, N, HASH) -> - Kj = HASH([K, H, Ki]), - hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HASH). +hash(K, H, Ki, N, HashAlg) -> + Kj = crypto:hash(HashAlg, [K, H, Ki]), + hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HashAlg). +%%%---------------------------------------------------------------- kex_h(SSH, Key, E, F, K) -> KeyBin = public_key:ssh_encode(Key, ssh2_pubkey), L = <<?Estring(SSH#ssh.c_version), ?Estring(SSH#ssh.s_version), @@ -1688,11 +1676,17 @@ sha(secp384r1) -> sha384; sha(secp521r1) -> sha512; sha('diffie-hellman-group1-sha1') -> sha; sha('diffie-hellman-group14-sha1') -> sha; +sha('diffie-hellman-group14-sha256') -> sha256; +sha('diffie-hellman-group16-sha512') -> sha512; +sha('diffie-hellman-group18-sha512') -> sha512; sha('diffie-hellman-group-exchange-sha1') -> sha; sha('diffie-hellman-group-exchange-sha256') -> sha256; sha(?'secp256r1') -> sha(secp256r1); sha(?'secp384r1') -> sha(secp384r1); -sha(?'secp521r1') -> sha(secp521r1). +sha(?'secp521r1') -> sha(secp521r1); +sha('ecdh-sha2-nistp256') -> sha(secp256r1); +sha('ecdh-sha2-nistp384') -> sha(secp384r1); +sha('ecdh-sha2-nistp521') -> sha(secp521r1). mac_key_bytes('hmac-sha1') -> 20; @@ -1715,9 +1709,6 @@ mac_digest_size('AEAD_AES_128_GCM') -> 16; mac_digest_size('AEAD_AES_256_GCM') -> 16; mac_digest_size(none) -> 0. -peer_name({Host, _}) -> - Host. - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% Diffie-Hellman utils @@ -1725,7 +1716,10 @@ peer_name({Host, _}) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% dh_group('diffie-hellman-group1-sha1') -> ?dh_group1; -dh_group('diffie-hellman-group14-sha1') -> ?dh_group14. +dh_group('diffie-hellman-group14-sha1') -> ?dh_group14; +dh_group('diffie-hellman-group14-sha256') -> ?dh_group14; +dh_group('diffie-hellman-group16-sha512') -> ?dh_group16; +dh_group('diffie-hellman-group18-sha512') -> ?dh_group18. %%%---------------------------------------------------------------- parallell_gen_key(Ssh = #ssh{keyex_key = {x, {G, P}}, @@ -1816,10 +1810,6 @@ len_supported(Name, Len) -> same(Algs) -> [{client2server,Algs}, {server2client,Algs}]. - -%% default_algorithms(kex) -> % Example of how to disable an algorithm -%% supported_algorithms(kex, ['ecdh-sha2-nistp521']); - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% %% Other utils diff --git a/lib/ssh/src/ssh_transport.hrl b/lib/ssh/src/ssh_transport.hrl index f91cb1dd63..19b3f5c437 100644 --- a/lib/ssh/src/ssh_transport.hrl +++ b/lib/ssh/src/ssh_transport.hrl @@ -112,7 +112,7 @@ %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% diffie-hellman-group1-sha1 | diffie-hellman-group14-sha1 +%% diffie-hellman-group*-sha* -define(SSH_MSG_KEXDH_INIT, 30). -define(SSH_MSG_KEXDH_REPLY, 31). @@ -238,4 +238,15 @@ -define(dh_group14, {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF}). +%%% rfc 3526, ch5 +%%% Size 4096-bit +-define(dh_group16, + {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF}). + +%%% rfc 3526, ch7 +%%% Size 8192-bit +-define(dh_group18, + {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF}). + + -endif. % -ifdef(ssh_transport). diff --git a/lib/ssh/src/sshd_sup.erl b/lib/ssh/src/sshd_sup.erl index 04d2df30f7..14f1937abd 100644 --- a/lib/ssh/src/sshd_sup.erl +++ b/lib/ssh/src/sshd_sup.erl @@ -41,13 +41,13 @@ start_link(Servers) -> supervisor:start_link({local, ?MODULE}, ?MODULE, [Servers]). -start_child(ServerOpts) -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), - Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), +start_child(Options) -> + Address = ?GET_INTERNAL_OPT(address, Options), + Port = ?GET_INTERNAL_OPT(port, Options), + Profile = ?GET_OPT(profile, Options), case ssh_system_sup:system_supervisor(Address, Port, Profile) of undefined -> - Spec = child_spec(Address, Port, ServerOpts), + Spec = child_spec(Address, Port, Options), case supervisor:start_child(?MODULE, Spec) of {error, already_present} -> Name = id(Address, Port, Profile), @@ -58,7 +58,7 @@ start_child(ServerOpts) -> end; Pid -> AccPid = ssh_system_sup:acceptor_supervisor(Pid), - ssh_acceptor_sup:start_child(AccPid, ServerOpts) + ssh_acceptor_sup:start_child(AccPid, Options) end. stop_child(Name) -> @@ -82,8 +82,8 @@ init([Servers]) -> MaxR = 10, MaxT = 3600, Fun = fun(ServerOpts) -> - Address = proplists:get_value(address, ServerOpts), - Port = proplists:get_value(port, ServerOpts), + Address = ?GET_INTERNAL_OPT(address, ServerOpts), + Port = ?GET_INTERNAL_OPT(port, ServerOpts), child_spec(Address, Port, ServerOpts) end, Children = lists:map(Fun, Servers), @@ -92,10 +92,10 @@ init([Servers]) -> %%%========================================================================= %%% Internal functions %%%========================================================================= -child_spec(Address, Port, ServerOpts) -> - Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE), +child_spec(Address, Port, Options) -> + Profile = ?GET_OPT(profile, Options), Name = id(Address, Port,Profile), - StartFunc = {ssh_system_sup, start_link, [ServerOpts]}, + StartFunc = {ssh_system_sup, start_link, [Options]}, Restart = temporary, Shutdown = infinity, Modules = [ssh_system_sup], diff --git a/lib/ssh/test/ssh_algorithms_SUITE.erl b/lib/ssh/test/ssh_algorithms_SUITE.erl index 14605ee44f..6f75d83c4a 100644 --- a/lib/ssh/test/ssh_algorithms_SUITE.erl +++ b/lib/ssh/test/ssh_algorithms_SUITE.erl @@ -58,9 +58,11 @@ groups() -> || {Tag,Algs} <- ErlAlgos, lists:member(Tag,tags()) ], + + TypeSSH = ssh_test_lib:ssh_type(), AlgoTcSet = - [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos)} + [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos,TypeSSH)} || {Tag,Algs} <- ErlAlgos ++ DoubleAlgos, Alg <- Algs], @@ -198,8 +200,9 @@ try_exec_simple_group(Group, Config) -> %%-------------------------------------------------------------------- %% Testing all default groups -simple_exec_groups() -> [{timetrap,{minutes,8}}]. - +simple_exec_groups() -> + [{timetrap,{seconds,120}}]. + simple_exec_groups(Config) -> Sizes = interpolate( public_key:dh_gex_group_sizes() ), lists:foreach( @@ -315,18 +318,13 @@ concat(A1, A2) -> list_to_atom(lists:concat([A1," + ",A2])). split(Alg) -> ssh_test_lib:to_atoms(string:tokens(atom_to_list(Alg), " + ")). -specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos) -> +specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos, TypeSSH) -> [simple_exec, simple_sftp] ++ case supports(Tag, Alg, SshcAlgos) of - true -> - case ssh_test_lib:ssh_type() of - openSSH -> - [sshc_simple_exec_os_cmd]; - _ -> - [] - end; - false -> - [] + true when TypeSSH == openSSH -> + [sshc_simple_exec_os_cmd]; + _ -> + [] end ++ case supports(Tag, Alg, SshdAlgos) of true -> diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl index 0a0ab5cdf7..cdf6cf9ae1 100644 --- a/lib/ssh/test/ssh_basic_SUITE.erl +++ b/lib/ssh/test/ssh_basic_SUITE.erl @@ -152,15 +152,27 @@ end_per_suite(_Config) -> %%-------------------------------------------------------------------- init_per_group(dsa_key, Config) -> - DataDir = proplists:get_value(data_dir, Config), - PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:setup_dsa(DataDir, PrivDir), - Config; + case lists:member('ssh-dss', + ssh_transport:default_algorithms(public_key)) of + true -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_dsa(DataDir, PrivDir), + Config; + false -> + {skip, unsupported_pub_key} + end; init_per_group(rsa_key, Config) -> - DataDir = proplists:get_value(data_dir, Config), - PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:setup_rsa(DataDir, PrivDir), - Config; + case lists:member('ssh-rsa', + ssh_transport:default_algorithms(public_key)) of + true -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_rsa(DataDir, PrivDir), + Config; + false -> + {skip, unsupported_pub_key} + end; init_per_group(ecdsa_sha2_nistp256_key, Config) -> case lists:member('ecdsa-sha2-nistp256', ssh_transport:default_algorithms(public_key)) of @@ -195,15 +207,27 @@ init_per_group(ecdsa_sha2_nistp521_key, Config) -> {skip, unsupported_pub_key} end; init_per_group(rsa_pass_key, Config) -> - DataDir = proplists:get_value(data_dir, Config), - PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"), - [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config]; + case lists:member('ssh-rsa', + ssh_transport:default_algorithms(public_key)) of + true -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"), + [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config]; + false -> + {skip, unsupported_pub_key} + end; init_per_group(dsa_pass_key, Config) -> - DataDir = proplists:get_value(data_dir, Config), - PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"), - [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config]; + case lists:member('ssh-dss', + ssh_transport:default_algorithms(public_key)) of + true -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"), + [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config]; + false -> + {skip, unsupported_pub_key} + end; init_per_group(host_user_key_differs, Config) -> Data = proplists:get_value(data_dir, Config), Sys = filename:join(proplists:get_value(priv_dir, Config), system_rsa), @@ -220,10 +244,16 @@ init_per_group(host_user_key_differs, Config) -> ssh_test_lib:setup_rsa_known_host(Sys, Usr), Config; init_per_group(key_cb, Config) -> - DataDir = proplists:get_value(data_dir, Config), - PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:setup_dsa(DataDir, PrivDir), - Config; + case lists:member('ssh-rsa', + ssh_transport:default_algorithms(public_key)) of + true -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_rsa(DataDir, PrivDir), + Config; + false -> + {skip, unsupported_pub_key} + end; init_per_group(internal_error, Config) -> DataDir = proplists:get_value(data_dir, Config), PrivDir = proplists:get_value(priv_dir, Config), @@ -293,7 +323,7 @@ end_per_group(rsa_pass_key, Config) -> Config; end_per_group(key_cb, Config) -> PrivDir = proplists:get_value(priv_dir, Config), - ssh_test_lib:clean_dsa(PrivDir), + ssh_test_lib:clean_rsa(PrivDir), Config; end_per_group(internal_error, Config) -> PrivDir = proplists:get_value(priv_dir, Config), @@ -750,7 +780,7 @@ key_callback_options(Config) when is_list(Config) -> {user_dir, UserDir}, {failfun, fun ssh_test_lib:failfun/2}]), - {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_dsa")), + {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_rsa")), ConnectOpts = [{silently_accept_hosts, true}, {user_dir, NoPubKeyDir}, @@ -1206,7 +1236,7 @@ check_error("Invalid state") -> ok; check_error("Connection closed") -> ok; -check_error("Selection of key exchange algorithm failed") -> +check_error("Selection of key exchange algorithm failed"++_) -> ok; check_error(Error) -> ct:fail(Error). diff --git a/lib/ssh/test/ssh_benchmark_SUITE.erl b/lib/ssh/test/ssh_benchmark_SUITE.erl index 85750f8fbd..fc90750455 100644 --- a/lib/ssh/test/ssh_benchmark_SUITE.erl +++ b/lib/ssh/test/ssh_benchmark_SUITE.erl @@ -139,7 +139,6 @@ openssh_client_shell(Config, Options) -> {ok, TracerPid} = erlang_trace(), {ServerPid, _Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir}, - {public_key_alg, ssh_dsa}, {failfun, fun ssh_test_lib:failfun/2} | Options]), ct:sleep(500), @@ -215,7 +214,6 @@ openssh_client_sftp(Config, Options) -> {ok, TracerPid} = erlang_trace(), {ServerPid, _Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir}, - {public_key_alg, ssh_dsa}, {subsystems,[ssh_sftpd:subsystem_spec([%{cwd, SftpSrcDir}, {root, SftpSrcDir}])]}, {failfun, fun ssh_test_lib:failfun/2} diff --git a/lib/ssh/test/ssh_key_cb.erl b/lib/ssh/test/ssh_key_cb.erl index 388ec2ecc1..12ff79efcd 100644 --- a/lib/ssh/test/ssh_key_cb.erl +++ b/lib/ssh/test/ssh_key_cb.erl @@ -33,9 +33,9 @@ add_host_key(_, _, _) -> is_host_key(_, _, _, _) -> true. -user_key('ssh-dss', Opts) -> +user_key('ssh-rsa', Opts) -> UserDir = proplists:get_value(user_dir, Opts), - KeyFile = filename:join(filename:dirname(UserDir), "id_dsa"), + KeyFile = filename:join(filename:dirname(UserDir), "id_rsa"), {ok, KeyBin} = file:read_file(KeyFile), [Entry] = public_key:pem_decode(KeyBin), Key = public_key:pem_entry_decode(Entry), diff --git a/lib/ssh/test/ssh_key_cb_options.erl b/lib/ssh/test/ssh_key_cb_options.erl index afccb34f0f..946a1254d0 100644 --- a/lib/ssh/test/ssh_key_cb_options.erl +++ b/lib/ssh/test/ssh_key_cb_options.erl @@ -33,7 +33,7 @@ add_host_key(_, _, _) -> is_host_key(_, _, _, _) -> true. -user_key('ssh-dss', Opts) -> +user_key('ssh-rsa', Opts) -> KeyCbOpts = proplists:get_value(key_cb_private, Opts), KeyBin = proplists:get_value(priv_key, KeyCbOpts), [Entry] = public_key:pem_decode(KeyBin), diff --git a/lib/ssh/test/ssh_options_SUITE.erl b/lib/ssh/test/ssh_options_SUITE.erl index 86f5cb1746..758c20e2b8 100644 --- a/lib/ssh/test/ssh_options_SUITE.erl +++ b/lib/ssh/test/ssh_options_SUITE.erl @@ -67,7 +67,8 @@ hostkey_fingerprint_check_sha/1, hostkey_fingerprint_check_sha256/1, hostkey_fingerprint_check_sha384/1, - hostkey_fingerprint_check_sha512/1 + hostkey_fingerprint_check_sha512/1, + hostkey_fingerprint_check_list/1 ]). %%% Common test callbacks @@ -112,6 +113,7 @@ all() -> hostkey_fingerprint_check_sha256, hostkey_fingerprint_check_sha384, hostkey_fingerprint_check_sha512, + hostkey_fingerprint_check_list, id_string_no_opt_client, id_string_own_string_client, id_string_random_client, @@ -148,6 +150,7 @@ init_per_group(hardening_tests, Config) -> DataDir = proplists:get_value(data_dir, Config), PrivDir = proplists:get_value(priv_dir, Config), ssh_test_lib:setup_dsa(DataDir, PrivDir), + ssh_test_lib:setup_rsa(DataDir, PrivDir), Config; init_per_group(dir_options, Config) -> PrivDir = proplists:get_value(priv_dir, Config), @@ -812,6 +815,8 @@ hostkey_fingerprint_check_sha384(Config) -> hostkey_fingerprint_check_sha512(Config) -> do_hostkey_fingerprint_check(Config, sha512). +hostkey_fingerprint_check_list(Config) -> + do_hostkey_fingerprint_check(Config, [sha,md5,sha256]). %%%---- do_hostkey_fingerprint_check(Config, HashAlg) -> @@ -824,9 +829,10 @@ do_hostkey_fingerprint_check(Config, HashAlg) -> supported_hash(old) -> true; supported_hash(HashAlg) -> - proplists:get_value(HashAlg, - proplists:get_value(hashs, crypto:supports(), []), - false). + Hs = if is_atom(HashAlg) -> [HashAlg]; + is_list(HashAlg) -> HashAlg + end, + [] == (Hs -- proplists:get_value(hashs, crypto:supports(), [])). really_do_hostkey_fingerprint_check(Config, HashAlg) -> @@ -840,7 +846,7 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) -> %% All host key fingerprints. Trust that public_key has checked the ssh_hostkey_fingerprint %% function since that function is used by the ssh client... - FPs = [case HashAlg of + FPs0 = [case HashAlg of old -> public_key:ssh_hostkey_fingerprint(Key); _ -> public_key:ssh_hostkey_fingerprint(HashAlg, Key) end @@ -856,6 +862,9 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) -> _:_ -> [] end end], + FPs = if is_atom(HashAlg) -> FPs0; + is_list(HashAlg) -> lists:concat(FPs0) + end, ct:log("Fingerprints(~p) = ~p",[HashAlg,FPs]), %% Start daemon with the public keys that we got fingerprints from @@ -866,8 +875,12 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) -> FP_check_fun = fun(PeerName, FP) -> ct:pal("PeerName = ~p, FP = ~p",[PeerName,FP]), HostCheck = (Host == PeerName), - FPCheck = lists:member(FP, FPs), - ct:log("check ~p == ~p (~p) and ~n~p in ~p (~p)~n", + FPCheck = + if is_atom(HashAlg) -> lists:member(FP, FPs); + is_list(HashAlg) -> lists:all(fun(FP1) -> lists:member(FP1,FPs) end, + FP) + end, + ct:log("check ~p == ~p (~p) and ~n~p~n in ~p (~p)~n", [PeerName,Host,HostCheck,FP,FPs,FPCheck]), HostCheck and FPCheck end, diff --git a/lib/ssh/test/ssh_protocol_SUITE.erl b/lib/ssh/test/ssh_protocol_SUITE.erl index 93d0bc2eb0..2c4fa8be88 100644 --- a/lib/ssh/test/ssh_protocol_SUITE.erl +++ b/lib/ssh/test/ssh_protocol_SUITE.erl @@ -34,6 +34,12 @@ -define(NEWLINE, <<"\r\n">>). -define(REKEY_DATA_TMO, 65000). +%%-define(DEFAULT_KEX, 'diffie-hellman-group1-sha1'). +-define(DEFAULT_KEX, 'diffie-hellman-group14-sha256'). + +-define(CIPHERS, ['aes256-ctr','aes192-ctr','aes128-ctr','aes128-cbc','3des-cbc']). +-define(DEFAULT_CIPHERS, [{client2server,?CIPHERS}, {server2client,?CIPHERS}]). + -define(v(Key, Config), proplists:get_value(Key, Config)). -define(v(Key, Config, Default), proplists:get_value(Key, Config, Default)). @@ -97,7 +103,9 @@ end_per_suite(Config) -> init_per_testcase(no_common_alg_server_disconnects, Config) -> - start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']}]}]); + start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']}, + {cipher,?DEFAULT_CIPHERS} + ]}]); init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ; TC == gex_client_init_option_groups_moduli_file ; @@ -107,7 +115,10 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ; TC == gex_client_old_request_noexact -> Opts = case TC of gex_client_init_option_groups -> - [{dh_gex_groups, [{2345, 3, 41}]}]; + [{dh_gex_groups, + [{1023, 5, + 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F + }]}]; gex_client_init_option_groups_file -> DataDir = proplists:get_value(data_dir, Config), F = filename:join(DataDir, "dh_group_test"), @@ -119,16 +130,19 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ; _ when TC == gex_server_gex_limit ; TC == gex_client_old_request_exact ; TC == gex_client_old_request_noexact -> - [{dh_gex_groups, [{ 500, 3, 17}, - {1000, 7, 91}, - {3000, 5, 61}]}, - {dh_gex_limits,{500,1500}} + [{dh_gex_groups, + [{1023, 2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323}, + {1535, 5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}, + {3071, 2, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9429825A2B} + ]}, + {dh_gex_limits, {1023,2000}} ]; _ -> [] end, start_std_daemon(Config, - [{preferred_algorithms, ssh:default_algorithms()} + [{preferred_algorithms,[{cipher,?DEFAULT_CIPHERS} + ]} | Opts]); init_per_testcase(_TestCase, Config) -> check_std_daemon_works(Config, ?LINE). @@ -237,7 +251,10 @@ lib_works_as_server(Config) -> %% and finally connect to it with a regular Erlang SSH client: {ok,_} = std_connect(HostPort, Config, - [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}] + [{preferred_algorithms,[{kex,[?DEFAULT_KEX]}, + {cipher,?DEFAULT_CIPHERS} + ]} + ] ). %%-------------------------------------------------------------------- @@ -277,7 +294,9 @@ no_common_alg_server_disconnects(Config) -> [{silently_accept_hosts, true}, {user_dir, user_dir(Config)}, {user_interaction, false}, - {preferred_algorithms,[{public_key,['ssh-dss']}]} + {preferred_algorithms,[{public_key,['ssh-dss']}, + {cipher,?DEFAULT_CIPHERS} + ]} ]}, receive_hello, {send, hello}, @@ -311,7 +330,7 @@ no_common_alg_client_disconnects(Config) -> {match, #ssh_msg_kexinit{_='_'}, receive_msg}, {send, #ssh_msg_kexinit{ % with unsupported "SOME-UNSUPPORTED" cookie = <<80,158,95,51,174,35,73,130,246,141,200,49,180,190,82,234>>, - kex_algorithms = ["diffie-hellman-group1-sha1"], + kex_algorithms = [atom_to_list(?DEFAULT_KEX)], server_host_key_algorithms = ["SOME-UNSUPPORTED"], % SIC! encryption_algorithms_client_to_server = ["aes128-ctr"], encryption_algorithms_server_to_client = ["aes128-ctr"], @@ -332,7 +351,9 @@ no_common_alg_client_disconnects(Config) -> %% and finally connect to it with a regular Erlang SSH client %% which of course does not support SOME-UNSUPPORTED as pub key algo: - Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']}]}]), + Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']}, + {cipher,?DEFAULT_CIPHERS} + ]}]), ct:log("Result of connect is ~p",[Result]), receive @@ -351,20 +372,25 @@ no_common_alg_client_disconnects(Config) -> %%%-------------------------------------------------------------------- gex_client_init_option_groups(Config) -> - do_gex_client_init(Config, {2000, 2048, 4000}, - {3,41}). + do_gex_client_init(Config, {512, 2048, 4000}, + {5,16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F} + ). gex_client_init_option_groups_file(Config) -> do_gex_client_init(Config, {2000, 2048, 4000}, - {5,61}). + {5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F} + ). gex_client_init_option_groups_moduli_file(Config) -> do_gex_client_init(Config, {2000, 2048, 4000}, - {5,16#B7}). + {5, 16#DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F} + ). gex_server_gex_limit(Config) -> do_gex_client_init(Config, {1000, 3000, 4000}, - {7,91}). + %% {7,91}). + {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827} + ). do_gex_client_init(Config, {Min,N,Max}, {G,P}) -> @@ -376,7 +402,9 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) -> [{silently_accept_hosts, true}, {user_dir, user_dir(Config)}, {user_interaction, false}, - {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]} + {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}, + {cipher,?DEFAULT_CIPHERS} + ]} ]}, receive_hello, {send, hello}, @@ -390,8 +418,15 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) -> ). %%%-------------------------------------------------------------------- -gex_client_old_request_exact(Config) -> do_gex_client_init_old(Config, 500, {3,17}). -gex_client_old_request_noexact(Config) -> do_gex_client_init_old(Config, 800, {7,91}). +gex_client_old_request_exact(Config) -> + do_gex_client_init_old(Config, 1023, + {2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323} + ). + +gex_client_old_request_noexact(Config) -> + do_gex_client_init_old(Config, 1400, + {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827} + ). do_gex_client_init_old(Config, N, {G,P}) -> {ok,_} = @@ -402,7 +437,9 @@ do_gex_client_init_old(Config, N, {G,P}) -> [{silently_accept_hosts, true}, {user_dir, user_dir(Config)}, {user_interaction, false}, - {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]} + {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}, + {cipher,?DEFAULT_CIPHERS} + ]} ]}, receive_hello, {send, hello}, @@ -572,7 +609,9 @@ client_handles_keyboard_interactive_0_pwds(Config) -> %% and finally connect to it with a regular Erlang SSH client: {ok,_} = std_connect(HostPort, Config, - [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}] + [{preferred_algorithms,[{kex,[?DEFAULT_KEX]}, + {cipher,?DEFAULT_CIPHERS} + ]}] ). @@ -623,6 +662,7 @@ stop_apps(_Config) -> setup_dirs(Config) -> DataDir = proplists:get_value(data_dir, Config), PrivDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:setup_dsa(DataDir, PrivDir), ssh_test_lib:setup_rsa(DataDir, PrivDir), Config. @@ -708,7 +748,9 @@ connect_and_kex(Config, InitialState) -> ssh_trpt_test_lib:exec( [{connect, server_host(Config),server_port(Config), - [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}, + [{preferred_algorithms,[{kex,[?DEFAULT_KEX]}, + {cipher,?DEFAULT_CIPHERS} + ]}, {silently_accept_hosts, true}, {user_dir, user_dir(Config)}, {user_interaction, false}]}, diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test index 2887bb4b60..87c4b4afc8 100644 --- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test +++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test @@ -1,3 +1,3 @@ -{2222, 5, 61}. -{1111, 7, 91}. +{1023, 5, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}. +{3071, 5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}. diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli index f6995ba4c9..6d2b4bcb59 100644 --- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli +++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli @@ -1,3 +1,2 @@ -20151021104105 2 6 100 2222 5 B7 -20151021104106 2 6 100 1111 5 4F - +20120821044046 2 6 100 1023 2 D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A7711F2C6B +20120821050554 2 6 100 2047 5 DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key new file mode 100644 index 0000000000..79968bdd7d --- /dev/null +++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key @@ -0,0 +1,16 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337 +zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB +6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB +AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW +NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++ +udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW +WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt +n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5 +sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY ++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt +64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB +m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT +tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR +-----END RSA PRIVATE KEY----- + diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub new file mode 100644 index 0000000000..75d2025c71 --- /dev/null +++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub @@ -0,0 +1,5 @@ +---- BEGIN SSH2 PUBLIC KEY ---- +AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8 +semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW +RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q== +---- END SSH2 PUBLIC KEY ---- diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl index 52a26110c4..b167f98ac8 100644 --- a/lib/ssh/test/ssh_sftpd_SUITE.erl +++ b/lib/ssh/test/ssh_sftpd_SUITE.erl @@ -65,7 +65,12 @@ all() -> ver3_open_flags, relpath, sshd_read_file, - ver6_basic]. + ver6_basic, + access_outside_root, + root_with_cwd, + relative_path, + open_file_dir_v5, + open_file_dir_v6]. groups() -> []. @@ -117,6 +122,31 @@ init_per_testcase(TestCase, Config) -> ver6_basic -> SubSystems = [ssh_sftpd:subsystem_spec([{sftpd_vsn, 6}])], ssh:daemon(0, [{subsystems, SubSystems}|Options]); + access_outside_root -> + %% Build RootDir/access_outside_root/a/b and set Root and CWD + BaseDir = filename:join(PrivDir, access_outside_root), + RootDir = filename:join(BaseDir, a), + CWD = filename:join(RootDir, b), + %% Make the directory chain: + ok = filelib:ensure_dir(filename:join(CWD, tmp)), + SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir}, + {cwd, CWD}])], + ssh:daemon(0, [{subsystems, SubSystems}|Options]); + root_with_cwd -> + RootDir = filename:join(PrivDir, root_with_cwd), + CWD = filename:join(RootDir, home), + SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir}, {cwd, CWD}])], + ssh:daemon(0, [{subsystems, SubSystems}|Options]); + relative_path -> + SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])], + ssh:daemon(0, [{subsystems, SubSystems}|Options]); + open_file_dir_v5 -> + SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])], + ssh:daemon(0, [{subsystems, SubSystems}|Options]); + open_file_dir_v6 -> + SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}, + {sftpd_vsn, 6}])], + ssh:daemon(0, [{subsystems, SubSystems}|Options]); _ -> SubSystems = [ssh_sftpd:subsystem_spec([])], ssh:daemon(0, [{subsystems, SubSystems}|Options]) @@ -128,8 +158,7 @@ init_per_testcase(TestCase, Config) -> [{user_dir, ClientUserDir}, {user, ?USER}, {password, ?PASSWD}, {user_interaction, false}, - {silently_accept_hosts, true}, - {pwdfun, fun(_,_) -> true end}]), + {silently_accept_hosts, true}]), {ok, Channel} = ssh_connection:session_channel(Cm, ?XFER_WINDOW_SIZE, ?XFER_PACKET_SIZE, ?TIMEOUT), @@ -646,6 +675,133 @@ ver6_basic(Config) when is_list(Config) -> open_file(PrivDir, Cm, Channel, ReqId, ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, ?SSH_FXF_OPEN_EXISTING). + +%%-------------------------------------------------------------------- +access_outside_root() -> + [{doc, "Try access files outside the tree below RootDir"}]. +access_outside_root(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + BaseDir = filename:join(PrivDir, access_outside_root), + %% A file outside the tree below RootDir which is BaseDir/a + %% Make the file BaseDir/bad : + BadFilePath = filename:join([BaseDir, bad]), + ok = file:write_file(BadFilePath, <<>>), + {Cm, Channel} = proplists:get_value(sftp, Config), + %% Try to access a file parallell to the RootDir: + try_access("/../bad", Cm, Channel, 0), + %% Try to access the same file via the CWD which is /b relative to the RootDir: + try_access("../../bad", Cm, Channel, 1). + + +try_access(Path, Cm, Channel, ReqId) -> + Return = + open_file(Path, Cm, Channel, ReqId, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING), + ct:log("Try open ~p -> ~p",[Path,Return]), + case Return of + {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle0/binary>>, _} -> + ct:fail("Could open a file outside the root tree!"); + {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId), ?UINT32(Code), Rest/binary>>, <<>>} -> + case Code of + ?SSH_FX_FILE_IS_A_DIRECTORY -> + ct:pal("Got the expected SSH_FX_FILE_IS_A_DIRECTORY status",[]), + ok; + ?SSH_FX_FAILURE -> + ct:pal("Got the expected SSH_FX_FAILURE status",[]), + ok; + _ -> + case Rest of + <<?UINT32(Len), Txt:Len/binary, _/binary>> -> + ct:fail("Got unexpected SSH_FX_code: ~p (~p)",[Code,Txt]); + _ -> + ct:fail("Got unexpected SSH_FX_code: ~p",[Code]) + end + end; + _ -> + ct:fail("Completly unexpected return: ~p", [Return]) + end. + +%%-------------------------------------------------------------------- +root_with_cwd() -> + [{doc, "Check if files are found, if the CWD and Root are specified"}]. +root_with_cwd(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + RootDir = filename:join(PrivDir, root_with_cwd), + CWD = filename:join(RootDir, home), + FileName = "root_with_cwd.txt", + FilePath = filename:join(CWD, FileName), + ok = filelib:ensure_dir(FilePath), + ok = file:write_file(FilePath ++ "0", <<>>), + ok = file:write_file(FilePath ++ "1", <<>>), + ok = file:write_file(FilePath ++ "2", <<>>), + {Cm, Channel} = proplists:get_value(sftp, Config), + ReqId0 = 0, + {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId0), _Handle0/binary>>, _} = + open_file(FileName ++ "0", Cm, Channel, ReqId0, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING), + ReqId1 = 1, + {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId1), _Handle1/binary>>, _} = + open_file("./" ++ FileName ++ "1", Cm, Channel, ReqId1, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING), + ReqId2 = 2, + {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId2), _Handle2/binary>>, _} = + open_file("/home/" ++ FileName ++ "2", Cm, Channel, ReqId2, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING). + +%%-------------------------------------------------------------------- +relative_path() -> + [{doc, "Test paths relative to CWD when opening a file handle."}]. +relative_path(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + FileName = "test_relative_path.txt", + FilePath = filename:join(PrivDir, FileName), + ok = filelib:ensure_dir(FilePath), + ok = file:write_file(FilePath, <<>>), + {Cm, Channel} = proplists:get_value(sftp, Config), + ReqId = 0, + {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle/binary>>, _} = + open_file(FileName, Cm, Channel, ReqId, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING). + +%%-------------------------------------------------------------------- +open_file_dir_v5() -> + [{doc, "Test if open_file fails when opening existing directory."}]. +open_file_dir_v5(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + FileName = "open_file_dir_v5", + FilePath = filename:join(PrivDir, FileName), + ok = filelib:ensure_dir(FilePath), + ok = file:make_dir(FilePath), + {Cm, Channel} = proplists:get_value(sftp, Config), + ReqId = 0, + {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId), + ?UINT32(?SSH_FX_FAILURE), _/binary>>, _} = + open_file(FileName, Cm, Channel, ReqId, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING). + +%%-------------------------------------------------------------------- +open_file_dir_v6() -> + [{doc, "Test if open_file fails when opening existing directory."}]. +open_file_dir_v6(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + FileName = "open_file_dir_v6", + FilePath = filename:join(PrivDir, FileName), + ok = filelib:ensure_dir(FilePath), + ok = file:make_dir(FilePath), + {Cm, Channel} = proplists:get_value(sftp, Config), + ReqId = 0, + {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId), + ?UINT32(?SSH_FX_FILE_IS_A_DIRECTORY), _/binary>>, _} = + open_file(FileName, Cm, Channel, ReqId, + ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES, + ?SSH_FXF_OPEN_EXISTING). + %%-------------------------------------------------------------------- %% Internal functions ------------------------------------------------ %%-------------------------------------------------------------------- @@ -688,9 +844,7 @@ reply(Cm, Channel, RBuf) -> 30000 -> ct:fail("timeout ~p:~p",[?MODULE,?LINE]) end. - open_file(File, Cm, Channel, ReqId, Access, Flags) -> - Data = list_to_binary([?uint32(ReqId), ?binary(list_to_binary(File)), ?uint32(Access), diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl index 56a33d6349..b4d7eadfa4 100644 --- a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl +++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl @@ -65,6 +65,7 @@ init_per_suite(Config) -> {ok, FileInfo} = file:read_file_info(FileName), ok = file:write_file_info(FileName, FileInfo#file_info{mode = 8#400}), + ssh_test_lib:setup_rsa(DataDir, PrivDir), ssh_test_lib:setup_dsa(DataDir, PrivDir), Config end). @@ -73,6 +74,7 @@ end_per_suite(Config) -> UserDir = filename:join(proplists:get_value(priv_dir, Config), nopubkey), file:del_dir(UserDir), SysDir = proplists:get_value(priv_dir, Config), + ssh_test_lib:clean_rsa(SysDir), ssh_test_lib:clean_dsa(SysDir), ok. @@ -187,7 +189,6 @@ quit(Config) when is_list(Config) -> timer:sleep(5000), {ok, NewSftp, _Conn} = ssh_sftp:start_channel(Host, Port, [{silently_accept_hosts, true}, - {pwdfun, fun(_,_) -> true end}, {user_dir, UserDir}, {user, ?USER}, {password, ?PASSWD}]), diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa new file mode 100644 index 0000000000..9d7e0dd5fb --- /dev/null +++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQD1OET+3O/Bvj/dtjxDTXmj1oiJt4sIph5kGy0RfjoPrZfaS+CU +DhakCmS6t2ivxWFgtpKWaoGMZMJqWj6F6ZsumyFl3FPBtujwY/35cgifrI9Ns4Tl +zR1uuengNBmV+WRQ5cd9F2qS6Z8aDQihzt0r8JUqLcK+VQbrmNzboCCQQwIDAQAB +AoGAPQEyqPTt8JUT7mRXuaacjFXiweAXhp9NEDpyi9eLOjtFe9lElZCrsUOkq47V +TGUeRKEm9qSodfTbKPoqc8YaBJGJPhUaTAcha+7QcDdfHBvIsgxvU7ePVnlpXRp3 +CCUEMPhlnx6xBoTYP+fRU0e3+xJIPVyVCqX1jAdUMkzfRoECQQD6ux7B1QJAIWyK +SGkbDUbBilNmzCFNgIpOP6PA+bwfi5d16diTpra5AX09keQABAo/KaP1PdV8Vg0p +z4P3A7G3AkEA+l+AKG6m0kQTTBMJDqOdVPYwe+5GxunMaqmhokpEbuGsrZBl5Dvd +WpcBjR7jmenrhKZRIuA+Fz5HPo/UQJPl1QJBAKxstDkeED8j/S2XoFhPKAJ+6t39 +sUVICVTIZQeXdmzHJXCcUSkw8+WEhakqw/3SyW0oaK2FSWQJFWJUZ+8eJj8CQEh3 +xeduB5kKnS9CvzdeghZqX6QvVosSdtlUmfUYW/BgH5PpHKTP8wTaeld3XldZTpMJ +dKiMkUw2+XYROVUrubUCQD+Na1LhULlpn4ISEtIEfqpdlUhxDgO15Wg8USmsng+x +ICliVOSQtwaZjm8kwaFt0W7XnpnDxbRs37vIEbIMWak= +-----END RSA PRIVATE KEY----- diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key new file mode 100644 index 0000000000..79968bdd7d --- /dev/null +++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key @@ -0,0 +1,16 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337 +zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB +6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB +AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW +NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++ +udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW +WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt +n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5 +sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY ++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt +64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB +m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT +tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR +-----END RSA PRIVATE KEY----- + diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub new file mode 100644 index 0000000000..75d2025c71 --- /dev/null +++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub @@ -0,0 +1,5 @@ +---- BEGIN SSH2 PUBLIC KEY ---- +AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8 +semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW +RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q== +---- END SSH2 PUBLIC KEY ---- diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl index 286ac6e882..1673f52821 100644 --- a/lib/ssh/test/ssh_test_lib.erl +++ b/lib/ssh/test/ssh_test_lib.erl @@ -690,13 +690,16 @@ ssh_type() -> ssh_type1() -> try + ct:log("~p:~p os:find_executable(\"ssh\")",[?MODULE,?LINE]), case os:find_executable("ssh") of false -> ct:log("~p:~p Executable \"ssh\" not found",[?MODULE,?LINE]), not_found; - _ -> + Path -> + ct:log("~p:~p Found \"ssh\" at ~p",[?MODULE,?LINE,Path]), case os:cmd("ssh -V") of - "OpenSSH" ++ _ -> + Version = "OpenSSH" ++ _ -> + ct:log("~p:~p Found OpenSSH ~p",[?MODULE,?LINE,Version]), openSSH; Str -> ct:log("ssh client ~p is unknown",[Str]), diff --git a/lib/ssh/test/ssh_to_openssh_SUITE.erl b/lib/ssh/test/ssh_to_openssh_SUITE.erl index 86c3d5de26..687e6efaf3 100644 --- a/lib/ssh/test/ssh_to_openssh_SUITE.erl +++ b/lib/ssh/test/ssh_to_openssh_SUITE.erl @@ -36,7 +36,7 @@ %%-------------------------------------------------------------------- suite() -> - [{timetrap,{seconds,20}}]. + [{timetrap,{seconds,60}}]. all() -> case os:find_executable("ssh") of @@ -381,7 +381,6 @@ erlang_server_openssh_client_public_key_X(Config, PubKeyAlg) -> PrivDir = proplists:get_value(priv_dir, Config), KnownHosts = filename:join(PrivDir, "known_hosts"), {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir}, - {public_key_alg, PubKeyAlg}, {failfun, fun ssh_test_lib:failfun/2}]), ct:sleep(500), @@ -402,7 +401,6 @@ erlang_server_openssh_client_renegotiate(Config) -> KnownHosts = filename:join(PrivDir, "known_hosts"), {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir}, - {public_key_alg, PubKeyAlg}, {failfun, fun ssh_test_lib:failfun/2}]), ct:sleep(500), @@ -442,7 +440,7 @@ erlang_server_openssh_client_renegotiate(Config) -> ssh_test_lib:rcv_expected(Expect, OpenSsh, ?TIMEOUT) of _ -> - %% Unfortunatly we can't check that there has been a renegotiation, just trust OpenSSH. + %% Unfortunately we can't check that there has been a renegotiation, just trust OpenSSH. ssh:stop_daemon(Pid) catch throw:{skip,R} -> {skip,R} @@ -464,6 +462,7 @@ erlang_client_openssh_server_renegotiate(_Config) -> {silently_accept_hosts,true}], group_leader(IO, self()), {ok, ConnRef} = ssh:connect(Host, ?SSH_DEFAULT_PORT, Options), + ct:pal("Parent = ~p, IO = ~p, Shell = ~p, ConnRef = ~p~n",[Parent, IO, self(), ConnRef]), case ssh_connection:session_channel(ConnRef, infinity) of {ok,ChannelId} -> success = ssh_connection:ptty_alloc(ConnRef, ChannelId, []), diff --git a/lib/ssh/test/ssh_trpt_test_lib.erl b/lib/ssh/test/ssh_trpt_test_lib.erl index bc86000d81..261239c152 100644 --- a/lib/ssh/test/ssh_trpt_test_lib.erl +++ b/lib/ssh/test/ssh_trpt_test_lib.erl @@ -85,15 +85,18 @@ exec(Op, S0=#s{}) -> throw:Term -> report_trace(throw, Term, S1), - throw(Term); + throw({Term,Op}); error:Error -> report_trace(error, Error, S1), - error(Error); + error({Error,Op}); exit:Exit -> report_trace(exit, Exit, S1), - exit(Exit) + exit({Exit,Op}); + Cls:Err -> + ct:pal("Class=~p, Error=~p", [Cls,Err]), + error({"fooooooO",Op}) end; exec(Op, {ok,S=#s{}}) -> exec(Op, S); exec(_, Error) -> Error. @@ -111,20 +114,20 @@ op({accept,Opts}, S) when ?role(S) == server -> {ok,Socket} = gen_tcp:accept(S#s.listen_socket, S#s.timeout), {Host,_Port} = ok(inet:sockname(Socket)), S#s{socket = Socket, - ssh = init_ssh(server,Socket,[{host,host(Host)}|Opts]), + ssh = init_ssh(server, Socket, host(Host), Opts), return_value = ok}; %%%---- Client ops op({connect,Host,Port,Opts}, S) when ?role(S) == undefined -> Socket = ok(gen_tcp:connect(host(Host), Port, mangle_opts([]))), S#s{socket = Socket, - ssh = init_ssh(client, Socket, [{host,host(Host)}|Opts]), + ssh = init_ssh(client, Socket, host(Host), Opts), return_value = ok}; %%%---- ops for both client and server op(close_socket, S) -> - catch tcp_gen:close(S#s.socket), - catch tcp_gen:close(S#s.listen_socket), + catch gen_tcp:close(S#s.socket), + catch gen_tcp:close(S#s.listen_socket), S#s{socket = undefined, listen_socket = undefined, return_value = ok}; @@ -293,12 +296,14 @@ instantiate(X, _S) -> %%%================================================================ %%% -init_ssh(Role, Socket, Options0) -> - Options = [{user_interaction, false}, - {vsn, {2,0}}, - {id_string, "ErlangTestLib"} - | Options0], - ssh_connection_handler:init_ssh_record(Role, Socket, Options). +init_ssh(Role, Socket, Host, UserOptions0) -> + UserOptions = [{user_interaction, false}, + {vsn, {2,0}}, + {id_string, "ErlangTestLib"} + | UserOptions0], + Opts = ?PUT_INTERNAL_OPT({host,Host}, + ssh_options:handle_options(Role, UserOptions)), + ssh_connection_handler:init_ssh_record(Role, Socket, Opts). mangle_opts(Options) -> SysOpts = [{reuseaddr, true}, diff --git a/lib/ssh/vsn.mk b/lib/ssh/vsn.mk index c6a5990f41..96c83cb0f7 100644 --- a/lib/ssh/vsn.mk +++ b/lib/ssh/vsn.mk @@ -1,5 +1,5 @@ #-*-makefile-*- ; force emacs to enter makefile-mode -SSH_VSN = 4.4 +SSH_VSN = 4.4.1 APP_VSN = "ssh-$(SSH_VSN)" diff --git a/lib/ssl/doc/src/notes.xml b/lib/ssl/doc/src/notes.xml index 29b8e8ff67..d3ab3e9216 100644 --- a/lib/ssl/doc/src/notes.xml +++ b/lib/ssl/doc/src/notes.xml @@ -28,6 +28,70 @@ <p>This document describes the changes made to the SSL application.</p> +<section><title>SSL 8.1.1</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + Corrected termination behavior, that caused a PEM cache + bug and sometimes resulted in connection failures.</p> + <p> + Own Id: OTP-14100</p> + </item> + <item> + <p> + Fix bug that could hang ssl connection processes when + failing to require more data for very large handshake + packages. Add option max_handshake_size to mitigate DoS + attacks.</p> + <p> + Own Id: OTP-14138</p> + </item> + <item> + <p> + Improved support for CRL handling that could fail to work + as intended when an id-ce-extKeyUsage was present in the + certificate. Also improvements where needed to + distributionpoint handling so that all revocations + actually are found and not deemed to be not determinable.</p> + <p> + Own Id: OTP-14141</p> + </item> + <item> + <p> + A TLS handshake might accidentally match old sslv2 format + and ssl application would incorrectly aborted TLS + handshake with ssl_v2_client_hello_no_supported. Parsing + was altered to avoid this problem.</p> + <p> + Own Id: OTP-14222</p> + </item> + <item> + <p> + Correct default cipher list to prefer AES 128 before 3DES</p> + <p> + Own Id: OTP-14235</p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Move PEM cache to a dedicated process, to avoid making + the SSL manager process a bottleneck. This improves + scalability of TLS connections.</p> + <p> + Own Id: OTP-13874</p> + </item> + </list> + </section> + +</section> + <section><title>SSL 8.1</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml index b85d8fb284..1b41eae89d 100644 --- a/lib/ssl/doc/src/ssl_session_cache_api.xml +++ b/lib/ssl/doc/src/ssl_session_cache_api.xml @@ -11,7 +11,7 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software @@ -62,8 +62,8 @@ </taglist> </section> - - <funcs> + + <funcs> <func> <name>delete(Cache, Key) -> _</name> @@ -134,7 +134,7 @@ </p> </desc> </func> - + <func> <name>select_session(Cache, PartialKey) -> [session()]</name> <fsummary>Selects sessions that can be reused.</fsummary> @@ -151,6 +151,21 @@ </func> <func> + <name>size(Cache) -> integer()</name> + <fsummary>Returns the number of sessions in the cache.</fsummary> + <type> + <v>Cache = cache_ref()</v> + </type> + <desc> + <p>Returns the number of sessions in the cache. If size + exceeds the maximum number of sessions, the current cache + entries will be invalidated regardless of their remaining + lifetime. Is to be callable from any process. + </p> + </desc> + </func> + + <func> <name>terminate(Cache) -> _</name> <fsummary>Called by the process that handles the cache when it is about to terminate.</fsummary> @@ -178,7 +193,7 @@ </p> </desc> </func> - - </funcs> - + + </funcs> + </erlref> diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl index 070a90d481..f607c86ae3 100644 --- a/lib/ssl/src/dtls_connection.erl +++ b/lib/ssl/src/dtls_connection.erl @@ -39,7 +39,7 @@ -export([start_fsm/8, start_link/7, init/1]). %% State transition handling --export([next_record/1, next_event/3]). +-export([next_record/1, next_event/3, next_event/4]). %% Handshake handling -export([renegotiate/2, @@ -53,7 +53,7 @@ %% Data handling -export([encode_data/3, passive_receive/2, next_record_if_active/1, handle_common_event/4, - send/3]). + send/3, socket/5]). %% gen_statem state functions -export([init/3, error/3, downgrade/3, %% Initiation and take down states @@ -77,20 +77,6 @@ start_fsm(Role, Host, Port, Socket, {#ssl_options{erl_dist = false},_, Tracker} catch error:{badmatch, {error, _} = Error} -> Error - end; - -start_fsm(Role, Host, Port, Socket, {#ssl_options{erl_dist = true},_, Tracker} = Opts, - User, {CbModule, _,_, _} = CbInfo, - Timeout) -> - try - {ok, Pid} = dtls_connection_sup:start_child_dist([Role, Host, Port, Socket, - Opts, User, CbInfo]), - {ok, SslSocket} = ssl_connection:socket_control(?MODULE, Socket, Pid, CbModule, Tracker), - ok = ssl_connection:handshake(SslSocket, Timeout), - {ok, SslSocket} - catch - error:{badmatch, {error, _} = Error} -> - Error end. send_handshake(Handshake, #state{connection_states = ConnectionStates} = States) -> @@ -201,6 +187,7 @@ reinit_handshake_data(#state{protocol_buffers = Buffers} = State) -> State#state{premaster_secret = undefined, public_key_info = undefined, tls_handshake_history = ssl_handshake:init_handshake_history(), + flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT}, protocol_buffers = Buffers#protocol_buffers{ dtls_handshake_next_seq = 0, @@ -213,6 +200,9 @@ select_sni_extension(#client_hello{extensions = HelloExtensions}) -> select_sni_extension(_) -> undefined. +socket(Pid, Transport, Socket, Connection, _) -> + dtls_socket:socket(Pid, Transport, Socket, Connection). + %%==================================================================== %% tls_connection_sup API %%==================================================================== @@ -243,7 +233,7 @@ callback_mode() -> state_functions. %%-------------------------------------------------------------------- -%% State functionsconnection/2 +%% State functions %%-------------------------------------------------------------------- init({call, From}, {start, Timeout}, @@ -262,17 +252,19 @@ init({call, From}, {start, Timeout}, Version = Hello#client_hello.client_version, HelloVersion = dtls_record:lowest_protocol_version(SslOpts#ssl_options.versions), State1 = prepare_flight(State0#state{negotiated_version = Version}), - State2 = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}), + {State2, Actions} = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}), State3 = State2#state{negotiated_version = Version, %% Requested version session = Session0#session{session_id = Hello#client_hello.session_id}, start_or_recv_from = From, - timer = Timer}, + timer = Timer, + flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT} + }, {Record, State} = next_record(State3), - next_event(hello, Record, State); + next_event(hello, Record, State, Actions); init({call, _} = Type, Event, #state{role = server, transport_cb = gen_udp} = State) -> ssl_connection:init(Type, Event, - State#state{flight_state = {waiting, undefined, ?INITIAL_RETRANSMIT_TIMEOUT}}, + State#state{flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT}}, ?MODULE); init({call, _} = Type, Event, #state{role = server} = State) -> %% I.E. DTLS over sctp @@ -302,9 +294,9 @@ hello(internal, #client_hello{cookie = <<>>, Cookie = dtls_handshake:cookie(<<"secret">>, IP, Port, Hello), VerifyRequest = dtls_handshake:hello_verify_request(Cookie, Version), State1 = prepare_flight(State0#state{negotiated_version = Version}), - State2 = send_handshake(VerifyRequest, State1), + {State2, Actions} = send_handshake(VerifyRequest, State1), {Record, State} = next_record(State2), - next_event(hello, Record, State#state{tls_handshake_history = ssl_handshake:init_handshake_history()}); + next_event(hello, Record, State#state{tls_handshake_history = ssl_handshake:init_handshake_history()}, Actions); hello(internal, #client_hello{cookie = Cookie} = Hello, #state{role = server, transport_cb = Transport, socket = Socket} = State0) -> @@ -333,13 +325,13 @@ hello(internal, #hello_verify_request{cookie = Cookie}, #state{role = client, Cache, CacheCb, Renegotiation, OwnCert), Version = Hello#client_hello.client_version, HelloVersion = dtls_record:lowest_protocol_version(SslOpts#ssl_options.versions), - State2 = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}), + {State2, Actions} = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}), State3 = State2#state{negotiated_version = Version, %% Requested version session = Session0#session{session_id = Hello#client_hello.session_id}}, {Record, State} = next_record(State3), - next_event(hello, Record, State); + next_event(hello, Record, State, Actions); hello(internal, #server_hello{} = Hello, #state{connection_states = ConnectionStates0, negotiated_version = ReqVersion, @@ -356,13 +348,13 @@ hello(internal, #server_hello{} = Hello, hello(internal, {handshake, {#client_hello{cookie = <<>>} = Handshake, _}}, State) -> %% Initial hello should not be in handshake history {next_state, hello, State, [{next_event, internal, Handshake}]}; - hello(internal, {handshake, {#hello_verify_request{} = Handshake, _}}, State) -> %% hello_verify should not be in handshake history {next_state, hello, State, [{next_event, internal, Handshake}]}; - hello(info, Event, State) -> handle_info(Event, hello, State); +hello(state_timeout, Event, State) -> + handle_state_timeout(Event, hello, State); hello(Type, Event, State) -> ssl_connection:hello(Type, Event, State, ?MODULE). @@ -375,7 +367,11 @@ abbreviated(internal = Type, ConnectionStates = dtls_record:next_epoch(ConnectionStates1, read), ssl_connection:abbreviated(Type, Event, State#state{connection_states = ConnectionStates}, ?MODULE); abbreviated(internal = Type, #finished{} = Event, #state{connection_states = ConnectionStates} = State) -> - ssl_connection:cipher(Type, Event, prepare_flight(State#state{connection_states = ConnectionStates}), ?MODULE); + ssl_connection:abbreviated(Type, Event, + prepare_flight(State#state{connection_states = ConnectionStates, + flight_state = connection}), ?MODULE); +abbreviated(state_timeout, Event, State) -> + handle_state_timeout(Event, abbreviated, State); abbreviated(Type, Event, State) -> ssl_connection:abbreviated(Type, Event, State, ?MODULE). @@ -383,6 +379,8 @@ certify(info, Event, State) -> handle_info(Event, certify, State); certify(internal = Type, #server_hello_done{} = Event, State) -> ssl_connection:certify(Type, Event, prepare_flight(State), ?MODULE); +certify(state_timeout, Event, State) -> + handle_state_timeout(Event, certify, State); certify(Type, Event, State) -> ssl_connection:certify(Type, Event, State, ?MODULE). @@ -395,7 +393,11 @@ cipher(internal = Type, #change_cipher_spec{type = <<1>>} = Event, ssl_connection:cipher(Type, Event, State#state{connection_states = ConnectionStates}, ?MODULE); cipher(internal = Type, #finished{} = Event, #state{connection_states = ConnectionStates} = State) -> ssl_connection:cipher(Type, Event, - prepare_flight(State#state{connection_states = ConnectionStates}), ?MODULE); + prepare_flight(State#state{connection_states = ConnectionStates, + flight_state = connection}), + ?MODULE); +cipher(state_timeout, Event, State) -> + handle_state_timeout(Event, cipher, State); cipher(Type, Event, State) -> ssl_connection:cipher(Type, Event, State, ?MODULE). @@ -409,12 +411,12 @@ connection(internal, #hello_request{}, #state{host = Host, port = Port, renegotiation = {Renegotiation, _}} = State0) -> Hello = dtls_handshake:client_hello(Host, Port, ConnectionStates0, SslOpts, Cache, CacheCb, Renegotiation, Cert), - State1 = send_handshake(Hello, State0), + {State1, Actions} = send_handshake(Hello, State0), {Record, State} = next_record( State1#state{session = Session0#session{session_id = Hello#client_hello.session_id}}), - next_event(hello, Record, State); + next_event(hello, Record, State, Actions); connection(internal, #client_hello{} = Hello, #state{role = server, allow_renegotiate = true} = State) -> %% Mitigate Computational DoS attack %% http://www.educatedguesswork.org/2011/10/ssltls_and_computational_dos.html @@ -434,7 +436,6 @@ connection(Type, Event, State) -> downgrade(Type, Event, State) -> ssl_connection:downgrade(Type, Event, State, ?MODULE). - %%-------------------------------------------------------------------- %% Description: This function is called by a gen_fsm when it receives any %% other message than a synchronous or asynchronous event @@ -442,16 +443,6 @@ downgrade(Type, Event, State) -> %%-------------------------------------------------------------------- %% raw data from socket, unpack records -handle_info({_,flight_retransmission_timeout}, connection, _) -> - {next_state, keep_state_and_data}; -handle_info({Ref, flight_retransmission_timeout}, StateName, - #state{flight_state = {waiting, Ref, NextTimeout}} = State0) -> - State1 = send_handshake_flight(State0#state{flight_state = {retransmit_timer, NextTimeout}}, - retransmit_epoch(StateName, State0)), - {Record, State} = next_record(State1), - next_event(StateName, Record, State); -handle_info({_, flight_retransmission_timeout}, _, _) -> - {next_state, keep_state_and_data}; handle_info({Protocol, _, _, _, Data}, StateName, #state{data_tag = Protocol} = State0) -> case next_dtls_record(Data, State0) of @@ -489,7 +480,6 @@ handle_call(Event, From, StateName, State) -> handle_common_event(internal, #alert{} = Alert, StateName, #state{negotiated_version = Version} = State) -> ssl_connection:handle_own_alert(Alert, Version, StateName, State); - %%% DTLS record protocol level handshake messages handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE, fragment = Data}, @@ -498,19 +488,14 @@ handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE, negotiated_version = Version} = State0) -> try case dtls_handshake:get_dtls_handshake(Version, Data, Buffers0) of - {more_data, Buffers} -> + {[], Buffers} -> {Record, State} = next_record(State0#state{protocol_buffers = Buffers}), next_event(StateName, Record, State); {Packets, Buffers} -> State = State0#state{protocol_buffers = Buffers}, Events = dtls_handshake_events(Packets), - case StateName of - connection -> - ssl_connection:hibernate_after(StateName, State, Events); - _ -> - {next_state, StateName, - State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events} - end + {next_state, StateName, + State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events} end catch throw:#alert{} = Alert -> ssl_connection:handle_own_alert(Alert, Version, StateName, State0) @@ -534,6 +519,13 @@ handle_common_event(internal, #ssl_tls{type = ?ALERT, fragment = EncAlerts}, Sta handle_common_event(internal, #ssl_tls{type = _Unknown}, StateName, State) -> {next_state, StateName, State}. +handle_state_timeout(flight_retransmission_timeout, StateName, + #state{flight_state = {retransmit, NextTimeout}} = State0) -> + {State1, Actions} = send_handshake_flight(State0#state{flight_state = {retransmit, NextTimeout}}, + retransmit_epoch(StateName, State0)), + {Record, State} = next_record(State1), + next_event(StateName, Record, State, Actions). + send(Transport, {_, {{_,_}, _} = Socket}, Data) -> send(Transport, Socket, Data); send(Transport, Socket, Data) -> @@ -645,7 +637,8 @@ initial_state(Role, Host, Port, Socket, {SSLOptions, SocketOptions, _}, User, allow_renegotiate = SSLOptions#ssl_options.client_renegotiation, start_or_recv_from = undefined, protocol_cb = ?MODULE, - flight_buffer = new_flight() + flight_buffer = new_flight(), + flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT} }. next_dtls_record(Data, #state{protocol_buffers = #protocol_buffers{ @@ -714,14 +707,14 @@ next_event(connection = StateName, no_record, #state{connection_states = #{current_read := #{epoch := CurrentEpoch}}} = State0, Actions) -> case next_record_if_active(State0) of {no_record, State} -> - ssl_connection:hibernate_after(StateName, State, Actions); + ssl_connection:hibernate_after(StateName, State, Actions); {#ssl_tls{epoch = CurrentEpoch} = Record, State} -> {next_state, StateName, State, [{next_event, internal, {protocol_record, Record}} | Actions]}; {#ssl_tls{epoch = Epoch, type = ?HANDSHAKE, version = _Version}, State1} = _Record when Epoch == CurrentEpoch-1 -> - State = send_handshake_flight(State1, Epoch), - {next_state, StateName, State, Actions}; + {State, MoreActions} = send_handshake_flight(State1, Epoch), + {next_state, StateName, State, Actions ++ MoreActions}; {#ssl_tls{epoch = _Epoch, version = _Version}, State} -> %% TODO maybe buffer later epoch @@ -772,17 +765,20 @@ next_flight(Flight) -> Flight#{handshakes => [], change_cipher_spec => undefined, handshakes_after_change_cipher_spec => []}. - start_flight(#state{transport_cb = gen_udp, - flight_state = {retransmit_timer, Timeout}} = State) -> - Ref = erlang:make_ref(), - _ = erlang:send_after(Timeout, self(), {Ref, flight_retransmission_timeout}), - State#state{flight_state = {waiting, Ref, new_timeout(Timeout)}}; - + flight_state = {retransmit, Timeout}} = State) -> + start_retransmision_timer(Timeout, State); +start_flight(#state{transport_cb = gen_udp, + flight_state = connection} = State) -> + {State, []}; start_flight(State) -> %% No retransmision needed i.e DTLS over SCTP - State#state{flight_state = reliable}. + {State#state{flight_state = reliable}, []}. + +start_retransmision_timer(Timeout, State) -> + {State#state{flight_state = {retransmit, new_timeout(Timeout)}}, + [{state_timeout, Timeout, flight_retransmission_timeout}]}. new_timeout(N) when N =< 30 -> N * 2; @@ -806,13 +802,13 @@ renegotiate(#state{role = server, connection_states = CS0} = State0, Actions) -> HelloRequest = ssl_handshake:hello_request(), CS = CS0#{write_msg_seq => 0}, - State1 = send_handshake(HelloRequest, - State0#state{connection_states = - CS}), + {State1, MoreActions} = send_handshake(HelloRequest, + State0#state{connection_states = + CS}), Hs0 = ssl_handshake:init_handshake_history(), {Record, State} = next_record(State1#state{tls_handshake_history = Hs0, protocol_buffers = #protocol_buffers{}}), - next_event(hello, Record, State, Actions). + next_event(hello, Record, State, Actions ++ MoreActions). handle_alerts([], Result) -> Result; @@ -823,15 +819,11 @@ handle_alerts([Alert | Alerts], {next_state, StateName, State}) -> handle_alerts([Alert | Alerts], {next_state, StateName, State, _Actions}) -> handle_alerts(Alerts, ssl_connection:handle_alert(Alert, StateName, State)). -retransmit_epoch(StateName, #state{connection_states = ConnectionStates}) -> +retransmit_epoch(_StateName, #state{connection_states = ConnectionStates}) -> #{epoch := Epoch} = ssl_record:current_connection_state(ConnectionStates, write), - case StateName of - connection -> - Epoch-1; - _ -> - Epoch - end. + Epoch. + update_handshake_history(#hello_verify_request{}, _, Hist) -> Hist; @@ -846,3 +838,4 @@ unprocessed_events(Events) -> %% handshake events left to process before we should %% process more TLS-records received on the socket. erlang:length(Events)-1. + diff --git a/lib/ssl/src/dtls_handshake.erl b/lib/ssl/src/dtls_handshake.erl index af3708ddb7..fd1f9698fe 100644 --- a/lib/ssl/src/dtls_handshake.erl +++ b/lib/ssl/src/dtls_handshake.erl @@ -136,9 +136,11 @@ handshake_bin([Type, Length, Data], Seq) -> %%-------------------------------------------------------------------- -spec get_dtls_handshake(dtls_record:dtls_version(), binary(), #protocol_buffers{}) -> - {[{dtls_handshake(), binary()}], #protocol_buffers{}} | {more_data, #protocol_buffers{}}. + {[dtls_handshake()], #protocol_buffers{}}. %% -%% Description: ... +%% Description: Given buffered and new data from dtls_record, collects +%% and returns it as a list of handshake messages, also returns +%% possible leftover data in the new "protocol_buffers". %%-------------------------------------------------------------------- get_dtls_handshake(Version, Fragment, ProtocolBuffers) -> handle_fragments(Version, Fragment, ProtocolBuffers, []). @@ -288,8 +290,6 @@ do_handle_fragments(_, [], Buffers, Acc) -> {lists:reverse(Acc), Buffers}; do_handle_fragments(Version, [Fragment | Fragments], Buffers0, Acc) -> case reassemble(Version, Fragment, Buffers0) of - {more_data, _} = More when Acc == []-> - More; {more_data, Buffers} when Fragments == [] -> {lists:reverse(Acc), Buffers}; {more_data, Buffers} -> diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl index f447897d59..0ee51c24b6 100644 --- a/lib/ssl/src/dtls_record.erl +++ b/lib/ssl/src/dtls_record.erl @@ -393,7 +393,7 @@ init_connection_state_seq(_, ConnnectionStates) -> integer(). %% %% Description: Returns the epoch the connection_state record -%% that is currently defined as the current conection state. +%% that is currently defined as the current connection state. %%-------------------------------------------------------------------- current_connection_state_epoch(#{current_read := #{epoch := Epoch}}, read) -> diff --git a/lib/ssl/src/dtls_socket.erl b/lib/ssl/src/dtls_socket.erl index 570b3ae83a..ac1a7b37c6 100644 --- a/lib/ssl/src/dtls_socket.erl +++ b/lib/ssl/src/dtls_socket.erl @@ -71,11 +71,14 @@ connect(Address, Port, #config{transport_info = {Transport, _, _, _} = CbInfo, close(gen_udp, {_Client, _Socket}) -> ok. +socket(Pid, gen_udp = Transport, {{_, _}, Socket}, ConnectionCb) -> + #sslsocket{pid = Pid, + %% "The name "fd" is keept for backwards compatibility + fd = {Transport, Socket, ConnectionCb}}; socket(Pid, Transport, Socket, ConnectionCb) -> #sslsocket{pid = Pid, %% "The name "fd" is keept for backwards compatibility - fd = {Transport, Socket, ConnectionCb}}. - + fd = {Transport, Socket, ConnectionCb}}. %% Vad göra med emulerade setopts(gen_udp, #sslsocket{pid = {Socket, _}}, Options) -> {SockOpts, _} = tls_socket:split_options(Options), @@ -108,11 +111,15 @@ getstat(gen_udp, {_,Socket}, Options) -> inet:getstat(Socket, Options); getstat(Transport, Socket, Options) -> Transport:getstat(Socket, Options). +peername(udp, _) -> + {error, enotconn}; peername(gen_udp, {_, {Client, _Socket}}) -> {ok, Client}; peername(Transport, Socket) -> Transport:peername(Socket). -sockname(gen_udp, {_,Socket}) -> +sockname(gen_udp, {_, {_,Socket}}) -> + inet:sockname(Socket); +sockname(gen_udp, Socket) -> inet:sockname(Socket); sockname(Transport, Socket) -> Transport:sockname(Socket). diff --git a/lib/ssl/src/dtls_udp_listener.erl b/lib/ssl/src/dtls_udp_listener.erl index b7f115582e..ab3d0783bd 100644 --- a/lib/ssl/src/dtls_udp_listener.erl +++ b/lib/ssl/src/dtls_udp_listener.erl @@ -24,7 +24,8 @@ -behaviour(gen_server). %% API --export([start_link/4, active_once/3, accept/2, sockname/1]). +-export([start_link/4, active_once/3, accept/2, sockname/1, close/1, + get_all_opts/1]). %% gen_server callbacks -export([init/1, handle_call/3, handle_cast/2, handle_info/2, @@ -39,7 +40,8 @@ clients = set_new(), dtls_processes = kv_new(), accepters = queue:new(), - first + first, + close }). %%%=================================================================== @@ -53,10 +55,14 @@ active_once(UDPConnection, Client, Pid) -> gen_server:cast(UDPConnection, {active_once, Client, Pid}). accept(UDPConnection, Accepter) -> - gen_server:call(UDPConnection, {accept, Accepter}, infinity). + call(UDPConnection, {accept, Accepter}). sockname(UDPConnection) -> - gen_server:call(UDPConnection, sockname, infinity). + call(UDPConnection, sockname). +close(UDPConnection) -> + call(UDPConnection, close). +get_all_opts(UDPConnection) -> + call(UDPConnection, get_all_opts). %%%=================================================================== %%% gen_server callbacks @@ -69,10 +75,13 @@ init([Port, EmOpts, InetOptions, DTLSOptions]) -> first = true, dtls_options = DTLSOptions, emulated_options = EmOpts, - listner = Socket}} + listner = Socket, + close = false}} catch _:_ -> {error, closed} end. +handle_call({accept, _}, _, #state{close = true} = State) -> + {reply, {error, closed}, State}; handle_call({accept, Accepter}, From, #state{first = true, accepters = Accepters, @@ -87,7 +96,21 @@ handle_call({accept, Accepter}, From, #state{accepters = Accepters} = State0) -> {noreply, State}; handle_call(sockname, _, #state{listner = Socket} = State) -> Reply = inet:sockname(Socket), - {reply, Reply, State}. + {reply, Reply, State}; +handle_call(close, _, #state{dtls_processes = Processes, + accepters = Accepters} = State) -> + case kv_empty(Processes) of + true -> + {stop, normal, ok, State#state{close=true}}; + false -> + lists:foreach(fun({_, From}) -> + gen_server:reply(From, {error, closed}) + end, queue:to_list(Accepters)), + {reply, ok, State#state{close = true, accepters = queue:new()}} + end; +handle_call(get_all_opts, _, #state{dtls_options = DTLSOptions, + emulated_options = EmOpts} = State) -> + {reply, {ok, EmOpts, DTLSOptions}, State}. handle_cast({active_once, Client, Pid}, State0) -> State = handle_active_once(Client, Pid, State0), @@ -99,11 +122,17 @@ handle_info({udp, Socket, IP, InPortNo, _} = Msg, #state{listner = Socket} = Sta {noreply, State}; handle_info({'DOWN', _, process, Pid, _}, #state{clients = Clients, - dtls_processes = Processes0} = State) -> + dtls_processes = Processes0, + close = ListenClosed} = State) -> Client = kv_get(Pid, Processes0), Processes = kv_delete(Pid, Processes0), - {noreply, State#state{clients = set_delete(Client, Clients), - dtls_processes = Processes}}. + case ListenClosed andalso kv_empty(Processes) of + true -> + {stop, normal, State}; + false -> + {noreply, State#state{clients = set_delete(Client, Clients), + dtls_processes = Processes}} + end. terminate(_Reason, _State) -> ok. @@ -182,6 +211,7 @@ setup_new_connection(User, From, Client, Msg, #state{dtls_processes = Processes, gen_server:reply(From, {error, Reason}), State end. + kv_update(Key, Value, Store) -> gb_trees:update(Key, Value, Store). kv_lookup(Key, Store) -> @@ -194,6 +224,8 @@ kv_delete(Key, Store) -> gb_trees:delete(Key, Store). kv_new() -> gb_trees:empty(). +kv_empty(Store) -> + gb_trees:is_empty(Store). set_new() -> gb_sets:empty(). @@ -203,3 +235,15 @@ set_delete(Item, Set) -> gb_sets:delete(Item, Set). set_is_member(Item, Set) -> gb_sets:is_member(Item, Set). + +call(Server, Msg) -> + try + gen_server:call(Server, Msg, infinity) + catch + exit:{noproc, _} -> + {error, closed}; + exit:{normal, _} -> + {error, closed}; + exit:{{shutdown, _},_} -> + {error, closed} + end. diff --git a/lib/ssl/src/dtls_v1.erl b/lib/ssl/src/dtls_v1.erl index ffd3e4b833..dd0d35d404 100644 --- a/lib/ssl/src/dtls_v1.erl +++ b/lib/ssl/src/dtls_v1.erl @@ -21,12 +21,21 @@ -include("ssl_cipher.hrl"). --export([suites/1, mac_hash/7, ecc_curves/1, corresponding_tls_version/1, corresponding_dtls_version/1]). +-export([suites/1, all_suites/1, mac_hash/7, ecc_curves/1, + corresponding_tls_version/1, corresponding_dtls_version/1]). -spec suites(Minor:: 253|255) -> [ssl_cipher:cipher_suite()]. suites(Minor) -> - tls_v1:suites(corresponding_minor_tls_version(Minor)). + lists:filter(fun(Cipher) -> + is_acceptable_cipher(ssl_cipher:suite_definition(Cipher)) + end, + tls_v1:suites(corresponding_minor_tls_version(Minor))). +all_suites(Version) -> + lists:filter(fun(Cipher) -> + is_acceptable_cipher(ssl_cipher:suite_definition(Cipher)) + end, + ssl_cipher:all_suites(corresponding_tls_version(Version))). mac_hash(Version, MacAlg, MacSecret, SeqNo, Type, Length, Fragment) -> tls_v1:mac_hash(MacAlg, MacSecret, SeqNo, Type, Version, @@ -50,3 +59,5 @@ corresponding_minor_dtls_version(2) -> 255; corresponding_minor_dtls_version(3) -> 253. +is_acceptable_cipher(Suite) -> + not ssl_cipher:is_stream_ciphersuite(Suite). diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src index 148989174d..064dcd6892 100644 --- a/lib/ssl/src/ssl.app.src +++ b/lib/ssl/src/ssl.app.src @@ -63,7 +63,7 @@ {applications, [crypto, public_key, kernel, stdlib]}, {env, []}, {mod, {ssl_app, []}}, - {runtime_dependencies, ["stdlib-3.1","public_key-1.2","kernel-3.0", + {runtime_dependencies, ["stdlib-3.2","public_key-1.2","kernel-3.0", "erts-7.0","crypto-3.3", "inets-5.10.7"]}]}. diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src index 32252386b4..bfdd0c205b 100644 --- a/lib/ssl/src/ssl.appup.src +++ b/lib/ssl/src/ssl.appup.src @@ -1,11 +1,19 @@ %% -*- erlang -*- {"%VSN%", [ - {<<"^8[.]0([.][0-9]+)?$">>, [{restart_application, ssl}]}, - {<<"^[3-7][.][^.].*">>, [{restart_application, ssl}]} + {<<"8\\..*">>, [{restart_application, ssl}]}, + {<<"7\\..*">>, [{restart_application, ssl}]}, + {<<"6\\..*">>, [{restart_application, ssl}]}, + {<<"5\\..*">>, [{restart_application, ssl}]}, + {<<"4\\..*">>, [{restart_application, ssl}]}, + {<<"3\\..*">>, [{restart_application, ssl}]} ], [ - {<<"^8[.]0([.][0-9]+)?$">>, [{restart_application, ssl}]}, - {<<"^[3-7][.][^.].*">>, [{restart_application, ssl}]} - ] + {<<"8\\..*">>, [{restart_application, ssl}]}, + {<<"7\\..*">>, [{restart_application, ssl}]}, + {<<"6\\..*">>, [{restart_application, ssl}]}, + {<<"5\\..*">>, [{restart_application, ssl}]}, + {<<"4\\..*">>, [{restart_application, ssl}]}, + {<<"3\\..*">>, [{restart_application, ssl}]} + ] }. diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl index 4a5a7e25ea..45fc29723f 100644 --- a/lib/ssl/src/ssl.erl +++ b/lib/ssl/src/ssl.erl @@ -187,16 +187,24 @@ ssl_accept(ListenSocket, SslOptions) when is_port(ListenSocket) -> ssl_accept(#sslsocket{} = Socket, [], Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)-> ssl_accept(Socket, Timeout); -ssl_accept(#sslsocket{fd = {_, _, _, Tracker}} = Socket, SslOpts0, Timeout) when +ssl_accept(#sslsocket{fd = {_, _, _, Tracker}} = Socket, SslOpts, Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)-> try - {ok, EmOpts, InheritedSslOpts} = tls_socket:get_all_opts(Tracker), - SslOpts = handle_options(SslOpts0, InheritedSslOpts), + {ok, EmOpts, _} = tls_socket:get_all_opts(Tracker), ssl_connection:handshake(Socket, {SslOpts, tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout) catch Error = {error, _Reason} -> Error end; +ssl_accept(#sslsocket{pid = Pid, fd = {_, _, _}} = Socket, SslOpts, Timeout) when + (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)-> + try + {ok, EmOpts, _} = dtls_udp_listener:get_all_opts(Pid), + ssl_connection:handshake(Socket, {SslOpts, + tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout) + catch + Error = {error, _Reason} -> Error + end; ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket), (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) -> {Transport,_,_,_} = @@ -215,7 +223,6 @@ ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket), catch Error = {error, _Reason} -> Error end. - %%-------------------------------------------------------------------- -spec close(#sslsocket{}) -> term(). %% @@ -223,6 +230,8 @@ ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket), %%-------------------------------------------------------------------- close(#sslsocket{pid = Pid}) when is_pid(Pid) -> ssl_connection:close(Pid, {close, ?DEFAULT_TIMEOUT}); +close(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) -> + dtls_udp_listener:close(Pid); close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_, _, _}}}}) -> Transport:close(ListenSocket). @@ -251,6 +260,8 @@ send(#sslsocket{pid = Pid}, Data) when is_pid(Pid) -> ssl_connection:send(Pid, Data); send(#sslsocket{pid = {_, #config{transport_info={gen_udp, _, _, _}}}}, _) -> {error,enotconn}; %% Emulate connection behaviour +send(#sslsocket{pid = {udp,_}}, _) -> + {error,enotconn}; send(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport, _, _, _}}}}, Data) -> Transport:send(ListenSocket, Data). %% {error,enotconn} @@ -265,6 +276,8 @@ recv(Socket, Length) -> recv(#sslsocket{pid = Pid}, Length, Timeout) when is_pid(Pid), (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)-> ssl_connection:recv(Pid, Length, Timeout); +recv(#sslsocket{pid = {udp,_}}, _, _) -> + {error,enotconn}; recv(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _}}}}, _,_) when is_port(Listen)-> Transport:recv(Listen, 0). %% {error,enotconn} @@ -277,10 +290,14 @@ recv(#sslsocket{pid = {Listen, %%-------------------------------------------------------------------- controlling_process(#sslsocket{pid = Pid}, NewOwner) when is_pid(Pid), is_pid(NewOwner) -> ssl_connection:new_user(Pid, NewOwner); +controlling_process(#sslsocket{pid = {udp, _}}, + NewOwner) when is_pid(NewOwner) -> + ok; %% Meaningless but let it be allowed to conform with TLS controlling_process(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _}}}}, NewOwner) when is_port(Listen), is_pid(NewOwner) -> + %% Meaningless but let it be allowed to conform with normal sockets Transport:controlling_process(Listen, NewOwner). @@ -297,7 +314,9 @@ connection_information(#sslsocket{pid = Pid}) when is_pid(Pid) -> Error end; connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) -> - {error, enotconn}. + {error, enotconn}; +connection_information(#sslsocket{pid = {udp,_}}) -> + {error,enotconn}. %%-------------------------------------------------------------------- -spec connection_information(#sslsocket{}, [atom()]) -> {ok, list()} | {error, reason()}. @@ -333,10 +352,18 @@ connection_info(#sslsocket{} = SSLSocket) -> %% %% Description: same as inet:peername/1. %%-------------------------------------------------------------------- +peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid)-> + dtls_socket:peername(Transport, Socket); peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid)-> tls_socket:peername(Transport, Socket); +peername(#sslsocket{pid = {udp = Transport, #config{udp_handler = {_Pid, _}}}}) -> + dtls_socket:peername(Transport, undefined); +peername(#sslsocket{pid = Pid, fd = {gen_udp= Transport, Socket, _, _}}) when is_pid(Pid) -> + dtls_socket:peername(Transport, Socket); peername(#sslsocket{pid = {ListenSocket, #config{transport_info = {Transport,_,_,_}}}}) -> - tls_socket:peername(Transport, ListenSocket). %% Will return {error, enotconn} + tls_socket:peername(Transport, ListenSocket); %% Will return {error, enotconn} +peername(#sslsocket{pid = {udp,_}}) -> + {error,enotconn}. %%-------------------------------------------------------------------- -spec peercert(#sslsocket{}) ->{ok, DerCert::binary()} | {error, reason()}. @@ -350,6 +377,8 @@ peercert(#sslsocket{pid = Pid}) when is_pid(Pid) -> Result -> Result end; +peercert(#sslsocket{pid = {udp, _}}) -> + {error, enotconn}; peercert(#sslsocket{pid = {Listen, _}}) when is_port(Listen) -> {error, enotconn}. @@ -506,6 +535,8 @@ getstat(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}, Options) when is_ shutdown(#sslsocket{pid = {Listen, #config{transport_info = {Transport,_, _, _}}}}, How) when is_port(Listen) -> Transport:shutdown(Listen, How); +shutdown(#sslsocket{pid = {udp,_}},_) -> + {error, enotconn}; shutdown(#sslsocket{pid = Pid}, How) -> ssl_connection:shutdown(Pid, How). @@ -518,7 +549,7 @@ sockname(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _ tls_socket:sockname(Transport, Listen); sockname(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) -> dtls_udp_listener:sockname(Pid); -sockname(#sslsocket{pid = Pid, fd = {gen_udp= Transport, Socket, _, _}}) when is_pid(Pid) -> +sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid) -> dtls_socket:sockname(Transport, Socket); sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid) -> tls_socket:sockname(Transport, Socket). @@ -531,6 +562,8 @@ sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid) %%-------------------------------------------------------------------- session_info(#sslsocket{pid = Pid}) when is_pid(Pid) -> ssl_connection:session_info(Pid); +session_info(#sslsocket{pid = {udp,_}}) -> + {error, enotconn}; session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) -> {error, enotconn}. @@ -555,6 +588,8 @@ versions() -> %%-------------------------------------------------------------------- renegotiate(#sslsocket{pid = Pid}) when is_pid(Pid) -> ssl_connection:renegotiation(Pid); +renegotiate(#sslsocket{pid = {udp,_}}) -> + {error, enotconn}; renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) -> {error, enotconn}. @@ -568,6 +603,8 @@ renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) -> prf(#sslsocket{pid = Pid}, Secret, Label, Seed, WantedLength) when is_pid(Pid) -> ssl_connection:prf(Pid, Secret, Label, Seed, WantedLength); +prf(#sslsocket{pid = {udp,_}}, _,_,_,_) -> + {error, enotconn}; prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) -> {error, enotconn}. @@ -696,7 +733,7 @@ handle_options(Opts0, Role) -> [RecordCb:protocol_version(Vsn) || Vsn <- Vsns] end, - Protocol = proplists:get_value(protocol, Opts, tls), + Protocol = handle_option(protocol, Opts, tls), SSLOptions = #ssl_options{ versions = Versions, @@ -755,7 +792,7 @@ handle_options(Opts0, Role) -> honor_ecc_order = handle_option(honor_ecc_order, Opts, default_option_role(server, false, Role), server, Role), - protocol = Protocol, + protocol = Protocol, padding_check = proplists:get_value(padding_check, Opts, true), beast_mitigation = handle_option(beast_mitigation, Opts, one_n_minus_one), fallback = handle_option(fallback, Opts, @@ -1032,6 +1069,10 @@ validate_option(v2_hello_compatible, Value) when is_boolean(Value) -> Value; validate_option(max_handshake_size, Value) when is_integer(Value) andalso Value =< ?MAX_UNIT24 -> Value; +validate_option(protocol, Value = tls) -> + Value; +validate_option(protocol, Value = dtls) -> + Value; validate_option(Opt, Value) -> throw({error, {options, {Opt, Value}}}). @@ -1069,17 +1110,37 @@ validate_binary_list(Opt, List) -> (Bin) -> throw({error, {options, {Opt, {invalid_protocol, Bin}}}}) end, List). - validate_versions([], Versions) -> Versions; validate_versions([Version | Rest], Versions) when Version == 'tlsv1.2'; Version == 'tlsv1.1'; Version == tlsv1; Version == sslv3 -> - validate_versions(Rest, Versions); + tls_validate_versions(Rest, Versions); +validate_versions([Version | Rest], Versions) when Version == 'dtlsv1'; + Version == 'dtlsv1.2'-> + dtls_validate_versions(Rest, Versions); validate_versions([Ver| _], Versions) -> throw({error, {options, {Ver, {versions, Versions}}}}). +tls_validate_versions([], Versions) -> + Versions; +tls_validate_versions([Version | Rest], Versions) when Version == 'tlsv1.2'; + Version == 'tlsv1.1'; + Version == tlsv1; + Version == sslv3 -> + tls_validate_versions(Rest, Versions); +tls_validate_versions([Ver| _], Versions) -> + throw({error, {options, {Ver, {versions, Versions}}}}). + +dtls_validate_versions([], Versions) -> + Versions; +dtls_validate_versions([Version | Rest], Versions) when Version == 'dtlsv1'; + Version == 'dtlsv1.2'-> + dtls_validate_versions(Rest, Versions); +dtls_validate_versions([Ver| _], Versions) -> + throw({error, {options, {Ver, {versions, Versions}}}}). + validate_inet_option(mode, Value) when Value =/= list, Value =/= binary -> throw({error, {options, {mode,Value}}}); @@ -1151,18 +1212,18 @@ handle_cipher_option(Value, Version) when is_list(Value) -> binary_cipher_suites(Version, []) -> %% Defaults to all supported suites that does %% not require explicit configuration - ssl_cipher:filter_suites(ssl_cipher:suites(Version)); + ssl_cipher:filter_suites(ssl_cipher:suites(tls_version(Version))); binary_cipher_suites(Version, [Tuple|_] = Ciphers0) when is_tuple(Tuple) -> Ciphers = [ssl_cipher:suite(C) || C <- Ciphers0], binary_cipher_suites(Version, Ciphers); binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) -> - All = ssl_cipher:all_suites(Version), + All = ssl_cipher:all_suites(tls_version(Version)), case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of [] -> %% Defaults to all supported suites that does %% not require explicit configuration - ssl_cipher:filter_suites(ssl_cipher:suites(Version)); + ssl_cipher:filter_suites(ssl_cipher:suites(tls_version(Version))); Ciphers -> Ciphers end; @@ -1175,7 +1236,8 @@ binary_cipher_suites(Version, Ciphers0) -> Ciphers = [ssl_cipher:openssl_suite(C) || C <- string:tokens(Ciphers0, ":")], binary_cipher_suites(Version, Ciphers). -handle_eccs_option(Value, {_Major, Minor}) when is_list(Value) -> +handle_eccs_option(Value, Version) when is_list(Value) -> + {_Major, Minor} = tls_version(Version), try tls_v1:ecc_curves(Minor, Value) of Curves -> #elliptic_curves{elliptic_curve_list = Curves} catch @@ -1348,7 +1410,10 @@ new_ssl_options([{signature_algs, Value} | Rest], #ssl_options{} = Opts, RecordC handle_hashsigns_option(Value, tls_version(RecordCB:highest_protocol_version()))}, RecordCB); - +new_ssl_options([{protocol, dtls = Value} | Rest], #ssl_options{} = Opts, dtls_record = RecordCB) -> + new_ssl_options(Rest, Opts#ssl_options{protocol = Value}, RecordCB); +new_ssl_options([{protocol, tls = Value} | Rest], #ssl_options{} = Opts, tls_record = RecordCB) -> + new_ssl_options(Rest, Opts#ssl_options{protocol = Value}, RecordCB); new_ssl_options([{Key, Value} | _Rest], #ssl_options{}, _) -> throw({error, {options, {Key, Value}}}). diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl index 32fec03b8e..8e6860e9dc 100644 --- a/lib/ssl/src/ssl_cipher.erl +++ b/lib/ssl/src/ssl_cipher.erl @@ -40,7 +40,8 @@ ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0, rc4_suites/1, des_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1, hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1, - random_bytes/1, calc_aad/3, calc_mac_hash/4]). + random_bytes/1, calc_aad/3, calc_mac_hash/4, + is_stream_ciphersuite/1]). -export_type([cipher_suite/0, erl_cipher_suite/0, openssl_cipher_suite/0, @@ -310,18 +311,21 @@ aead_decipher(Type, #cipher_state{key = Key, iv = IV} = CipherState, %%-------------------------------------------------------------------- suites({3, 0}) -> ssl_v3:suites(); -suites({3, N}) -> - tls_v1:suites(N); -suites(Version) -> - suites(dtls_v1:corresponding_tls_version(Version)). +suites({3, Minor}) -> + tls_v1:suites(Minor); +suites({_, Minor}) -> + dtls_v1:suites(Minor). -all_suites(Version) -> +all_suites({3, _} = Version) -> suites(Version) ++ anonymous_suites(Version) ++ psk_suites(Version) ++ srp_suites() ++ rc4_suites(Version) - ++ des_suites(Version). + ++ des_suites(Version); +all_suites(Version) -> + dtls_v1:all_suites(Version). + %%-------------------------------------------------------------------- -spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()]. %% @@ -1541,6 +1545,10 @@ calc_mac_hash(Type, Version, MacSecret, SeqNo, Type, Length, PlainFragment). +is_stream_ciphersuite({_, rc4_128, _, _}) -> + true; +is_stream_ciphersuite(_) -> + false. %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl index 54f83928ee..09d4c3e678 100644 --- a/lib/ssl/src/ssl_config.erl +++ b/lib/ssl/src/ssl_config.erl @@ -32,20 +32,20 @@ init(SslOpts, Role) -> init_manager_name(SslOpts#ssl_options.erl_dist), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert} + {ok, #{pem_cache := PemCache} = Config} = init_certificates(SslOpts, Role), PrivateKey = - init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile, + init_private_key(PemCache, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile, SslOpts#ssl_options.password, Role), - DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role), - {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}. + DHParams = init_diffie_hellman(PemCache, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role), + {ok, Config#{private_key => PrivateKey, dh_params => DHParams}}. init_manager_name(false) -> put(ssl_manager, ssl_manager:name(normal)), - put(ssl_cache, ssl_pem_cache:name(normal)); + put(ssl_pem_cache, ssl_pem_cache:name(normal)); init_manager_name(true) -> put(ssl_manager, ssl_manager:name(dist)), - put(ssl_cache, ssl_pem_cache:name(dist)). + put(ssl_pem_cache, ssl_pem_cache:name(dist)). init_certificates(#ssl_options{cacerts = CaCerts, cacertfile = CACertFile, @@ -53,7 +53,7 @@ init_certificates(#ssl_options{cacerts = CaCerts, cert = Cert, crl_cache = CRLCache }, Role) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} = + {ok, Config} = try Certs = case CaCerts of undefined -> @@ -61,41 +61,37 @@ init_certificates(#ssl_options{cacerts = CaCerts, _ -> {der, CaCerts} end, - {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache) + {ok,_} = ssl_manager:connection_init(Certs, Role, CRLCache) catch _:Reason -> file_error(CACertFile, {cacertfile, Reason}) end, - init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CRLDbInfo, CertFile, Role). + init_certificates(Cert, Config, CertFile, Role). -init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, - CRLDbInfo, <<>>, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}; +init_certificates(undefined, Config, <<>>, _) -> + {ok, Config#{own_certificate => undefined}}; -init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CRLDbInfo, CertFile, client) -> +init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, client) -> try %% Ignoring potential proxy-certificates see: %% http://dev.globus.org/wiki/Security/ProxyFileFormat - [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert} + [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache), + {ok, Config#{own_certificate => OwnCert}} catch _Error:_Reason -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined} - end; + {ok, Config#{own_certificate => undefined}} + end; -init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, - PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) -> +init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, server) -> try - [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert} + [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache), + {ok, Config#{own_certificate => OwnCert}} catch _:Reason -> file_error(CertFile, {certfile, Reason}) end; -init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}. - +init_certificates(Cert, Config, _, _) -> + {ok, Config#{own_certificate => Cert}}. + init_private_key(_, undefined, <<>>, _Password, _Client) -> undefined; init_private_key(DbHandle, undefined, KeyFile, Password, _) -> diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl index 6ed2fc83da..ea139ac4b1 100644 --- a/lib/ssl/src/ssl_connection.erl +++ b/lib/ssl/src/ssl_connection.erl @@ -148,19 +148,19 @@ socket_control(Connection, Socket, Pid, Transport) -> %%-------------------------------------------------------------------- socket_control(Connection, Socket, Pid, Transport, udp_listner) -> %% dtls listner process must have the socket control - {ok, dtls_socket:socket(Pid, Transport, Socket, Connection)}; + {ok, Connection:socket(Pid, Transport, Socket, Connection, undefined)}; socket_control(tls_connection = Connection, Socket, Pid, Transport, ListenTracker) -> case Transport:controlling_process(Socket, Pid) of ok -> - {ok, tls_socket:socket(Pid, Transport, Socket, Connection, ListenTracker)}; + {ok, Connection:socket(Pid, Transport, Socket, Connection, ListenTracker)}; {error, Reason} -> {error, Reason} end; socket_control(dtls_connection = Connection, {_, Socket}, Pid, Transport, ListenTracker) -> case Transport:controlling_process(Socket, Pid) of ok -> - {ok, tls_socket:socket(Pid, Transport, Socket, Connection, ListenTracker)}; + {ok, Connection:socket(Pid, Transport, Socket, Connection, ListenTracker)}; {error, Reason} -> {error, Reason} end. @@ -323,8 +323,14 @@ handle_session(#server_hello{cipher_suite = CipherSuite, -spec ssl_config(#ssl_options{}, client | server, #state{}) -> #state{}. %%-------------------------------------------------------------------- ssl_config(Opts, Role, State) -> - {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, - OwnCert, Key, DHParams} = + {ok, #{cert_db_ref := Ref, + cert_db_handle := CertDbHandle, + fileref_db_handle := FileRefHandle, + session_cache := CacheHandle, + crl_db_info := CRLDbHandle, + private_key := Key, + dh_params := DHParams, + own_certificate := OwnCert}} = ssl_config:init(Opts, Role), Handshake = ssl_handshake:init_handshake_history(), TimeStamp = erlang:monotonic_time(), @@ -335,7 +341,7 @@ ssl_config(Opts, Role, State) -> file_ref_db = FileRefHandle, cert_db_ref = Ref, cert_db = CertDbHandle, - crl_db = CRLDbInfo, + crl_db = CRLDbHandle, session_cache = CacheHandle, private_key = Key, diffie_hellman_params = DHParams, @@ -357,11 +363,13 @@ init({call, From}, {start, Timeout}, State0, Connection) -> timer = Timer}), Connection:next_event(hello, Record, State); init({call, From}, {start, {Opts, EmOpts}, Timeout}, - #state{role = Role} = State0, Connection) -> + #state{role = Role, ssl_options = OrigSSLOptions, + socket_options = SockOpts} = State0, Connection) -> try - State = ssl_config(Opts, Role, State0), + SslOpts = ssl:handle_options(Opts, OrigSSLOptions), + State = ssl_config(SslOpts, Role, State0), init({call, From}, {start, Timeout}, - State#state{ssl_options = Opts, socket_options = EmOpts}, Connection) + State#state{ssl_options = SslOpts, socket_options = new_emulated(EmOpts, SockOpts)}, Connection) catch throw:Error -> {stop_and_reply, normal, {reply, From, {error, Error}}} end; @@ -426,11 +434,11 @@ abbreviated(internal, #finished{verify_data = Data} = Finished, verified -> ConnectionStates1 = ssl_record:set_server_verify_data(current_read, Data, ConnectionStates0), - State1 = + {State1, Actions} = finalize_handshake(State0#state{connection_states = ConnectionStates1}, abbreviated, Connection), {Record, State} = prepare_connection(State1#state{expecting_finished = false}, Connection), - Connection:next_event(connection, Record, State); + Connection:next_event(connection, Record, State, Actions); #alert{} = Alert -> handle_own_alert(Alert, Version, abbreviated, State0) end; @@ -850,6 +858,7 @@ handle_common_event(internal, #change_cipher_spec{type = <<1>>}, StateName, StateName, State); handle_common_event(_Type, Msg, StateName, #state{negotiated_version = Version} = State, _) -> + ct:pal("Unexpected msg ~p", [Msg]), Alert = ?ALERT_REC(?FATAL,?UNEXPECTED_MESSAGE), handle_own_alert(Alert, Version, {StateName, Msg}, State). @@ -1011,7 +1020,7 @@ terminate(_, _, #state{terminated = true}) -> %% Happens when user closes the connection using ssl:close/1 %% we want to guarantee that Transport:close has been called %% when ssl:close/1 returns unless it is a downgrade where - %% we want to guarantee that close alert is recived before + %% we want to guarantee that close alert is received before %% returning. In both cases terminate has been run manually %% before run by gen_statem which will end up here ok; @@ -1230,13 +1239,13 @@ new_server_hello(#server_hello{cipher_suite = CipherSuite, negotiated_version = Version} = State0, Connection) -> try server_certify_and_key_exchange(State0, Connection) of #state{} = State1 -> - State2 = server_hello_done(State1, Connection), + {State2, Actions} = server_hello_done(State1, Connection), Session = Session0#session{session_id = SessionId, cipher_suite = CipherSuite, compression_method = Compression}, {Record, State} = Connection:next_record(State2#state{session = Session}), - Connection:next_event(certify, Record, State) + Connection:next_event(certify, Record, State, Actions) catch #alert{} = Alert -> handle_own_alert(Alert, Version, hello, State0) @@ -1251,10 +1260,10 @@ resumed_server_hello(#state{session = Session, {_, ConnectionStates1} -> State1 = State0#state{connection_states = ConnectionStates1, session = Session}, - State2 = + {State2, Actions} = finalize_handshake(State1, abbreviated, Connection), {Record, State} = Connection:next_record(State2), - Connection:next_event(abbreviated, Record, State); + Connection:next_event(abbreviated, Record, State, Actions); #alert{} = Alert -> handle_own_alert(Alert, Version, hello, State0) end. @@ -1337,12 +1346,12 @@ client_certify_and_key_exchange(#state{negotiated_version = Version} = State0, Connection) -> try do_client_certify_and_key_exchange(State0, Connection) of State1 = #state{} -> - State2 = finalize_handshake(State1, certify, Connection), + {State2, Actions} = finalize_handshake(State1, certify, Connection), State3 = State2#state{ %% Reinitialize client_certificate_requested = false}, {Record, State} = Connection:next_record(State3), - Connection:next_event(cipher, Record, State) + Connection:next_event(cipher, Record, State, Actions) catch throw:#alert{} = Alert -> handle_own_alert(Alert, Version, certify, State0) @@ -1864,11 +1873,11 @@ cipher_role(server, Data, Session, #state{connection_states = ConnectionStates0 Connection) -> ConnectionStates1 = ssl_record:set_client_verify_data(current_read, Data, ConnectionStates0), - State1 = + {State1, Actions} = finalize_handshake(State0#state{connection_states = ConnectionStates1, session = Session}, cipher, Connection), {Record, State} = prepare_connection(State1, Connection), - Connection:next_event(connection, Record, State). + Connection:next_event(connection, Record, State, Actions). is_anonymous(Algo) when Algo == dh_anon; Algo == ecdh_anon; @@ -2299,7 +2308,7 @@ format_reply(_, _,#socket_options{active = false, mode = Mode, packet = Packet, {ok, do_format_reply(Mode, Packet, Header, Data)}; format_reply(Transport, Socket, #socket_options{active = _, mode = Mode, packet = Packet, header = Header}, Data, Tracker, Connection) -> - {ssl, tls_socket:socket(self(), Transport, Socket, Connection, Tracker), + {ssl, Connection:socket(self(), Transport, Socket, Connection, Tracker), do_format_reply(Mode, Packet, Header, Data)}. deliver_packet_error(Transport, Socket, SO= #socket_options{active = Active}, Data, Pid, From, Tracker, Connection) -> @@ -2308,7 +2317,7 @@ deliver_packet_error(Transport, Socket, SO= #socket_options{active = Active}, Da format_packet_error(_, _,#socket_options{active = false, mode = Mode}, Data, _, _) -> {error, {invalid_packet, do_format_reply(Mode, raw, 0, Data)}}; format_packet_error(Transport, Socket, #socket_options{active = _, mode = Mode}, Data, Tracker, Connection) -> - {ssl_error, tls_socket:socket(self(), Transport, Socket, Connection, Tracker), + {ssl_error, Connection:socket(self(), Transport, Socket, Connection, Tracker), {invalid_packet, do_format_reply(Mode, raw, 0, Data)}}. do_format_reply(binary, _, N, Data) when N > 0 -> % Header mode @@ -2363,11 +2372,11 @@ alert_user(Transport, Tracker, Socket, Active, Pid, From, Alert, Role, Connectio case ssl_alert:reason_code(Alert, Role) of closed -> send_or_reply(Active, Pid, From, - {ssl_closed, tls_socket:socket(self(), + {ssl_closed, Connection:socket(self(), Transport, Socket, Connection, Tracker)}); ReasonCode -> send_or_reply(Active, Pid, From, - {ssl_error, tls_socket:socket(self(), + {ssl_error, Connection:socket(self(), Transport, Socket, Connection, Tracker), ReasonCode}) end. @@ -2428,16 +2437,23 @@ handle_sni_extension(#sni{hostname = Hostname}, State0) -> undefined -> State0; _ -> - {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, Key, DHParams} = - ssl_config:init(NewOptions, State0#state.role), - State0#state{ - session = State0#state.session#session{own_certificate = OwnCert}, - file_ref_db = FileRefHandle, - cert_db_ref = Ref, - cert_db = CertDbHandle, - crl_db = CRLDbHandle, - session_cache = CacheHandle, - private_key = Key, + {ok, #{cert_db_ref := Ref, + cert_db_handle := CertDbHandle, + fileref_db_handle := FileRefHandle, + session_cache := CacheHandle, + crl_db_info := CRLDbHandle, + private_key := Key, + dh_params := DHParams, + own_certificate := OwnCert}} = + ssl_config:init(NewOptions, State0#state.role), + State0#state{ + session = State0#state.session#session{own_certificate = OwnCert}, + file_ref_db = FileRefHandle, + cert_db_ref = Ref, + cert_db = CertDbHandle, + crl_db = CRLDbHandle, + session_cache = CacheHandle, + private_key = Key, diffie_hellman_params = DHParams, ssl_options = NewOptions, sni_hostname = Hostname @@ -2459,3 +2475,8 @@ update_ssl_options_from_sni(OrigSSLOptions, SNIHostname) -> _ -> ssl:handle_options(SSLOption, OrigSSLOptions) end. + +new_emulated([], EmOpts) -> + EmOpts; +new_emulated(NewEmOpts, _) -> + NewEmOpts. diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl index c34af9f82c..c10ec3a2d6 100644 --- a/lib/ssl/src/ssl_internal.hrl +++ b/lib/ssl/src/ssl_internal.hrl @@ -76,7 +76,7 @@ -define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]). -define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]). -define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]). --define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]). +-define(MIN_DATAGRAM_SUPPORTED_VERSIONS, [dtlsv1]). -define('24H_in_msec', 86400000). -define('24H_in_sec', 86400). diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl index 29b15f843f..2b82f18bb5 100644 --- a/lib/ssl/src/ssl_manager.erl +++ b/lib/ssl/src/ssl_manager.erl @@ -107,8 +107,7 @@ start_link_dist(Opts) -> %%-------------------------------------------------------------------- -spec connection_init(binary()| {der, list()}, client | server, {Cb :: atom(), Handle:: term()}) -> - {ok, certdb_ref(), db_handle(), db_handle(), - db_handle(), db_handle(), CRLInfo::term()}. + {ok, map()}. %% %% Description: Do necessary initializations for a new connection. %%-------------------------------------------------------------------- @@ -128,7 +127,7 @@ cache_pem_file(File, DbHandle) -> [Content] -> {ok, Content}; undefined -> - ssl_pem_cache:insert(File) + ssl_pem_cache:insert(File) end. %%-------------------------------------------------------------------- @@ -224,7 +223,7 @@ init([ManagerName, PemCacheName, Opts]) -> CacheCb = proplists:get_value(session_cb, Opts, ssl_session_cache), SessionLifeTime = proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'), - CertDb = ssl_pkix_db:create(), + CertDb = ssl_pkix_db:create(PemCacheName), ClientSessionCache = CacheCb:init([{role, client} | proplists:get_value(session_cb_init_args, Opts, [])]), @@ -261,18 +260,25 @@ init([ManagerName, PemCacheName, Opts]) -> handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From, #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> Ref = make_ref(), - Result = {ok, Ref, CertDb, FileRefDb, PemChace, - session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}}, - {reply, Result, State#state{certificate_db = Db}}; + {reply, {ok, #{cert_db_ref => Ref, + cert_db_handle => CertDb, + fileref_db_handle => FileRefDb, + pem_cache => PemChace, + session_cache => session_cache(Role, State), + crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State}; handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From, #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> case add_trusted_certs(Pid, Trustedcerts, Db) of {ok, Ref} -> - {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), - {CRLCb, crl_db_info(Db, UserCRLDb)}}, State}; - {error, _} = Error -> - {reply, Error, State} + {reply, {ok, #{cert_db_ref => Ref, + cert_db_handle => CertDb, + fileref_db_handle => FileRefDb, + pem_cache => PemChace, + session_cache => session_cache(Role, State), + crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State}; + {error, _} = Error -> + {reply, Error, State} end; handle_call({{insert_crls, Path, CRLs}, _}, _From, diff --git a/lib/ssl/src/ssl_pem_cache.erl b/lib/ssl/src/ssl_pem_cache.erl index 2b31374bcc..f63a301f69 100644 --- a/lib/ssl/src/ssl_pem_cache.erl +++ b/lib/ssl/src/ssl_pem_cache.erl @@ -133,7 +133,7 @@ invalidate_pem(File) -> init([Name]) -> put(ssl_pem_cache, Name), process_flag(trap_exit, true), - PemCache = ssl_pkix_db:create_pem_cache(), + PemCache = ssl_pkix_db:create_pem_cache(Name), Interval = pem_check_interval(), erlang:send_after(Interval, self(), clear_pem_cache), {ok, #state{pem_cache = PemCache, diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl index 961a555873..cde05bb16f 100644 --- a/lib/ssl/src/ssl_pkix_db.erl +++ b/lib/ssl/src/ssl_pkix_db.erl @@ -28,7 +28,7 @@ -include_lib("public_key/include/public_key.hrl"). -include_lib("kernel/include/file.hrl"). --export([create/0, create_pem_cache/0, +-export([create/1, create_pem_cache/1, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3, extract_trusted_certs/1, remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1, @@ -40,13 +40,13 @@ %%==================================================================== %%-------------------------------------------------------------------- --spec create() -> [db_handle(),...]. +-spec create(atom()) -> [db_handle(),...]. %% %% Description: Creates a new certificate db. %% Note: lookup_trusted_cert/4 may be called from any process but only %% the process that called create may call the other functions. %%-------------------------------------------------------------------- -create() -> +create(PEMCacheName) -> [%% Let connection process delete trusted certs %% that can only belong to one connection. (Supplied directly %% on DER format to ssl:connect/listen.) @@ -56,14 +56,14 @@ create() -> ets:new(ssl_otp_ca_ref_file_mapping, [set, protected]) }, %% Lookups in named table owned by ssl_pem_cache process - ssl_otp_pem_cache, + PEMCacheName, %% Default cache {ets:new(ssl_otp_crl_cache, [set, protected]), ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])} ]. -create_pem_cache() -> - ets:new(ssl_otp_pem_cache, [named_table, set, protected]). +create_pem_cache(Name) -> + ets:new(Name, [named_table, set, protected]). %%-------------------------------------------------------------------- -spec remove([db_handle()]) -> ok. @@ -76,7 +76,9 @@ remove(Dbs) -> true = ets:delete(Db1); (undefined) -> ok; - (ssl_otp_pem_cache) -> + (ssl_pem_cache) -> + ok; + (ssl_pem_cache_dist) -> ok; (Db) -> true = ets:delete(Db) @@ -341,3 +343,4 @@ crl_issuer(DerCRL) -> CRL = public_key:der_decode('CertificateList', DerCRL), TBSCRL = CRL#'CertificateList'.tbsCertList, TBSCRL#'TBSCertList'.issuer. + diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl index b10069c3cb..539e189c4f 100644 --- a/lib/ssl/src/ssl_record.erl +++ b/lib/ssl/src/ssl_record.erl @@ -67,7 +67,7 @@ connection_state(). %% %% Description: Returns the instance of the connection_state map -%% that is currently defined as the current conection state. +%% that is currently defined as the current connection state. %%-------------------------------------------------------------------- current_connection_state(ConnectionStates, read) -> maps:get(current_read, ConnectionStates); @@ -79,7 +79,7 @@ current_connection_state(ConnectionStates, write) -> connection_state(). %% %% Description: Returns the instance of the connection_state map -%% that is pendingly defined as the pending conection state. +%% that is pendingly defined as the pending connection state. %%-------------------------------------------------------------------- pending_connection_state(ConnectionStates, read) -> maps:get(pending_read, ConnectionStates); diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl index 77606911be..c6e530e164 100644 --- a/lib/ssl/src/tls_connection.erl +++ b/lib/ssl/src/tls_connection.erl @@ -48,7 +48,7 @@ -export([encode_data/3, encode_alert/3]). %% State transition handling --export([next_record/1, next_event/3]). +-export([next_record/1, next_event/3, next_event/4]). %% Handshake handling -export([renegotiate/2, send_handshake/2, @@ -59,7 +59,8 @@ -export([send_alert/2, close/5]). %% Data handling --export([passive_receive/2, next_record_if_active/1, handle_common_event/4, send/3]). +-export([passive_receive/2, next_record_if_active/1, handle_common_event/4, send/3, + socket/5]). %% gen_statem state functions -export([init/3, error/3, downgrade/3, %% Initiation and take down states @@ -117,7 +118,7 @@ send_handshake_flight(#state{socket = Socket, transport_cb = Transport, flight_buffer = Flight} = State0) -> send(Transport, Socket, Flight), - State0#state{flight_buffer = []}. + {State0#state{flight_buffer = []}, []}. queue_change_cipher(Msg, #state{negotiated_version = Version, flight_buffer = Flight0, @@ -191,6 +192,10 @@ init([Role, Host, Port, Socket, Options, User, CbInfo]) -> callback_mode() -> state_functions. +socket(Pid, Transport, Socket, Connection, Tracker) -> + tls_socket:socket(Pid, Transport, Socket, Connection, Tracker). + + %%-------------------------------------------------------------------- %% State functions %%-------------------------------------------------------------------- @@ -340,12 +345,12 @@ connection(internal, #hello_request{}, renegotiation = {Renegotiation, _}} = State0) -> Hello = tls_handshake:client_hello(Host, Port, ConnectionStates0, SslOpts, Cache, CacheCb, Renegotiation, Cert), - State1 = send_handshake(Hello, State0), + {State1, Actions} = send_handshake(Hello, State0), {Record, State} = next_record( State1#state{session = Session0#session{session_id = Hello#client_hello.session_id}}), - next_event(hello, Record, State); + next_event(hello, Record, State, Actions); connection(internal, #client_hello{} = Hello, #state{role = server, allow_renegotiate = true} = State0) -> %% Mitigate Computational DoS attack diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl index 2800ee6537..5726561865 100644 --- a/lib/ssl/src/tls_handshake.erl +++ b/lib/ssl/src/tls_handshake.erl @@ -88,7 +88,7 @@ client_hello(Host, Port, ConnectionStates, #hello_extensions{}, {ssl_cipher:hash(), ssl_cipher:sign_algo()} | undefined} | #alert{}. %% -%% Description: Handles a recieved hello message +%% Description: Handles a received hello message %%-------------------------------------------------------------------- hello(#server_hello{server_version = Version, random = Random, cipher_suite = CipherSuite, @@ -192,7 +192,8 @@ handle_client_hello(Version, #client_hello{session_id = SugesstedId, end. get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length), - Body:Length/binary,Rest/binary>>, #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) -> + Body:Length/binary,Rest/binary>>, + #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) -> Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>, try decode_handshake(Version, Type, Body, V2Hello) of Handshake -> @@ -207,27 +208,17 @@ get_tls_handshake_aux(_Version, Data, _, Acc) -> decode_handshake(_, ?HELLO_REQUEST, <<>>, _) -> #hello_request{}; -%% Client hello v2. -%% The server must be able to receive such messages, from clients that -%% are willing to use ssl v3 or higher, but have ssl v2 compatibility. -decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), - ?UINT16(CSLength), ?UINT16(0), - ?UINT16(CDLength), - CipherSuites:CSLength/binary, - ChallengeData:CDLength/binary>>, true) -> - #client_hello{client_version = {Major, Minor}, - random = ssl_v2:client_random(ChallengeData, CDLength), - session_id = 0, - cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites), - compression_methods = [?NULL], - extensions = #hello_extensions{} - }; -decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(_), ?BYTE(_), - ?UINT16(CSLength), ?UINT16(0), - ?UINT16(CDLength), - _CipherSuites:CSLength/binary, - _ChallengeData:CDLength/binary>>, false) -> - throw(?ALERT_REC(?FATAL, ?PROTOCOL_VERSION, ssl_v2_client_hello_no_supported)); +decode_handshake(_Version, ?CLIENT_HELLO, Bin, true) -> + try decode_hello(Bin) of + Hello -> + Hello + catch + _:_ -> + decode_v2_hello(Bin) + end; +decode_handshake(_Version, ?CLIENT_HELLO, Bin, false) -> + decode_hello(Bin); + decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:32/binary, ?BYTE(SID_length), Session_ID:SID_length/binary, ?UINT16(Cs_length), CipherSuites:Cs_length/binary, @@ -244,10 +235,40 @@ decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:3 compression_methods = Comp_methods, extensions = DecodedExtensions }; - decode_handshake(Version, Tag, Msg, _) -> ssl_handshake:decode_handshake(Version, Tag, Msg). + +decode_hello(<<?BYTE(Major), ?BYTE(Minor), Random:32/binary, + ?BYTE(SID_length), Session_ID:SID_length/binary, + ?UINT16(Cs_length), CipherSuites:Cs_length/binary, + ?BYTE(Cm_length), Comp_methods:Cm_length/binary, + Extensions/binary>>) -> + DecodedExtensions = ssl_handshake:decode_hello_extensions({client, Extensions}), + + #client_hello{ + client_version = {Major,Minor}, + random = Random, + session_id = Session_ID, + cipher_suites = ssl_handshake:decode_suites('2_bytes', CipherSuites), + compression_methods = Comp_methods, + extensions = DecodedExtensions + }. +%% The server must be able to receive such messages, from clients that +%% are willing to use ssl v3 or higher, but have ssl v2 compatibility. +decode_v2_hello(<<?BYTE(Major), ?BYTE(Minor), + ?UINT16(CSLength), ?UINT16(0), + ?UINT16(CDLength), + CipherSuites:CSLength/binary, + ChallengeData:CDLength/binary>>) -> + #client_hello{client_version = {Major, Minor}, + random = ssl_v2:client_random(ChallengeData, CDLength), + session_id = 0, + cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites), + compression_methods = [?NULL], + extensions = #hello_extensions{} + }. + enc_handshake(#hello_request{}, _Version) -> {?HELLO_REQUEST, <<>>}; enc_handshake(#client_hello{client_version = {Major, Minor}, diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl index 7f24ce5192..f52ee06e71 100644 --- a/lib/ssl/src/tls_v1.erl +++ b/lib/ssl/src/tls_v1.erl @@ -204,21 +204,21 @@ suites(Minor) when Minor == 1; Minor == 2 -> ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA, ?TLS_RSA_WITH_AES_256_CBC_SHA, - ?TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA, - ?TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_RSA_WITH_3DES_EDE_CBC_SHA, - ?TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, ?TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA, ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA, ?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA, ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA, - ?TLS_RSA_WITH_AES_128_CBC_SHA + ?TLS_RSA_WITH_AES_128_CBC_SHA, + + ?TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA, + ?TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA, + ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA, + ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA, + ?TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA, + ?TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA, + ?TLS_RSA_WITH_3DES_EDE_CBC_SHA ]; suites(3) -> [ @@ -407,7 +407,7 @@ is_pair(Hash, rsa, Hashs) -> AtLeastMd5 = Hashs -- [md2,md4], lists:member(Hash, AtLeastMd5). -%% list ECC curves in prefered order +%% list ECC curves in preferred order -spec ecc_curves(1..3 | all) -> [named_curve()]. ecc_curves(all) -> [sect571r1,sect571k1,secp521r1,brainpoolP512r1, diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl index f0a3c42e8d..86426bdb60 100644 --- a/lib/ssl/test/ssl_basic_SUITE.erl +++ b/lib/ssl/test/ssl_basic_SUITE.erl @@ -53,7 +53,8 @@ all() -> {group, options_tls}, {group, session}, {group, 'dtlsv1.2'}, - %%{group, 'dtlsv1'}, + %% {group, 'dtlsv1'}, Breaks dtls in cert_verify_SUITE enable later when + %% problem is identified and fixed {group, 'tlsv1.2'}, {group, 'tlsv1.1'}, {group, 'tlsv1'}, @@ -65,15 +66,15 @@ groups() -> {basic_tls, [], basic_tests_tls()}, {options, [], options_tests()}, {options_tls, [], options_tests_tls()}, - %%{'dtlsv1.2', [], all_versions_groups()}, - {'dtlsv1.2', [], [connection_information]}, - %%{'dtlsv1', [], all_versions_groups()}, + {'dtlsv1.2', [], all_versions_groups()}, + {'dtlsv1', [], all_versions_groups()}, {'tlsv1.2', [], all_versions_groups() ++ tls_versions_groups() ++ [conf_signature_algs, no_common_signature_algs]}, {'tlsv1.1', [], all_versions_groups() ++ tls_versions_groups()}, {'tlsv1', [], all_versions_groups() ++ tls_versions_groups() ++ rizzo_tests()}, {'sslv3', [], all_versions_groups() ++ tls_versions_groups() ++ rizzo_tests() ++ [tls_ciphersuite_vs_version]}, {api,[], api_tests()}, {api_tls,[], api_tests_tls()}, + {tls_ciphers,[], tls_cipher_tests()}, {session, [], session_tests()}, {renegotiate, [], renegotiate_tests()}, {ciphers, [], cipher_tests()}, @@ -83,12 +84,13 @@ groups() -> ]. tls_versions_groups ()-> - [{group, api_tls}, + [{group, renegotiate}, %% Should be in all_versions_groups not fixed for DTLS yet + {group, api_tls}, + {group, tls_ciphers}, {group, error_handling_tests_tls}]. all_versions_groups ()-> [{group, api}, - {group, renegotiate}, {group, ciphers}, {group, ciphers_ec}, {group, error_handling_tests}]. @@ -147,10 +149,8 @@ options_tests_tls() -> api_tests() -> [connection_info, connection_information, - peername, peercert, peercert_with_client_cert, - sockname, versions, eccs, controlling_process, @@ -162,7 +162,6 @@ api_tests() -> ssl_recv_timeout, server_name_indication_option, accept_pool, - new_options_in_accept, prf ]. @@ -175,7 +174,10 @@ api_tests_tls() -> tls_shutdown, tls_shutdown_write, tls_shutdown_both, - tls_shutdown_error + tls_shutdown_error, + peername, + sockname, + new_options_in_accept ]. session_tests() -> @@ -197,6 +199,11 @@ renegotiate_tests() -> renegotiate_dos_mitigate_passive, renegotiate_dos_mitigate_absolute]. +tls_cipher_tests() -> + [rc4_rsa_cipher_suites, + rc4_ecdh_rsa_cipher_suites, + rc4_ecdsa_cipher_suites]. + cipher_tests() -> [cipher_suites, cipher_suites_mix, @@ -212,9 +219,6 @@ cipher_tests() -> srp_cipher_suites, srp_anon_cipher_suites, srp_dsa_cipher_suites, - rc4_rsa_cipher_suites, - rc4_ecdh_rsa_cipher_suites, - rc4_ecdsa_cipher_suites, des_rsa_cipher_suites, des_ecdh_rsa_cipher_suites, default_reject_anonymous]. @@ -226,15 +230,15 @@ cipher_tests_ec() -> ciphers_ecdh_rsa_signed_certs_openssl_names]. error_handling_tests()-> - [controller_dies, - close_transport_accept, + [close_transport_accept, recv_active, recv_active_once, recv_error_handling ]. error_handling_tests_tls()-> - [tls_client_closes_socket, + [controller_dies, + tls_client_closes_socket, tls_tcp_error_propagation_in_active_mode, tls_tcp_connect, tls_tcp_connect_big, @@ -843,8 +847,7 @@ controller_dies(Config) when is_list(Config) -> Server ! listen, Tester = self(), Connect = fun(Pid) -> - {ok, Socket} = ssl:connect(Hostname, Port, - [{reuseaddr,true},{ssl_imp,new}]), + {ok, Socket} = ssl:connect(Hostname, Port, ClientOpts), %% Make sure server finishes and verification %% and is in coonection state before %% killing client @@ -2194,8 +2197,9 @@ ciphers_dsa_signed_certs() -> [{doc,"Test all dsa ssl cipher suites in highest support ssl/tls version"}]. ciphers_dsa_signed_certs(Config) when is_list(Config) -> + NVersion = ssl_test_lib:protocol_version(Config, tuple), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:dsa_suites(tls_record:protocol_version(Version)), + Ciphers = ssl_test_lib:dsa_suites(NVersion), ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]), run_suites(Ciphers, Version, Config, dsa). %%------------------------------------------------------------------- @@ -2218,29 +2222,33 @@ anonymous_cipher_suites(Config) when is_list(Config) -> psk_cipher_suites() -> [{doc, "Test the PSK ciphersuites WITHOUT server supplied identity hint"}]. psk_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:psk_suites(), + Ciphers = ssl_test_lib:psk_suites(NVersion), run_suites(Ciphers, Version, Config, psk). %%------------------------------------------------------------------- psk_with_hint_cipher_suites()-> [{doc, "Test the PSK ciphersuites WITH server supplied identity hint"}]. psk_with_hint_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:psk_suites(), + Ciphers = ssl_test_lib:psk_suites(NVersion), run_suites(Ciphers, Version, Config, psk_with_hint). %%------------------------------------------------------------------- psk_anon_cipher_suites() -> [{doc, "Test the anonymous PSK ciphersuites WITHOUT server supplied identity hint"}]. psk_anon_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:psk_anon_suites(), + Ciphers = ssl_test_lib:psk_anon_suites(NVersion), run_suites(Ciphers, Version, Config, psk_anon). %%------------------------------------------------------------------- psk_anon_with_hint_cipher_suites()-> [{doc, "Test the anonymous PSK ciphersuites WITH server supplied identity hint"}]. psk_anon_with_hint_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:psk_anon_suites(), + Ciphers = ssl_test_lib:psk_anon_suites(NVersion), run_suites(Ciphers, Version, Config, psk_anon_with_hint). %%------------------------------------------------------------------- srp_cipher_suites()-> @@ -2291,18 +2299,17 @@ rc4_ecdsa_cipher_suites(Config) when is_list(Config) -> %%------------------------------------------------------------------- des_rsa_cipher_suites()-> - [{doc, "Test the RC4 ciphersuites"}]. + [{doc, "Test the des_rsa ciphersuites"}]. des_rsa_cipher_suites(Config) when is_list(Config) -> - NVersion = tls_record:highest_protocol_version([]), - Version = tls_record:protocol_version(NVersion), - Ciphers = ssl_test_lib:des_suites(NVersion), + Version = ssl_test_lib:protocol_version(Config), + Ciphers = ssl_test_lib:des_suites(Config), run_suites(Ciphers, Version, Config, des_rsa). %------------------------------------------------------------------- des_ecdh_rsa_cipher_suites()-> - [{doc, "Test the RC4 ciphersuites"}]. + [{doc, "Test ECDH rsa signed ciphersuites"}]. des_ecdh_rsa_cipher_suites(Config) when is_list(Config) -> - NVersion = tls_record:highest_protocol_version([]), - Version = tls_record:protocol_version(NVersion), + NVersion = ssl_test_lib:protocol_version(Config, tuple), + Version = ssl_test_lib:protocol_version(Config), Ciphers = ssl_test_lib:des_suites(NVersion), run_suites(Ciphers, Version, Config, des_dhe_rsa). @@ -2313,9 +2320,11 @@ default_reject_anonymous(Config) when is_list(Config) -> {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), ClientOpts = ssl_test_lib:ssl_options(client_opts, Config), ServerOpts = ssl_test_lib:ssl_options(server_opts, Config), - Version = tls_record:highest_protocol_version(tls_record:supported_protocol_versions()), - [CipherSuite | _] = ssl_test_lib:anonymous_suites(Version), - + Version = ssl_test_lib:protocol_version(Config), + TLSVersion = ssl_test_lib:tls_version(Version), + + [CipherSuite | _] = ssl_test_lib:anonymous_suites(TLSVersion), + Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, {from, self()}, {options, ServerOpts}]), @@ -2335,8 +2344,9 @@ ciphers_ecdsa_signed_certs() -> [{doc, "Test all ecdsa ssl cipher suites in highest support ssl/tls version"}]. ciphers_ecdsa_signed_certs(Config) when is_list(Config) -> + NVersion = ssl_test_lib:protocol_version(Config, tuple), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:ecdsa_suites(tls_record:protocol_version(Version)), + Ciphers = ssl_test_lib:ecdsa_suites(NVersion), ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]), run_suites(Ciphers, Version, Config, ecdsa). %%-------------------------------------------------------------------- @@ -2353,8 +2363,9 @@ ciphers_ecdh_rsa_signed_certs() -> [{doc, "Test all ecdh_rsa ssl cipher suites in highest support ssl/tls version"}]. ciphers_ecdh_rsa_signed_certs(Config) when is_list(Config) -> + NVersion = ssl_test_lib:protocol_version(Config, tuple), Version = ssl_test_lib:protocol_version(Config), - Ciphers = ssl_test_lib:ecdh_rsa_suites(tls_record:protocol_version(Version)), + Ciphers = ssl_test_lib:ecdh_rsa_suites(NVersion), ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]), run_suites(Ciphers, Version, Config, ecdh_rsa). %%-------------------------------------------------------------------- @@ -3326,11 +3337,11 @@ hibernate(Config) -> process_info(Pid, current_function), ssl_test_lib:check_result(Server, ok, Client, ok), - timer:sleep(1100), - + + timer:sleep(1500), {current_function, {erlang, hibernate, 3}} = process_info(Pid, current_function), - + ssl_test_lib:close(Server), ssl_test_lib:close(Client). @@ -3363,13 +3374,12 @@ hibernate_right_away(Config) -> [{port, Port1}, {options, [{hibernate_after, 0}|ClientOpts]}]), ssl_test_lib:check_result(Server1, ok, Client1, ok), - - {current_function, {erlang, hibernate, 3}} = + + {current_function, {erlang, hibernate, 3}} = process_info(Pid1, current_function), - ssl_test_lib:close(Server1), ssl_test_lib:close(Client1), - + Server2 = ssl_test_lib:start_server(StartServerOpts), Port2 = ssl_test_lib:inet_port(Server2), {Client2, #sslsocket{pid = Pid2}} = ssl_test_lib:start_client(StartClientOpts ++ @@ -3377,8 +3387,8 @@ hibernate_right_away(Config) -> ssl_test_lib:check_result(Server2, ok, Client2, ok), - ct:sleep(100), %% Schedule out - + ct:sleep(1000), %% Schedule out + {current_function, {erlang, hibernate, 3}} = process_info(Pid2, current_function), @@ -4030,11 +4040,11 @@ prf_create_plan(TlsVersions, PRFs, Results) -> prf_ciphers_and_expected(TlsVer, PRFs, Results) -> case TlsVer of TlsVer when TlsVer == sslv3 orelse TlsVer == tlsv1 - orelse TlsVer == 'tlsv1.1' -> + orelse TlsVer == 'tlsv1.1' orelse TlsVer == 'dtlsv1' -> Ciphers = ssl:cipher_suites(), {_, Expected} = lists:keyfind(md5sha, 1, Results), [[{tls_ver, TlsVer}, {ciphers, Ciphers}, {expected, Expected}, {prf, md5sha}]]; - 'tlsv1.2' -> + TlsVer when TlsVer == 'tlsv1.2' orelse TlsVer == 'dtlsv1.2'-> lists:foldl( fun(PRF, Acc) -> Ciphers = prf_get_ciphers(TlsVer, PRF), @@ -4049,21 +4059,20 @@ prf_ciphers_and_expected(TlsVer, PRFs, Results) -> end end, [], PRFs) end. -prf_get_ciphers(TlsVer, PRF) -> - case TlsVer of - 'tlsv1.2' -> - lists:filter( - fun(C) when tuple_size(C) == 4 andalso - element(4, C) == PRF -> - true; - (_) -> false - end, ssl:cipher_suites()) - end. +prf_get_ciphers(_, PRF) -> + lists:filter( + fun(C) when tuple_size(C) == 4 andalso + element(4, C) == PRF -> + true; + (_) -> + false + end, + ssl:cipher_suites()). prf_run_test(_, TlsVer, [], _, Prf) -> ct:fail({error, cipher_list_empty, TlsVer, Prf}); prf_run_test(Config, TlsVer, Ciphers, Expected, Prf) -> {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), - BaseOpts = [{active, true}, {versions, [TlsVer]}, {ciphers, Ciphers}], + BaseOpts = [{active, true}, {versions, [TlsVer]}, {ciphers, Ciphers}, {protocol, tls_or_dtls(TlsVer)}], ServerOpts = BaseOpts ++ proplists:get_value(server_opts, Config), ClientOpts = BaseOpts ++ proplists:get_value(client_opts, Config), Server = ssl_test_lib:start_server( @@ -4507,16 +4516,21 @@ run_suites(Ciphers, Version, Config, Type) -> [{reuseaddr, true}, {ciphers, ssl_test_lib:anonymous_suites(Version)}]}; psk -> {ssl_test_lib:ssl_options(client_psk, Config), - ssl_test_lib:ssl_options(server_psk, Config)}; + [{ciphers, ssl_test_lib:psk_suites(Version)} | + ssl_test_lib:ssl_options(server_psk, Config)]}; psk_with_hint -> {ssl_test_lib:ssl_options(client_psk, Config), - ssl_test_lib:ssl_options(server_psk_hint, Config)}; + [{ciphers, ssl_test_lib:psk_suites(Version)} | + ssl_test_lib:ssl_options(server_psk_hint, Config) + ]}; psk_anon -> {ssl_test_lib:ssl_options(client_psk, Config), - ssl_test_lib:ssl_options(server_psk_anon, Config)}; + [{ciphers, ssl_test_lib:psk_anon_suites(Version)} | + ssl_test_lib:ssl_options(server_psk_anon, Config)]}; psk_anon_with_hint -> {ssl_test_lib:ssl_options(client_psk, Config), - ssl_test_lib:ssl_options(server_psk_anon_hint, Config)}; + [{ciphers, ssl_test_lib:psk_anon_suites(Version)} | + ssl_test_lib:ssl_options(server_psk_anon_hint, Config)]}; srp -> {ssl_test_lib:ssl_options(client_srp, Config), ssl_test_lib:ssl_options(server_srp, Config)}; @@ -4556,7 +4570,7 @@ run_suites(Ciphers, Version, Config, Type) -> Result = lists:map(fun(Cipher) -> cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end, - ssl_test_lib:filter_suites(Ciphers)), + ssl_test_lib:filter_suites(Ciphers, Version)), case lists:flatten(Result) of [] -> ok; @@ -4756,3 +4770,9 @@ wait_for_send(Socket) -> %% Make sure TLS process processed send message event _ = ssl:connection_information(Socket). +tls_or_dtls('dtlsv1') -> + dtls; +tls_or_dtls('dtlsv1.2') -> + dtls; +tls_or_dtls(_) -> + tls. diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl index 5265c87e29..66b0c09b73 100644 --- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl +++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl @@ -39,17 +39,26 @@ %% Common Test interface functions ----------------------------------- %%-------------------------------------------------------------------- all() -> - [{group, active}, - {group, passive}, - {group, active_once}, - {group, error_handling}]. - + [ + {group, tls}, + {group, dtls} + ]. groups() -> - [{active, [], tests()}, + [ + {tls, [], all_protocol_groups()}, + {dtls, [], all_protocol_groups()}, + {active, [], tests()}, {active_once, [], tests()}, {passive, [], tests()}, - {error_handling, [],error_handling_tests()}]. + {error_handling, [],error_handling_tests()} + ]. + +all_protocol_groups() -> + [{group, active}, + {group, passive}, + {group, active_once}, + {group, error_handling}]. tests() -> [verify_peer, @@ -85,7 +94,7 @@ init_per_suite(Config0) -> catch crypto:stop(), try crypto:start() of ok -> - ssl_test_lib:clean_start(), + ssl_test_lib:clean_start(), %% make rsa certs using oppenssl {ok, _} = make_certs:all(proplists:get_value(data_dir, Config0), proplists:get_value(priv_dir, Config0)), @@ -99,6 +108,26 @@ end_per_suite(_Config) -> ssl:stop(), application:stop(crypto). +init_per_group(tls, Config) -> + Version = tls_record:protocol_version(tls_record:highest_protocol_version([])), + ssl:stop(), + application:load(ssl), + application:set_env(ssl, protocol_version, Version), + application:set_env(ssl, bypass_pem_cache, Version), + ssl:start(), + NewConfig = proplists:delete(protocol, Config), + [{protocol, tls}, {version, tls_record:protocol_version(Version)} | NewConfig]; + +init_per_group(dtls, Config) -> + Version = dtls_record:protocol_version(dtls_record:highest_protocol_version([])), + ssl:stop(), + application:load(ssl), + application:set_env(ssl, protocol_version, Version), + application:set_env(ssl, bypass_pem_cache, Version), + ssl:start(), + NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)), + [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]}, {version, dtls_record:protocol_version(Version)} | NewConfig]; + init_per_group(active, Config) -> [{active, true}, {receive_function, send_recv_result_active} | Config]; init_per_group(active_once, Config) -> @@ -126,7 +155,7 @@ init_per_testcase(_TestCase, Config) -> ssl:stop(), ssl:start(), ssl_test_lib:ct_log_supported_protocol_versions(Config), - ct:timetrap({seconds, 5}), + ct:timetrap({seconds, 10}), Config. end_per_testcase(_TestCase, Config) -> @@ -262,7 +291,7 @@ server_require_peer_cert_fail() -> server_require_peer_cert_fail(Config) when is_list(Config) -> ServerOpts = [{verify, verify_peer}, {fail_if_no_peer_cert, true} | ssl_test_lib:ssl_options(server_verification_opts, Config)], - BadClientOpts = ssl_test_lib:ssl_options(client_opts, []), + BadClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, @@ -411,7 +440,7 @@ server_require_peer_cert_partial_chain_fun_fail() -> server_require_peer_cert_partial_chain_fun_fail(Config) when is_list(Config) -> ServerOpts = [{verify, verify_peer}, {fail_if_no_peer_cert, true} | ssl_test_lib:ssl_options(server_verification_opts, Config)], - ClientOpts = proplists:get_value(client_opts, Config), + ClientOpts = ssl_test_lib:ssl_options(client_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), {ok, ServerCAs} = file:read_file(proplists:get_value(cacertfile, ServerOpts)), @@ -1091,6 +1120,7 @@ client_with_cert_cipher_suites_handshake() -> client_with_cert_cipher_suites_handshake(Config) when is_list(Config) -> ClientOpts = ssl_test_lib:ssl_options(client_verification_opts_digital_signature_only, Config), ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), + {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, {from, self()}, @@ -1098,7 +1128,7 @@ client_with_cert_cipher_suites_handshake(Config) when is_list(Config) -> send_recv_result_active, []}}, {options, [{active, true}, {ciphers, - ssl_test_lib:rsa_non_signed_suites(tls_record:highest_protocol_version([]))} + ssl_test_lib:rsa_non_signed_suites(proplists:get_value(version, Config))} | ServerOpts]}]), Port = ssl_test_lib:inet_port(Server), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, @@ -1132,7 +1162,7 @@ server_verify_no_cacerts(Config) when is_list(Config) -> unknown_server_ca_fail() -> [{doc,"Test that the client fails if the ca is unknown in verify_peer mode"}]. unknown_server_ca_fail(Config) when is_list(Config) -> - ClientOpts = ssl_test_lib:ssl_options(client_opts, []), + ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config), ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, @@ -1176,7 +1206,7 @@ unknown_server_ca_fail(Config) when is_list(Config) -> unknown_server_ca_accept_verify_none() -> [{doc,"Test that the client succeds if the ca is unknown in verify_none mode"}]. unknown_server_ca_accept_verify_none(Config) when is_list(Config) -> - ClientOpts = ssl_test_lib:ssl_options(client_opts, []), + ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config), ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, @@ -1201,8 +1231,8 @@ unknown_server_ca_accept_verify_peer() -> [{doc, "Test that the client succeds if the ca is unknown in verify_peer mode" " with a verify_fun that accepts the unknown ca error"}]. unknown_server_ca_accept_verify_peer(Config) when is_list(Config) -> - ClientOpts =ssl_test_lib:ssl_options(client_opts, []), - ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), + ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config), + ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, {from, self()}, @@ -1240,7 +1270,7 @@ unknown_server_ca_accept_verify_peer(Config) when is_list(Config) -> unknown_server_ca_accept_backwardscompatibility() -> [{doc,"Test that old style verify_funs will work"}]. unknown_server_ca_accept_backwardscompatibility(Config) when is_list(Config) -> - ClientOpts = ssl_test_lib:ssl_options(client_opts, []), + ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config), ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, diff --git a/lib/ssl/test/ssl_handshake_SUITE.erl b/lib/ssl/test/ssl_handshake_SUITE.erl index 74b14145dd..0a50c98a28 100644 --- a/lib/ssl/test/ssl_handshake_SUITE.erl +++ b/lib/ssl/test/ssl_handshake_SUITE.erl @@ -33,6 +33,7 @@ %% Common Test interface functions ----------------------------------- %%-------------------------------------------------------------------- all() -> [decode_hello_handshake, + decode_hello_handshake_version_confusion, decode_single_hello_extension_correctly, decode_supported_elliptic_curves_hello_extension_correctly, decode_unknown_hello_extension_correctly, @@ -106,6 +107,14 @@ decode_hello_handshake(_Config) -> #renegotiation_info{renegotiated_connection = <<0>>} = (Hello#server_hello.extensions)#hello_extensions.renegotiation_info. + +decode_hello_handshake_version_confusion(_) -> + HelloPacket = <<3,3,0,0,0,0,0,63,210,235,149,6,244,140,108,13,177,74,16,218,33,108,219,41,73,228,3,82,132,123,73,144,118,100,0,0,32,192,4,0,10,192,45,192,38,0,47,192,18,0,163,0,22,0,165,192,29,192,18,192,30,0,103,0,57,192,48,0,47,1,0>>, + Version = {3,3}, + ClientHello = 1, + Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, false), + Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, true). + decode_single_hello_extension_correctly(_Config) -> Renegotiation = <<?UINT16(?RENEGOTIATION_EXT), ?UINT16(1), 0>>, Extensions = ssl_handshake:decode_hello_extensions(Renegotiation), diff --git a/lib/ssl/test/ssl_npn_hello_SUITE.erl b/lib/ssl/test/ssl_npn_hello_SUITE.erl index 69aeea10c5..0b1de1dc1c 100644 --- a/lib/ssl/test/ssl_npn_hello_SUITE.erl +++ b/lib/ssl/test/ssl_npn_hello_SUITE.erl @@ -50,6 +50,10 @@ init_per_suite(Config) -> {skip, "Crypto did not start"} end. +end_per_suite(_Config) -> + %% This function is required since init_per_suite/1 exists. + ok. + init_per_testcase(_TestCase, Config) -> ssl_test_lib:ct_log_supported_protocol_versions(Config), ct:timetrap({seconds, 5}), diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl index 9632103696..4b740c79db 100644 --- a/lib/ssl/test/ssl_test_lib.erl +++ b/lib/ssl/test/ssl_test_lib.erl @@ -278,8 +278,11 @@ check_result(Server, ServerMsg, Client, ClientMsg) -> check_result(Server, ServerMsg); {Port, {data,Debug}} when is_port(Port) -> - ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), + ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Server, ServerMsg, Client, ClientMsg); + {Port,closed} when is_port(Port) -> + ct:log("~p:~p~n Openssl port ~n",[?MODULE,?LINE]), + check_result(Server, ServerMsg, Client, ClientMsg); Unexpected -> Reason = {{expected, {Client, ClientMsg}}, {expected, {Server, ServerMsg}}, {got, Unexpected}}, @@ -291,11 +294,11 @@ check_result(Pid, Msg) -> {Pid, Msg} -> ok; {Port, {data,Debug}} when is_port(Port) -> - ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), + ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Pid,Msg); - %% {Port, {exit_status, Status}} when is_port(Port) -> - %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]), - %% check_result(Pid, Msg); + {Port,closed} when is_port(Port)-> + ct:log("~p:~p Openssl port closed ~n",[?MODULE,?LINE]), + check_result(Pid, Msg); Unexpected -> Reason = {{expected, {Pid, Msg}}, {got, Unexpected}}, @@ -398,27 +401,22 @@ cert_options(Config) -> {ssl_imp, new}]}, {server_opts, [{ssl_imp, new},{reuseaddr, true}, {cacertfile, ServerCaCertFile}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}]}, - %%{server_anon, [{ssl_imp, new},{reuseaddr, true}, {ciphers, anonymous_suites()}]}, - {client_psk, [{ssl_imp, new},{reuseaddr, true}, + {client_psk, [{ssl_imp, new}, {psk_identity, "Test-User"}, {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]}, {server_psk, [{ssl_imp, new},{reuseaddr, true}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, - {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}, - {ciphers, psk_suites()}]}, + {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]}, {server_psk_hint, [{ssl_imp, new},{reuseaddr, true}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {psk_identity, "HINT"}, - {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}, - {ciphers, psk_suites()}]}, + {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]}, {server_psk_anon, [{ssl_imp, new},{reuseaddr, true}, - {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}, - {ciphers, psk_anon_suites()}]}, + {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]}, {server_psk_anon_hint, [{ssl_imp, new},{reuseaddr, true}, {psk_identity, "HINT"}, - {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}, - {ciphers, psk_anon_suites()}]}, - {client_srp, [{ssl_imp, new},{reuseaddr, true}, + {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]}, + {client_srp, [{ssl_imp, new}, {srp_identity, {"Test-User", "secret"}}]}, {server_srp, [{ssl_imp, new},{reuseaddr, true}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, @@ -473,7 +471,7 @@ make_dsa_cert(Config) -> {cacertfile, ClientCaCertFile}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {verify, verify_peer}]}, - {client_dsa_opts, [{ssl_imp, new},{reuseaddr, true}, + {client_dsa_opts, [{ssl_imp, new}, {cacertfile, ClientCaCertFile}, {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}, {server_srp_dsa, [{ssl_imp, new},{reuseaddr, true}, @@ -481,7 +479,7 @@ make_dsa_cert(Config) -> {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {user_lookup_fun, {fun user_lookup/3, undefined}}, {ciphers, srp_dss_suites()}]}, - {client_srp_dsa, [{ssl_imp, new},{reuseaddr, true}, + {client_srp_dsa, [{ssl_imp, new}, {srp_identity, {"Test-User", "secret"}}, {cacertfile, ClientCaCertFile}, {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]} @@ -502,7 +500,7 @@ make_ecdsa_cert(Config) -> {cacertfile, ClientCaCertFile}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {verify, verify_peer}]}, - {client_ecdsa_opts, [{ssl_imp, new},{reuseaddr, true}, + {client_ecdsa_opts, [{ssl_imp, new}, {cacertfile, ClientCaCertFile}, {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]} | Config]; @@ -537,7 +535,7 @@ make_ecdh_rsa_cert(Config) -> {cacertfile, ClientCaCertFile}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {verify, verify_peer}]}, - {client_ecdh_rsa_opts, [{ssl_imp, new},{reuseaddr, true}, + {client_ecdh_rsa_opts, [{ssl_imp, new}, {cacertfile, ClientCaCertFile}, {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]} | Config]; @@ -557,7 +555,7 @@ make_mix_cert(Config) -> {cacertfile, ClientCaCertFile}, {certfile, ServerCertFile}, {keyfile, ServerKeyFile}, {verify, verify_peer}]}, - {client_mix_opts, [{ssl_imp, new},{reuseaddr, true}, + {client_mix_opts, [{ssl_imp, new}, {cacertfile, ClientCaCertFile}, {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]} | Config]. @@ -827,17 +825,17 @@ rsa_suites(CounterPart) -> ({dhe_rsa, des_cbc, sha}) when FIPS == true -> false; ({rsa, Cipher, _}) -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); ({dhe_rsa, Cipher, _}) -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); ({ecdhe_rsa, Cipher, _}) when ECC == true -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); ({rsa, Cipher, _, _}) -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); ({dhe_rsa, Cipher, _,_}) -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); ({ecdhe_rsa, Cipher, _,_}) when ECC == true -> - lists:member(Cipher, Ciphers); + lists:member(cipher_atom(Cipher), Ciphers); (_) -> false end, @@ -930,44 +928,12 @@ anonymous_suites(Version) -> Suites = ssl_cipher:anonymous_suites(Version), ssl_cipher:filter_suites(Suites). -psk_suites() -> - Suites = - [{psk, rc4_128, sha}, - {psk, '3des_ede_cbc', sha}, - {psk, aes_128_cbc, sha}, - {psk, aes_256_cbc, sha}, - {psk, aes_128_cbc, sha256}, - {psk, aes_256_cbc, sha384}, - {dhe_psk, rc4_128, sha}, - {dhe_psk, '3des_ede_cbc', sha}, - {dhe_psk, aes_128_cbc, sha}, - {dhe_psk, aes_256_cbc, sha}, - {dhe_psk, aes_128_cbc, sha256}, - {dhe_psk, aes_256_cbc, sha384}, - {rsa_psk, rc4_128, sha}, - {rsa_psk, '3des_ede_cbc', sha}, - {rsa_psk, aes_128_cbc, sha}, - {rsa_psk, aes_256_cbc, sha}, - {rsa_psk, aes_128_cbc, sha256}, - {rsa_psk, aes_256_cbc, sha384}, - {psk, aes_128_gcm, null, sha256}, - {psk, aes_256_gcm, null, sha384}, - {dhe_psk, aes_128_gcm, null, sha256}, - {dhe_psk, aes_256_gcm, null, sha384}, - {rsa_psk, aes_128_gcm, null, sha256}, - {rsa_psk, aes_256_gcm, null, sha384}], +psk_suites(Version) -> + Suites = ssl_cipher:psk_suites(Version), ssl_cipher:filter_suites(Suites). -psk_anon_suites() -> - Suites = - [{psk, rc4_128, sha}, - {psk, '3des_ede_cbc', sha}, - {psk, aes_128_cbc, sha}, - {psk, aes_256_cbc, sha}, - {dhe_psk, rc4_128, sha}, - {dhe_psk, '3des_ede_cbc', sha}, - {dhe_psk, aes_128_cbc, sha}, - {dhe_psk, aes_256_cbc, sha}], +psk_anon_suites(Version) -> + Suites = [Suite || Suite <- psk_suites(Version), is_psk_anon_suite(Suite)], ssl_cipher:filter_suites(Suites). srp_suites() -> @@ -1089,14 +1055,16 @@ init_tls_version(Version, Config) application:load(ssl), application:set_env(ssl, dtls_protocol_version, Version), ssl:start(), - [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]}|Config]; + NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)), + [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]} | NewConfig]; init_tls_version(Version, Config) -> ssl:stop(), application:load(ssl), application:set_env(ssl, protocol_version, Version), ssl:start(), - [{protocol, tls}|Config]. + NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)), + [{protocol, tls} | NewConfig]. sufficient_crypto_support(Version) when Version == 'tlsv1.2'; Version == 'dtlsv1.2' -> @@ -1222,6 +1190,10 @@ check_sane_openssl_version(Version) -> false; {'tlsv1.1', "OpenSSL 0" ++ _} -> false; + {'dtlsv1', "OpenSSL 0" ++ _} -> + false; + {'dtlsv1.2', "OpenSSL 0" ++ _} -> + false; {_, _} -> true end; @@ -1231,19 +1203,37 @@ check_sane_openssl_version(Version) -> enough_openssl_crl_support("OpenSSL 0." ++ _) -> false; enough_openssl_crl_support(_) -> true. -wait_for_openssl_server(Port) -> - wait_for_openssl_server(Port, 10). -wait_for_openssl_server(_, 0) -> +wait_for_openssl_server(Port, tls) -> + do_wait_for_openssl_tls_server(Port, 10); +wait_for_openssl_server(Port, dtls) -> + do_wait_for_openssl_dtls_server(Port, 10). + +do_wait_for_openssl_tls_server(_, 0) -> exit(failed_to_connect_to_openssl); -wait_for_openssl_server(Port, N) -> +do_wait_for_openssl_tls_server(Port, N) -> case gen_tcp:connect("localhost", Port, []) of {ok, S} -> gen_tcp:close(S); _ -> ct:sleep(?SLEEP), - wait_for_openssl_server(Port, N-1) + do_wait_for_openssl_tls_server(Port, N-1) end. +do_wait_for_openssl_dtls_server(_, 0) -> + %%exit(failed_to_connect_to_openssl); + ok; +do_wait_for_openssl_dtls_server(Port, N) -> + %% case gen_udp:open(0) of + %% {ok, S} -> + %% gen_udp:connect(S, "localhost", Port), + %% gen_udp:close(S); + %% _ -> + %% ct:sleep(?SLEEP), + %% do_wait_for_openssl_dtls_server(Port, N-1) + %% end. + ct:sleep(500), + do_wait_for_openssl_dtls_server(Port, N-1). + version_flag(tlsv1) -> "-tls1"; version_flag('tlsv1.1') -> @@ -1253,10 +1243,14 @@ version_flag('tlsv1.2') -> version_flag(sslv3) -> "-ssl3"; version_flag(sslv2) -> - "-ssl2". - -filter_suites(Ciphers0) -> - Version = tls_record:highest_protocol_version([]), + "-ssl2"; +version_flag('dtlsv1.2') -> + "-dtls1_2"; +version_flag('dtlsv1') -> + "-dtls1". + +filter_suites(Ciphers0, AtomVersion) -> + Version = tls_version(AtomVersion), Supported0 = ssl_cipher:suites(Version) ++ ssl_cipher:anonymous_suites(Version) ++ ssl_cipher:psk_suites(Version) @@ -1338,7 +1332,7 @@ protocol_version(Config) -> protocol_version(Config, tuple) -> case proplists:get_value(protocol, Config) of dtls -> - dtls_record:protocol_version(dtls_record:highest_protocol_version([])); + dtls_record:highest_protocol_version(dtls_record:supported_protocol_versions()); _ -> tls_record:highest_protocol_version(tls_record:supported_protocol_versions()) end; @@ -1372,6 +1366,7 @@ clean_env() -> application:unset_env(ssl, session_cache_client_max), application:unset_env(ssl, session_cache_server_max), application:unset_env(ssl, ssl_pem_cache_clean), + application:unset_env(ssl, bypass_pem_cache), application:unset_env(ssl, alert_timeout). clean_start() -> @@ -1379,3 +1374,105 @@ clean_start() -> application:load(ssl), clean_env(), ssl:start(). + +is_psk_anon_suite({psk, _,_}) -> + true; +is_psk_anon_suite({dhe_psk,_,_}) -> + true; +is_psk_anon_suite({psk, _,_,_}) -> + true; +is_psk_anon_suite({dhe_psk, _,_,_}) -> + true; +is_psk_anon_suite(_) -> + false. + +cipher_atom(aes_256_cbc) -> + aes_cbc256; +cipher_atom(aes_128_cbc) -> + aes_cbc128; +cipher_atom('3des_ede_cbc') -> + des_ede3; +cipher_atom(Atom) -> + Atom. +tls_version('dtlsv1' = Atom) -> + dtls_v1:corresponding_tls_version(dtls_record:protocol_version(Atom)); +tls_version('dtlsv1.2' = Atom) -> + dtls_v1:corresponding_tls_version(dtls_record:protocol_version(Atom)); +tls_version(Atom) -> + tls_record:protocol_version(Atom). + +dtls_hello() -> + [1, + <<0,1,4>>, + <<0,0>>, + <<0,0,0>>, + <<0,1,4>>, + <<254,253,88, + 156,129,61, + 131,216,15, + 131,194,242, + 46,154,190, + 20,228,234, + 234,150,44, + 62,96,96,103, + 127,95,103, + 23,24,42,138, + 13,142,32,57, + 230,177,32, + 210,154,152, + 188,121,134, + 136,53,105, + 118,96,106, + 103,231,223, + 133,10,165, + 50,32,211, + 227,193,14, + 181,143,48, + 66,0,0,100,0, + 255,192,44, + 192,48,192, + 36,192,40, + 192,46,192, + 50,192,38, + 192,42,0,159, + 0,163,0,107, + 0,106,0,157, + 0,61,192,43, + 192,47,192, + 35,192,39, + 192,45,192, + 49,192,37, + 192,41,0,158, + 0,162,0,103, + 0,64,0,156,0, + 60,192,10, + 192,20,0,57, + 0,56,192,5, + 192,15,0,53, + 192,8,192,18, + 0,22,0,19, + 192,3,192,13, + 0,10,192,9, + 192,19,0,51, + 0,50,192,4, + 192,14,0,47, + 1,0,0,86,0,0, + 0,14,0,12,0, + 0,9,108,111, + 99,97,108, + 104,111,115, + 116,0,10,0, + 58,0,56,0,14, + 0,13,0,25,0, + 28,0,11,0,12, + 0,27,0,24,0, + 9,0,10,0,26, + 0,22,0,23,0, + 8,0,6,0,7,0, + 20,0,21,0,4, + 0,5,0,18,0, + 19,0,1,0,2,0, + 3,0,15,0,16, + 0,17,0,11,0, + 2,1,0>>]. + diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl index e99340822d..48fd2b7eab 100644 --- a/lib/ssl/test/ssl_to_openssl_SUITE.erl +++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl @@ -42,7 +42,9 @@ all() -> {group, 'tlsv1.2'}, {group, 'tlsv1.1'}, {group, 'tlsv1'}, - {group, 'sslv3'} + {group, 'sslv3'}, + {group, 'dtlsv1.2'}, + {group, 'dtlsv1'} ]. groups() -> @@ -50,7 +52,10 @@ groups() -> {'tlsv1.2', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()}, {'tlsv1.1', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()}, {'tlsv1', [], all_versions_tests()++ alpn_tests() ++ npn_tests() ++ sni_server_tests()}, - {'sslv3', [], all_versions_tests()}]. + {'sslv3', [], all_versions_tests()}, + {'dtlsv1.2', [], dtls_all_versions_tests()}, + {'dtlsv1', [], dtls_all_versions_tests()} + ]. basic_tests() -> [basic_erlang_client_openssl_server, @@ -78,6 +83,24 @@ all_versions_tests() -> expired_session, ssl2_erlang_server_openssl_client ]. +dtls_all_versions_tests() -> + [ + %%erlang_client_openssl_server, + erlang_server_openssl_client, + %%erlang_client_openssl_server_dsa_cert, + erlang_server_openssl_client_dsa_cert, + erlang_server_openssl_client_reuse_session + %%erlang_client_openssl_server_renegotiate, + %%erlang_client_openssl_server_nowrap_seqnum, + %%erlang_server_openssl_client_nowrap_seqnum, + %%erlang_client_openssl_server_no_server_ca_cert, + %%erlang_client_openssl_server_client_cert, + %%erlang_server_openssl_client_client_cert + %%ciphers_rsa_signed_certs, + %%ciphers_dsa_signed_certs, + %%erlang_client_bad_openssl_server, + %%expired_session + ]. alpn_tests() -> [erlang_client_alpn_openssl_server_alpn, @@ -144,13 +167,18 @@ init_per_group(basic, Config) -> init_per_group(GroupName, Config) -> case ssl_test_lib:is_tls_version(GroupName) of true -> - case ssl_test_lib:check_sane_openssl_version(GroupName) of - true -> - ssl_test_lib:init_tls_version(GroupName, Config); - false -> - {skip, openssl_does_not_support_version} - end; - _ -> + case ssl_test_lib:supports_ssl_tls_version(GroupName) of + true -> + case ssl_test_lib:check_sane_openssl_version(GroupName) of + true -> + ssl_test_lib:init_tls_version(GroupName, Config); + false -> + {skip, openssl_does_not_support_version} + end; + false -> + {skip, openssl_does_not_support_version} + end; + _ -> ssl:start(), Config end. @@ -284,7 +312,8 @@ basic_erlang_client_openssl_server(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + + ssl_test_lib:wait_for_openssl_server(Port, tls), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -357,7 +386,7 @@ erlang_client_openssl_server(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -431,7 +460,7 @@ erlang_client_openssl_server_dsa_cert(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -551,7 +580,7 @@ erlang_client_openssl_server_renegotiate(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -600,7 +629,7 @@ erlang_client_openssl_server_nowrap_seqnum(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -681,7 +710,7 @@ erlang_client_openssl_server_no_server_ca_cert(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -724,7 +753,7 @@ erlang_client_openssl_server_client_cert(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -856,7 +885,7 @@ erlang_client_bad_openssl_server(Config) when is_list(Config) -> "-cert", CertFile, "-key", KeyFile], OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client0 = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -911,7 +940,7 @@ expired_session(Config) when is_list(Config) -> OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, tls), Client0 = ssl_test_lib:start_client([{node, ClientNode}, @@ -970,20 +999,7 @@ ssl2_erlang_server_openssl_client(Config) when is_list(Config) -> true = port_command(OpenSslPort, Data), ct:log("Ports ~p~n", [[erlang:port_info(P) || P <- erlang:ports()]]), - receive - {'EXIT', OpenSslPort, _} = Exit -> - ct:log("Received: ~p ~n", [Exit]), - ok - end, - receive - {'EXIT', _, _} = UnkownExit -> - Msg = lists:flatten(io_lib:format("Received: ~p ~n", [UnkownExit])), - ct:log(Msg), - ct:comment(Msg), - ok - after 0 -> - ok - end, + consume_port_exit(OpenSslPort), ssl_test_lib:check_result(Server, {error, {tls_alert, "handshake failure"}}), process_flag(trap_exit, false). %%-------------------------------------------------------------------- @@ -1014,20 +1030,7 @@ ssl2_erlang_server_openssl_client_comp(Config) when is_list(Config) -> true = port_command(OpenSslPort, Data), ct:log("Ports ~p~n", [[erlang:port_info(P) || P <- erlang:ports()]]), - receive - {'EXIT', OpenSslPort, _} = Exit -> - ct:log("Received: ~p ~n", [Exit]), - ok - end, - receive - {'EXIT', _, _} = UnkownExit -> - Msg = lists:flatten(io_lib:format("Received: ~p ~n", [UnkownExit])), - ct:log(Msg), - ct:comment(Msg), - ok - after 0 -> - ok - end, + consume_port_exit(OpenSslPort), ssl_test_lib:check_result(Server, {error, {tls_alert, "protocol version"}}), process_flag(trap_exit, false). @@ -1399,7 +1402,7 @@ cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) -> OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), ConnectionInfo = {ok, {Version, CipherSuite}}, @@ -1469,7 +1472,7 @@ start_erlang_client_and_openssl_server_with_opts(Config, ErlangClientOpts, Opens OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -1505,7 +1508,7 @@ start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, Callba Args = ["s_server", "-msg", "-alpn", "http/1.1,spdy/2", "-accept", integer_to_list(Port), ssl_test_lib:version_flag(Version), "-cert", CertFile, "-key", KeyFile], OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -1574,7 +1577,7 @@ start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, Ca OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -1639,7 +1642,7 @@ start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callbac "-cert", CertFile, "-key", KeyFile], OpensslPort = ssl_test_lib:portable_open_port(Exe, Args), - ssl_test_lib:wait_for_openssl_server(Port), + ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)), Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, @@ -1848,3 +1851,9 @@ openssl_client_args(false, Hostname, Port, ServerName) -> openssl_client_args(true, Hostname, Port, ServerName) -> ["s_client", "-no_ssl2", "-connect", Hostname ++ ":" ++ integer_to_list(Port), "-servername", ServerName]. + +consume_port_exit(OpenSSLPort) -> + receive + {'EXIT', OpenSSLPort, _} -> + ok + end. diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk index 2cdb825d75..415a47949d 100644 --- a/lib/ssl/vsn.mk +++ b/lib/ssl/vsn.mk @@ -1 +1 @@ -SSL_VSN = 8.1 +SSL_VSN = 8.1.1 diff --git a/lib/stdlib/doc/src/c.xml b/lib/stdlib/doc/src/c.xml index 55a77d1bc5..7666699183 100644 --- a/lib/stdlib/doc/src/c.xml +++ b/lib/stdlib/doc/src/c.xml @@ -52,13 +52,27 @@ <func> <name name="c" arity="1"/> <name name="c" arity="2"/> - <fsummary>Compile and load code in a file.</fsummary> + <name name="c" arity="3"/> + <fsummary>Compile and load a file or module.</fsummary> <desc> - <p>Compiles and then purges and loads the code for a file. - <c><anno>Options</anno></c> defaults to <c>[]</c>. Compilation is - equivalent to:</p> - <code type="none"> -compile:file(<anno>File</anno>, <anno>Options</anno> ++ [report_errors, report_warnings])</code> + <p>Compiles and then purges and loads the code for a module. + <c><anno>Module</anno></c> can be either a module name or a source + file path, with or without <c>.erl</c> extension. + <c><anno>Options</anno></c> defaults to <c>[]</c>.</p> + <p>If <c><anno>Module</anno></c> is an atom and is not the path of a + source file, then the code path is searched to locate the object + file for the module and extract its original compiler options and + source path. If the source file is not found in the original + location, <seealso + marker="filelib#find_source/1"><c>filelib:find_source/1</c></seealso> + is used to search for it relative to the directory of the object + file.</p> + <p>The source file is compiled with the the original + options appended to the given <c><anno>Options</anno></c>, the + output replacing the old object file if and only if compilation + succeeds. A function <c><anno>Filter</anno></c> can be specified + for removing elements from from the original compiler options + before the new options are added.</p> <p>Notice that purging the code means that any processes lingering in old code for the module are killed without warning. For more information, see <c>code/3</c>.</p> diff --git a/lib/stdlib/doc/src/erl_tar.xml b/lib/stdlib/doc/src/erl_tar.xml index 24e7b64b9e..f28d8b425b 100644 --- a/lib/stdlib/doc/src/erl_tar.xml +++ b/lib/stdlib/doc/src/erl_tar.xml @@ -37,12 +37,13 @@ </modulesummary> <description> <p>This module archives and extract files to and from - a tar file. This module supports the <c>ustar</c> format - (IEEE Std 1003.1 and ISO/IEC 9945-1). All modern <c>tar</c> - programs (including GNU tar) can read this format. To ensure that - that GNU tar produces a tar file that <c>erl_tar</c> can read, - specify option <c>--format=ustar</c> to GNU tar.</p> - + a tar file. This module supports reading most common tar formats, + namely v7, STAR, USTAR, and PAX, as well as some of GNU tar's extensions + to the USTAR format (sparse files most notably). It produces tar archives + in USTAR format, unless the files being archived require PAX format due to + restrictions in USTAR (such as unicode metadata, filename length, and more). + As such, <c>erl_tar</c> supports tar archives produced by most all modern + tar utilities, and produces tarballs which should be similarly portable.</p> <p>By convention, the name of a tar file is to end in "<c>.tar</c>". To abide to the convention, add "<c>.tar</c>" to the name.</p> @@ -83,6 +84,8 @@ <p>If <seealso marker="kernel:file#native_name_encoding/0"> <c>file:native_name_encoding/0</c></seealso> returns <c>latin1</c>, no translation of path names is done.</p> + + <p>Unicode metadata stored in PAX headers is preserved</p> </section> <section> @@ -104,21 +107,20 @@ <title>Limitations</title> <list type="bulleted"> <item> - <p>For maximum compatibility, it is safe to archive files with names - up to 100 characters in length. Such tar files can generally be - extracted by any <c>tar</c> program.</p> - </item> - <item> - <p>For filenames exceeding 100 characters in length, the resulting tar - file can only be correctly extracted by a POSIX-compatible <c>tar</c> - program (such as Solaris <c>tar</c> or a modern GNU <c>tar</c>).</p> - </item> - <item> - <p>Files with longer names than 256 bytes cannot be stored.</p> + <p>If you must remain compatible with the USTAR tar format, you must ensure file paths being + stored are less than 255 bytes in total, with a maximum filename component + length of 100 bytes. USTAR uses a header field (prefix) in addition to the name field, and + splits file paths longer than 100 bytes into two parts. This split is done on a directory boundary, + and is done in such a way to make the best use of the space available in those two fields, but in practice + this will often mean that you have less than 255 bytes for a path. <c>erl_tar</c> will + automatically upgrade the format to PAX to handle longer filenames, so this is only an issue if you + need to extract the archive with an older implementation of <c>erl_tar</c> or <c>tar</c> which does + not support PAX. In this case, the PAX headers will be extracted as regular files, and you will need to + apply them manually.</p> </item> <item> - <p>The file name a symbolic link points is always limited - to 100 characters.</p> + <p>Like the above, if you must remain USTAR compatible, you must also ensure than paths for + symbolic/hard links are no more than 100 bytes, otherwise PAX headers will be used.</p> </item> </list> </section> @@ -129,7 +131,9 @@ <fsummary>Add a file to an open tar file.</fsummary> <type> <v>TarDescriptor = term()</v> - <v>Filename = filename()</v> + <v>FilenameOrBin = filename()|binary()</v> + <v>NameInArchive = filename()</v> + <v>Filename = filename()|{NameInArchive,FilenameOrBin}</v> <v>Options = [Option]</v> <v>Option = dereference|verbose|{chunks,ChunkSize}</v> <v>ChunkSize = positive_integer()</v> @@ -139,6 +143,9 @@ <desc> <p>Adds a file to a tar file that has been opened for writing by <seealso marker="#open/2"><c>open/1</c></seealso>.</p> + <p><c>NameInArchive</c> is the name under which the file becomes + stored in the tar file. The file gets this name when it is + extracted from the tar file.</p> <p>Options:</p> <taglist> <tag><c>dereference</c></tag> @@ -183,9 +190,6 @@ <seealso marker="#open/2"><c>open/2</c></seealso>. This function accepts the same options as <seealso marker="#add/3"><c>add/3</c></seealso>.</p> - <p><c>NameInArchive</c> is the name under which the file becomes - stored in the tar file. The file gets this name when it is - extracted from the tar file.</p> </desc> </func> @@ -206,8 +210,8 @@ <fsummary>Create a tar archive.</fsummary> <type> <v>Name = filename()</v> - <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive, - Filename}]</v> + <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v> + <v>FilenameOrBin = filename()|binary()</v> <v>Filename = filename()</v> <v>NameInArchive = filename()</v> <v>RetValue = ok|{error,{Name,Reason}}</v> @@ -225,8 +229,8 @@ <fsummary>Create a tar archive with options.</fsummary> <type> <v>Name = filename()</v> - <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive, - Filename}]</v> + <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v> + <v>FilenameOrBin = filename()|binary()</v> <v>Filename = filename()</v> <v>NameInArchive = filename()</v> <v>OptionList = [Option]</v> @@ -275,7 +279,8 @@ <name>extract(Name) -> RetValue</name> <fsummary>Extract all files from a tar file.</fsummary> <type> - <v>Name = filename()</v> + <v>Name = filename() | {binary,binary()} | {file,Fd}</v> + <v>Fd = file_descriptor()</v> <v>RetValue = ok|{error,{Name,Reason}}</v> <v>Reason = term()</v> </type> @@ -294,8 +299,7 @@ <name>extract(Name, OptionList)</name> <fsummary>Extract files from a tar file.</fsummary> <type> - <v>Name = filename() | {binary,Binary} | {file,Fd}</v> - <v>Binary = binary()</v> + <v>Name = filename() | {binary,binary()} | {file,Fd}</v> <v>Fd = file_descriptor()</v> <v>OptionList = [Option]</v> <v>Option = {cwd,Cwd}|{files,FileList}|keep_old_files|verbose|memory</v> @@ -521,7 +525,7 @@ erl_tar:close(TarDesc)</code> <name>table(Name) -> RetValue</name> <fsummary>Retrieve the name of all files in a tar file.</fsummary> <type> - <v>Name = filename()</v> + <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v> <v>RetValue = {ok,[string()]}|{error,{Name,Reason}}</v> <v>Reason = term()</v> </type> @@ -535,7 +539,7 @@ erl_tar:close(TarDesc)</code> <fsummary>Retrieve name and information of all files in a tar file. </fsummary> <type> - <v>Name = filename()</v> + <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v> </type> <desc> <p>Retrieves the names of all files in the tar file <c>Name</c>.</p> @@ -546,7 +550,7 @@ erl_tar:close(TarDesc)</code> <name>t(Name)</name> <fsummary>Print the name of each file in a tar file.</fsummary> <type> - <v>Name = filename()</v> + <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v> </type> <desc> <p>Prints the names of all files in the tar file <c>Name</c> to the @@ -559,7 +563,7 @@ erl_tar:close(TarDesc)</code> <fsummary>Print name and information for each file in a tar file. </fsummary> <type> - <v>Name = filename()</v> + <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v> </type> <desc> <p>Prints names and information about all files in the tar file diff --git a/lib/stdlib/doc/src/filelib.xml b/lib/stdlib/doc/src/filelib.xml index 7c6380ce28..ad73fc254a 100644 --- a/lib/stdlib/doc/src/filelib.xml +++ b/lib/stdlib/doc/src/filelib.xml @@ -60,6 +60,12 @@ <datatype> <name name="filename_all"/> </datatype> + <datatype> + <name name="find_file_rule"/> + </datatype> + <datatype> + <name name="find_source_rule"/> + </datatype> </datatypes> <funcs> @@ -226,7 +232,51 @@ filelib:wildcard("lib/**/*.{erl,hrl}")</code> directory.</p> </desc> </func> + + <func> + <name name="find_file" arity="2"/> + <name name="find_file" arity="3"/> + <fsummary>Find a file relative to a given directory.</fsummary> + <desc> + <p>Looks for a file of the given name by applying suffix rules to + the given directory path. For example, a rule <c>{"ebin", "src"}</c> + means that if the directory path ends with <c>"ebin"</c>, the + corresponding path ending in <c>"src"</c> should be searched.</p> + <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the + default system rules are used. See also the Kernel application + parameter <seealso + marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p> + </desc> + </func> + <func> + <name name="find_source" arity="1"/> + <fsummary>Find the source file for a given object file.</fsummary> + <desc> + <p>Equivalent to <c>find_source(Base, Dir)</c>, where <c>Dir</c> is + <c>filename:dirname(<anno>FilePath</anno>)</c> and <c>Base</c> is + <c>filename:basename(<anno>FilePath</anno>)</c>.</p> + </desc> + </func> + <func> + <name name="find_source" arity="2"/> + <name name="find_source" arity="3"/> + <fsummary>Find a source file relative to a given directory.</fsummary> + <desc> + <p>Applies file extension specific rules to find the source file for + a given object file relative to the object directory. For example, + for a file with the extension <c>.beam</c>, the default rule is to + look for a file with a corresponding extension <c>.erl</c> by + replacing the suffix <c>"ebin"</c> of the object directory path with + <c>"src"</c>. + The file search is done through <seealso + marker="#find_file/3"><c>find_file/3</c></seealso>. The directory of + the object file is always tried before any other directory specified + by the rules.</p> + <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the + default system rules are used. See also the Kernel application + parameter <seealso + marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p> + </desc> + </func> </funcs> </erlref> - - diff --git a/lib/stdlib/doc/src/filename.xml b/lib/stdlib/doc/src/filename.xml index 2a413835d0..0ccca37a9d 100644 --- a/lib/stdlib/doc/src/filename.xml +++ b/lib/stdlib/doc/src/filename.xml @@ -356,10 +356,12 @@ true <p>Finds the source filename and compiler options for a module. The result can be fed to <seealso marker="compiler:compile#file/2"> <c>compile:file/2</c></seealso> to compile the file again.</p> - <warning><p>It is not recommended to use this function. If possible, - use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso> - module to extract the abstract code format from the Beam file and - compile that instead.</p></warning> + <warning> + <p>This function is deprecated. Use <seealso marker="filelib#find_source/1"> + <c>filelib:find_source/1</c></seealso> instead for finding source files.</p> + <p>If possible, use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso> + module to extract the compiler options and the abstract code + format from the Beam file and compile that instead.</p></warning> <p>Argument <c><anno>Beam</anno></c>, which can be a string or an atom, specifies either the module name or the path to the source code, with or without extension <c>".erl"</c>. In either @@ -511,6 +513,33 @@ true </func> <func> + <name name="safe_relative_path" arity="1"/> + <fsummary>Sanitize a relative path to avoid directory traversal attacks.</fsummary> + <desc> + <p>Sanitizes the relative path by eliminating ".." and "." + components to protect against directory traversal attacks. + Either returns the sanitized path name, or the atom + <c>unsafe</c> if the path is unsafe. + The path is considered unsafe in the following circumstances:</p> + <list type="bulleted"> + <item><p>The path is not relative.</p></item> + <item><p>A ".." component would climb up above the root of + the relative path.</p></item> + </list> + <p><em>Examples:</em></p> + <pre> +1> <input>filename:safe_relative_path("dir/sub_dir/..").</input> +"dir" +2> <input>filename:safe_relative_path("dir/..").</input> +[] +3> <input>filename:safe_relative_path("dir/../..").</input> +unsafe +4> <input>filename:safe_relative_path("/abs/path").</input> +unsafe</pre> + </desc> + </func> + + <func> <name name="split" arity="1"/> <fsummary>Split a filename into its path components.</fsummary> <desc> diff --git a/lib/stdlib/doc/src/gen_statem.xml b/lib/stdlib/doc/src/gen_statem.xml index fd498ee82e..5eb13db1aa 100644 --- a/lib/stdlib/doc/src/gen_statem.xml +++ b/lib/stdlib/doc/src/gen_statem.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>2016</year> + <year>2016-2017</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -587,8 +587,8 @@ handle_event(_, _, State, Data) -> <name name="state_enter"/> <desc> <p> - If the state machine should use <em>state enter calls</em> - is selected when starting the <c>gen_statem</c> + Whether the state machine should use <em>state enter calls</em> + or not is selected when starting the <c>gen_statem</c> and after code change using the return value from <seealso marker="#Module:callback_mode/0"><c>Module:callback_mode/0</c></seealso>. </p> @@ -606,7 +606,16 @@ handle_event(_, _, State, Data) -> See <seealso marker="#Module:StateName/3"><c>Module:StateName/3</c></seealso> and - <seealso marker="#Module:handle_event/4"><c>Module:handle_event/4</c></seealso>. + <seealso marker="#Module:handle_event/4"><c>Module:handle_event/4</c></seealso>. + Such a call can be repeated by returning a + <seealso marker="#type-state_callback_result"> + <c>repeat_state</c> + </seealso> + or + <seealso marker="#type-state_callback_result"> + <c>repeat_state_and_data</c> + </seealso> + tuple from the state callback. </p> <p> If @@ -625,7 +634,8 @@ handle_event(_, _, State, Data) -> right before entering the initial state even though this formally is not a state change. In this case <c>OldState</c> will be the same as <c>State</c>, - which can not happen for a subsequent state change. + which can not happen for a subsequent state change, + but will happen when repeating the state enter call. </p> </desc> </datatype> @@ -640,7 +650,15 @@ handle_event(_, _, State, Data) -> <list type="ordered"> <item> <p> - If the state changes or is the initial state, and + If the state changes, is the initial state, + <seealso marker="#type-state_callback_result"> + <c>repeat_state</c> + </seealso> + or + <seealso marker="#type-state_callback_result"> + <c>repeat_state_and_data</c> + </seealso> + is used, and also <seealso marker="#type-state_enter"><em>state enter calls</em></seealso> are used, the <c>gen_statem</c> calls the new state callback with arguments @@ -983,6 +1001,33 @@ handle_event(_, _, State, Data) -> </desc> </datatype> <datatype> + <name name="init_result"/> + <desc> + <p> + For a succesful initialization, + <c><anno>State</anno></c> is the initial + <seealso marker="#type-state"><c>state()</c></seealso> + and <c><anno>Data</anno></c> the initial server + <seealso marker="#type-data"><c>data()</c></seealso> + of the <c>gen_statem</c>. + </p> + <p> + The <seealso marker="#type-action"><c>Actions</c></seealso> + are executed when entering the first + <seealso marker="#type-state">state</seealso> just as for a + <seealso marker="#state callback">state callback</seealso>, + except that the action <c>postpone</c> is forced to + <c>false</c> since there is no event to postpone. + </p> + <p> + For an unsuccesful initialization, + <c>{stop,<anno>Reason</anno>}</c> + or <c>ignore</c> should be used; see + <seealso marker="#start_link/3"><c>start_link/3,4</c></seealso>. + </p> + </desc> + </datatype> + <datatype> <name name="state_enter_result"/> <desc> <p> @@ -1068,6 +1113,37 @@ handle_event(_, _, State, Data) -> <c>{next_state,CurrentState,CurrentData,<anno>Actions</anno>}</c>. </p> </item> + <tag><c>repeat_state</c></tag> + <item> + <p> + The <c>gen_statem</c> keeps the current state, or + does a state transition to the current state if you like, + sets <c><anno>NewData</anno></c>, + and executes all <c><anno>Actions</anno></c>. + If the <c>gen_statem</c> runs with + <seealso marker="#type-state_enter"><em>state enter calls</em></seealso>, + the state enter call is repeated, see type + <seealso marker="#type-transition_option"><c>transition_option()</c></seealso>, + otherwise <c>repeat_state</c> is the same as + <c>keep_state</c>. + </p> + </item> + <tag><c>repeat_state_and_data</c></tag> + <item> + <p> + The <c>gen_statem</c> keeps the current state and data, or + does a state transition to the current state if you like, + and executes all <c><anno>Actions</anno></c>. + This is the same as + <c>{repeat_state,CurrentData,<anno>Actions</anno>}</c>. + If the <c>gen_statem</c> runs with + <seealso marker="#type-state_enter"><em>state enter calls</em></seealso>, + the state enter call is repeated, see type + <seealso marker="#type-transition_option"><c>transition_option()</c></seealso>, + otherwise <c>repeat_state_and_data</c> is the same as + <c>keep_state_and_data</c>. + </p> + </item> <tag><c>stop</c></tag> <item> <p> @@ -1609,29 +1685,33 @@ handle_event(_, _, State, Data) -> It is recommended to use an atom as <c>Reason</c> since it will be wrapped in an <c>{error,Reason}</c> tuple. </p> + <p> + Also note when upgrading a <c>gen_statem</c>, + this function and hence + the <c>Change={advanced,Extra}</c> parameter in the + <seealso marker="sasl:appup"><c>appup</c></seealso> file + is not only needed to update the internal state + or to act on the <c>Extra</c> argument. + It is also needed if an upgrade or downgrade should change + <seealso marker="#type-callback_mode"><em>callback mode</em></seealso>, + or else the callback mode after the code change + will not be honoured, + most probably causing a server crash. + </p> </desc> </func> <func> - <name>Module:init(Args) -> Result</name> + <name>Module:init(Args) -> Result(StateType)</name> <fsummary> Optional function for initializing process and internal state. </fsummary> <type> <v>Args = term()</v> - <v>Result = {ok,State,Data}</v> - <v> | {ok,State,Data,Actions}</v> - <v> | {stop,Reason} | ignore</v> - <v>State = <seealso marker="#type-state">state()</seealso></v> - <v> - Data = <seealso marker="#type-data">data()</seealso> - </v> <v> - Actions = - [<seealso marker="#type-action">action()</seealso>] | - <seealso marker="#type-action">action()</seealso> + Result(StateType) = + <seealso marker="#type-init_result">init_result(StateType)</seealso> </v> - <v>Reason = term()</v> </type> <desc> <marker id="Module:init-1"/> @@ -1644,30 +1724,9 @@ handle_event(_, _, State, Data) -> the implementation state and server data. </p> <p> - <c>Args</c> is the <c>Args</c> argument provided to the start + <c>Args</c> is the <c>Args</c> argument provided to that start function. </p> - <p> - If the initialization is successful, the function is to - return <c>{ok,State,Data}</c> or - <c>{ok,State,Data,Actions}</c>. - <c>State</c> is the initial - <seealso marker="#type-state"><c>state()</c></seealso> - and <c>Data</c> the initial server - <seealso marker="#type-data"><c>data()</c></seealso>. - </p> - <p> - The <seealso marker="#type-action"><c>Actions</c></seealso> - are executed when entering the first - <seealso marker="#type-state">state</seealso> just as for a - <seealso marker="#state callback">state callback</seealso>. - </p> - <p> - If the initialization fails, - the function is to return <c>{stop,Reason}</c> - or <c>ignore</c>; see - <seealso marker="#start_link/3"><c>start_link/3,4</c></seealso>. - </p> <note> <p> This callback is optional, so a callback module does not need @@ -1873,22 +1932,33 @@ handle_event(_, _, State, Data) -> <seealso marker="#type-enter_action">actions</seealso> that may be returned: <seealso marker="#type-postpone"><c>postpone()</c></seealso> - and + is not allowed since a <em>state enter call</em> is not + an event so there is no event to postpone, and <seealso marker="#type-action"><c>{next_event,_,_}</c></seealso> - are not allowed. + is not allowed since using <em>state enter calls</em> + should not affect how events are consumed and produced. You may also not change states from this call. Should you return <c>{next_state,NextState, ...}</c> with <c>NextState =/= State</c> the <c>gen_statem</c> crashes. - You are advised to use <c>{keep_state,...}</c> or - <c>keep_state_and_data</c>. + It is possible to use <c>{repeat_state, ...}</c>, + <c>{repeat_state_and_data,_}</c> or + <c>repeat_state_and_data</c> but all of them makes little + sense since you immediately will be called again with a new + <em>state enter call</em> making this just a weird way + of looping, and there are better ways to loop in Erlang. + You are advised to use <c>{keep_state,...}</c>, + <c>{keep_state_and_data,_}</c> or + <c>keep_state_and_data</c> since you can not change states + from a <em>state enter call</em> anyway. </p> <p> Note the fact that you can use <seealso marker="erts:erlang#throw/1"><c>throw</c></seealso> to return the result, which can be useful. For example to bail out with <c>throw(keep_state_and_data)</c> - from deep within complex code that is in no position to - return <c>{next_state,State,Data}</c>. + from deep within complex code that can not + return <c>{next_state,State,Data}</c> because + <c>State</c> or <c>Data</c> is no longer in scope. </p> </desc> </func> @@ -1903,6 +1973,11 @@ handle_event(_, _, State, Data) -> <v>Ignored = term()</v> </type> <desc> + <note> + <p>This callback is optional, so callback modules need not + export it. The <c>gen_statem</c> module provides a default + implementation without cleanup.</p> + </note> <p> This function is called by a <c>gen_statem</c> when it is about to terminate. It is to be the opposite of diff --git a/lib/stdlib/doc/src/notes.xml b/lib/stdlib/doc/src/notes.xml index 0143686bb2..428d8a6e70 100644 --- a/lib/stdlib/doc/src/notes.xml +++ b/lib/stdlib/doc/src/notes.xml @@ -31,6 +31,110 @@ </header> <p>This document describes the changes made to the STDLIB application.</p> +<section><title>STDLIB 3.3</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p>An escript with only two lines would not work.</p> + <p> + Own Id: OTP-14098</p> + </item> + <item> + <p> Characters (<c>$char</c>) can be used in constant + pattern expressions. They can also be used in types and + contracts. </p> + <p> + Own Id: OTP-14103 Aux Id: ERL-313 </p> + </item> + <item> + <p> The signatures of <c>erl_parse:anno_to_term/1</c> and + <c>erl_parse:anno_from_term/1</c> are corrected. Using + these functions no longer results in false Dialyzer + warnings. </p> + <p> + Own Id: OTP-14131</p> + </item> + <item> + <p>Pretty-printing of maps is improved. </p> + <p> + Own Id: OTP-14175 Aux Id: seq13277 </p> + </item> + <item> + <p>If any of the following functions in the <c>zip</c> + module crashed, a file would be left open: + <c>extract()</c>, <c>unzip()</c>, <c>create()</c>, or + <c>zip()</c>. This has been corrected.</p> + <p>A <c>zip</c> file having a "Unix header" could not be + unpacked.</p> + <p> + Own Id: OTP-14189 Aux Id: ERL-348, ERL-349 </p> + </item> + <item> + <p> Improve the Erlang shell's tab-completion of long + names. </p> + <p> + Own Id: OTP-14200 Aux Id: ERL-352 </p> + </item> + <item> + <p> + The reference manual for <c>sys</c> had some faulty + information about the 'get_modules' message used by + processes where modules change dynamically during + runtime. The documentation is now corrected.</p> + <p> + Own Id: OTP-14248 Aux Id: ERL-367 </p> + </item> + </list> + </section> + + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Bug fixes, new features and improvements to gen_statem:</p> + <p> + A new type init_result/1 has replaced the old + init_result/0, so if you used that old type (that was + never documented) you have to change your code, which may + be regarded as a potential incompatibility.</p> + <p> + Changing callback modes after code change did not work + since the new callback mode was not recorded. This bug + has been fixed.</p> + <p> + The event types state_timeout and {call,From} could not + be generated with a {next_event,EventType,EventContent} + action since they did not pass the runtime type check. + This bug has now been corrected.</p> + <p> + State entry calls can now be repeated using (new) state + callback returns {repeat_state,...}, + {repeat_state_and_data,_} and repeat_state_and_data.</p> + <p> + There have been lots of code cleanup in particular + regarding timer handling. For example is async + cancel_timer now used. Error handling has also been + cleaned up.</p> + <p> + To align with probable future changes to the rest of + gen_*, terminate/3 has now got a fallback and + code_change/4 is not mandatory.</p> + <p> + Own Id: OTP-14114</p> + </item> + <item> + <p><c>filename:safe_relative_path/1</c> to sanitize a + relative path has been added.</p> + <p> + Own Id: OTP-14215</p> + </item> + </list> + </section> + +</section> + <section><title>STDLIB 3.2</title> <section><title>Fixed Bugs and Malfunctions</title> @@ -3163,7 +3267,7 @@ <p> Two bugs in io:format for ~F.~Ps has been corrected. When length(S) >= abs(F) > P, the precision P was incorrectly - ignored. When F == P > lenght(S) the result was + ignored. When F == P > length(S) the result was incorrectly left adjusted. Bug found by Ali Yakout who also provided a fix.</p> <p> diff --git a/lib/stdlib/doc/src/shell.xml b/lib/stdlib/doc/src/shell.xml index d6e8036d4e..f52bc39deb 100644 --- a/lib/stdlib/doc/src/shell.xml +++ b/lib/stdlib/doc/src/shell.xml @@ -165,12 +165,12 @@ <item> <p>Evaluates <c>shell_default:help()</c>.</p> </item> - <tag><c>c(File)</c></tag> + <tag><c>c(Mod)</c></tag> <item> - <p>Evaluates <c>shell_default:c(File)</c>. This compiles - and loads code in <c>File</c> and purges old versions of - code, if necessary. Assumes that the file and module names - are the same.</p> + <p>Evaluates <c>shell_default:c(Mod)</c>. This compiles and + loads the module <c>Mod</c> and purges old versions of the + code, if necessary. <c>Mod</c> can be either a module name or a + a source file path, with or without <c>.erl</c> extension.</p> </item> <tag><c>catch_exception(Bool)</c></tag> <item> diff --git a/lib/stdlib/doc/src/sys.xml b/lib/stdlib/doc/src/sys.xml index 9091a46df9..45171f814d 100644 --- a/lib/stdlib/doc/src/sys.xml +++ b/lib/stdlib/doc/src/sys.xml @@ -83,8 +83,8 @@ <p>If the modules used to implement the process change dynamically during runtime, the process must understand one more message. An example is the <seealso marker="gen_event"><c>gen_event</c></seealso> - processes. The message is <c>{get_modules, From}</c>. - The reply to this message is <c>From ! {modules, Modules}</c>, where + processes. The message is <c>{_Label, {From, Ref}, get_modules}</c>. + The reply to this message is <c>From ! {Ref, Modules}</c>, where <c>Modules</c> is a list of the currently active modules in the process.</p> <p>This message is used by the release handler to find which diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile index d6c0ff8d8d..ed3dfb342c 100644 --- a/lib/stdlib/src/Makefile +++ b/lib/stdlib/src/Makefile @@ -130,7 +130,7 @@ HRL_FILES= \ ../include/qlc.hrl \ ../include/zip.hrl -INTERNAL_HRL_FILES= dets.hrl +INTERNAL_HRL_FILES= dets.hrl erl_tar.hrl ERL_FILES= $(MODULES:%=%.erl) @@ -228,7 +228,7 @@ $(EBIN)/dets_v9.beam: dets.hrl $(EBIN)/erl_bits.beam: ../include/erl_bits.hrl $(EBIN)/erl_compile.beam: ../include/erl_compile.hrl ../../kernel/include/file.hrl $(EBIN)/erl_lint.beam: ../include/erl_bits.hrl -$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl +$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl erl_tar.hrl $(EBIN)/file_sorter.beam: ../../kernel/include/file.hrl $(EBIN)/filelib.beam: ../../kernel/include/file.hrl $(EBIN)/filename.beam: ../../kernel/include/file.hrl diff --git a/lib/stdlib/src/base64.erl b/lib/stdlib/src/base64.erl index bf259e6691..0c8d817910 100644 --- a/lib/stdlib/src/base64.erl +++ b/lib/stdlib/src/base64.erl @@ -219,38 +219,49 @@ mime_decode_binary(Result, <<0:8,T/bits>>) -> mime_decode_binary(Result, T); mime_decode_binary(Result0, <<C:8,T/bits>>) -> case element(C, ?DECODE_MAP) of - Bits when is_integer(Bits) -> - mime_decode_binary(<<Result0/bits,Bits:6>>, T); - eq -> - case tail_contains_more(T, false) of - {<<>>, Eq} -> - %% No more valid data. - case bit_size(Result0) rem 8 of - 0 -> - %% '====' is not uncommon. - Result0; - 4 when Eq -> - %% enforce at least one more '=' only ignoring illegals and spacing - Split = byte_size(Result0) - 1, - <<Result:Split/bytes,_:4>> = Result0, - Result; - 2 -> - %% remove 2 bits - Split = byte_size(Result0) - 1, - <<Result:Split/bytes,_:2>> = Result0, - Result - end; - {More, _} -> - %% More valid data, skip the eq as invalid - mime_decode_binary(Result0, More) - end; - _ -> - mime_decode_binary(Result0, T) + Bits when is_integer(Bits) -> + mime_decode_binary(<<Result0/bits,Bits:6>>, T); + eq -> + mime_decode_binary_after_eq(Result0, T, false); + _ -> + mime_decode_binary(Result0, T) end; -mime_decode_binary(Result, <<>>) -> +mime_decode_binary(Result, _) -> true = is_binary(Result), Result. +mime_decode_binary_after_eq(Result, <<0:8,T/bits>>, Eq) -> + mime_decode_binary_after_eq(Result, T, Eq); +mime_decode_binary_after_eq(Result0, <<C:8,T/bits>>, Eq) -> + case element(C, ?DECODE_MAP) of + bad -> + mime_decode_binary_after_eq(Result0, T, Eq); + ws -> + mime_decode_binary_after_eq(Result0, T, Eq); + eq -> + mime_decode_binary_after_eq(Result0, T, true); + Bits when is_integer(Bits) -> + %% More valid data, skip the eq as invalid + mime_decode_binary(<<Result0/bits,Bits:6>>, T) + end; +mime_decode_binary_after_eq(Result0, <<>>, Eq) -> + %% No more valid data. + case bit_size(Result0) rem 8 of + 0 -> + %% '====' is not uncommon. + Result0; + 4 when Eq -> + %% enforce at least one more '=' only ignoring illegals and spacing + Split = byte_size(Result0) - 1, + <<Result:Split/bytes,_:4>> = Result0, + Result; + 2 -> + %% remove 2 bits + Split = byte_size(Result0) - 1, + <<Result:Split/bytes,_:2>> = Result0, + Result + end. + decode([], A) -> A; decode([$=,$=,C2,C1|Cs], A) -> Bits2x6 = (b64d(C1) bsl 18) bor (b64d(C2) bsl 12), diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl index d7ee5c1f5d..461acf03be 100644 --- a/lib/stdlib/src/beam_lib.erl +++ b/lib/stdlib/src/beam_lib.erl @@ -63,7 +63,7 @@ -type label() :: integer(). -type chunkid() :: nonempty_string(). % approximation of the strings below -%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom". +%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom" | "AtU8". -type chunkname() :: 'abstract_code' | 'attributes' | 'compile_info' | 'exports' | 'labeled_exports' | 'imports' | 'indexed_imports' @@ -520,6 +520,8 @@ read_chunk_data(File0, ChunkNames0, Options) end. %% -> {ok, list()} | throw(Error) +check_chunks([atoms | Ids], File, IL, L) -> + check_chunks(Ids, File, ["Atom", "AtU8" | IL], [{atom_chunk, atoms} | L]); check_chunks([ChunkName | Ids], File, IL, L) when is_atom(ChunkName) -> ChunkId = chunk_name_to_id(ChunkName, File), check_chunks(Ids, File, [ChunkId | IL], [{ChunkId, ChunkName} | L]); @@ -537,6 +539,10 @@ scan_beam(File, What0, AllowMissingChunks) -> case scan_beam1(File, What0) of {missing, _FD, Mod, Data, What} when AllowMissingChunks -> {ok, Mod, [{Id, missing_chunk} || Id <- What] ++ Data}; + {missing, _FD, Mod, Data, ["Atom"]} -> + {ok, Mod, Data}; + {missing, _FD, Mod, Data, ["AtU8"]} -> + {ok, Mod, Data}; {missing, FD, _Mod, _Data, What} -> error({missing_chunk, filename(FD), hd(What)}); R -> @@ -581,18 +587,23 @@ scan_beam(FD, Pos, What, Mod, Data) -> error({invalid_beam_file, filename(FD), Pos}) end. -get_data(Cs, "Atom"=Id, FD, Size, Pos, Pos2, _Mod, Data) -> +get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, Encoding) -> NewCs = del_chunk(Id, Cs), {NFD, Chunk} = get_chunk(Id, Pos, Size, FD), <<_Num:32, Chunk2/binary>> = Chunk, - {Module, _} = extract_atom(Chunk2), + {Module, _} = extract_atom(Chunk2, Encoding), C = case Cs of info -> {Id, Pos, Size}; _ -> {Id, Chunk} end, - scan_beam(NFD, Pos2, NewCs, Module, [C | Data]); + scan_beam(NFD, Pos2, NewCs, Module, [C | Data]). + +get_data(Cs, "Atom" = Id, FD, Size, Pos, Pos2, _Mod, Data) -> + get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, latin1); +get_data(Cs, "AtU8" = Id, FD, Size, Pos, Pos2, _Mod, Data) -> + get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, utf8); get_data(info, Id, FD, Size, Pos, Pos2, Mod, Data) -> scan_beam(FD, Pos2, info, Mod, [{Id, Pos, Size} | Data]); get_data(Chunks, Id, FD, Size, Pos, Pos2, Mod, Data) -> @@ -624,6 +635,9 @@ get_chunk(Id, Pos, Size, FD) -> {NFD, Chunk} end. +chunks_to_data([{atom_chunk, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) -> + {NewAtoms, Ret} = chunk_to_data(Name, <<"">>, File, Cs, Atoms, Module), + chunks_to_data(CNs, Chunks, File, Cs, Module, NewAtoms, [Ret | L]); chunks_to_data([{Id, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) -> {_Id, Chunk} = lists:keyfind(Id, 1, Chunks), {NewAtoms, Ret} = chunk_to_data(Name, Chunk, File, Cs, Atoms, Module), @@ -651,7 +665,7 @@ chunk_to_data(abstract_code=Id, Chunk, File, _Cs, AtomTable, Mod) -> <<>> -> {AtomTable, {Id, no_abstract_code}}; <<0:8,N:8,Mode0:N/binary,Rest/binary>> -> - Mode = list_to_atom(binary_to_list(Mode0)), + Mode = binary_to_atom(Mode0, utf8), decrypt_abst(Mode, Mod, File, Id, AtomTable, Rest); _ -> case catch binary_to_term(Chunk) of @@ -683,7 +697,6 @@ chunk_to_data(ChunkId, Chunk, _File, _Cs, AtomTable, _Module) when is_list(ChunkId) -> {AtomTable, {ChunkId, Chunk}}. % Chunk is a binary -chunk_name_to_id(atoms, _) -> "Atom"; chunk_name_to_id(indexed_imports, _) -> "ImpT"; chunk_name_to_id(imports, _) -> "ImpT"; chunk_name_to_id(exports, _) -> "ExpT"; @@ -738,25 +751,30 @@ atm(AT, N) -> %% AT is updated. ensure_atoms({empty, AT}, Cs) -> - {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs), - extract_atoms(AtomChunk, AT), + case lists:keyfind("AtU8", 1, Cs) of + {_Id, AtomChunk} when is_binary(AtomChunk) -> + extract_atoms(AtomChunk, AT, utf8); + _ -> + {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs), + extract_atoms(AtomChunk, AT, latin1) + end, AT; ensure_atoms(AT, _Cs) -> AT. -extract_atoms(<<_Num:32, B/binary>>, AT) -> - extract_atoms(B, 1, AT). +extract_atoms(<<_Num:32, B/binary>>, AT, Encoding) -> + extract_atoms(B, 1, AT, Encoding). -extract_atoms(<<>>, _I, _AT) -> +extract_atoms(<<>>, _I, _AT, _Encoding) -> true; -extract_atoms(B, I, AT) -> - {Atom, B1} = extract_atom(B), +extract_atoms(B, I, AT, Encoding) -> + {Atom, B1} = extract_atom(B, Encoding), true = ets:insert(AT, {I, Atom}), - extract_atoms(B1, I+1, AT). + extract_atoms(B1, I+1, AT, Encoding). -extract_atom(<<Len, B/binary>>) -> +extract_atom(<<Len, B/binary>>, Encoding) -> <<SB:Len/binary, Tail/binary>> = B, - {list_to_atom(binary_to_list(SB)), Tail}. + {binary_to_atom(SB, Encoding), Tail}. %%% Utils. @@ -856,12 +874,12 @@ significant_chunks() -> %% for a module. They are listed in the order that they should be MD5:ed. md5_chunks() -> - ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"]. + ["Atom", "AtU8", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"]. %% The following chunks are mandatory in every Beam file. mandatory_chunks() -> - ["Code", "ExpT", "ImpT", "StrT", "Atom"]. + ["Code", "ExpT", "ImpT", "StrT"]. %%% ==================================================================== %%% The rest of the file handles encrypted debug info. diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl index ccc827ca2d..45666fbcb4 100644 --- a/lib/stdlib/src/binary.erl +++ b/lib/stdlib/src/binary.erl @@ -24,7 +24,7 @@ -export_type([cp/0]). --opaque cp() :: {'am' | 'bm', binary()}. +-opaque cp() :: {'am' | 'bm', reference()}. -type part() :: {Start :: non_neg_integer(), Length :: integer()}. %%% BIFs. diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl index d36630214c..bb7b485490 100644 --- a/lib/stdlib/src/c.erl +++ b/lib/stdlib/src/c.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2016. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -23,7 +23,7 @@ %% Avoid warning for local function error/2 clashing with autoimported BIF. -compile({no_auto_import,[error/2]}). --export([help/0,lc/1,c/1,c/2,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0, +-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0, y/1, y/2, lc_batch/0, lc_batch/1, i/3,pid/3,m/0,m/1,mm/0,lm/0, @@ -35,7 +35,7 @@ -export([appcall/4]). -import(lists, [reverse/1,flatten/1,sublist/3,sort/1,keysort/2, - concat/1,max/1,min/1,foreach/2,foldl/3,flatmap/2]). + max/1,min/1,foreach/2,foldl/3,flatmap/2]). -import(io, [format/1, format/2]). %%----------------------------------------------------------------------- @@ -44,7 +44,7 @@ help() -> io:put_chars(<<"bt(Pid) -- stack backtrace for a process\n" - "c(File) -- compile and load code in <File>\n" + "c(Mod) -- compile and load module or file <Mod>\n" "cd(Dir) -- change working directory\n" "flush() -- flush any messages sent to the shell\n" "help() -- help info\n" @@ -72,32 +72,224 @@ help() -> "xm(M) -- cross reference check a module\n" "y(File) -- generate a Yecc parser\n">>). -%% c(FileName) -%% Compile a file/module. - --spec c(File) -> {'ok', Module} | 'error' when - File :: file:name(), - Module :: module(). +%% c(Module) +%% Compile a module/file. + +-spec c(Module) -> {'ok', ModuleName} | 'error' when + Module :: file:name(), + ModuleName :: module(). + +c(Module) -> c(Module, []). + +-spec c(Module, Options) -> {'ok', ModuleName} | 'error' when + Module :: file:name(), + Options :: [compile:option()] | compile:option(), + ModuleName :: module(). + +c(Module, SingleOption) when not is_list(SingleOption) -> + c(Module, [SingleOption]); +c(Module, Opts) when is_atom(Module) -> + %% either a module name or a source file name (possibly without + %% suffix); if such a source file exists, it is used to compile from + %% scratch with the given options, otherwise look for an object file + Suffix = case filename:extension(Module) of + "" -> src_suffix(Opts); + S -> S + end, + SrcFile = filename:rootname(Module, Suffix) ++ Suffix, + case filelib:is_file(SrcFile) of + true -> + compile_and_load(SrcFile, Opts); + false -> + c(Module, Opts, fun (_) -> true end) + end; +c(Module, Opts) -> + %% we never interpret a string as a module name, only as a file + compile_and_load(Module, Opts). -c(File) -> c(File, []). +%% This tries to find an existing object file and use its compile_info and +%% source path to recompile the module, overwriting the old object file. +%% The Filter parameter is applied to the old compile options --spec c(File, Options) -> {'ok', Module} | 'error' when - File :: file:name(), +-spec c(Module, Options, Filter) -> {'ok', ModuleName} | 'error' when + Module :: atom(), Options :: [compile:option()], - Module :: module(). + Filter :: fun ((compile:option()) -> boolean()), + ModuleName :: module(). + +c(Module, Options, Filter) when is_atom(Module) -> + case find_beam(Module) of + BeamFile when is_list(BeamFile) -> + c(Module, Options, Filter, BeamFile); + Error -> + {error, Error} + end. + +c(Module, Options, Filter, BeamFile) -> + case compile_info(Module, BeamFile) of + Info when is_list(Info) -> + case find_source(BeamFile, Info) of + SrcFile when is_list(SrcFile) -> + c(SrcFile, Options, Filter, BeamFile, Info); + Error -> + Error + end; + Error -> + Error + end. + +c(SrcFile, NewOpts, Filter, BeamFile, Info) -> + %% Filter old options; also remove options that will be replaced. + %% Write new beam over old beam unless other outdir is specified. + F = fun (Opt) -> not is_outdir_opt(Opt) andalso Filter(Opt) end, + Options = (NewOpts ++ [{outdir,filename:dirname(BeamFile)}] + ++ lists:filter(F, old_options(Info))), + format("Recompiling ~s\n", [SrcFile]), + safe_recompile(SrcFile, Options, BeamFile). + +old_options(Info) -> + case lists:keyfind(options, 1, Info) of + {options, Opts} -> Opts; + false -> [] + end. + +%% prefer the source path in the compile info if the file exists, +%% otherwise do a standard source search relative to the beam file +find_source(BeamFile, Info) -> + case lists:keyfind(source, 1, Info) of + {source, SrcFile} -> + case filelib:is_file(SrcFile) of + true -> SrcFile; + false -> find_source(BeamFile) + end; + _ -> + find_source(BeamFile) + end. + +find_source(BeamFile) -> + case filelib:find_source(BeamFile) of + {ok, SrcFile} -> SrcFile; + _ -> {error, no_source} + end. -c(File, Opts0) when is_list(Opts0) -> - Opts = [report_errors,report_warnings|Opts0], +%% find the beam file for a module, preferring the path reported by code:which() +%% if it still exists, or otherwise by searching the code path +find_beam(Module) when is_atom(Module) -> + case code:which(Module) of + Beam when is_list(Beam), Beam =/= "" -> + case erlang:module_loaded(Module) of + false -> + Beam; % code:which/1 found this in the path + true -> + case filelib:is_file(Beam) of + true -> Beam; + false -> find_beam_1(Module) % file moved? + end + end; + Other when Other =:= ""; Other =:= cover_compiled -> + %% module is loaded but not compiled directly from source + find_beam_1(Module); + Error -> + Error + end. + +find_beam_1(Module) -> + File = atom_to_list(Module) ++ code:objfile_extension(), + case code:where_is_file(File) of + Beam when is_list(Beam) -> + Beam; + Error -> + Error + end. + +%% get the compile_info for a module +%% -will report the info for the module in memory, if loaded +%% -will try to find and examine the beam file if not in memory +%% -will not cause a module to become loaded by accident +compile_info(Module, Beam) when is_atom(Module) -> + case erlang:module_loaded(Module) of + true -> + %% getting the compile info for a loaded module should normally + %% work, but return an empty info list if it fails + try erlang:get_module_info(Module, compile) + catch _:_ -> [] + end; + false -> + case beam_lib:chunks(Beam, [compile_info]) of + {ok, {_Module, [{compile_info, Info}]}} -> + Info; + Error -> + Error + end + end. + +%% compile module, backing up any existing target file and restoring the +%% old version if compilation fails (this should only be used when we have +%% an old beam file that we want to preserve) +safe_recompile(File, Options, BeamFile) -> + %% Note that it's possible that because of options such as 'to_asm', + %% the compiler might not actually write a new beam file at all + Backup = BeamFile ++ ".bak", + case file:rename(BeamFile, Backup) of + Status when Status =:= ok; Status =:= {error,enoent} -> + case compile_and_load(File, Options) of + {ok, _} = Result -> + _ = if Status =:= ok -> file:delete(Backup); + true -> ok + end, + Result; + Error -> + _ = if Status =:= ok -> file:rename(Backup, BeamFile); + true -> ok + end, + Error + end; + Error -> + Error + end. + +%% Compile the file and load the resulting object code (if any). +%% Automatically ensures that there is an outdir option, by default the +%% directory of File, and that a 'from' option will be passed to match the +%% actual source suffix if needed (unless already specified). +compile_and_load(File, Opts0) when is_list(Opts0) -> + Opts = [report_errors, report_warnings + | ensure_from(filename:extension(File), + ensure_outdir(filename:dirname(File), Opts0))], case compile:file(File, Opts) of {ok,Mod} -> %Listing file. - machine_load(Mod, File, Opts); + purge_and_load(Mod, File, Opts); {ok,Mod,_Ws} -> %Warnings maybe turned on. - machine_load(Mod, File, Opts); + purge_and_load(Mod, File, Opts); Other -> %Errors go here Other end; -c(File, Opt) -> - c(File, [Opt]). +compile_and_load(File, Opt) -> + compile_and_load(File, [Opt]). + +ensure_from(Suffix, Opts0) -> + case lists:partition(fun is_from_opt/1, Opts0++from_opt(Suffix)) of + {[Opt|_], Opts} -> [Opt | Opts]; + {[], Opts} -> Opts + end. + +ensure_outdir(Dir, Opts0) -> + {[Opt|_], Opts} = lists:partition(fun is_outdir_opt/1, + Opts0++[{outdir,Dir}]), + [Opt | Opts]. + +is_outdir_opt({outdir, _}) -> true; +is_outdir_opt(_) -> false. + +is_from_opt(from_core) -> true; +is_from_opt(from_asm) -> true; +is_from_opt(from_beam) -> true; +is_from_opt(_) -> false. + +from_opt(".core") -> [from_core]; +from_opt(".S") -> [from_asm]; +from_opt(".beam") -> [from_beam]; +from_opt(_) -> []. %%% Obtain the 'outdir' option from the argument. Return "." if no %%% such option was given. @@ -113,18 +305,29 @@ outdir([Opt|Rest]) -> outdir(Rest) end. +%% mimic how suffix is selected in compile:file(). +src_suffix([from_core|_]) -> ".core"; +src_suffix([from_asm|_]) -> ".S"; +src_suffix([from_beam|_]) -> ".beam"; +src_suffix([_|Opts]) -> src_suffix(Opts); +src_suffix([]) -> ".erl". + %%% We have compiled File with options Opts. Find out where the -%%% output file went to, and load it. -machine_load(Mod, File, Opts) -> +%%% output file went and load it, purging any old version. +purge_and_load(Mod, File, Opts) -> Dir = outdir(Opts), - File2 = filename:join(Dir, filename:basename(File, ".erl")), + Base = filename:basename(File, src_suffix(Opts)), + OutFile = filename:join(Dir, Base), case compile:output_generated(Opts) of true -> - Base = atom_to_list(Mod), - case filename:basename(File, ".erl") of + case atom_to_list(Mod) of Base -> code:purge(Mod), - check_load(code:load_abs(File2,Mod), Mod); + %% Note that load_abs() adds the object file suffix + case code:load_abs(OutFile, Mod) of + {error, _R}=Error -> Error; + _ -> {ok, Mod} + end; _OtherMod -> format("** Module name '~p' does not match file name '~tp' **~n", [Mod,File]), @@ -135,13 +338,6 @@ machine_load(Mod, File, Opts) -> ok end. -%%% This function previously warned if the loaded module was -%%% loaded from some other place than current directory. -%%% Now, loading from other than current directory is supposed to work. -%%% so this function does nothing special. -check_load({error, _R} = Error, _) -> Error; -check_load(_, Mod) -> {ok, Mod}. - %% Compile a list of modules %% enables the nice unix shell cmd %% erl -s c lc f1 f2 f3 @d c1=v1 @c2 @i IDir @o ODir -s erlang halt @@ -204,7 +400,7 @@ split_def([], Res) -> {d, list_to_atom(reverse(Res))}. make_term(Str) -> case erl_scan:string(Str) of {ok, Tokens, _} -> - case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of + case erl_parse:parse_term(Tokens ++ [{dot, erl_anno:new(1)}]) of {ok, Term} -> Term; {error, {_,_,Reason}} -> io:format("~ts: ~ts~n", [Reason, Str]), diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl index 5bc9475fc8..e81383775b 100644 --- a/lib/stdlib/src/dets.erl +++ b/lib/stdlib/src/dets.erl @@ -1063,11 +1063,8 @@ foldl_bins([Bin | Bins], MP, Terms) -> compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC('_') = Spec) -> {Spec, true}; compile_match_spec(select, Spec) -> - case catch ets:match_spec_compile(Spec) of - X when is_binary(X) -> - {Spec, {match_spec, X}}; - _ -> - badarg + try {Spec, {match_spec, ets:match_spec_compile(Spec)}} + catch error:_ -> badarg end; compile_match_spec(object, Pat) -> compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC(Pat)); diff --git a/lib/stdlib/src/edlin_expand.erl b/lib/stdlib/src/edlin_expand.erl index 5f821caef0..a1a97af4c5 100644 --- a/lib/stdlib/src/edlin_expand.erl +++ b/lib/stdlib/src/edlin_expand.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2005-2016. All Rights Reserved. +%% Copyright Ericsson AB 2005-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -101,44 +101,77 @@ match(Prefix, Alts, Extra0) -> %% Return the list of names L in multiple columns. format_matches(L) -> - S = format_col(lists:sort(L), []), + {S1, Dots} = format_col(lists:sort(L), []), + S = case Dots of + true -> + {_, Prefix} = longest_common_head(vals(L)), + PrefixLen = length(Prefix), + case PrefixLen =< 3 of + true -> S1; % Do not replace the prefix with "...". + false -> + LeadingDotsL = leading_dots(L, PrefixLen), + {S2, _} = format_col(lists:sort(LeadingDotsL), []), + S2 + end; + false -> S1 + end, ["\n" | S]. format_col([], _) -> []; -format_col(L, Acc) -> format_col(L, field_width(L), 0, Acc). - -format_col(X, Width, Len, Acc) when Width + Len > 79 -> - format_col(X, Width, 0, ["\n" | Acc]); -format_col([A|T], Width, Len, Acc0) -> - H = case A of - %% If it's a tuple {string(), integer()}, we assume it's an - %% arity, and meant to be printed. - {H0, I} when is_integer(I) -> - H0 ++ "/" ++ integer_to_list(I); - {H1, _} -> H1; - H2 -> H2 - end, - Acc = [io_lib:format("~-*ts", [Width,H]) | Acc0], - format_col(T, Width, Len+Width, Acc); -format_col([], _, _, Acc) -> - lists:reverse(Acc, "\n"). - -field_width(L) -> field_width(L, 0). - -field_width([{H,_}|T], W) -> +format_col(L, Acc) -> + LL = 79, + format_col(L, field_width(L, LL), 0, Acc, LL, false). + +format_col(X, Width, Len, Acc, LL, Dots) when Width + Len > LL -> + format_col(X, Width, 0, ["\n" | Acc], LL, Dots); +format_col([A|T], Width, Len, Acc0, LL, Dots) -> + {H0, R} = format_val(A), + Hmax = LL - length(R), + {H, NewDots} = + case length(H0) > Hmax of + true -> {io_lib:format("~-*ts", [Hmax - 3, H0]) ++ "...", true}; + false -> {H0, Dots} + end, + Acc = [io_lib:format("~-*ts", [Width, H ++ R]) | Acc0], + format_col(T, Width, Len+Width, Acc, LL, NewDots); +format_col([], _, _, Acc, _LL, Dots) -> + {lists:reverse(Acc, "\n"), Dots}. + +format_val({H, I}) when is_integer(I) -> + %% If it's a tuple {string(), integer()}, we assume it's an + %% arity, and meant to be printed. + {H, "/" ++ integer_to_list(I)}; +format_val({H, _}) -> + {H, ""}; +format_val(H) -> + {H, ""}. + +field_width(L, LL) -> field_width(L, 0, LL). + +field_width([{H,_}|T], W, LL) -> case length(H) of - L when L > W -> field_width(T, L); - _ -> field_width(T, W) + L when L > W -> field_width(T, L, LL); + _ -> field_width(T, W, LL) end; -field_width([H|T], W) -> +field_width([H|T], W, LL) -> case length(H) of - L when L > W -> field_width(T, L); - _ -> field_width(T, W) + L when L > W -> field_width(T, L, LL); + _ -> field_width(T, W, LL) end; -field_width([], W) when W < 40 -> +field_width([], W, LL) when W < LL - 3 -> W + 4; -field_width([], _) -> - 40. +field_width([], _, LL) -> + LL. + +vals([]) -> []; +vals([{S, _}|L]) -> [S|vals(L)]; +vals([S|L]) -> [S|vals(L)]. + +leading_dots([], _Len) -> []; +leading_dots([{H, I}|L], Len) -> + [{"..." ++ nthtail(Len, H), I}|leading_dots(L, Len)]; +leading_dots([H|L], Len) -> + ["..." ++ nthtail(Len, H)|leading_dots(L, Len)]. longest_common_head([]) -> no; diff --git a/lib/stdlib/src/epp.erl b/lib/stdlib/src/epp.erl index 40eba4ad67..61d755ba55 100644 --- a/lib/stdlib/src/epp.erl +++ b/lib/stdlib/src/epp.erl @@ -286,7 +286,7 @@ parse_file(Epp) -> {warning,W} -> [{warning,W}|parse_file(Epp)]; {eof,Location} -> - [{eof,erl_anno:new(Location)}] + [{eof,Location}] end. -spec default_encoding() -> source_encoding(). diff --git a/lib/stdlib/src/erl_anno.erl b/lib/stdlib/src/erl_anno.erl index d32c34dabd..d0310f52e2 100644 --- a/lib/stdlib/src/erl_anno.erl +++ b/lib/stdlib/src/erl_anno.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2015. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -42,7 +42,7 @@ %% Debug: define DEBUG to make sure that annotations are handled as an %% opaque type. Note that all abstract code need to be compiled with -%% DEBUG=true. See also ./erl_pp.erl. +%% DEBUG=true. See also ./erl_pp.erl and ./erl_parse.yrl. %-define(DEBUG, true). @@ -52,7 +52,11 @@ | {'record', record()} | {'text', string()}. +-ifdef(DEBUG). +-opaque anno() :: [annotation(), ...]. +-else. -opaque anno() :: location() | [annotation(), ...]. +-endif. -type anno_term() :: term(). -type column() :: pos_integer(). diff --git a/lib/stdlib/src/erl_compile.erl b/lib/stdlib/src/erl_compile.erl index a6ae398d03..76db2eeacd 100644 --- a/lib/stdlib/src/erl_compile.erl +++ b/lib/stdlib/src/erl_compile.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2016. All Rights Reserved. +%% Copyright Ericsson AB 1997-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -337,7 +337,7 @@ file_or_directory(Name) -> make_term(Str) -> case erl_scan:string(Str) of {ok, Tokens, _} -> - case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of + case erl_parse:parse_term(Tokens ++ [{dot, erl_anno:new(1)}]) of {ok, Term} -> Term; {error, {_,_,Reason}} -> io:format(?STDERR, "~ts: ~ts~n", [Reason, Str]), diff --git a/lib/stdlib/src/erl_expand_records.erl b/lib/stdlib/src/erl_expand_records.erl index 2280464bff..16220bceb4 100644 --- a/lib/stdlib/src/erl_expand_records.erl +++ b/lib/stdlib/src/erl_expand_records.erl @@ -30,13 +30,13 @@ -import(lists, [map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]). --record(exprec, {compile=[], % Compile flags - vcount=0, % Variable counter - calltype=#{}, % Call types - records=dict:new(), % Record definitions - strict_ra=[], % strict record accesses - checked_ra=[] % successfully accessed records - }). +-record(exprec, {compile=[], % Compile flags + vcount=0, % Variable counter + calltype=#{}, % Call types + records=#{}, % Record definitions + strict_ra=[], % strict record accesses + checked_ra=[] % successfully accessed records + }). -spec(module(AbsForms, CompileOptions) -> AbsForms2 when AbsForms :: [erl_parse:abstract_form()], @@ -72,7 +72,7 @@ init_calltype_imports([], Ctype) -> Ctype. forms([{attribute,_,record,{Name,Defs}}=Attr | Fs], St0) -> NDefs = normalise_fields(Defs), - St = St0#exprec{records=dict:store(Name, NDefs, St0#exprec.records)}, + St = St0#exprec{records=maps:put(Name, NDefs, St0#exprec.records)}, {Fs1, St1} = forms(Fs, St), {[Attr | Fs1], St1}; forms([{function,L,N,A,Cs0} | Fs0], St0) -> @@ -546,7 +546,7 @@ normalise_fields(Fs) -> %% record_fields(RecordName, State) %% find_field(FieldName, Fields) -record_fields(R, St) -> dict:fetch(R, St#exprec.records). +record_fields(R, St) -> maps:get(R, St#exprec.records). find_field(F, [{record_field,_,{atom,_,F},Val} | _]) -> {ok,Val}; find_field(F, [_ | Fs]) -> find_field(F, Fs); diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl index 1b84234fac..0ffca0886f 100644 --- a/lib/stdlib/src/erl_lint.erl +++ b/lib/stdlib/src/erl_lint.erl @@ -2,7 +2,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2016. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -737,7 +737,12 @@ start_state({attribute,_,module,M}, St0) -> St1 = St0#lint{module=M}, St1#lint{state=attribute}; start_state(Form, St) -> - St1 = add_error(element(2, Form), undefined_module, St), + Anno = case Form of + {eof, L} -> erl_anno:new(L); + %% {warning, Warning} and {error, Error} not possible here. + _ -> element(2, Form) + end, + St1 = add_error(Anno, undefined_module, St), attribute_state(Form, St1#lint{state=attribute}). %% attribute_state(Form, State) -> diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl index 922455a6f2..2dcddeb8c2 100644 --- a/lib/stdlib/src/erl_parse.yrl +++ b/lib/stdlib/src/erl_parse.yrl @@ -981,6 +981,16 @@ Erlang code. %% keep track of annotation info in tokens -define(anno(Tup), element(2, Tup)). +%-define(DEBUG, true). + +-ifdef(DEBUG). +%% Assumes that erl_anno has been compiled with DEBUG=true. +-define(ANNO_CHECK(Tokens), + [] = [T || T <- Tokens, not is_list(element(2, T))]). +-else. +-define(ANNO_CHECK(Tokens), ok). +-endif. + %% Entry points compatible to old erl_parse. %% These really suck and are only here until Calle gets multiple %% entry points working. @@ -990,10 +1000,15 @@ Erlang code. AbsForm :: abstract_form(), ErrorInfo :: error_info(). parse_form([{'-',A1},{atom,A2,spec}|Tokens]) -> - parse([{'-',A1},{'spec',A2}|Tokens]); + NewTokens = [{'-',A1},{'spec',A2}|Tokens], + ?ANNO_CHECK(NewTokens), + parse(NewTokens); parse_form([{'-',A1},{atom,A2,callback}|Tokens]) -> - parse([{'-',A1},{'callback',A2}|Tokens]); + NewTokens = [{'-',A1},{'callback',A2}|Tokens], + ?ANNO_CHECK(NewTokens), + parse(NewTokens); parse_form(Tokens) -> + ?ANNO_CHECK(Tokens), parse(Tokens). -spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when @@ -1001,6 +1016,7 @@ parse_form(Tokens) -> ExprList :: [abstract_expr()], ErrorInfo :: error_info(). parse_exprs(Tokens) -> + ?ANNO_CHECK(Tokens), A = erl_anno:new(0), case parse([{atom,A,f},{'(',A},{')',A},{'->',A}|Tokens]) of {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} -> @@ -1013,6 +1029,7 @@ parse_exprs(Tokens) -> Term :: term(), ErrorInfo :: error_info(). parse_term(Tokens) -> + ?ANNO_CHECK(Tokens), A = erl_anno:new(0), case parse([{atom,A,f},{'(',A},{')',A},{'->',A}|Tokens]) of {ok,{function,_Af,f,0,[{clause,_Ac,[],[],[Expr]}]}} -> @@ -1531,8 +1548,8 @@ type_preop_prec('#') -> {700,800}. Fun :: fun((Anno) -> NewAnno), Anno :: erl_anno:anno(), NewAnno :: erl_anno:anno(), - Abstr :: erl_parse_tree(), - NewAbstr :: erl_parse_tree(). + Abstr :: erl_parse_tree() | form_info(), + NewAbstr :: erl_parse_tree() | form_info(). map_anno(F0, Abstr) -> F = fun(A, Acc) -> {F0(A), Acc} end, @@ -1546,7 +1563,7 @@ map_anno(F0, Abstr) -> Acc1 :: term(), AccIn :: term(), AccOut :: term(), - Abstr :: erl_parse_tree(). + Abstr :: erl_parse_tree() | form_info(). fold_anno(F0, Acc0, Abstr) -> F = fun(A, Acc) -> {A, F0(A, Acc)} end, @@ -1561,15 +1578,15 @@ fold_anno(F0, Acc0, Abstr) -> Acc1 :: term(), AccIn :: term(), AccOut :: term(), - Abstr :: erl_parse_tree(), - NewAbstr :: erl_parse_tree(). + Abstr :: erl_parse_tree() | form_info(), + NewAbstr :: erl_parse_tree() | form_info(). mapfold_anno(F, Acc0, Abstr) -> modify_anno1(Abstr, Acc0, F). -spec new_anno(Term) -> Abstr when Term :: term(), - Abstr :: erl_parse_tree(). + Abstr :: erl_parse_tree() | form_info(). new_anno(Term) -> F = fun(L, Acc) -> {erl_anno:new(L), Acc} end, @@ -1577,14 +1594,14 @@ new_anno(Term) -> NewAbstr. -spec anno_to_term(Abstr) -> term() when - Abstr :: erl_parse_tree(). + Abstr :: erl_parse_tree() | form_info(). anno_to_term(Abstract) -> F = fun(Anno, Acc) -> {erl_anno:to_term(Anno), Acc} end, {NewAbstract, []} = modify_anno1(Abstract, [], F), NewAbstract. --spec anno_from_term(Term) -> erl_parse_tree() when +-spec anno_from_term(Term) -> erl_parse_tree() | form_info() when Term :: term(). anno_from_term(Term) -> @@ -1629,6 +1646,8 @@ modify_anno1({warning,W}, Ac, _Mf) -> {{warning,W},Ac}; modify_anno1({error,W}, Ac, _Mf) -> {{error,W},Ac}; +modify_anno1({eof,L}, Ac, _Mf) -> + {{eof,L},Ac}; %% Expressions. modify_anno1({clauses,Cs}, Ac, Mf) -> {Cs1,Ac1} = modify_anno1(Cs, Ac, Mf), diff --git a/lib/stdlib/src/erl_pp.erl b/lib/stdlib/src/erl_pp.erl index d30cd508c1..6068afb293 100644 --- a/lib/stdlib/src/erl_pp.erl +++ b/lib/stdlib/src/erl_pp.erl @@ -51,6 +51,15 @@ %-define(DEBUG, true). -ifdef(DEBUG). +-define(FORM_TEST(T), + _ = case T of + {eof, _Line} -> ok; + {warning, _W} -> ok; + {error, _E} -> ok; + _ -> ?TEST(T) + end). +-define(EXPRS_TEST(L), + [?TEST(E) || E <- L]). -define(TEST(T), %% Assumes that erl_anno has been compiled with DEBUG=true. %% erl_pp does not use the annoations, but test it anyway. @@ -62,6 +71,8 @@ erlang:error(badarg, [T]) end). -else. +-define(FORM_TEST(T), ok). +-define(EXPRS_TEST(T), ok). -define(TEST(T), ok). -endif. @@ -80,7 +91,7 @@ form(Thing) -> Options :: options()). form(Thing, Options) -> - ?TEST(Thing), + ?FORM_TEST(Thing), State = state(Options), frmt(lform(Thing, options(Options)), State). @@ -124,7 +135,7 @@ guard(Gs) -> Options :: options()). guard(Gs, Options) -> - ?TEST(Gs), + ?EXPRS_TEST(Gs), frmt(lguard(Gs, options(Options)), state(Options)). -spec(exprs(Expressions) -> io_lib:chars() when @@ -146,7 +157,7 @@ exprs(Es, Options) -> Options :: options()). exprs(Es, I, Options) -> - ?TEST(Es), + ?EXPRS_TEST(Es), frmt({seq,[],[],[$,],lexprs(Es, options(Options))}, I, state(Options)). -spec(expr(Expression) -> io_lib:chars() when diff --git a/lib/stdlib/src/erl_tar.erl b/lib/stdlib/src/erl_tar.erl index a383a0fc67..a54df939bf 100644 --- a/lib/stdlib/src/erl_tar.erl +++ b/lib/stdlib/src/erl_tar.erl @@ -1,8 +1,8 @@ %% %% %CopyrightBegin% -%% -%% Copyright Ericsson AB 1997-2016. All Rights Reserved. -%% +%% +%% Copyright Ericsson AB 1997-2017. All Rights Reserved. +%% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. %% You may obtain a copy of the License at @@ -14,191 +14,245 @@ %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. %% See the License for the specific language governing permissions and %% limitations under the License. -%% +%% %% %CopyrightEnd% %% +%% This module implements extraction/creation of tar archives. +%% It supports reading most common tar formats, namely V7, STAR, +%% USTAR, GNU, BSD/libarchive, and PAX. It produces archives in USTAR +%% format, unless it must use PAX headers, in which case it produces PAX +%% format. +%% +%% The following references where used: +%% http://www.freebsd.org/cgi/man.cgi?query=tar&sektion=5 +%% http://www.gnu.org/software/tar/manual/html_node/Standard.html +%% http://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html -module(erl_tar). -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% Purpose: Unix tar (tape archive) utility. -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - --export([init/3, create/2, create/3, extract/1, extract/2, table/1, table/2, - open/2, close/1, add/3, add/4, - t/1, tt/1, format_error/1]). +-export([init/3, + create/2, create/3, + extract/1, extract/2, + table/1, table/2, t/1, tt/1, + open/2, close/1, + add/3, add/4, + format_error/1]). -include_lib("kernel/include/file.hrl"). +-include_lib("erl_tar.hrl"). --record(add_opts, - {read_info, % Fun to use for read file/link info. - chunk_size = 0, % For file reading when sending to sftp. 0=do not chunk - verbose = false :: boolean()}). % Verbose on/off. - -%% Opens a tar archive. - -init(UsrHandle, AccessMode, Fun) when is_function(Fun,2) -> - {ok, {AccessMode,{tar_descriptor,UsrHandle,Fun}}}. - -%%%================================================================ -%%% The open function with friends is to keep the file and binary api of this module -open(Name, Mode) -> - case open_mode(Mode) of - {ok, Access, Raw, Opts} -> - open1(Name, Access, Raw, Opts); - {error, Reason} -> - {error, {Name, Reason}} - end. - -open1({binary,Bin}, read, _Raw, Opts) -> - case file:open(Bin, [ram,binary,read]) of - {ok,File} -> - _ = [ram_file:uncompress(File) || Opts =:= [compressed]], - init(File,read,file_fun()); - Error -> - Error - end; -open1({file, Fd}, read, _Raw, _Opts) -> - init(Fd, read, file_fun()); -open1(Name, Access, Raw, Opts) -> - case file:open(Name, Raw ++ [binary, Access|Opts]) of - {ok, File} -> - init(File, Access, file_fun()); - {error, Reason} -> - {error, {Name, Reason}} - end. - -file_fun() -> - fun(write, {Fd,Data}) -> file:write(Fd, Data); - (position, {Fd,Pos}) -> file:position(Fd, Pos); - (read2, {Fd,Size}) -> file:read(Fd,Size); - (close, Fd) -> file:close(Fd) - end. - -%%% End of file and binary api (except for open_mode/1 downwards -%%%================================================================ - -%% Closes a tar archive. - -close({read, File}) -> - ok = do_close(File); -close({write, File}) -> - PadResult = pad_file(File), - ok = do_close(File), - PadResult; -close(_) -> - {error, einval}. - -%% Adds a file to a tape archive. - -add(File, Name, Options) -> - add(File, Name, Name, Options). -add({write, File}, Name, NameInArchive, Options) -> - Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end}, - add1(File, Name, NameInArchive, add_opts(Options, Opts)); -add({read, _File}, _, _, _) -> - {error, eacces}; -add(_, _, _, _) -> - {error, einval}. - -add_opts([dereference|T], Opts) -> - add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end}); -add_opts([verbose|T], Opts) -> - add_opts(T, Opts#add_opts{verbose=true}); -add_opts([{chunks,N}|T], Opts) -> - add_opts(T, Opts#add_opts{chunk_size=N}); -add_opts([_|T], Opts) -> - add_opts(T, Opts); -add_opts([], Opts) -> - Opts. - -%% Creates a tar file Name containing the given files. - -create(Name, Filenames) -> - create(Name, Filenames, []). - -%% Creates a tar archive Name containing the given files. -%% Accepted options: verbose, compressed, cooked +%% Converts the short error reason to a descriptive string. +-spec format_error(term()) -> string(). +format_error(invalid_tar_checksum) -> + "Checksum failed"; +format_error(bad_header) -> + "Unrecognized tar header format"; +format_error({bad_header, Reason}) -> + lists:flatten(io_lib:format("Unrecognized tar header format: ~p", [Reason])); +format_error({invalid_header, negative_size}) -> + "Invalid header: negative size"; +format_error(invalid_sparse_header_size) -> + "Invalid sparse header: negative size"; +format_error(invalid_sparse_map_entry) -> + "Invalid sparse map entry"; +format_error({invalid_sparse_map_entry, Reason}) -> + lists:flatten(io_lib:format("Invalid sparse map entry: ~p", [Reason])); +format_error(invalid_end_of_archive) -> + "Invalid end of archive"; +format_error(eof) -> + "Unexpected end of file"; +format_error(integer_overflow) -> + "Failed to parse numeric: integer overflow"; +format_error({misaligned_read, Pos}) -> + lists:flatten(io_lib:format("Read a block which was misaligned: block_size=~p pos=~p", + [?BLOCK_SIZE, Pos])); +format_error(invalid_gnu_1_0_sparsemap) -> + "Invalid GNU sparse map (version 1.0)"; +format_error({invalid_gnu_0_1_sparsemap, Format}) -> + lists:flatten(io_lib:format("Invalid GNU sparse map (version ~s)", [Format])); +format_error({Name,Reason}) -> + lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)])); +format_error(Atom) when is_atom(Atom) -> + file:format_error(Atom); +format_error(Term) -> + lists:flatten(io_lib:format("~tp", [Term])). -create(Name, FileList, Options) -> - Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked) - end, Options), - case open(Name, [write|Mode]) of - {ok, TarFile} -> - Add = fun({NmInA, NmOrBin}) -> - add(TarFile, NmOrBin, NmInA, Options); - (Nm) -> - add(TarFile, Nm, Nm, Options) - end, - Result = foreach_while_ok(Add, FileList), - case {Result, close(TarFile)} of - {ok, Res} -> Res; - {Res, _} -> Res - end; - Reason -> - Reason - end. +%% Initializes a new reader given a custom file handle and I/O wrappers +-spec init(handle(), write | read, file_op()) -> {ok, reader()} | {error, badarg}. +init(Handle, AccessMode, Fun) when is_function(Fun, 2) -> + Reader = #reader{handle=Handle,access=AccessMode,func=Fun}, + {ok, Pos, Reader2} = do_position(Reader, {cur, 0}), + {ok, Reader2#reader{pos=Pos}}; +init(_Handle, _AccessMode, _Fun) -> + {error, badarg}. +%%%================================================================ %% Extracts all files from the tar file Name. - +-spec extract(open_handle()) -> ok | {error, term()}. extract(Name) -> extract(Name, []). %% Extracts (all) files from the tar file Name. -%% Options accepted: keep_old_files, {files, ListOfFilesToExtract}, verbose, -%% {cwd, AbsoluteDirectory} +%% Options accepted: +%% - cooked: Opens the tar file without mode `raw` +%% - compressed: Uncompresses the tar file when reading +%% - memory: Returns the tar contents as a list of tuples {Name, Bin} +%% - keep_old_files: Extracted files will not overwrite the destination +%% - {files, ListOfFilesToExtract}: Only extract ListOfFilesToExtract +%% - verbose: Prints verbose information about the extraction, +%% - {cwd, AbsoluteDir}: Sets the current working directory for the extraction +-spec extract(open_handle(), [extract_opt()]) -> + ok + | {ok, [{string(), binary()}]} + | {error, term()}. +extract({binary, Bin}, Opts) when is_list(Opts) -> + do_extract({binary, Bin}, Opts); +extract({file, Fd}, Opts) when is_list(Opts) -> + do_extract({file, Fd}, Opts); +extract(#reader{}=Reader, Opts) when is_list(Opts) -> + do_extract(Reader, Opts); +extract(Name, Opts) when is_list(Name); is_binary(Name), is_list(Opts) -> + do_extract(Name, Opts). + +do_extract(Handle, Opts) when is_list(Opts) -> + Opts2 = extract_opts(Opts), + Acc = if Opts2#read_opts.output =:= memory -> []; true -> ok end, + foldl_read(Handle, fun extract1/4, Acc, Opts2). + +extract1(eof, Reader, _, Acc) when is_list(Acc) -> + {ok, {ok, lists:reverse(Acc)}, Reader}; +extract1(eof, Reader, _, Acc) -> + {ok, Acc, Reader}; +extract1(#tar_header{name=Name,size=Size}=Header, Reader, Opts, Acc) -> + case check_extract(Name, Opts) of + true -> + case do_read(Reader, Size) of + {ok, Bin, Reader2} -> + case write_extracted_element(Header, Bin, Opts) of + ok -> + {ok, Acc, Reader2}; + {ok, NameBin} when is_list(Acc) -> + {ok, [NameBin | Acc], Reader2}; + {error, _} = Err -> + throw(Err) + end; + {error, _} = Err -> + throw(Err) + end; + false -> + {ok, Acc, skip_file(Reader)} + end. -extract(Name, Opts) -> - foldl_read(Name, fun extract1/4, ok, extract_opts(Opts)). +%% Checks if the file Name should be extracted. +check_extract(_, #read_opts{files=all}) -> + true; +check_extract(Name, #read_opts{files=Files}) -> + ordsets:is_element(Name, Files). -%% Returns a list of names of the files in the tar file Name. -%% Options accepted: verbose +%%%================================================================ +%% The following table functions produce a list of information about +%% the files contained in the archive. +-type filename() :: string(). +-type typeflag() :: regular | link | symlink | + char | block | directory | + fifo | reserved | unknown. +-type mode() :: non_neg_integer(). +-type uid() :: non_neg_integer(). +-type gid() :: non_neg_integer(). + +-type tar_entry() :: {filename(), + typeflag(), + non_neg_integer(), + calendar:datetime(), + mode(), + uid(), + gid()}. +%% Returns a list of names of the files in the tar file Name. +-spec table(open_handle()) -> {ok, [string()]} | {error, term()}. table(Name) -> table(Name, []). %% Returns a list of names of the files in the tar file Name. %% Options accepted: compressed, verbose, cooked. - -table(Name, Opts) -> +-spec table(open_handle(), [compressed | verbose | cooked]) -> + {ok, [tar_entry()]} | {error, term()}. +table(Name, Opts) when is_list(Opts) -> foldl_read(Name, fun table1/4, [], table_opts(Opts)). +table1(eof, Reader, _, Result) -> + {ok, {ok, lists:reverse(Result)}, Reader}; +table1(#tar_header{}=Header, Reader, #read_opts{verbose=Verbose}, Result) -> + Attrs = table1_attrs(Header, Verbose), + Reader2 = skip_file(Reader), + {ok, [Attrs|Result], Reader2}. + +%% Extracts attributes relevant to table1's output +table1_attrs(#tar_header{typeflag=Typeflag,mode=Mode}=Header, true) -> + Type = typeflag(Typeflag), + Name = Header#tar_header.name, + Mtime = Header#tar_header.mtime, + Uid = Header#tar_header.uid, + Gid = Header#tar_header.gid, + Size = Header#tar_header.size, + {Name, Type, Size, Mtime, Mode, Uid, Gid}; +table1_attrs(#tar_header{name=Name}, _Verbose) -> + Name. + +typeflag(?TYPE_REGULAR) -> regular; +typeflag(?TYPE_REGULAR_A) -> regular; +typeflag(?TYPE_GNU_SPARSE) -> regular; +typeflag(?TYPE_CONT) -> regular; +typeflag(?TYPE_LINK) -> link; +typeflag(?TYPE_SYMLINK) -> symlink; +typeflag(?TYPE_CHAR) -> char; +typeflag(?TYPE_BLOCK) -> block; +typeflag(?TYPE_DIR) -> directory; +typeflag(?TYPE_FIFO) -> fifo; +typeflag(_) -> unknown. +%%%================================================================ %% Comments for printing the contents of a tape archive, %% meant to be invoked from the shell. -t(Name) -> +%% Prints each filename in the archive +-spec t(file:filename()) -> ok | {error, term()}. +t(Name) when is_list(Name); is_binary(Name) -> case table(Name) of - {ok, List} -> - lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List); - Error -> - Error + {ok, List} -> + lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List); + Error -> + Error end. +%% Prints verbose information about each file in the archive +-spec tt(open_handle()) -> ok | {error, term()}. tt(Name) -> case table(Name, [verbose]) of - {ok, List} -> - lists:foreach(fun print_header/1, List); - Error -> - Error + {ok, List} -> + lists:foreach(fun print_header/1, List); + Error -> + Error end. +%% Used by tt/1 to print a tar_entry tuple +-spec print_header(tar_entry()) -> ok. print_header({Name, Type, Size, Mtime, Mode, Uid, Gid}) -> io:format("~s~s ~4w/~-4w ~7w ~s ~s\n", - [type_to_string(Type), mode_to_string(Mode), - Uid, Gid, Size, time_to_string(Mtime), Name]). + [type_to_string(Type), mode_to_string(Mode), + Uid, Gid, Size, time_to_string(Mtime), Name]). -type_to_string(regular) -> "-"; +type_to_string(regular) -> "-"; type_to_string(directory) -> "d"; -type_to_string(link) -> "l"; -type_to_string(symlink) -> "s"; -type_to_string(char) -> "c"; -type_to_string(block) -> "b"; -type_to_string(fifo) -> "f"; -type_to_string(_) -> "?". - +type_to_string(link) -> "l"; +type_to_string(symlink) -> "s"; +type_to_string(char) -> "c"; +type_to_string(block) -> "b"; +type_to_string(fifo) -> "f"; +type_to_string(unknown) -> "?". + +%% Converts a numeric mode to its human-readable representation mode_to_string(Mode) -> mode_to_string(Mode, "xwrxwrxwr", []). - mode_to_string(Mode, [C|T], Acc) when Mode band 1 =:= 1 -> mode_to_string(Mode bsr 1, T, [C|Acc]); mode_to_string(Mode, [_|T], Acc) -> @@ -206,6 +260,7 @@ mode_to_string(Mode, [_|T], Acc) -> mode_to_string(_, [], Acc) -> Acc. +%% Converts a datetime tuple to a readable string time_to_string({{Y, Mon, Day}, {H, Min, _}}) -> io_lib:format("~s ~2w ~s:~s ~w", [month(Mon), Day, two_d(H), two_d(Min), Y]). @@ -225,809 +280,1612 @@ month(10) -> "Oct"; month(11) -> "Nov"; month(12) -> "Dec". -%% Converts the short error reason to a descriptive string. +%%%================================================================ +%% The open function with friends is to keep the file and binary api of this module +-type open_handle() :: file:filename() + | {binary, binary()} + | {file, term()}. +-spec open(open_handle(), [write | compressed | cooked]) -> + {ok, reader()} | {error, term()}. +open({binary, Bin}, Mode) when is_binary(Bin) -> + do_open({binary, Bin}, Mode); +open({file, Fd}, Mode) -> + do_open({file, Fd}, Mode); +open(Name, Mode) when is_list(Name); is_binary(Name) -> + do_open(Name, Mode). + +do_open(Name, Mode) when is_list(Mode) -> + case open_mode(Mode) of + {ok, Access, Raw, Opts} -> + open1(Name, Access, Raw, Opts); + {error, Reason} -> + {error, {Name, Reason}} + end. -format_error(bad_header) -> "Bad directory header"; -format_error(eof) -> "Unexpected end of file"; -format_error(symbolic_link_too_long) -> "Symbolic link too long"; -format_error({Name,Reason}) -> - lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)])); -format_error(Atom) when is_atom(Atom) -> - file:format_error(Atom); -format_error(Term) -> - lists:flatten(io_lib:format("~tp", [Term])). +open1({binary,Bin}, read, _Raw, Opts) when is_binary(Bin) -> + case file:open(Bin, [ram,binary,read]) of + {ok,File} -> + _ = [ram_file:uncompress(File) || Opts =:= [compressed]], + {ok, #reader{handle=File,access=read,func=fun file_op/2}}; + Error -> + Error + end; +open1({file, Fd}, read, _Raw, _Opts) -> + Reader = #reader{handle=Fd,access=read,func=fun file_op/2}, + case do_position(Reader, {cur, 0}) of + {ok, Pos, Reader2} -> + {ok, Reader2#reader{pos=Pos}}; + {error, _} = Err -> + Err + end; +open1(Name, Access, Raw, Opts) when is_list(Name) or is_binary(Name) -> + case file:open(Name, Raw ++ [binary, Access|Opts]) of + {ok, File} -> + {ok, #reader{handle=File,access=Access,func=fun file_op/2}}; + {error, Reason} -> + {error, {Name, Reason}} + end. +open_mode(Mode) -> + open_mode(Mode, false, [raw], []). -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% -%%% Useful definitions (also start of implementation). -%%% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -%% Offset for fields in the tar header. -%% Note that these offsets are ZERO-based as in the POSIX standard -%% document, while binaries use ONE-base offset. Caveat Programmer. - --define(th_name, 0). --define(th_mode, 100). --define(th_uid, 108). --define(th_gid, 116). --define(th_size, 124). --define(th_mtime, 136). --define(th_chksum, 148). --define(th_typeflag, 156). --define(th_linkname, 157). --define(th_magic, 257). --define(th_version, 263). --define(th_prefix, 345). - -%% Length of these fields. - --define(th_name_len, 100). --define(th_mode_len, 8). --define(th_uid_len, 8). --define(th_gid_len, 8). --define(th_size_len, 12). --define(th_mtime_len, 12). --define(th_chksum_len, 8). --define(th_linkname_len, 100). --define(th_magic_len, 6). --define(th_version_len, 2). --define(th_prefix_len, 167). - --record(tar_header, - {name, % Name of file. - mode, % Mode bits. - uid, % User id. - gid, % Group id. - size, % Size of file - mtime, % Last modified (seconds since - % Jan 1, 1970). - chksum, % Checksum of header. - typeflag = [], % Type of file. - linkname = [], % Name of link. - filler = [], - prefix}). % Filename prefix. - --define(record_size, 512). --define(block_size, (512*20)). - - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% -%%% Adding members to a tar archive. -%%% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -add1(TarFile, Bin, NameInArchive, Opts) when is_binary(Bin) -> - Now = calendar:now_to_local_time(erlang:timestamp()), - Info = #file_info{size = byte_size(Bin), - type = regular, - access = read_write, - atime = Now, - mtime = Now, - ctime = Now, - mode = 8#100644, - links = 1, - major_device = 0, - minor_device = 0, - inode = 0, - uid = 0, - gid = 0}, - Header = create_header(NameInArchive, Info), - add1(TarFile, NameInArchive, Header, Bin, Opts); -add1(TarFile, Name, NameInArchive, Opts) -> - case read_file_and_info(Name, Opts) of - {ok, Bin, Info} when Info#file_info.type =:= regular -> - Header = create_header(NameInArchive, Info), - add1(TarFile, Name, Header, Bin, Opts); - {ok, PointsTo, Info} when Info#file_info.type =:= symlink -> - if - length(PointsTo) > 100 -> - {error,{PointsTo,symbolic_link_too_long}}; - true -> - Info2 = Info#file_info{size=0}, - Header = create_header(NameInArchive, Info2, PointsTo), - add1(TarFile, Name, Header, list_to_binary([]), Opts) - end; - {ok, _, Info} when Info#file_info.type =:= directory -> - add_directory(TarFile, Name, NameInArchive, Info, Opts); - {ok, _, #file_info{type=Type}} -> - {error, {bad_file_type, Name, Type}}; - {error, Reason} -> - {error, {Name, Reason}} +open_mode(read, _, Raw, _) -> + {ok, read, Raw, []}; +open_mode(write, _, Raw, _) -> + {ok, write, Raw, []}; +open_mode([read|Rest], false, Raw, Opts) -> + open_mode(Rest, read, Raw, Opts); +open_mode([write|Rest], false, Raw, Opts) -> + open_mode(Rest, write, Raw, Opts); +open_mode([compressed|Rest], Access, Raw, Opts) -> + open_mode(Rest, Access, Raw, [compressed|Opts]); +open_mode([cooked|Rest], Access, _Raw, Opts) -> + open_mode(Rest, Access, [], Opts); +open_mode([], Access, Raw, Opts) -> + {ok, Access, Raw, Opts}; +open_mode(_, _, _, _) -> + {error, einval}. + +file_op(write, {Fd, Data}) -> + file:write(Fd, Data); +file_op(position, {Fd, Pos}) -> + file:position(Fd, Pos); +file_op(read2, {Fd, Size}) -> + file:read(Fd, Size); +file_op(close, Fd) -> + file:close(Fd). + +%% Closes a tar archive. +-spec close(reader()) -> ok | {error, term()}. +close(#reader{access=read}=Reader) -> + ok = do_close(Reader); +close(#reader{access=write}=Reader) -> + {ok, Reader2} = pad_file(Reader), + ok = do_close(Reader2), + ok; +close(_) -> + {error, einval}. + +pad_file(#reader{pos=Pos}=Reader) -> + %% There must be at least two zero blocks at the end. + PadCurrent = skip_padding(Pos+?BLOCK_SIZE), + Padding = <<0:PadCurrent/unit:8>>, + do_write(Reader, [Padding, ?ZERO_BLOCK, ?ZERO_BLOCK]). + + +%%%================================================================ +%% Creation/modification of tar archives + +%% Creates a tar file Name containing the given files. +-spec create(file:filename(), filelist()) -> ok | {error, {string(), term()}}. +create(Name, FileList) when is_list(Name); is_binary(Name) -> + create(Name, FileList, []). + +%% Creates a tar archive Name containing the given files. +%% Accepted options: verbose, compressed, cooked +-spec create(file:filename(), filelist(), [create_opt()]) -> + ok | {error, term()} | {error, {string(), term()}}. +create(Name, FileList, Options) when is_list(Name); is_binary(Name) -> + Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked) + end, Options), + case open(Name, [write|Mode]) of + {ok, TarFile} -> + do_create(TarFile, FileList, Options); + {error, _} = Err -> + Err end. -add1(Tar, Name, Header, chunked, Options) -> - add_verbose(Options, "a ~ts [chunked ", [Name]), - try - ok = do_write(Tar, Header), - {ok,D} = file:open(Name, [read,binary]), - {ok,NumBytes} = add_read_write_chunks(D, Tar, Options#add_opts.chunk_size, 0, Options), - _ = file:close(D), - ok = do_write(Tar, padding(NumBytes,?record_size)) - of - ok -> - add_verbose(Options, "~n", []), - ok - catch - error:{badmatch,{error,Error}} -> - add_verbose(Options, "~n", []), - {error,{Name,Error}} +do_create(TarFile, [], _Opts) -> + close(TarFile); +do_create(TarFile, [{NameInArchive, NameOrBin}|Rest], Opts) -> + case add(TarFile, NameOrBin, NameInArchive, Opts) of + ok -> + do_create(TarFile, Rest, Opts); + {error, _} = Err -> + _ = close(TarFile), + Err end; -add1(Tar, Name, Header, Bin, Options) -> - add_verbose(Options, "a ~ts~n", [Name]), - do_write(Tar, [Header, Bin, padding(byte_size(Bin), ?record_size)]). - -add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes, Options) -> - case file:read(D, ChunkSize) of - {ok,Bin} -> - ok = do_write(Tar, Bin), - add_verbose(Options, ".", []), - add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes+byte_size(Bin), Options); - eof -> - add_verbose(Options, "]", []), - {ok,SumNumBytes}; - Other -> - Other +do_create(TarFile, [Name|Rest], Opts) -> + case add(TarFile, Name, Name, Opts) of + ok -> + do_create(TarFile, Rest, Opts); + {error, _} = Err -> + _ = close(TarFile), + Err end. -add_directory(TarFile, DirName, NameInArchive, Info, Options) -> +%% Adds a file to a tape archive. +-type add_type() :: string() + | {string(), string()} + | {string(), binary()}. +-spec add(reader(), add_type(), [add_opt()]) -> ok | {error, term()}. +add(Reader, {NameInArchive, Name}, Opts) + when is_list(NameInArchive), is_list(Name) -> + do_add(Reader, Name, NameInArchive, Opts); +add(Reader, {NameInArchive, Bin}, Opts) + when is_list(NameInArchive), is_binary(Bin) -> + do_add(Reader, Bin, NameInArchive, Opts); +add(Reader, Name, Opts) when is_list(Name) -> + do_add(Reader, Name, Name, Opts). + + +-spec add(reader(), string() | binary(), string(), [add_opt()]) -> + ok | {error, term()}. +add(Reader, NameOrBin, NameInArchive, Options) + when is_list(NameOrBin); is_binary(NameOrBin), + is_list(NameInArchive), is_list(Options) -> + do_add(Reader, NameOrBin, NameInArchive, Options). + +do_add(#reader{access=write}=Reader, Name, NameInArchive, Options) + when is_list(NameInArchive), is_list(Options) -> + Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end}, + add1(Reader, Name, NameInArchive, add_opts(Options, Opts)); +do_add(#reader{access=read},_,_,_) -> + {error, eacces}; +do_add(Reader,_,_,_) -> + {error, {badarg, Reader}}. + +add_opts([dereference|T], Opts) -> + add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end}); +add_opts([verbose|T], Opts) -> + add_opts(T, Opts#add_opts{verbose=true}); +add_opts([{chunks,N}|T], Opts) -> + add_opts(T, Opts#add_opts{chunk_size=N}); +add_opts([_|T], Opts) -> + add_opts(T, Opts); +add_opts([], Opts) -> + Opts. + +add1(#reader{}=Reader, Name, NameInArchive, #add_opts{read_info=ReadInfo}=Opts) + when is_list(Name) -> + Res = case ReadInfo(Name) of + {error, Reason0} -> + {error, {Name, Reason0}}; + {ok, #file_info{type=symlink}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + {ok, Linkname} = file:read_link(Name), + Header = fileinfo_to_header(NameInArchive, Fi, Linkname), + add_header(Reader, Header, Opts); + {ok, #file_info{type=regular}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Header = fileinfo_to_header(NameInArchive, Fi, false), + {ok, Reader2} = add_header(Reader, Header, Opts), + FileSize = Header#tar_header.size, + {ok, FileSize, Reader3} = do_copy(Reader2, Name, Opts), + Padding = skip_padding(FileSize), + Pad = <<0:Padding/unit:8>>, + do_write(Reader3, Pad); + {ok, #file_info{type=directory}=Fi} -> + add_directory(Reader, Name, NameInArchive, Fi, Opts); + {ok, #file_info{}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Header = fileinfo_to_header(NameInArchive, Fi, false), + add_header(Reader, Header, Opts) + end, + case Res of + ok -> ok; + {ok, _Reader} -> ok; + {error, _Reason} = Err -> Err + end; +add1(Reader, Bin, NameInArchive, Opts) when is_binary(Bin) -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Now = calendar:now_to_local_time(erlang:timestamp()), + Header = #tar_header{ + name = NameInArchive, + size = byte_size(Bin), + typeflag = ?TYPE_REGULAR, + atime = Now, + mtime = Now, + ctime = Now, + mode = 8#100644}, + {ok, Reader2} = add_header(Reader, Header, Opts), + Padding = skip_padding(byte_size(Bin)), + Data = [Bin, <<0:Padding/unit:8>>], + case do_write(Reader2, Data) of + {ok, _Reader3} -> ok; + {error, Reason} -> {error, {NameInArchive, Reason}} + end. + +add_directory(Reader, DirName, NameInArchive, Info, Opts) -> case file:list_dir(DirName) of - {ok, []} -> - add_verbose(Options, "a ~ts~n", [DirName]), - Header = create_header(NameInArchive, Info), - do_write(TarFile, Header); - {ok, Files} -> - Add = fun (File) -> - add1(TarFile, - filename:join(DirName, File), - filename:join(NameInArchive, File), - Options) end, - foreach_while_ok(Add, Files); - {error, Reason} -> - {error, {DirName, Reason}} + {ok, []} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Header = fileinfo_to_header(NameInArchive, Info, false), + add_header(Reader, Header, Opts); + {ok, Files} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + try add_files(Reader, Files, DirName, NameInArchive, Opts) of + ok -> ok; + {error, _} = Err -> Err + catch + throw:{error, {_Name, _Reason}} = Err -> Err; + throw:{error, Reason} -> {error, {DirName, Reason}} + end; + {error, Reason} -> + {error, {DirName, Reason}} end. - -%% Creates a header for file in a tar file. - -create_header(Name, Info) -> - create_header(Name, Info, []). -create_header(Name, #file_info {mode=Mode, uid=Uid, gid=Gid, - size=Size, mtime=Mtime0, type=Type}, Linkname) -> - Mtime = posix_time(erlang:localtime_to_universaltime(Mtime0)), - {Prefix,Suffix} = split_filename(Name), - H0 = [to_string(Suffix, 100), - to_octal(Mode, 8), - to_octal(Uid, 8), - to_octal(Gid, 8), - to_octal(Size, ?th_size_len), - to_octal(Mtime, ?th_mtime_len), - <<" ">>, - file_type(Type), - to_string(Linkname, ?th_linkname_len), - "ustar",0, - "00", - zeroes(?th_prefix-?th_version-?th_version_len), - to_string(Prefix, ?th_prefix_len)], - H = list_to_binary(H0), - 512 = byte_size(H), %Assertion. - ChksumString = to_octal(checksum(H), 6, [0,$\s]), - <<Before:?th_chksum/binary,_:?th_chksum_len/binary,After/binary>> = H, - [Before,ChksumString,After]. - -file_type(regular) -> $0; -file_type(symlink) -> $2; -file_type(directory) -> $5. - -to_octal(Int, Count) when Count > 1 -> - to_octal(Int, Count-1, [0]). - -to_octal(_, 0, Result) -> Result; -to_octal(Int, Count, Result) -> - to_octal(Int div 8, Count-1, [Int rem 8 + $0|Result]). - -to_string(Str0, Count) -> - Str = case file:native_name_encoding() of - utf8 -> - unicode:characters_to_binary(Str0); - latin1 -> - list_to_binary(Str0) - end, - case byte_size(Str) of - Size when Size < Count -> - [Str|zeroes(Count-Size)]; - _ -> Str + +add_files(_Reader, [], _Dir, _DirInArchive, _Opts) -> + ok; +add_files(Reader, [Name|Rest], Dir, DirInArchive, #add_opts{read_info=Info}=Opts) -> + FullName = filename:join(Dir, Name), + NameInArchive = filename:join(DirInArchive, Name), + Res = case Info(FullName) of + {error, Reason} -> + {error, {FullName, Reason}}; + {ok, #file_info{type=directory}=Fi} -> + add_directory(Reader, FullName, NameInArchive, Fi, Opts); + {ok, #file_info{type=symlink}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + {ok, Linkname} = file:read_link(FullName), + Header = fileinfo_to_header(NameInArchive, Fi, Linkname), + add_header(Reader, Header, Opts); + {ok, #file_info{type=regular}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Header = fileinfo_to_header(NameInArchive, Fi, false), + {ok, Reader2} = add_header(Reader, Header, Opts), + FileSize = Header#tar_header.size, + {ok, FileSize, Reader3} = do_copy(Reader2, FullName, Opts), + Padding = skip_padding(FileSize), + Pad = <<0:Padding/unit:8>>, + do_write(Reader3, Pad); + {ok, #file_info{}=Fi} -> + add_verbose(Opts, "a ~ts~n", [NameInArchive]), + Header = fileinfo_to_header(NameInArchive, Fi, false), + add_header(Reader, Header, Opts) + end, + case Res of + ok -> add_files(Reader, Rest, Dir, DirInArchive, Opts); + {ok, ReaderNext} -> add_files(ReaderNext, Rest, Dir, DirInArchive, Opts); + {error, _} = Err -> Err end. -%% Pads out end of file. - -pad_file(File) -> - {ok,Position} = do_position(File, {cur,0}), - %% There must be at least two zero records at the end. - Fill = case ?block_size - (Position rem ?block_size) of - Fill0 when Fill0 < 2*?record_size -> - %% We need to another block here to ensure that there - %% are at least two zero records at the end. - Fill0 + ?block_size; - Fill0 -> - %% Large enough. - Fill0 - end, - do_write(File, zeroes(Fill)). - -split_filename(Name) when length(Name) =< ?th_name_len -> - {"", Name}; -split_filename(Name0) -> - split_filename(lists:reverse(filename:split(Name0)), [], [], 0). - -split_filename([Comp|Rest], Prefix, Suffix, Len) - when Len+length(Comp) < ?th_name_len -> - split_filename(Rest, Prefix, [Comp|Suffix], Len+length(Comp)+1); -split_filename([Comp|Rest], Prefix, Suffix, Len) -> - split_filename(Rest, [Comp|Prefix], Suffix, Len+length(Comp)+1); -split_filename([], Prefix, Suffix, _) -> - {filename:join(Prefix),filename:join(Suffix)}. - - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% -%%% Retrieving files from a tape archive. -%%% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -%% Options used when reading a tar archive. - --record(read_opts, - {cwd :: string(), % Current working directory. - keep_old_files = false :: boolean(), % Owerwrite or not. - files = all, % Set of files to extract - % (or all). - output = file :: 'file' | 'memory', - open_mode = [], % Open mode options. - verbose = false :: boolean()}). % Verbose on/off. +format_string(String, Size) when length(String) > Size -> + throw({error, {write_string, field_too_long}}); +format_string(String, Size) -> + Ascii = to_ascii(String), + if byte_size(Ascii) < Size -> + [Ascii, 0]; + true -> + Ascii + end. -extract_opts(List) -> - extract_opts(List, default_options()). +format_octal(Octal) -> + iolist_to_binary(io_lib:fwrite("~.8B", [Octal])). + +add_header(#reader{}=Reader, #tar_header{}=Header, Opts) -> + {ok, Iodata} = build_header(Header, Opts), + do_write(Reader, Iodata). + +write_to_block(Block, IoData, Start) when is_list(IoData) -> + write_to_block(Block, iolist_to_binary(IoData), Start); +write_to_block(Block, Bin, Start) when is_binary(Bin) -> + Size = byte_size(Bin), + <<Head:Start/unit:8, _:Size/unit:8, Rest/binary>> = Block, + <<Head:Start/unit:8, Bin/binary, Rest/binary>>. + +build_header(#tar_header{}=Header, Opts) -> + #tar_header{ + name=Name, + mode=Mode, + uid=Uid, + gid=Gid, + size=Size, + typeflag=Type, + linkname=Linkname, + uname=Uname, + gname=Gname, + devmajor=Devmaj, + devminor=Devmin + } = Header, + Mtime = datetime_to_posix(Header#tar_header.mtime), + + Block0 = ?ZERO_BLOCK, + {Block1, Pax0} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN, Name, ?PAX_PATH, #{}), + Block2 = write_octal(Block1, ?V7_MODE, ?V7_MODE_LEN, Mode), + {Block3, Pax1} = write_numeric(Block2, ?V7_UID, ?V7_UID_LEN, Uid, ?PAX_UID, Pax0), + {Block4, Pax2} = write_numeric(Block3, ?V7_GID, ?V7_GID_LEN, Gid, ?PAX_GID, Pax1), + {Block5, Pax3} = write_numeric(Block4, ?V7_SIZE, ?V7_SIZE_LEN, Size, ?PAX_SIZE, Pax2), + {Block6, Pax4} = write_numeric(Block5, ?V7_MTIME, ?V7_MTIME_LEN, Mtime, ?PAX_NONE, Pax3), + {Block7, Pax5} = write_string(Block6, ?V7_TYPE, ?V7_TYPE_LEN, <<Type>>, ?PAX_NONE, Pax4), + {Block8, Pax6} = write_string(Block7, ?V7_LINKNAME, ?V7_LINKNAME_LEN, + Linkname, ?PAX_LINKPATH, Pax5), + {Block9, Pax7} = write_string(Block8, ?USTAR_UNAME, ?USTAR_UNAME_LEN, + Uname, ?PAX_UNAME, Pax6), + {Block10, Pax8} = write_string(Block9, ?USTAR_GNAME, ?USTAR_GNAME_LEN, + Gname, ?PAX_GNAME, Pax7), + {Block11, Pax9} = write_numeric(Block10, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN, + Devmaj, ?PAX_NONE, Pax8), + {Block12, Pax10} = write_numeric(Block11, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN, + Devmin, ?PAX_NONE, Pax9), + {Block13, Pax11} = set_path(Block12, Pax10), + PaxEntry = case maps:size(Pax11) of + 0 -> []; + _ -> build_pax_entry(Header, Pax11, Opts) + end, + Block14 = set_format(Block13, ?FORMAT_USTAR), + Block15 = set_checksum(Block14), + {ok, [PaxEntry, Block15]}. + +set_path(Block0, Pax) -> + %% only use ustar header when name is too long + case maps:get(?PAX_PATH, Pax, nil) of + nil -> + {Block0, Pax}; + PaxPath -> + case split_ustar_path(PaxPath) of + {ok, UstarName, UstarPrefix} -> + {Block1, _} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN, + UstarName, ?PAX_NONE, #{}), + {Block2, _} = write_string(Block1, ?USTAR_PREFIX, ?USTAR_PREFIX_LEN, + UstarPrefix, ?PAX_NONE, #{}), + {Block2, maps:remove(?PAX_PATH, Pax)}; + false -> + {Block0, Pax} + end + end. -table_opts(List) -> - read_opts(List, default_options()). +set_format(Block0, Format) + when Format =:= ?FORMAT_USTAR; Format =:= ?FORMAT_PAX -> + Block1 = write_to_block(Block0, ?MAGIC_USTAR, ?USTAR_MAGIC), + write_to_block(Block1, ?VERSION_USTAR, ?USTAR_VERSION); +set_format(_Block, Format) -> + throw({error, {invalid_format, Format}}). + +set_checksum(Block) -> + Checksum = compute_checksum(Block), + write_octal(Block, ?V7_CHKSUM, ?V7_CHKSUM_LEN, Checksum). + +build_pax_entry(Header, PaxAttrs, Opts) -> + Path = Header#tar_header.name, + Filename = filename:basename(Path), + Dir = filename:dirname(Path), + Path2 = filename:join([Dir, "PaxHeaders.0", Filename]), + AsciiPath = to_ascii(Path2), + Path3 = if byte_size(AsciiPath) > ?V7_NAME_LEN -> + binary_part(AsciiPath, 0, ?V7_NAME_LEN - 1); + true -> + AsciiPath + end, + Keys = maps:keys(PaxAttrs), + SortedKeys = lists:sort(Keys), + PaxFile = build_pax_file(SortedKeys, PaxAttrs), + Size = byte_size(PaxFile), + Padding = (?BLOCK_SIZE - + (byte_size(PaxFile) rem ?BLOCK_SIZE)) rem ?BLOCK_SIZE, + Pad = <<0:Padding/unit:8>>, + PaxHeader = #tar_header{ + name=unicode:characters_to_list(Path3), + size=Size, + mtime=Header#tar_header.mtime, + atime=Header#tar_header.atime, + ctime=Header#tar_header.ctime, + typeflag=?TYPE_X_HEADER + }, + {ok, PaxHeaderData} = build_header(PaxHeader, Opts), + [PaxHeaderData, PaxFile, Pad]. + +build_pax_file(Keys, PaxAttrs) -> + build_pax_file(Keys, PaxAttrs, []). +build_pax_file([], _, Acc) -> + unicode:characters_to_binary(Acc); +build_pax_file([K|Rest], Attrs, Acc) -> + V = maps:get(K, Attrs), + Size = sizeof(K) + sizeof(V) + 3, + Size2 = sizeof(Size) + Size, + Key = to_string(K), + Value = to_string(V), + Record = unicode:characters_to_binary(io_lib:format("~B ~ts=~ts\n", [Size2, Key, Value])), + if byte_size(Record) =/= Size2 -> + Size3 = byte_size(Record), + Record2 = io_lib:format("~B ~ts=~ts\n", [Size3, Key, Value]), + build_pax_file(Rest, Attrs, [Acc, Record2]); + true -> + build_pax_file(Rest, Attrs, [Acc, Record]) + end. -default_options() -> - {ok, Cwd} = file:get_cwd(), - #read_opts{cwd=Cwd}. +sizeof(Bin) when is_binary(Bin) -> + byte_size(Bin); +sizeof(List) when is_list(List) -> + length(List); +sizeof(N) when is_integer(N) -> + byte_size(integer_to_binary(N)); +sizeof(N) when is_float(N) -> + byte_size(float_to_binary(N)). + +to_string(Bin) when is_binary(Bin) -> + unicode:characters_to_list(Bin); +to_string(List) when is_list(List) -> + List; +to_string(N) when is_integer(N) -> + integer_to_list(N); +to_string(N) when is_float(N) -> + float_to_list(N). + +split_ustar_path(Path) -> + Len = length(Path), + NotAscii = not is_ascii(Path), + if Len =< ?V7_NAME_LEN; NotAscii -> + false; + true -> + PathBin = binary:list_to_bin(Path), + case binary:split(PathBin, [<<$/>>], [global, trim_all]) of + [Part] when byte_size(Part) >= ?V7_NAME_LEN -> + false; + Parts -> + case lists:last(Parts) of + Name when byte_size(Name) >= ?V7_NAME_LEN -> + false; + Name -> + Parts2 = lists:sublist(Parts, length(Parts) - 1), + join_split_ustar_path(Parts2, {ok, Name, nil}) + end + end + end. -%% Parse options for extract. +join_split_ustar_path([], Acc) -> + Acc; +join_split_ustar_path([Part|_], {ok, _, nil}) + when byte_size(Part) > ?USTAR_PREFIX_LEN -> + false; +join_split_ustar_path([Part|_], {ok, _Name, Acc}) + when (byte_size(Part)+byte_size(Acc)) > ?USTAR_PREFIX_LEN -> + false; +join_split_ustar_path([Part|Rest], {ok, Name, nil}) -> + join_split_ustar_path(Rest, {ok, Name, Part}); +join_split_ustar_path([Part|Rest], {ok, Name, Acc}) -> + join_split_ustar_path(Rest, {ok, Name, <<Acc/binary,$/,Part/binary>>}). + +datetime_to_posix(DateTime) -> + Epoch = calendar:datetime_to_gregorian_seconds(?EPOCH), + Secs = calendar:datetime_to_gregorian_seconds(DateTime), + case Secs - Epoch of + N when N < 0 -> 0; + N -> N + end. -extract_opts([keep_old_files|Rest], Opts) -> - extract_opts(Rest, Opts#read_opts{keep_old_files=true}); -extract_opts([{cwd, Cwd}|Rest], Opts) -> - extract_opts(Rest, Opts#read_opts{cwd=Cwd}); -extract_opts([{files, Files}|Rest], Opts) -> - Set = ordsets:from_list(Files), - extract_opts(Rest, Opts#read_opts{files=Set}); -extract_opts([memory|Rest], Opts) -> - extract_opts(Rest, Opts#read_opts{output=memory}); -extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) -> - extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]}); -extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) -> - extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]}); -extract_opts([verbose|Rest], Opts) -> - extract_opts(Rest, Opts#read_opts{verbose=true}); -extract_opts([Other|Rest], Opts) -> - extract_opts(Rest, read_opts([Other], Opts)); -extract_opts([], Opts) -> - Opts. +write_octal(Block, Pos, Size, X) -> + Octal = zero_pad(format_octal(X), Size-1), + if byte_size(Octal) < Size -> + write_to_block(Block, Octal, Pos); + true -> + throw({error, {write_failed, octal_field_too_long}}) + end. -%% Common options for all read operations. +write_string(Block, Pos, Size, Str, PaxAttr, Pax0) -> + NotAscii = not is_ascii(Str), + if PaxAttr =/= ?PAX_NONE andalso (length(Str) > Size orelse NotAscii) -> + Pax1 = maps:put(PaxAttr, Str, Pax0), + {Block, Pax1}; + true -> + Formatted = format_string(Str, Size), + {write_to_block(Block, Formatted, Pos), Pax0} + end. +write_numeric(Block, Pos, Size, X, PaxAttr, Pax0) -> + %% attempt octal + Octal = zero_pad(format_octal(X), Size-1), + if byte_size(Octal) < Size -> + {write_to_block(Block, [Octal, 0], Pos), Pax0}; + PaxAttr =/= ?PAX_NONE -> + Pax1 = maps:put(PaxAttr, X, Pax0), + {Block, Pax1}; + true -> + throw({error, {write_failed, numeric_field_too_long}}) + end. -read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) -> - read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]}); -read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) -> - read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]}); -read_opts([verbose|Rest], Opts) -> - read_opts(Rest, Opts#read_opts{verbose=true}); -read_opts([_|Rest], Opts) -> - read_opts(Rest, Opts); -read_opts([], Opts) -> - Opts. +zero_pad(Str, Size) when byte_size(Str) >= Size -> + Str; +zero_pad(Str, Size) -> + Padding = Size - byte_size(Str), + Pad = binary:copy(<<$0>>, Padding), + <<Pad/binary, Str/binary>>. -foldl_read({AccessMode,TD={tar_descriptor,_UsrHandle,_AccessFun}}, Fun, Accu, Opts) -> - case AccessMode of - read -> - foldl_read0(TD, Fun, Accu, Opts); - _ -> - {error,{read_mode_expected,AccessMode}} - end; -foldl_read(TarName, Fun, Accu, Opts) -> - case open(TarName, [read|Opts#read_opts.open_mode]) of - {ok, {read, File}} -> - Result = foldl_read0(File, Fun, Accu, Opts), - ok = do_close(File), - Result; - Error -> - Error + +%%%================================================================ +%% Functions for creating or modifying tar archives + +read_block(Reader) -> + case do_read(Reader, ?BLOCK_SIZE) of + eof -> + throw({error, eof}); + %% Two zero blocks mark the end of the archive + {ok, ?ZERO_BLOCK, Reader1} -> + case do_read(Reader1, ?BLOCK_SIZE) of + eof -> + % This is technically a malformed end-of-archive marker, + % as two ZERO_BLOCKs are expected as the marker, + % but if we've already made it this far, we should just ignore it + eof; + {ok, ?ZERO_BLOCK, _Reader2} -> + eof; + {ok, _Block, _Reader2} -> + throw({error, invalid_end_of_archive}); + {error,_} = Err -> + throw(Err) + end; + {ok, Block, Reader1} when is_binary(Block) -> + {ok, Block, Reader1}; + {error, _} = Err -> + throw(Err) end. -foldl_read0(File, Fun, Accu, Opts) -> - case catch foldl_read1(Fun, Accu, File, Opts) of - {'EXIT', Reason} -> - exit(Reason); - {error, {Reason, Format, Args}} -> - read_verbose(Opts, Format, Args), - {error, Reason}; - {error, Reason} -> - {error, Reason}; - Ok -> - Ok +get_header(#reader{}=Reader) -> + case read_block(Reader) of + eof -> + eof; + {ok, Block, Reader1} -> + convert_header(Block, Reader1) end. -foldl_read1(Fun, Accu0, File, Opts) -> - case get_header(File) of - eof -> - Fun(eof, File, Opts, Accu0); - Header -> - {ok, NewAccu} = Fun(Header, File, Opts, Accu0), - foldl_read1(Fun, NewAccu, File, Opts) +%% Converts the tar header to a record. +to_v7(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + #header_v7{ + name=binary_part(Bin, ?V7_NAME, ?V7_NAME_LEN), + mode=binary_part(Bin, ?V7_MODE, ?V7_MODE_LEN), + uid=binary_part(Bin, ?V7_UID, ?V7_UID_LEN), + gid=binary_part(Bin, ?V7_GID, ?V7_GID_LEN), + size=binary_part(Bin, ?V7_SIZE, ?V7_SIZE_LEN), + mtime=binary_part(Bin, ?V7_MTIME, ?V7_MTIME_LEN), + checksum=binary_part(Bin, ?V7_CHKSUM, ?V7_CHKSUM_LEN), + typeflag=binary:at(Bin, ?V7_TYPE), + linkname=binary_part(Bin, ?V7_LINKNAME, ?V7_LINKNAME_LEN) + }; +to_v7(_) -> + {error, header_block_too_small}. + +to_gnu(#header_v7{}=V7, Bin) + when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + #header_gnu{ + header_v7=V7, + magic=binary_part(Bin, ?GNU_MAGIC, ?GNU_MAGIC_LEN), + version=binary_part(Bin, ?GNU_VERSION, ?GNU_VERSION_LEN), + uname=binary_part(Bin, 265, 32), + gname=binary_part(Bin, 297, 32), + devmajor=binary_part(Bin, 329, 8), + devminor=binary_part(Bin, 337, 8), + atime=binary_part(Bin, 345, 12), + ctime=binary_part(Bin, 357, 12), + sparse=to_sparse_array(binary_part(Bin, 386, 24*4+1)), + real_size=binary_part(Bin, 483, 12) + }. + +to_star(#header_v7{}=V7, Bin) + when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + #header_star{ + header_v7=V7, + magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN), + version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN), + uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN), + gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN), + devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN), + devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN), + prefix=binary_part(Bin, 345, 131), + atime=binary_part(Bin, 476, 12), + ctime=binary_part(Bin, 488, 12), + trailer=binary_part(Bin, ?STAR_TRAILER, ?STAR_TRAILER_LEN) + }. + +to_ustar(#header_v7{}=V7, Bin) + when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + #header_ustar{ + header_v7=V7, + magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN), + version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN), + uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN), + gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN), + devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN), + devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN), + prefix=binary_part(Bin, 345, 155) + }. + +to_sparse_array(Bin) when is_binary(Bin) -> + MaxEntries = byte_size(Bin) div 24, + IsExtended = 1 =:= binary:at(Bin, 24*MaxEntries), + Entries = parse_sparse_entries(Bin, MaxEntries-1, []), + #sparse_array{ + entries=Entries, + max_entries=MaxEntries, + is_extended=IsExtended + }. + +parse_sparse_entries(<<>>, _, Acc) -> + Acc; +parse_sparse_entries(_, -1, Acc) -> + Acc; +parse_sparse_entries(Bin, N, Acc) -> + case to_sparse_entry(binary_part(Bin, N*24, 24)) of + nil -> + parse_sparse_entries(Bin, N-1, Acc); + Entry = #sparse_entry{} -> + parse_sparse_entries(Bin, N-1, [Entry|Acc]) end. -table1(eof, _, _, Result) -> - {ok, lists:reverse(Result)}; -table1(Header = #tar_header{}, File, #read_opts{verbose=true}, Result) -> - #tar_header{name=Name, size=Size, mtime=Mtime, typeflag=Type, - mode=Mode, uid=Uid, gid=Gid} = Header, - skip(File, Size), - {ok, [{Name, Type, Size, posix_to_erlang_time(Mtime), Mode, Uid, Gid}|Result]}; -table1(#tar_header{name=Name, size=Size}, File, _, Result) -> - skip(File, Size), - {ok, [Name|Result]}. - -extract1(eof, _, _, Acc) -> - if - is_list(Acc) -> - {ok, lists:reverse(Acc)}; - true -> - Acc - end; -extract1(Header, File, Opts, Acc) -> - Name = Header#tar_header.name, - case check_extract(Name, Opts) of - true -> - {ok, Bin} = get_element(File, Header), - case write_extracted_element(Header, Bin, Opts) of - ok -> - {ok, Acc}; - {ok, NameBin} when is_list(Acc) -> - {ok, [NameBin | Acc]}; - {ok, NameBin} when Acc =:= ok -> - {ok, [NameBin]} - end; - false -> - ok = skip(File, Header#tar_header.size), - {ok, Acc} +-define(EMPTY_ENTRY, <<0,0,0,0,0,0,0,0,0,0,0,0>>). +to_sparse_entry(Bin) when is_binary(Bin), byte_size(Bin) =:= 24 -> + OffsetBin = binary_part(Bin, 0, 12), + NumBytesBin = binary_part(Bin, 12, 12), + case {OffsetBin, NumBytesBin} of + {?EMPTY_ENTRY, ?EMPTY_ENTRY} -> + nil; + _ -> + #sparse_entry{ + offset=parse_numeric(OffsetBin), + num_bytes=parse_numeric(NumBytesBin)} end. -%% Checks if the file Name should be extracted. +-spec get_format(binary()) -> {ok, pos_integer(), header_v7()} + | ?FORMAT_UNKNOWN + | {error, term()}. +get_format(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + do_get_format(to_v7(Bin), Bin). + +do_get_format({error, _} = Err, _Bin) -> + Err; +do_get_format(#header_v7{}=V7, Bin) + when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + Checksum = parse_octal(V7#header_v7.checksum), + Chk1 = compute_checksum(Bin), + Chk2 = compute_signed_checksum(Bin), + if Checksum =/= Chk1 andalso Checksum =/= Chk2 -> + ?FORMAT_UNKNOWN; + true -> + %% guess magic + Ustar = to_ustar(V7, Bin), + Star = to_star(V7, Bin), + Magic = Ustar#header_ustar.magic, + Version = Ustar#header_ustar.version, + Trailer = Star#header_star.trailer, + Format = if + Magic =:= ?MAGIC_USTAR, Trailer =:= ?TRAILER_STAR -> + ?FORMAT_STAR; + Magic =:= ?MAGIC_USTAR -> + ?FORMAT_USTAR; + Magic =:= ?MAGIC_GNU, Version =:= ?VERSION_GNU -> + ?FORMAT_GNU; + true -> + ?FORMAT_V7 + end, + {ok, Format, V7} + end. -check_extract(_, #read_opts{files=all}) -> +unpack_format(Format, #header_v7{}=V7, Bin, Reader) + when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE -> + Mtime = posix_to_erlang_time(parse_numeric(V7#header_v7.mtime)), + Header0 = #tar_header{ + name=parse_string(V7#header_v7.name), + mode=parse_numeric(V7#header_v7.mode), + uid=parse_numeric(V7#header_v7.uid), + gid=parse_numeric(V7#header_v7.gid), + size=parse_numeric(V7#header_v7.size), + mtime=Mtime, + atime=Mtime, + ctime=Mtime, + typeflag=V7#header_v7.typeflag, + linkname=parse_string(V7#header_v7.linkname) + }, + Typeflag = Header0#tar_header.typeflag, + Header1 = if Format > ?FORMAT_V7 -> + unpack_modern(Format, V7, Bin, Header0); + true -> + Name = Header0#tar_header.name, + Header0#tar_header{name=safe_join_path("", Name)} + end, + HeaderOnly = is_header_only_type(Typeflag), + Header2 = if HeaderOnly -> + Header1#tar_header{size=0}; + true -> + Header1 + end, + if Typeflag =:= ?TYPE_GNU_SPARSE -> + Gnu = to_gnu(V7, Bin), + RealSize = parse_numeric(Gnu#header_gnu.real_size), + {Sparsemap, Reader2} = parse_sparse_map(Gnu, Reader), + Header3 = Header2#tar_header{size=RealSize}, + {Header3, new_sparse_file_reader(Reader2, Sparsemap, RealSize)}; + true -> + FileReader = #reg_file_reader{ + handle=Reader, + num_bytes=Header2#tar_header.size, + size=Header2#tar_header.size, + pos = 0 + }, + {Header2, FileReader} + end. + +unpack_modern(Format, #header_v7{}=V7, Bin, #tar_header{}=Header0) + when is_binary(Bin) -> + Typeflag = Header0#tar_header.typeflag, + Ustar = to_ustar(V7, Bin), + H0 = Header0#tar_header{ + uname=parse_string(Ustar#header_ustar.uname), + gname=parse_string(Ustar#header_ustar.gname)}, + H1 = if Typeflag =:= ?TYPE_CHAR + orelse Typeflag =:= ?TYPE_BLOCK -> + Ma = parse_numeric(Ustar#header_ustar.devmajor), + Mi = parse_numeric(Ustar#header_ustar.devminor), + H0#tar_header{ + devmajor=Ma, + devminor=Mi + }; + true -> + H0 + end, + {Prefix, H2} = case Format of + ?FORMAT_USTAR -> + {parse_string(Ustar#header_ustar.prefix), H1}; + ?FORMAT_STAR -> + Star = to_star(V7, Bin), + Prefix0 = parse_string(Star#header_star.prefix), + Atime0 = Star#header_star.atime, + Atime = posix_to_erlang_time(parse_numeric(Atime0)), + Ctime0 = Star#header_star.ctime, + Ctime = posix_to_erlang_time(parse_numeric(Ctime0)), + {Prefix0, H1#tar_header{ + atime=Atime, + ctime=Ctime + }}; + _ -> + {"", H1} + end, + Name = H2#tar_header.name, + H2#tar_header{name=safe_join_path(Prefix, Name)}. + + +safe_join_path([], Name) -> + strip_slashes(Name, both); +safe_join_path(Prefix, []) -> + strip_slashes(Prefix, right); +safe_join_path(Prefix, Name) -> + filename:join(strip_slashes(Prefix, right), strip_slashes(Name, both)). + +strip_slashes(Str, Direction) -> + string:strip(Str, Direction, $/). + +new_sparse_file_reader(Reader, Sparsemap, RealSize) -> + true = validate_sparse_entries(Sparsemap, RealSize), + #sparse_file_reader{ + handle = Reader, + num_bytes = RealSize, + pos = 0, + size = RealSize, + sparse_map = Sparsemap}. + +validate_sparse_entries(Entries, RealSize) -> + validate_sparse_entries(Entries, RealSize, 0, 0). +validate_sparse_entries([], _RealSize, _I, _LastOffset) -> true; -check_extract(Name, #read_opts{files=Files}) -> - ordsets:is_element(Name, Files). +validate_sparse_entries([#sparse_entry{}=Entry|Rest], RealSize, I, LastOffset) -> + Offset = Entry#sparse_entry.offset, + NumBytes = Entry#sparse_entry.num_bytes, + if + Offset > ?MAX_INT64-NumBytes -> + throw({error, {invalid_sparse_map_entry, offset_too_large}}); + Offset+NumBytes > RealSize -> + throw({error, {invalid_sparse_map_entry, offset_too_large}}); + I > 0 andalso LastOffset > Offset -> + throw({error, {invalid_sparse_map_entry, overlapping_offsets}}); + true -> + ok + end, + validate_sparse_entries(Rest, RealSize, I+1, Offset+NumBytes). + + +-spec parse_sparse_map(header_gnu(), reader_type()) -> + {[sparse_entry()], reader_type()}. +parse_sparse_map(#header_gnu{sparse=Sparse}, Reader) + when Sparse#sparse_array.is_extended -> + parse_sparse_map(Sparse, Reader, []); +parse_sparse_map(#header_gnu{sparse=Sparse}, Reader) -> + {Sparse#sparse_array.entries, Reader}. +parse_sparse_map(#sparse_array{is_extended=true,entries=Entries}, Reader, Acc) -> + case read_block(Reader) of + eof -> + throw({error, eof}); + {ok, Block, Reader2} -> + Sparse2 = to_sparse_array(Block), + parse_sparse_map(Sparse2, Reader2, Entries++Acc) + end; +parse_sparse_map(#sparse_array{entries=Entries}, Reader, Acc) -> + Sorted = lists:sort(fun (#sparse_entry{offset=A},#sparse_entry{offset=B}) -> + A =< B + end, Entries++Acc), + {Sorted, Reader}. + +%% Defined by taking the sum of the unsigned byte values of the +%% entire header record, treating the checksum bytes to as ASCII spaces +compute_checksum(<<H1:?V7_CHKSUM/binary, + H2:?V7_CHKSUM_LEN/binary, + Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary, + _/binary>>) -> + C0 = checksum(H1) + (byte_size(H2) * $\s), + C1 = checksum(Rest), + C0 + C1. + +compute_signed_checksum(<<H1:?V7_CHKSUM/binary, + H2:?V7_CHKSUM_LEN/binary, + Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary, + _/binary>>) -> + C0 = signed_checksum(H1) + (byte_size(H2) * $\s), + C1 = signed_checksum(Rest), + C0 + C1. -get_header(File) -> - case do_read(File, ?record_size) of - eof -> - throw({error,eof}); - {ok, Bin} when is_binary(Bin) -> - convert_header(Bin); - {ok, List} -> - convert_header(list_to_binary(List)); - {error, Reason} -> - throw({error, Reason}) - end. +%% Returns the checksum of a binary. +checksum(Bin) -> checksum(Bin, 0). +checksum(<<A/unsigned,Rest/binary>>, Sum) -> + checksum(Rest, Sum+A); +checksum(<<>>, Sum) -> Sum. -%% Converts the tar header to a record. +signed_checksum(Bin) -> signed_checksum(Bin, 0). +signed_checksum(<<A/signed,Rest/binary>>, Sum) -> + signed_checksum(Rest, Sum+A); +signed_checksum(<<>>, Sum) -> Sum. + +-spec parse_numeric(binary()) -> non_neg_integer(). +parse_numeric(<<>>) -> + 0; +parse_numeric(<<First, _/binary>> = Bin) -> + %% check for base-256 format first + %% if the bit is set, then all following bits constitute a two's + %% complement encoded number in big-endian byte order + if + First band 16#80 =/= 0 -> + %% Handling negative numbers relies on the following identity: + %% -a-1 == ^a + %% If the number is negative, we use an inversion mask to invert + %% the data bytes and treat the value as an unsigned number + Inv = if First band 16#40 =/= 0 -> 16#00; true -> 16#FF end, + Bytes = binary:bin_to_list(Bin), + Reducer = fun (C, {I, X}) -> + C1 = C bxor Inv, + C2 = if I =:= 0 -> C1 band 16#7F; true -> C1 end, + if (X bsr 56) > 0 -> + throw({error,integer_overflow}); + true -> + {I+1, (X bsl 8) bor C2} + end + end, + {_, N} = lists:foldl(Reducer, {0,0}, Bytes), + if (N bsr 63) > 0 -> + throw({error, integer_overflow}); + true -> + if Inv =:= 16#FF -> + -1 bxor N; + true -> + N + end + end; + true -> + %% normal case is an octal number + parse_octal(Bin) + end. -convert_header(Bin) when byte_size(Bin) =:= ?record_size -> - case verify_checksum(Bin) of - ok -> - Hd = #tar_header{name=get_name(Bin), - mode=from_octal(Bin, ?th_mode, ?th_mode_len), - uid=from_octal(Bin, ?th_uid, ?th_uid_len), - gid=from_octal(Bin, ?th_gid, ?th_gid_len), - size=from_octal(Bin, ?th_size, ?th_size_len), - mtime=from_octal(Bin, ?th_mtime, ?th_mtime_len), - linkname=from_string(Bin, - ?th_linkname, ?th_linkname_len), - typeflag=typeflag(Bin)}, - convert_header1(Hd); - eof -> - eof +parse_octal(Bin) when is_binary(Bin) -> + %% skip leading/trailing zero bytes and spaces + do_parse_octal(Bin, <<>>). +do_parse_octal(<<>>, <<>>) -> + 0; +do_parse_octal(<<>>, Acc) -> + case io_lib:fread("~8u", binary:bin_to_list(Acc)) of + {error, _} -> throw({error, invalid_tar_checksum}); + {ok, [Octal], []} -> Octal; + {ok, _, _} -> throw({error, invalid_tar_checksum}) end; -convert_header(Bin) when byte_size(Bin) =:= 0 -> +do_parse_octal(<<$\s,Rest/binary>>, Acc) -> + do_parse_octal(Rest, Acc); +do_parse_octal(<<0, Rest/binary>>, Acc) -> + do_parse_octal(Rest, Acc); +do_parse_octal(<<C, Rest/binary>>, Acc) -> + do_parse_octal(Rest, <<Acc/binary, C>>). + +parse_string(Bin) when is_binary(Bin) -> + do_parse_string(Bin, <<>>). +do_parse_string(<<>>, Acc) -> + case unicode:characters_to_list(Acc) of + Str when is_list(Str) -> + Str; + {incomplete, _Str, _Rest} -> + binary:bin_to_list(Acc); + {error, _Str, _Rest} -> + throw({error, {bad_header, invalid_string}}) + end; +do_parse_string(<<0, _/binary>>, Acc) -> + do_parse_string(<<>>, Acc); +do_parse_string(<<C, Rest/binary>>, Acc) -> + do_parse_string(Rest, <<Acc/binary, C>>). + +convert_header(Bin, #reader{pos=Pos}=Reader) + when byte_size(Bin) =:= ?BLOCK_SIZE, (Pos rem ?BLOCK_SIZE) =:= 0 -> + case get_format(Bin) of + ?FORMAT_UNKNOWN -> + throw({error, bad_header}); + {ok, Format, V7} -> + unpack_format(Format, V7, Bin, Reader); + {error, Reason} -> + throw({error, {bad_header, Reason}}) + end; +convert_header(Bin, #reader{pos=Pos}) when byte_size(Bin) =:= ?BLOCK_SIZE -> + throw({error, misaligned_read, Pos}); +convert_header(Bin, _Reader) when byte_size(Bin) =:= 0 -> eof; -convert_header(_Bin) -> +convert_header(_Bin, _Reader) -> throw({error, eof}). -%% Basic sanity. Better set the element size to zero here if the type -%% always is of zero length. - -convert_header1(H) when H#tar_header.typeflag =:= symlink, H#tar_header.size =/= 0 -> - convert_header1(H#tar_header{size=0}); -convert_header1(H) when H#tar_header.typeflag =:= directory, H#tar_header.size =/= 0 -> - convert_header1(H#tar_header{size=0}); -convert_header1(Header) -> - Header. - -typeflag(Bin) -> - [T] = binary_to_list(Bin, ?th_typeflag+1, ?th_typeflag+1), - case T of - 0 -> regular; - $0 -> regular; - $1 -> link; - $2 -> symlink; - $3 -> char; - $4 -> block; - $5 -> directory; - $6 -> fifo; - $7 -> regular; - _ -> unknown +%% Creates a partially-populated header record based +%% on the provided file_info record. If the file is +%% a symlink, then `link` is used as the link target. +%% If the file is a directory, a slash is appended to the name. +fileinfo_to_header(Name, #file_info{}=Fi, Link) when is_list(Name) -> + BaseHeader = #tar_header{name=Name, + mtime=Fi#file_info.mtime, + atime=Fi#file_info.atime, + ctime=Fi#file_info.ctime, + mode=Fi#file_info.mode, + uid=Fi#file_info.uid, + gid=Fi#file_info.gid, + typeflag=?TYPE_REGULAR}, + do_fileinfo_to_header(BaseHeader, Fi, Link). + +do_fileinfo_to_header(Header, #file_info{size=Size,type=regular}, _Link) -> + Header#tar_header{size=Size,typeflag=?TYPE_REGULAR}; +do_fileinfo_to_header(#tar_header{name=Name}=Header, + #file_info{type=directory}, _Link) -> + Header#tar_header{name=Name++"/",typeflag=?TYPE_DIR}; +do_fileinfo_to_header(Header, #file_info{type=symlink}, Link) -> + Header#tar_header{typeflag=?TYPE_SYMLINK,linkname=Link}; +do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link) + when (Mode band ?S_IFMT) =:= ?S_IFCHR -> + Header#tar_header{typeflag=?TYPE_CHAR, + devmajor=Fi#file_info.major_device, + devminor=Fi#file_info.minor_device}; +do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link) + when (Mode band ?S_IFMT) =:= ?S_IFBLK -> + Header#tar_header{typeflag=?TYPE_BLOCK, + devmajor=Fi#file_info.major_device, + devminor=Fi#file_info.minor_device}; +do_fileinfo_to_header(Header, #file_info{type=other,mode=Mode}, _Link) + when (Mode band ?S_IFMT) =:= ?S_FIFO -> + Header#tar_header{typeflag=?TYPE_FIFO}; +do_fileinfo_to_header(Header, Fi, _Link) -> + {error, {invalid_file_type, Header#tar_header.name, Fi}}. + +is_ascii(Str) when is_list(Str) -> + not lists:any(fun (Char) -> Char >= 16#80 end, Str); +is_ascii(Bin) when is_binary(Bin) -> + is_ascii1(Bin). + +is_ascii1(<<>>) -> + true; +is_ascii1(<<C,_Rest/binary>>) when C >= 16#80 -> + false; +is_ascii1(<<_, Rest/binary>>) -> + is_ascii1(Rest). + +to_ascii(Str) when is_list(Str) -> + case is_ascii(Str) of + true -> + unicode:characters_to_binary(Str); + false -> + Chars = lists:filter(fun (Char) -> Char < 16#80 end, Str), + unicode:characters_to_binary(Chars) + end; +to_ascii(Bin) when is_binary(Bin) -> + to_ascii(Bin, <<>>). +to_ascii(<<>>, Acc) -> + Acc; +to_ascii(<<C, Rest/binary>>, Acc) when C < 16#80 -> + to_ascii(Rest, <<Acc/binary,C>>); +to_ascii(<<_, Rest/binary>>, Acc) -> + to_ascii(Rest, Acc). + +is_header_only_type(?TYPE_SYMLINK) -> true; +is_header_only_type(?TYPE_LINK) -> true; +is_header_only_type(?TYPE_DIR) -> true; +is_header_only_type(_) -> false. + +posix_to_erlang_time(Sec) -> + OneMillion = 1000000, + Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}), + erlang:universaltime_to_localtime(Time). + +foldl_read(#reader{access=read}=Reader, Fun, Accu, #read_opts{}=Opts) + when is_function(Fun,4) -> + case foldl_read0(Reader, Fun, Accu, Opts) of + {ok, Result, _Reader2} -> + Result; + {error, _} = Err -> + Err + end; +foldl_read(#reader{access=Access}, _Fun, _Accu, _Opts) -> + {error, {read_mode_expected, Access}}; +foldl_read(TarName, Fun, Accu, #read_opts{}=Opts) + when is_function(Fun,4) -> + try open(TarName, [read|Opts#read_opts.open_mode]) of + {ok, #reader{access=read}=Reader} -> + try + foldl_read(Reader, Fun, Accu, Opts) + after + _ = close(Reader) + end; + {error, _} = Err -> + Err + catch + throw:Err -> + Err end. -%% Get the name of the file from the prefix and name fields of the -%% tar header. - -get_name(Bin0) -> - List0 = get_name_raw(Bin0), - case file:native_name_encoding() of - utf8 -> - Bin = list_to_binary(List0), - case unicode:characters_to_list(Bin) of - {error,_,_} -> - List0; - List when is_list(List) -> - List - end; - latin1 -> - List0 +foldl_read0(Reader, Fun, Accu, Opts) -> + try foldl_read1(Fun, Accu, Reader, Opts, #{}) of + {ok,_,_} = Ok -> + Ok + catch + throw:{error, {Reason, Format, Args}} -> + read_verbose(Opts, Format, Args), + {error, Reason}; + throw:Err -> + Err end. -get_name_raw(Bin) -> - Name = from_string(Bin, ?th_name, ?th_name_len), - case binary_to_list(Bin, ?th_prefix+1, ?th_prefix+1) of - [0] -> - Name; - [_] -> - Prefix = binary_to_list(Bin, ?th_prefix+1, byte_size(Bin)), - lists:reverse(remove_nulls(Prefix), [$/|Name]) +foldl_read1(Fun, Accu0, Reader0, Opts, ExtraHeaders) -> + {ok, Reader1} = skip_unread(Reader0), + case get_header(Reader1) of + eof -> + Fun(eof, Reader1, Opts, Accu0); + {Header, Reader2} -> + case Header#tar_header.typeflag of + ?TYPE_X_HEADER -> + {ExtraHeaders2, Reader3} = parse_pax(Reader2), + ExtraHeaders3 = maps:merge(ExtraHeaders, ExtraHeaders2), + foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders3); + ?TYPE_GNU_LONGNAME -> + {RealName, Reader3} = get_real_name(Reader2), + ExtraHeaders2 = maps:put(?PAX_PATH, + parse_string(RealName), ExtraHeaders), + foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2); + ?TYPE_GNU_LONGLINK -> + {RealName, Reader3} = get_real_name(Reader2), + ExtraHeaders2 = maps:put(?PAX_LINKPATH, + parse_string(RealName), ExtraHeaders), + foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2); + _ -> + Header1 = merge_pax(Header, ExtraHeaders), + {ok, NewAccu, Reader3} = Fun(Header1, Reader2, Opts, Accu0), + foldl_read1(Fun, NewAccu, Reader3, Opts, #{}) + end end. -from_string(Bin, Pos, Len) -> - lists:reverse(remove_nulls(binary_to_list(Bin, Pos+1, Pos+Len))). - -%% Returns all characters up to (but not including) the first null -%% character, in REVERSE order. - -remove_nulls(List) -> - remove_nulls(List, []). - -remove_nulls([0|_], Result) -> - remove_nulls([], Result); -remove_nulls([C|Rest], Result) -> - remove_nulls(Rest, [C|Result]); -remove_nulls([], Result) -> - Result. - -from_octal(Bin, Pos, Len) -> - from_octal(binary_to_list(Bin, Pos+1, Pos+Len)). - -from_octal([$\s|Rest]) -> - from_octal(Rest); -from_octal([Digit|Rest]) when $0 =< Digit, Digit =< $7 -> - from_octal(Rest, Digit-$0); -from_octal(Bin) when is_binary(Bin) -> - from_octal(binary_to_list(Bin)); -from_octal(Other) -> - throw({error, {bad_header, "Bad octal number: ~p", [Other]}}). - -from_octal([Digit|Rest], Result) when $0 =< Digit, Digit =< $7 -> - from_octal(Rest, Result*8+Digit-$0); -from_octal([$\s|_], Result) -> - Result; -from_octal([0|_], Result) -> - Result; -from_octal(Other, _) -> - throw({error, {bad_header, "Bad contents in octal field: ~p", [Other]}}). - -%% Retrieves the next element from the archive. -%% Returns {ok, Bin} | eof | {error, Reason} - -get_element(File, #tar_header{size = 0}) -> - skip_to_next(File), - {ok,<<>>}; -get_element(File, #tar_header{size = Size}) -> - case do_read(File, Size) of - {ok,Bin}=Res when byte_size(Bin) =:= Size -> - skip_to_next(File), - Res; - {ok,List} when length(List) =:= Size -> - skip_to_next(File), - {ok,list_to_binary(List)}; - {ok,_} -> throw({error,eof}); - {error, Reason} -> throw({error, Reason}); - eof -> throw({error,eof}) +%% Applies all known PAX attributes to the current tar header +-spec merge_pax(tar_header(), #{binary() => binary()}) -> tar_header(). +merge_pax(Header, ExtraHeaders) when is_map(ExtraHeaders) -> + do_merge_pax(Header, maps:to_list(ExtraHeaders)). + +do_merge_pax(Header, []) -> + Header; +do_merge_pax(Header, [{?PAX_PATH, Path}|Rest]) -> + do_merge_pax(Header#tar_header{name=unicode:characters_to_list(Path)}, Rest); +do_merge_pax(Header, [{?PAX_LINKPATH, LinkPath}|Rest]) -> + do_merge_pax(Header#tar_header{linkname=unicode:characters_to_list(LinkPath)}, Rest); +do_merge_pax(Header, [{?PAX_GNAME, Gname}|Rest]) -> + do_merge_pax(Header#tar_header{gname=unicode:characters_to_list(Gname)}, Rest); +do_merge_pax(Header, [{?PAX_UNAME, Uname}|Rest]) -> + do_merge_pax(Header#tar_header{uname=unicode:characters_to_list(Uname)}, Rest); +do_merge_pax(Header, [{?PAX_UID, Uid}|Rest]) -> + Uid2 = binary_to_integer(Uid), + do_merge_pax(Header#tar_header{uid=Uid2}, Rest); +do_merge_pax(Header, [{?PAX_GID, Gid}|Rest]) -> + Gid2 = binary_to_integer(Gid), + do_merge_pax(Header#tar_header{gid=Gid2}, Rest); +do_merge_pax(Header, [{?PAX_ATIME, Atime}|Rest]) -> + Atime2 = parse_pax_time(Atime), + do_merge_pax(Header#tar_header{atime=Atime2}, Rest); +do_merge_pax(Header, [{?PAX_MTIME, Mtime}|Rest]) -> + Mtime2 = parse_pax_time(Mtime), + do_merge_pax(Header#tar_header{mtime=Mtime2}, Rest); +do_merge_pax(Header, [{?PAX_CTIME, Ctime}|Rest]) -> + Ctime2 = parse_pax_time(Ctime), + do_merge_pax(Header#tar_header{ctime=Ctime2}, Rest); +do_merge_pax(Header, [{?PAX_SIZE, Size}|Rest]) -> + Size2 = binary_to_integer(Size), + do_merge_pax(Header#tar_header{size=Size2}, Rest); +do_merge_pax(Header, [{<<?PAX_XATTR_STR, _Key/binary>>, _Value}|Rest]) -> + do_merge_pax(Header, Rest); +do_merge_pax(Header, [_Ignore|Rest]) -> + do_merge_pax(Header, Rest). + +%% Returns the time since UNIX epoch as a datetime +-spec parse_pax_time(binary()) -> calendar:datetime(). +parse_pax_time(Bin) when is_binary(Bin) -> + TotalNano = case binary:split(Bin, [<<$.>>]) of + [SecondsStr, NanoStr0] -> + Seconds = binary_to_integer(SecondsStr), + if byte_size(NanoStr0) < ?MAX_NANO_INT_SIZE -> + %% right pad + PaddingN = ?MAX_NANO_INT_SIZE-byte_size(NanoStr0), + Padding = binary:copy(<<$0>>, PaddingN), + NanoStr1 = <<NanoStr0/binary,Padding/binary>>, + Nano = binary_to_integer(NanoStr1), + (Seconds*?BILLION)+Nano; + byte_size(NanoStr0) > ?MAX_NANO_INT_SIZE -> + %% right truncate + NanoStr1 = binary_part(NanoStr0, 0, ?MAX_NANO_INT_SIZE), + Nano = binary_to_integer(NanoStr1), + (Seconds*?BILLION)+Nano; + true -> + (Seconds*?BILLION)+binary_to_integer(NanoStr0) + end; + [SecondsStr] -> + binary_to_integer(SecondsStr)*?BILLION + end, + %% truncate to microseconds + Micro = TotalNano div 1000, + Mega = Micro div 1000000000000, + Secs = Micro div 1000000 - (Mega*1000000), + Micro2 = Micro rem 1000000, + calendar:now_to_datetime({Mega, Secs, Micro2}). + +%% Given a regular file reader, reads the whole file and +%% parses all extended attributes it contains. +parse_pax(#reg_file_reader{handle=Handle,num_bytes=0}) -> + {#{}, Handle}; +parse_pax(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) -> + case do_read(Handle0, NumBytes) of + {ok, Bytes, Handle1} -> + do_parse_pax(Handle1, Bytes, #{}); + {error, _} = Err -> + throw(Err) end. -%% Verify the checksum in the header. First try an unsigned addition -%% of all bytes in the header (as it should be according to Posix). - -verify_checksum(Bin) -> - <<H1:?th_chksum/binary,CheckStr:?th_chksum_len/binary,H2/binary>> = Bin, - case checksum(H1) + checksum(H2) of - 0 -> eof; - Checksum0 -> - Csum = from_octal(CheckStr), - CsumInit = ?th_chksum_len * $\s, - case Checksum0 + CsumInit of - Csum -> ok; - Unsigned -> - verify_checksum(H1, H2, CsumInit, Csum, Unsigned) - end +do_parse_pax(Reader, <<>>, Headers) -> + {Headers, Reader}; +do_parse_pax(Reader, Bin, Headers) -> + {Key, Value, Residual} = parse_pax_record(Bin), + NewHeaders = maps:put(Key, Value, Headers), + do_parse_pax(Reader, Residual, NewHeaders). + +%% Parse an extended attribute +parse_pax_record(Bin) when is_binary(Bin) -> + case binary:split(Bin, [<<$\n>>]) of + [Record, Residual] -> + case binary:split(Record, [<<$\s>>], [trim_all]) of + [_Len, Record1] -> + case binary:split(Record1, [<<$=>>], [trim_all]) of + [AttrName, AttrValue] -> + {AttrName, AttrValue, Residual}; + _Other -> + throw({error, malformed_pax_record}) + end; + _Other -> + throw({error, malformed_pax_record}) + end; + _Other -> + throw({error, malformed_pax_record}) end. -%% The checksums didn't match. Now try a signed addition. +get_real_name(#reg_file_reader{handle=Handle,num_bytes=0}) -> + {"", Handle}; +get_real_name(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) -> + case do_read(Handle0, NumBytes) of + {ok, RealName, Handle1} -> + {RealName, Handle1}; + {error, _} = Err -> + throw(Err) + end; +get_real_name(#sparse_file_reader{num_bytes=NumBytes}=Reader0) -> + case do_read(Reader0, NumBytes) of + {ok, RealName, Reader1} -> + {RealName, Reader1}; + {error, _} = Err -> + throw(Err) + end. -verify_checksum(H1, H2, Csum, ShouldBe, Unsigned) -> - case signed_sum(binary_to_list(H1), signed_sum(binary_to_list(H2), Csum)) of - ShouldBe -> ok; - Signed -> - throw({error, - {bad_header, - "Incorrect directory checksum ~w (~w), should be ~w", - [Signed, Unsigned, ShouldBe]}}) +%% Skip the remaining bytes for the current file entry +skip_file(#reg_file_reader{handle=Handle0,pos=Pos,size=Size}=Reader) -> + Padding = skip_padding(Size), + AbsPos = Handle0#reader.pos + (Size-Pos) + Padding, + case do_position(Handle0, AbsPos) of + {ok, _, Handle1} -> + Reader#reg_file_reader{handle=Handle1,num_bytes=0,pos=Size}; + Err -> + throw(Err) + end; +skip_file(#sparse_file_reader{pos=Pos,size=Size}=Reader) -> + case do_read(Reader, Size-Pos) of + {ok, _, Reader2} -> + Reader2; + Err -> + throw(Err) end. -signed_sum([C|Rest], Sum) when C < 128 -> - signed_sum(Rest, Sum+C); -signed_sum([C|Rest], Sum) -> - signed_sum(Rest, Sum+C-256); -signed_sum([], Sum) -> Sum. - -write_extracted_element(Header, Bin, Opts) - when Opts#read_opts.output =:= memory -> - case Header#tar_header.typeflag of - regular -> - {ok, {Header#tar_header.name, Bin}}; - _ -> - ok +skip_padding(0) -> + 0; +skip_padding(Size) when (Size rem ?BLOCK_SIZE) =:= 0 -> + 0; +skip_padding(Size) when Size =< ?BLOCK_SIZE -> + ?BLOCK_SIZE - Size; +skip_padding(Size) -> + ?BLOCK_SIZE - (Size rem ?BLOCK_SIZE). + +skip_unread(#reader{pos=Pos}=Reader0) when (Pos rem ?BLOCK_SIZE) > 0 -> + Padding = skip_padding(Pos + ?BLOCK_SIZE), + AbsPos = Pos + Padding, + case do_position(Reader0, AbsPos) of + {ok, _, Reader1} -> + {ok, Reader1}; + Err -> + throw(Err) + end; +skip_unread(#reader{}=Reader) -> + {ok, Reader}; +skip_unread(#reg_file_reader{handle=Handle,num_bytes=0}) -> + skip_unread(Handle); +skip_unread(#reg_file_reader{}=Reader) -> + #reg_file_reader{handle=Handle} = skip_file(Reader), + {ok, Handle}; +skip_unread(#sparse_file_reader{handle=Handle,num_bytes=0}) -> + skip_unread(Handle); +skip_unread(#sparse_file_reader{}=Reader) -> + #sparse_file_reader{handle=Handle} = skip_file(Reader), + {ok, Handle}. + +write_extracted_element(#tar_header{name=Name,typeflag=Type}, + Bin, + #read_opts{output=memory}=Opts) -> + case typeflag(Type) of + regular -> + read_verbose(Opts, "x ~ts~n", [Name]), + {ok, {Name, Bin}}; + _ -> + ok end; -write_extracted_element(Header, Bin, Opts) -> - Name = filename:absname(Header#tar_header.name, Opts#read_opts.cwd), - Created = - case Header#tar_header.typeflag of - regular -> - write_extracted_file(Name, Bin, Opts); - directory -> - create_extracted_dir(Name, Opts); - symlink -> - create_symlink(Name, Header, Opts); - Other -> % Ignore. - read_verbose(Opts, "x ~ts - unsupported type ~p~n", - [Name, Other]), - not_written - end, +write_extracted_element(#tar_header{name=Name0}=Header, Bin, Opts) -> + Name1 = filename:absname(Name0, Opts#read_opts.cwd), + Created = + case typeflag(Header#tar_header.typeflag) of + regular -> + create_regular(Name1, Name0, Bin, Opts); + directory -> + read_verbose(Opts, "x ~ts~n", [Name0]), + create_extracted_dir(Name1, Opts); + symlink -> + read_verbose(Opts, "x ~ts~n", [Name0]), + create_symlink(Name1, Header#tar_header.linkname, Opts); + Device when Device =:= char orelse Device =:= block -> + %% char/block devices will be created as empty files + %% and then have their major/minor device set later + create_regular(Name1, Name0, <<>>, Opts); + fifo -> + %% fifo devices will be created as empty files + create_regular(Name1, Name0, <<>>, Opts); + Other -> % Ignore. + read_verbose(Opts, "x ~ts - unsupported type ~p~n", + [Name0, Other]), + not_written + end, case Created of - ok -> set_extracted_file_info(Name, Header); - not_written -> ok + ok -> set_extracted_file_info(Name1, Header); + not_written -> ok + end. + +create_regular(Name, NameInArchive, Bin, Opts) -> + case write_extracted_file(Name, Bin, Opts) of + not_written -> + read_verbose(Opts, "x ~ts - exists, not created~n", [NameInArchive]), + not_written; + Ok -> + read_verbose(Opts, "x ~ts~n", [NameInArchive]), + Ok end. create_extracted_dir(Name, _Opts) -> case file:make_dir(Name) of - ok -> ok; - {error,enotsup} -> not_written; - {error,eexist} -> not_written; - {error,enoent} -> make_dirs(Name, dir); - {error,Reason} -> throw({error, Reason}) + ok -> ok; + {error,enotsup} -> not_written; + {error,eexist} -> not_written; + {error,enoent} -> make_dirs(Name, dir); + {error,Reason} -> throw({error, Reason}) end. -create_symlink(Name, #tar_header{linkname=Linkname}=Header, Opts) -> +create_symlink(Name, Linkname, Opts) -> case file:make_symlink(Linkname, Name) of - ok -> ok; - {error,enoent} -> - ok = make_dirs(Name, file), - create_symlink(Name, Header, Opts); - {error,eexist} -> not_written; - {error,enotsup} -> - read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]), - not_written; - {error,Reason} -> throw({error, Reason}) + ok -> ok; + {error,enoent} -> + ok = make_dirs(Name, file), + create_symlink(Name, Linkname, Opts); + {error,eexist} -> not_written; + {error,enotsup} -> + read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]), + not_written; + {error,Reason} -> throw({error, Reason}) end. write_extracted_file(Name, Bin, Opts) -> Write = - case Opts#read_opts.keep_old_files of - true -> - case file:read_file_info(Name) of - {ok, _} -> false; - _ -> true - end; - false -> true - end, + case Opts#read_opts.keep_old_files of + true -> + case file:read_file_info(Name) of + {ok, _} -> false; + _ -> true + end; + false -> true + end, case Write of - true -> - read_verbose(Opts, "x ~ts~n", [Name]), - write_file(Name, Bin); - false -> - read_verbose(Opts, "x ~ts - exists, not created~n", [Name]), - not_written + true -> write_file(Name, Bin); + false -> not_written end. write_file(Name, Bin) -> case file:write_file(Name, Bin) of - ok -> ok; - {error,enoent} -> - ok = make_dirs(Name, file), - write_file(Name, Bin); - {error,Reason} -> - throw({error, Reason}) + ok -> ok; + {error,enoent} -> + ok = make_dirs(Name, file), + write_file(Name, Bin); + {error,Reason} -> + throw({error, Reason}) end. -set_extracted_file_info(_, #tar_header{typeflag = symlink}) -> ok; -set_extracted_file_info(Name, #tar_header{mode=Mode, mtime=Mtime}) -> - Info = #file_info{mode=Mode, mtime=posix_to_erlang_time(Mtime)}, +set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_SYMLINK}) -> ok; +set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_LINK}) -> ok; +set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_CHAR}=Header) -> + set_device_info(Name, Header); +set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_BLOCK}=Header) -> + set_device_info(Name, Header); +set_extracted_file_info(Name, #tar_header{mtime=Mtime,mode=Mode}) -> + Info = #file_info{mode=Mode, mtime=Mtime}, + file:write_file_info(Name, Info). + +set_device_info(Name, #tar_header{}=Header) -> + Mtime = Header#tar_header.mtime, + Mode = Header#tar_header.mode, + Devmajor = Header#tar_header.devmajor, + Devminor = Header#tar_header.devminor, + Info = #file_info{ + mode=Mode, + mtime=Mtime, + major_device=Devmajor, + minor_device=Devminor + }, file:write_file_info(Name, Info). %% Makes all directories leading up to the file. make_dirs(Name, file) -> - filelib:ensure_dir(Name); + filelib:ensure_dir(Name); make_dirs(Name, dir) -> - filelib:ensure_dir(filename:join(Name,"*")). + filelib:ensure_dir(filename:join(Name,"*")). %% Prints the message on if the verbose option is given (for reading). - read_verbose(#read_opts{verbose=true}, Format, Args) -> - io:format(Format, Args), - io:nl(); + io:format(Format, Args); read_verbose(_, _, _) -> ok. - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%%% -%%% Utility functions. -%%% -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -%% Returns the checksum of a binary. - -checksum(Bin) -> checksum(Bin, 0). - -checksum(<<A,B,C,D,E,F,G,H,T/binary>>, Sum) -> - checksum(T, Sum+A+B+C+D+E+F+G+H); -checksum(<<A,T/binary>>, Sum) -> - checksum(T, Sum+A); -checksum(<<>>, Sum) -> Sum. - -%% Returns a list of zeroes to pad out to the given block size. - -padding(Size, BlockSize) -> - zeroes(pad_size(Size, BlockSize)). - -pad_size(Size, BlockSize) -> - case Size rem BlockSize of - 0 -> 0; - Rem -> BlockSize-Rem - end. - -zeroes(0) -> []; -zeroes(1) -> [0]; -zeroes(2) -> [0,0]; -zeroes(Number) -> - Half = zeroes(Number div 2), - case Number rem 2 of - 0 -> [Half|Half]; - 1 -> [Half|[0|Half]] - end. - -%% Skips the given number of bytes rounded up to an even record. - -skip(File, Size) -> - %% Note: There is no point in handling failure to get the current position - %% in the file. If it doesn't work, something serious is wrong. - Amount = ((Size + ?record_size - 1) div ?record_size) * ?record_size, - {ok,_} = do_position(File, {cur, Amount}), - ok. - -%% Skips to the next record in the file. - -skip_to_next(File) -> - %% Note: There is no point in handling failure to get the current position - %% in the file. If it doesn't work, something serious is wrong. - {ok, Position} = do_position(File, {cur, 0}), - NewPosition = ((Position + ?record_size - 1) div ?record_size) * ?record_size, - {ok,NewPosition} = do_position(File, NewPosition), - ok. - %% Prints the message on if the verbose option is given. - add_verbose(#add_opts{verbose=true}, Format, Args) -> io:format(Format, Args); add_verbose(_, _, _) -> ok. -%% Converts a tuple containing the time to a Posix time (seconds -%% since Jan 1, 1970). +%%%%%%%%%%%%%%%%%% +%% I/O primitives +%%%%%%%%%%%%%%%%%% + +do_write(#reader{handle=Handle,func=Fun}=Reader0, Data) + when is_function(Fun,2) -> + case Fun(write,{Handle,Data}) of + ok -> + {ok, Pos, Reader1} = do_position(Reader0, {cur,0}), + {ok, Reader1#reader{pos=Pos}}; + {error, _} = Err -> + Err + end. -posix_time(Time) -> - EpochStart = {{1970,1,1},{0,0,0}}, - {Days,{Hour,Min,Sec}} = calendar:time_difference(EpochStart, Time), - 86400*Days + 3600*Hour + 60*Min + Sec. +do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=0}=Opts) + when is_function(Fun, 2) -> + do_copy(Reader, Source, Opts#add_opts{chunk_size=65536}); +do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=ChunkSize}) + when is_function(Fun, 2) -> + case file:open(Source, [read, binary]) of + {ok, SourceFd} -> + case copy_chunked(Reader, SourceFd, ChunkSize, 0) of + {ok, _Copied, _Reader2} = Ok-> + _ = file:close(SourceFd), + Ok; + Err -> + _ = file:close(SourceFd), + throw(Err) + end; + Err -> + throw(Err) + end. -posix_to_erlang_time(Sec) -> - OneMillion = 1000000, - Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}), - erlang:universaltime_to_localtime(Time). +copy_chunked(#reader{}=Reader, Source, ChunkSize, Copied) -> + case file:read(Source, ChunkSize) of + {ok, Bin} -> + {ok, Reader2} = do_write(Reader, Bin), + copy_chunked(Reader2, Source, ChunkSize, Copied+byte_size(Bin)); + eof -> + {ok, Copied, Reader}; + Other -> + Other + end. -read_file_and_info(Name, Opts) -> - ReadInfo = Opts#add_opts.read_info, - case ReadInfo(Name) of - {ok,Info} when Info#file_info.type =:= regular, - Opts#add_opts.chunk_size>0 -> - {ok,chunked,Info}; - {ok,Info} when Info#file_info.type =:= regular -> - case file:read_file(Name) of - {ok,Bin} -> - {ok,Bin,Info}; - Error -> - Error - end; - {ok,Info} when Info#file_info.type =:= symlink -> - case file:read_link(Name) of - {ok,PointsTo} -> - {ok,PointsTo,Info}; - Error -> - Error - end; - {ok, Info} -> - {ok,[],Info}; - Error -> - Error + +do_position(#reader{handle=Handle,func=Fun}=Reader, Pos) + when is_function(Fun,2)-> + case Fun(position, {Handle,Pos}) of + {ok, NewPos} -> + %% since Pos may not always be an absolute seek, + %% make sure we update the reader with the new absolute position + {ok, AbsPos} = Fun(position, {Handle, {cur, 0}}), + {ok, NewPos, Reader#reader{pos=AbsPos}}; + Other -> + Other end. -foreach_while_ok(Fun, [First|Rest]) -> - case Fun(First) of - ok -> foreach_while_ok(Fun, Rest); - Other -> Other +do_read(#reg_file_reader{handle=Handle,pos=Pos,size=Size}=Reader, Len) -> + NumBytes = Size - Pos, + ActualLen = if NumBytes - Len < 0 -> NumBytes; true -> Len end, + case do_read(Handle, ActualLen) of + {ok, Bin, Handle2} -> + NewPos = Pos + ActualLen, + NumBytes2 = Size - NewPos, + Reader1 = Reader#reg_file_reader{ + handle=Handle2, + pos=NewPos, + num_bytes=NumBytes2}, + {ok, Bin, Reader1}; + Other -> + Other end; -foreach_while_ok(_, []) -> ok. - -open_mode(Mode) -> - open_mode(Mode, false, [raw], []). +do_read(#sparse_file_reader{}=Reader, Len) -> + do_sparse_read(Reader, Len); +do_read(#reader{pos=Pos,handle=Handle,func=Fun}=Reader, Len) + when is_function(Fun,2)-> + %% Always convert to binary internally + case Fun(read2,{Handle,Len}) of + {ok, List} when is_list(List) -> + Bin = list_to_binary(List), + NewPos = Pos+byte_size(Bin), + {ok, Bin, Reader#reader{pos=NewPos}}; + {ok, Bin} when is_binary(Bin) -> + NewPos = Pos+byte_size(Bin), + {ok, Bin, Reader#reader{pos=NewPos}}; + Other -> + Other + end. -open_mode(read, _, Raw, _) -> - {ok, read, Raw, []}; -open_mode(write, _, Raw, _) -> - {ok, write, Raw, []}; -open_mode([read|Rest], false, Raw, Opts) -> - open_mode(Rest, read, Raw, Opts); -open_mode([write|Rest], false, Raw, Opts) -> - open_mode(Rest, write, Raw, Opts); -open_mode([compressed|Rest], Access, Raw, Opts) -> - open_mode(Rest, Access, Raw, [compressed|Opts]); -open_mode([cooked|Rest], Access, _Raw, Opts) -> - open_mode(Rest, Access, [], Opts); -open_mode([], Access, Raw, Opts) -> - {ok, Access, Raw, Opts}; -open_mode(_, _, _, _) -> - {error, einval}. -%%%================================================================ -do_write({tar_descriptor,UsrHandle,Fun}, Data) -> Fun(write,{UsrHandle,Data}). +do_sparse_read(Reader, Len) -> + do_sparse_read(Reader, Len, <<>>). + +do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{num_bytes=0}|Entries] + }=Reader0, Len, Acc) -> + %% skip all empty fragments + Reader1 = Reader0#sparse_file_reader{sparse_map=Entries}, + do_sparse_read(Reader1, Len, Acc); +do_sparse_read(#sparse_file_reader{sparse_map=[], + pos=Pos,size=Size}=Reader0, Len, Acc) + when Pos < Size -> + %% if there are no more fragments, it is possible that there is one last sparse hole + %% this behaviour matches the BSD tar utility + %% however, GNU tar stops returning data even if we haven't reached the end + {ok, Bin, Reader1} = read_sparse_hole(Reader0, Size, Len), + do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>); +do_sparse_read(#sparse_file_reader{sparse_map=[]}=Reader, _Len, Acc) -> + {ok, Acc, Reader}; +do_sparse_read(#sparse_file_reader{}=Reader, 0, Acc) -> + {ok, Acc, Reader}; +do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{offset=Offset}|_], + pos=Pos}=Reader0, Len, Acc) + when Pos < Offset -> + {ok, Bin, Reader1} = read_sparse_hole(Reader0, Offset, Offset-Pos), + do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>); +do_sparse_read(#sparse_file_reader{sparse_map=[Entry|Entries], + pos=Pos}=Reader0, Len, Acc) -> + %% we're in a data fragment, so read from it + %% end offset of fragment + EndPos = Entry#sparse_entry.offset + Entry#sparse_entry.num_bytes, + %% bytes left in fragment + NumBytes = EndPos - Pos, + ActualLen = if Len > NumBytes -> NumBytes; true -> Len end, + case do_read(Reader0#sparse_file_reader.handle, ActualLen) of + {ok, Bin, Handle} -> + BytesRead = byte_size(Bin), + ActualEndPos = Pos+BytesRead, + Reader1 = if ActualEndPos =:= EndPos -> + Reader0#sparse_file_reader{sparse_map=Entries}; + true -> + Reader0 + end, + Size = Reader1#sparse_file_reader.size, + NumBytes2 = Size - ActualEndPos, + Reader2 = Reader1#sparse_file_reader{ + handle=Handle, + pos=ActualEndPos, + num_bytes=NumBytes2}, + do_sparse_read(Reader2, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>); + Other -> + Other + end. + +%% Reads a sparse hole ending at Offset +read_sparse_hole(#sparse_file_reader{pos=Pos}=Reader, Offset, Len) -> + N = Offset - Pos, + N2 = if N > Len -> + Len; + true -> + N + end, + Bin = <<0:N2/unit:8>>, + NumBytes = Reader#sparse_file_reader.size - (Pos+N2), + {ok, Bin, Reader#sparse_file_reader{ + num_bytes=NumBytes, + pos=Pos+N2}}. + +-spec do_close(reader()) -> ok | {error, term()}. +do_close(#reader{handle=Handle,func=Fun}) when is_function(Fun,2) -> + Fun(close,Handle). + +%%%%%%%%%%%%%%%%%% +%% Option parsing +%%%%%%%%%%%%%%%%%% -do_position({tar_descriptor,UsrHandle,Fun}, Pos) -> Fun(position,{UsrHandle,Pos}). +extract_opts(List) -> + extract_opts(List, default_options()). -do_read({tar_descriptor,UsrHandle,Fun}, Len) -> Fun(read2,{UsrHandle,Len}). +table_opts(List) -> + read_opts(List, default_options()). + +default_options() -> + {ok, Cwd} = file:get_cwd(), + #read_opts{cwd=Cwd}. -do_close({tar_descriptor,UsrHandle,Fun}) -> Fun(close,UsrHandle). +extract_opts([keep_old_files|Rest], Opts) -> + extract_opts(Rest, Opts#read_opts{keep_old_files=true}); +extract_opts([{cwd, Cwd}|Rest], Opts) -> + extract_opts(Rest, Opts#read_opts{cwd=Cwd}); +extract_opts([{files, Files}|Rest], Opts) -> + Set = ordsets:from_list(Files), + extract_opts(Rest, Opts#read_opts{files=Set}); +extract_opts([memory|Rest], Opts) -> + extract_opts(Rest, Opts#read_opts{output=memory}); +extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) -> + extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]}); +extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) -> + extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]}); +extract_opts([verbose|Rest], Opts) -> + extract_opts(Rest, Opts#read_opts{verbose=true}); +extract_opts([Other|Rest], Opts) -> + extract_opts(Rest, read_opts([Other], Opts)); +extract_opts([], Opts) -> + Opts. + +read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) -> + read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]}); +read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) -> + read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]}); +read_opts([verbose|Rest], Opts) -> + read_opts(Rest, Opts#read_opts{verbose=true}); +read_opts([_|Rest], Opts) -> + read_opts(Rest, Opts); +read_opts([], Opts) -> + Opts. diff --git a/lib/stdlib/src/erl_tar.hrl b/lib/stdlib/src/erl_tar.hrl new file mode 100644 index 0000000000..d646d02989 --- /dev/null +++ b/lib/stdlib/src/erl_tar.hrl @@ -0,0 +1,394 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2017. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% + +%% Options used when adding files to a tar archive. +-record(add_opts, { + read_info, %% Fun to use for read file/link info. + chunk_size = 0, %% For file reading when sending to sftp. 0=do not chunk + verbose = false}). %% Verbose on/off. +-type add_opts() :: #add_opts{}. + +%% Options used when reading a tar archive. +-record(read_opts, { + cwd :: string(), %% Current working directory. + keep_old_files = false :: boolean(), %% Owerwrite or not. + files = all, %% Set of files to extract (or all) + output = file :: 'file' | 'memory', + open_mode = [], %% Open mode options. + verbose = false :: boolean()}). %% Verbose on/off. +-type read_opts() :: #read_opts{}. + +-type add_opt() :: dereference | + verbose | + {chunks, pos_integer()}. + +-type extract_opt() :: {cwd, string()} | + {files, [string()]} | + compressed | + cooked | + memory | + keep_old_files | + verbose. + +-type create_opt() :: compressed | + cooked | + dereference | + verbose. + +-type filelist() :: [file:filename() | + {string(), binary()} | + {string(), file:filename()}]. + +%% The tar header, once fully parsed. +-record(tar_header, { + name = "" :: string(), %% name of header file entry + mode = 8#100644 :: non_neg_integer(), %% permission and mode bits + uid = 0 :: non_neg_integer(), %% user id of owner + gid = 0 :: non_neg_integer(), %% group id of owner + size = 0 :: non_neg_integer(), %% length in bytes + mtime :: calendar:datetime(), %% modified time + typeflag :: char(), %% type of header entry + linkname = "" :: string(), %% target name of link + uname = "" :: string(), %% user name of owner + gname = "" :: string(), %% group name of owner + devmajor = 0 :: non_neg_integer(), %% major number of character or block device + devminor = 0 :: non_neg_integer(), %% minor number of character or block device + atime :: calendar:datetime(), %% access time + ctime :: calendar:datetime() %% status change time + }). +-type tar_header() :: #tar_header{}. + +%% Metadata for a sparse file fragment +-record(sparse_entry, { + offset = 0 :: non_neg_integer(), + num_bytes = 0 :: non_neg_integer()}). +-type sparse_entry() :: #sparse_entry{}. +%% Contains metadata about fragments of a sparse file +-record(sparse_array, { + entries = [] :: [sparse_entry()], + is_extended = false :: boolean(), + max_entries = 0 :: non_neg_integer()}). +-type sparse_array() :: #sparse_array{}. +%% A subset of tar header fields common to all tar implementations +-record(header_v7, { + name :: binary(), + mode :: binary(), %% octal + uid :: binary(), %% integer + gid :: binary(), %% integer + size :: binary(), %% integer + mtime :: binary(), %% integer + checksum :: binary(), %% integer + typeflag :: byte(), %% char + linkname :: binary()}). +-type header_v7() :: #header_v7{}. +%% The set of fields specific to GNU tar formatted archives +-record(header_gnu, { + header_v7 :: header_v7(), + magic :: binary(), + version :: binary(), + uname :: binary(), + gname :: binary(), + devmajor :: binary(), %% integer + devminor :: binary(), %% integer + atime :: binary(), %% integer + ctime :: binary(), %% integer + sparse :: sparse_array(), + real_size :: binary()}). %% integer +-type header_gnu() :: #header_gnu{}. +%% The set of fields specific to STAR-formatted archives +-record(header_star, { + header_v7 :: header_v7(), + magic :: binary(), + version :: binary(), + uname :: binary(), + gname :: binary(), + devmajor :: binary(), %% integer + devminor :: binary(), %% integer + prefix :: binary(), + atime :: binary(), %% integer + ctime :: binary(), %% integer + trailer :: binary()}). +-type header_star() :: #header_star{}. +%% The set of fields specific to USTAR-formatted archives +-record(header_ustar, { + header_v7 :: header_v7(), + magic :: binary(), + version :: binary(), + uname :: binary(), + gname :: binary(), + devmajor :: binary(), %% integer + devminor :: binary(), %% integer + prefix :: binary()}). +-type header_ustar() :: #header_ustar{}. + +-type header_fields() :: header_v7() | + header_gnu() | + header_star() | + header_ustar(). + +%% The overall tar reader, it holds the low-level file handle, +%% its access, position, and the I/O primitives wrapper. +-record(reader, { + handle :: file:io_device() | term(), + access :: read | write | ram, + pos = 0 :: non_neg_integer(), + func :: file_op() + }). +-type reader() :: #reader{}. +%% A reader for a regular file within the tar archive, +%% It tracks its current state relative to that file. +-record(reg_file_reader, { + handle :: reader(), + num_bytes = 0, + pos = 0, + size = 0 + }). +-type reg_file_reader() :: #reg_file_reader{}. +%% A reader for a sparse file within the tar archive, +%% It tracks its current state relative to that file. +-record(sparse_file_reader, { + handle :: reader(), + num_bytes = 0, %% bytes remaining + pos = 0, %% pos + size = 0, %% total size of file + sparse_map = #sparse_array{} + }). +-type sparse_file_reader() :: #sparse_file_reader{}. + +%% Types for the readers +-type reader_type() :: reader() | reg_file_reader() | sparse_file_reader(). +-type handle() :: file:io_device() | term(). + +%% Type for the I/O primitive wrapper function +-type file_op() :: fun((write | close | read2 | position, + {handle(), iodata()} | handle() | {handle(), non_neg_integer()} + | {handle(), non_neg_integer()}) -> + ok | eof | {ok, string() | binary()} | {ok, non_neg_integer()} + | {error, term()}). + +%% These constants (except S_IFMT) are +%% used to determine what type of device +%% a file is. Namely, `S_IFMT band file_info.mode` +%% will equal one of these contants, and tells us +%% which type it is. The stdlib file_info record +%% does not differentiate between device types, and +%% will not allow us to differentiate between sockets +%% and named pipes. These constants are pulled from libc. +-define(S_IFMT, 61440). +-define(S_IFSOCK, 49152). %% socket +-define(S_FIFO, 4096). %% fifo/named pipe +-define(S_IFBLK, 24576). %% block device +-define(S_IFCHR, 8192). %% character device + +%% Typeflag constants for the tar header +-define(TYPE_REGULAR, $0). %% regular file +-define(TYPE_REGULAR_A, 0). %% regular file +-define(TYPE_LINK, $1). %% hard link +-define(TYPE_SYMLINK, $2). %% symbolic link +-define(TYPE_CHAR, $3). %% character device node +-define(TYPE_BLOCK, $4). %% block device node +-define(TYPE_DIR, $5). %% directory +-define(TYPE_FIFO, $6). %% fifo node +-define(TYPE_CONT, $7). %% reserved +-define(TYPE_X_HEADER, $x). %% extended header +-define(TYPE_X_GLOBAL_HEADER, $g). %% global extended header +-define(TYPE_GNU_LONGNAME, $L). %% next file has a long name +-define(TYPE_GNU_LONGLINK, $K). %% next file symlinks to a file with a long name +-define(TYPE_GNU_SPARSE, $S). %% sparse file + +%% Mode constants from tar spec +-define(MODE_ISUID, 4000). %% set uid +-define(MODE_ISGID, 2000). %% set gid +-define(MODE_ISVTX, 1000). %% save text (sticky bit) +-define(MODE_ISDIR, 40000). %% directory +-define(MODE_ISFIFO, 10000). %% fifo +-define(MODE_ISREG, 100000). %% regular file +-define(MODE_ISLNK, 120000). %% symbolic link +-define(MODE_ISBLK, 60000). %% block special file +-define(MODE_ISCHR, 20000). %% character special file +-define(MODE_ISSOCK, 140000). %% socket + +%% Keywords for PAX extended header +-define(PAX_ATIME, <<"atime">>). +-define(PAX_CHARSET, <<"charset">>). +-define(PAX_COMMENT, <<"comment">>). +-define(PAX_CTIME, <<"ctime">>). %% ctime is not a valid pax header +-define(PAX_GID, <<"gid">>). +-define(PAX_GNAME, <<"gname">>). +-define(PAX_LINKPATH, <<"linkpath">>). +-define(PAX_MTIME, <<"mtime">>). +-define(PAX_PATH, <<"path">>). +-define(PAX_SIZE, <<"size">>). +-define(PAX_UID, <<"uid">>). +-define(PAX_UNAME, <<"uname">>). +-define(PAX_XATTR, <<"SCHILY.xattr.">>). +-define(PAX_XATTR_STR, "SCHILY.xattr."). +-define(PAX_NONE, <<"">>). + +%% Tar format constants +%% Unknown format +-define(FORMAT_UNKNOWN, 0). +%% The format of the original Unix V7 tar tool prior to standardization +-define(FORMAT_V7, 1). +%% The old and new GNU formats, incompatible with USTAR. +%% This covers the old GNU sparse extension, but it does +%% not cover the GNU sparse extensions using PAX headers, +%% versions 0.0, 0.1, and 1.0; these fall under the PAX format. +-define(FORMAT_GNU, 2). +%% Schily's tar format, which is incompatible with USTAR. +%% This does not cover STAR extensions to the PAX format; these +%% fall under the PAX format. +-define(FORMAT_STAR, 3). +%% USTAR is the former standardization of tar defined in POSIX.1-1988, +%% it is incompatible with the GNU and STAR formats. +-define(FORMAT_USTAR, 4). +%% PAX is the latest standardization of tar defined in POSIX.1-2001. +%% This is an extension of USTAR and is "backwards compatible" with it. +%% +%% Some newer formats add their own extensions to PAX, such as GNU sparse +%% files and SCHILY extended attributes. Since they are backwards compatible +%% with PAX, they will be labelled as "PAX". +-define(FORMAT_PAX, 5). + +%% Magic constants +-define(MAGIC_GNU, <<"ustar ">>). +-define(VERSION_GNU, <<" \x00">>). +-define(MAGIC_USTAR, <<"ustar\x00">>). +-define(VERSION_USTAR, <<"00">>). +-define(TRAILER_STAR, <<"tar\x00">>). + +%% Size constants +-define(BLOCK_SIZE, 512). %% size of each block in a tar stream +-define(NAME_SIZE, 100). %% max length of the name field in USTAR format +-define(PREFIX_SIZE, 155). %% max length of the prefix field in USTAR format + +%% Maximum size of a nanosecond value as an integer +-define(MAX_NANO_INT_SIZE, 9). +%% Maximum size of a 64-bit signed integer +-define(MAX_INT64, (1 bsl 63 - 1)). + +-define(PAX_GNU_SPARSE_NUMBLOCKS, <<"GNU.sparse.numblocks">>). +-define(PAX_GNU_SPARSE_OFFSET, <<"GNU.sparse.offset">>). +-define(PAX_GNU_SPARSE_NUMBYTES, <<"GNU.sparse.numbytes">>). +-define(PAX_GNU_SPARSE_MAP, <<"GNU.sparse.map">>). +-define(PAX_GNU_SPARSE_NAME, <<"GNU.sparse.name">>). +-define(PAX_GNU_SPARSE_MAJOR, <<"GNU.sparse.major">>). +-define(PAX_GNU_SPARSE_MINOR, <<"GNU.sparse.minor">>). +-define(PAX_GNU_SPARSE_SIZE, <<"GNU.sparse.size">>). +-define(PAX_GNU_SPARSE_REALSIZE, <<"GNU.sparse.realsize">>). + +-define(V7_NAME, 0). +-define(V7_NAME_LEN, 100). +-define(V7_MODE, 100). +-define(V7_MODE_LEN, 8). +-define(V7_UID, 108). +-define(V7_UID_LEN, 8). +-define(V7_GID, 116). +-define(V7_GID_LEN, 8). +-define(V7_SIZE, 124). +-define(V7_SIZE_LEN, 12). +-define(V7_MTIME, 136). +-define(V7_MTIME_LEN, 12). +-define(V7_CHKSUM, 148). +-define(V7_CHKSUM_LEN, 8). +-define(V7_TYPE, 156). +-define(V7_TYPE_LEN, 1). +-define(V7_LINKNAME, 157). +-define(V7_LINKNAME_LEN, 100). + +-define(STAR_TRAILER, 508). +-define(STAR_TRAILER_LEN, 4). + +-define(USTAR_MAGIC, 257). +-define(USTAR_MAGIC_LEN, 6). +-define(USTAR_VERSION, 263). +-define(USTAR_VERSION_LEN, 2). +-define(USTAR_UNAME, 265). +-define(USTAR_UNAME_LEN, 32). +-define(USTAR_GNAME, 297). +-define(USTAR_GNAME_LEN, 32). +-define(USTAR_DEVMAJ, 329). +-define(USTAR_DEVMAJ_LEN, 8). +-define(USTAR_DEVMIN, 337). +-define(USTAR_DEVMIN_LEN, 8). +-define(USTAR_PREFIX, 345). +-define(USTAR_PREFIX_LEN, 155). + +-define(GNU_MAGIC, 257). +-define(GNU_MAGIC_LEN, 6). +-define(GNU_VERSION, 263). +-define(GNU_VERSION_LEN, 2). + +%% ?BLOCK_SIZE of zero-bytes. +%% Two of these in a row mark the end of an archive. +-define(ZERO_BLOCK, <<0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0>>). + +-define(BILLION, 1000000000). + +-define(EPOCH, {{1970,1,1}, {0,0,0}}). diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl index c42ae981e7..6e8f780f7c 100644 --- a/lib/stdlib/src/escript.erl +++ b/lib/stdlib/src/escript.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2016. All Rights Reserved. +%% Copyright Ericsson AB 2007-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -629,8 +629,7 @@ parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly) -> {error, _} -> epp_parse_file2(Epp, S2, [FileForm], OptModRes); {eof, LastLine} -> - Anno = anno(LastLine), - S#state{forms_or_bin = [FileForm, {eof, Anno}]} + S#state{forms_or_bin = [FileForm, {eof, LastLine}]} end, ok = epp:close(Epp), ok = file:close(Fd), @@ -728,8 +727,7 @@ epp_parse_file2(Epp, S, Forms, Parsed) -> [S#state.file,Ln,Mod:format_error(Args)]), epp_parse_file(Epp, S#state{n_errors = S#state.n_errors + 1}, [Form | Forms]); {eof, LastLine} -> - Anno = anno(LastLine), - S#state{forms_or_bin = lists:reverse([{eof, Anno} | Forms])} + S#state{forms_or_bin = lists:reverse([{eof, LastLine} | Forms])} end. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl index 20de06fd0b..d6fd1e3ea1 100644 --- a/lib/stdlib/src/ets.erl +++ b/lib/stdlib/src/ets.erl @@ -51,8 +51,8 @@ -type tab() :: atom() | tid(). -type type() :: set | ordered_set | bag | duplicate_bag. -type continuation() :: '$end_of_table' - | {tab(),integer(),integer(),binary(),list(),integer()} - | {tab(),_,_,integer(),binary(),list(),integer(),integer()}. + | {tab(),integer(),integer(),comp_match_spec(),list(),integer()} + | {tab(),_,_,integer(),comp_match_spec(),list(),integer(),integer()}. -opaque tid() :: integer(). @@ -488,7 +488,7 @@ update_element(_, _, _) -> %%% End of BIFs --opaque comp_match_spec() :: binary(). %% this one is REALLY opaque +-opaque comp_match_spec() :: reference(). -spec match_spec_run(List, CompiledMatchSpec) -> list() when List :: [tuple()], @@ -505,28 +505,28 @@ match_spec_run(List, CompiledMS) -> repair_continuation('$end_of_table', _) -> '$end_of_table'; %% ordered_set -repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,Bin,L2,N3,N4}, MS) +repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,MSRef,L2,N3,N4}, MS) when %% (is_atom(Table) or is_integer(Table)), is_integer(N2), - byte_size(Bin) =:= 0, + %% is_reference(MSRef), is_list(L2), is_integer(N3), is_integer(N4) -> - case ets:is_compiled_ms(Bin) of + case ets:is_compiled_ms(MSRef) of true -> Untouched; false -> {Table,Lastkey,EndCondition,N2,ets:match_spec_compile(MS),L2,N3,N4} end; %% set/bag/duplicate_bag -repair_continuation(Untouched = {Table,N1,N2,Bin,L,N3}, MS) +repair_continuation(Untouched = {Table,N1,N2,MSRef,L,N3}, MS) when %% (is_atom(Table) or is_integer(Table)), is_integer(N1), is_integer(N2), - byte_size(Bin) =:= 0, + %% is_reference(MSRef), is_list(L), is_integer(N3) -> - case ets:is_compiled_ms(Bin) of + case ets:is_compiled_ms(MSRef) of true -> Untouched; false -> diff --git a/lib/stdlib/src/filelib.erl b/lib/stdlib/src/filelib.erl index 7029389e2f..daa18da9aa 100644 --- a/lib/stdlib/src/filelib.erl +++ b/lib/stdlib/src/filelib.erl @@ -24,6 +24,7 @@ -export([fold_files/5, last_modified/1, file_size/1, ensure_dir/1]). -export([wildcard/3, is_dir/2, is_file/2, is_regular/2]). -export([fold_files/6, last_modified/2, file_size/2]). +-export([find_file/2, find_file/3, find_source/1, find_source/2, find_source/3]). %% For debugging/testing. -export([compile_wildcard/1]). @@ -517,3 +518,124 @@ eval_list_dir(Dir, erl_prim_loader) -> end; eval_list_dir(Dir, Mod) -> Mod:list_dir(Dir). + +%% Getting the rules to use for file search + +keep_dir_search_rules(Rules) -> + [T || {_,_}=T <- Rules]. + +keep_suffix_search_rules(Rules) -> + [T || {_,_,_}=T <- Rules]. + +get_search_rules() -> + case application:get_env(kernel, source_search_rules) of + undefined -> default_search_rules(); + {ok, []} -> default_search_rules(); + {ok, R} when is_list(R) -> R + end. + +default_search_rules() -> + [%% suffix-speficic rules for source search + {".beam", ".erl", erl_source_search_rules()}, + {".erl", ".yrl", []}, + {"", ".src", erl_source_search_rules()}, + {".so", ".c", c_source_search_rules()}, + {".o", ".c", c_source_search_rules()}, + {"", ".c", c_source_search_rules()}, + {"", ".in", basic_source_search_rules()}, + %% plain old directory rules, backwards compatible + {"", ""}, + {"ebin","src"}, + {"ebin","esrc"} + ]. + +basic_source_search_rules() -> + (erl_source_search_rules() + ++ c_source_search_rules()). + +erl_source_search_rules() -> + [{"ebin","src"}, {"ebin","esrc"}]. + +c_source_search_rules() -> + [{"priv","c_src"}, {"priv","src"}, {"bin","c_src"}, {"bin","src"}, {"", "src"}]. + +%% Looks for a file relative to a given directory + +-type find_file_rule() :: {ObjDirSuffix::string(), SrcDirSuffix::string()}. + +-spec find_file(filename(), filename()) -> + {ok, filename()} | {error, not_found}. +find_file(Filename, Dir) -> + find_file(Filename, Dir, []). + +-spec find_file(filename(), filename(), [find_file_rule()]) -> + {ok, filename()} | {error, not_found}. +find_file(Filename, Dir, []) -> + find_file(Filename, Dir, get_search_rules()); +find_file(Filename, Dir, Rules) -> + try_dir_rules(keep_dir_search_rules(Rules), Filename, Dir). + +%% Looks for a source file relative to the object file name and directory + +-type find_source_rule() :: {ObjExtension::string(), SrcExtension::string(), + [find_file_rule()]}. + +-spec find_source(filename()) -> + {ok, filename()} | {error, not_found}. +find_source(FilePath) -> + find_source(filename:basename(FilePath), filename:dirname(FilePath)). + +-spec find_source(filename(), filename()) -> + {ok, filename()} | {error, not_found}. +find_source(Filename, Dir) -> + find_source(Filename, Dir, []). + +-spec find_source(filename(), filename(), [find_source_rule()]) -> + {ok, filename()} | {error, not_found}. +find_source(Filename, Dir, []) -> + find_source(Filename, Dir, get_search_rules()); +find_source(Filename, Dir, Rules) -> + try_suffix_rules(keep_suffix_search_rules(Rules), Filename, Dir). + +try_suffix_rules(Rules, Filename, Dir) -> + Ext = filename:extension(Filename), + try_suffix_rules(Rules, filename:rootname(Filename, Ext), Dir, Ext). + +try_suffix_rules([{Ext,Src,Rules}|Rest], Root, Dir, Ext) + when is_list(Src), is_list(Rules) -> + case try_dir_rules(add_local_search(Rules), Root ++ Src, Dir) of + {ok, File} -> {ok, File}; + _Other -> + try_suffix_rules(Rest, Root, Dir, Ext) + end; +try_suffix_rules([_|Rest], Root, Dir, Ext) -> + try_suffix_rules(Rest, Root, Dir, Ext); +try_suffix_rules([], _Root, _Dir, _Ext) -> + {error, not_found}. + +%% ensuring we check the directory of the object file before any other directory +add_local_search(Rules) -> + Local = {"",""}, + [Local] ++ lists:filter(fun (X) -> X =/= Local end, Rules). + +try_dir_rules([{From, To}|Rest], Filename, Dir) + when is_list(From), is_list(To) -> + case try_dir_rule(Dir, Filename, From, To) of + {ok, File} -> {ok, File}; + error -> try_dir_rules(Rest, Filename, Dir) + end; +try_dir_rules([], _Filename, _Dir) -> + {error, not_found}. + +try_dir_rule(Dir, Filename, From, To) -> + case lists:suffix(From, Dir) of + true -> + NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To, + Src = filename:join(NewDir, Filename), + case is_regular(Src) of + true -> {ok, Src}; + false -> error + end; + false -> + error + end. diff --git a/lib/stdlib/src/filename.erl b/lib/stdlib/src/filename.erl index c4586171ca..b5df5c9d37 100644 --- a/lib/stdlib/src/filename.erl +++ b/lib/stdlib/src/filename.erl @@ -19,6 +19,9 @@ %% -module(filename). +-deprecated({find_src,1,next_major_release}). +-deprecated({find_src,2,next_major_release}). + %% Purpose: Provides generic manipulation of filenames. %% %% Generally, these functions accept filenames in the native format @@ -34,8 +37,9 @@ -export([absname/1, absname/2, absname_join/2, basename/1, basename/2, dirname/1, extension/1, join/1, join/2, pathtype/1, - rootname/1, rootname/2, split/1, nativename/1]). --export([find_src/1, find_src/2, flatten/1]). + rootname/1, rootname/2, split/1, flatten/1, nativename/1, + safe_relative_path/1]). +-export([find_src/1, find_src/2]). % deprecated -export([basedir/2, basedir/3]). %% Undocumented and unsupported exports. @@ -750,7 +754,45 @@ separators() -> _ -> {false, false} end. +-spec safe_relative_path(Filename) -> 'unsafe' | SafeFilename when + Filename :: file:name_all(), + SafeFilename :: file:name_all(). + +safe_relative_path(Path) -> + case pathtype(Path) of + relative -> + Cs0 = split(Path), + safe_relative_path_1(Cs0, []); + _ -> + unsafe + end. +safe_relative_path_1(["."|T], Acc) -> + safe_relative_path_1(T, Acc); +safe_relative_path_1([<<".">>|T], Acc) -> + safe_relative_path_1(T, Acc); +safe_relative_path_1([".."|T], Acc) -> + climb(T, Acc); +safe_relative_path_1([<<"..">>|T], Acc) -> + climb(T, Acc); +safe_relative_path_1([H|T], Acc) -> + safe_relative_path_1(T, [H|Acc]); +safe_relative_path_1([], []) -> + []; +safe_relative_path_1([], Acc) -> + join(lists:reverse(Acc)). + +climb(_, []) -> + unsafe; +climb(T, [_|Acc]) -> + safe_relative_path_1(T, Acc). + +%% NOTE: The find_src/1/2 functions are deprecated; they try to do too much +%% at once and are not a good fit for this module. Parts of the code have +%% been moved to filelib:find_file/2 instead. Only this part of this +%% module is allowed to call the filelib module; such mutual dependency +%% should otherwise be avoided! This code should eventually be removed. +%% %% find_src(Module) -- %% find_src(Module, Rules) -- @@ -793,14 +835,7 @@ separators() -> | {'d', atom()}, ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'. find_src(Mod) -> - Default = [{"", ""}, {"ebin", "src"}, {"ebin", "esrc"}], - Rules = - case application:get_env(kernel, source_search_rules) of - undefined -> Default; - {ok, []} -> Default; - {ok, R} when is_list(R) -> R - end, - find_src(Mod, Rules). + find_src(Mod, []). -spec find_src(Beam, Rules) -> {SourceFile, Options} | {error, {ErrorReason, Module}} when @@ -816,44 +851,47 @@ find_src(Mod) -> ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'. find_src(Mod, Rules) when is_atom(Mod) -> find_src(atom_to_list(Mod), Rules); -find_src(File0, Rules) when is_list(File0) -> - Mod = list_to_atom(basename(File0, ".erl")), - File = rootname(File0, ".erl"), - case readable_file(File++".erl") of - true -> - try_file(File, Mod, Rules); - false -> - try_file(undefined, Mod, Rules) - end. - -try_file(File, Mod, Rules) -> +find_src(ModOrFile, Rules) when is_list(ModOrFile) -> + Extension = ".erl", + Mod = list_to_atom(basename(ModOrFile, Extension)), case code:which(Mod) of Possibly_Rel_Path when is_list(Possibly_Rel_Path) -> - {ok, Cwd} = file:get_cwd(), - Path = join(Cwd, Possibly_Rel_Path), - try_file(File, Path, Mod, Rules); + {ok, Cwd} = file:get_cwd(), + ObjPath = make_abs_path(Cwd, Possibly_Rel_Path), + find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules); Ecode when is_atom(Ecode) -> % Ecode :: ecode() {error, {Ecode, Mod}} end. %% At this point, the Mod is known to be valid. %% If the source name is not known, find it. -%% Then get the compilation options. -%% Returns: {SrcFile, Options} +find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules) -> + %% The documentation says this function must return the found path + %% without extension in all cases. Also, ModOrFile could be given with + %% or without extension. Hence the calls to rootname below. + ModOrFileRoot = rootname(ModOrFile, Extension), + case filelib:is_regular(ModOrFileRoot++Extension) of + true -> + find_src_2(ModOrFileRoot, Mod); + false -> + SrcName = basename(ObjPath, code:objfile_extension()) ++ Extension, + case filelib:find_file(SrcName, dirname(ObjPath), Rules) of + {ok, SrcFile} -> + find_src_2(rootname(SrcFile, Extension), Mod); + Error -> + Error + end + end. -try_file(undefined, ObjFilename, Mod, Rules) -> - case get_source_file(ObjFilename, Mod, Rules) of - {ok, File} -> try_file(File, ObjFilename, Mod, Rules); - Error -> Error - end; -try_file(Src, _ObjFilename, Mod, _Rules) -> +%% Get the compilation options and return {SrcFileRoot, Options} +find_src_2(SrcRoot, Mod) -> List = case Mod:module_info(compile) of none -> []; List0 -> List0 end, Options = proplists:get_value(options, List, []), {ok, Cwd} = file:get_cwd(), - AbsPath = make_abs_path(Cwd, Src), + AbsPath = make_abs_path(Cwd, SrcRoot), {AbsPath, filter_options(dirname(AbsPath), Options, [])}. %% Filters the options. @@ -884,42 +922,6 @@ filter_options(Base, [_|Rest], Result) -> filter_options(_Base, [], Result) -> Result. -%% Gets the source file given path of object code and module name. - -get_source_file(Obj, Mod, Rules) -> - source_by_rules(dirname(Obj), atom_to_list(Mod), Rules). - -source_by_rules(Dir, Base, [{From, To}|Rest]) -> - case try_rule(Dir, Base, From, To) of - {ok, File} -> {ok, File}; - error -> source_by_rules(Dir, Base, Rest) - end; -source_by_rules(_Dir, _Base, []) -> - {error, source_file_not_found}. - -try_rule(Dir, Base, From, To) -> - case lists:suffix(From, Dir) of - true -> - NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To, - Src = join(NewDir, Base), - case readable_file(Src++".erl") of - true -> {ok, Src}; - false -> error - end; - false -> - error - end. - -readable_file(File) -> - case file:read_file_info(File) of - {ok, #file_info{type=regular, access=read}} -> - true; - {ok, #file_info{type=regular, access=read_write}} -> - true; - _Other -> - false - end. - make_abs_path(BasePath, Path) -> join(BasePath, Path). diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl index 4839fe4f2c..0aebf1bdc5 100644 --- a/lib/stdlib/src/gen_event.erl +++ b/lib/stdlib/src/gen_event.erl @@ -778,7 +778,7 @@ stop_handlers([], _) -> []. %% Message from the release_handler. -%% The list of modules got to be a set ! +%% The list of modules got to be a set, i.e. no duplicate elements! get_modules(MSL) -> Mods = [Handler#handler.module || Handler <- MSL], ordsets:to_list(ordsets:from_list(Mods)). diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl index 6e7528fd98..e925a75fe8 100644 --- a/lib/stdlib/src/gen_fsm.erl +++ b/lib/stdlib/src/gen_fsm.erl @@ -273,7 +273,7 @@ start_timer(Time, Msg) -> send_event_after(Time, Event) -> erlang:start_timer(Time, self(), {'$gen_event', Event}). -%% Returns the remaing time for the timer if Ref referred to +%% Returns the remaining time for the timer if Ref referred to %% an active timer/send_event_after, false otherwise. cancel_timer(Ref) -> case erlang:cancel_timer(Ref) of diff --git a/lib/stdlib/src/gen_statem.erl b/lib/stdlib/src/gen_statem.erl index 018aca90e6..cacc932ec4 100644 --- a/lib/stdlib/src/gen_statem.erl +++ b/lib/stdlib/src/gen_statem.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2016. All Rights Reserved. +%% Copyright Ericsson AB 2016-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -47,15 +47,17 @@ %% Type exports for templates and callback modules -export_type( [event_type/0, - init_result/0, callback_mode_result/0, - state_function_result/0, - handle_event_result/0, + init_result/1, state_enter_result/1, event_handler_result/1, reply_action/0, enter_action/0, action/0]). +%% Old types, not advertised +-export_type( + [state_function_result/0, + handle_event_result/0]). %% Type that is exported just to be documented -export_type([transition_option/0]). @@ -143,9 +145,10 @@ {'reply', % Reply to a caller From :: from(), Reply :: term()}. --type init_result() :: - {ok, state(), data()} | - {ok, state(), data(), [action()] | action()} | +-type init_result(StateType) :: + {ok, State :: StateType, Data :: data()} | + {ok, State :: StateType, Data :: data(), + Actions :: [action()] | action()} | 'ignore' | {'stop', Reason :: term()}. @@ -182,12 +185,23 @@ 'keep_state_and_data' | % {keep_state_and_data,[]} {'keep_state_and_data', % Keep state and data -> only actions Actions :: [ActionType] | ActionType} | + %% + {'repeat_state', % {repeat_state,NewData,[]} + NewData :: data()} | + {'repeat_state', % Repeat state, change data + NewData :: data(), + Actions :: [ActionType] | ActionType} | + 'repeat_state_and_data' | % {repeat_state_and_data,[]} + {'repeat_state_and_data', % Repeat state and data -> only actions + Actions :: [ActionType] | ActionType} | + %% 'stop' | % {stop,normal} {'stop', % Stop the server Reason :: term()} | {'stop', % Stop the server Reason :: term(), NewData :: data()} | + %% {'stop_and_reply', % Reply then stop the server Reason :: term(), Replies :: [reply_action()] | reply_action()} | @@ -201,7 +215,7 @@ %% the server is not running until this function has returned %% an {ok, ...} tuple. Thereafter the state callbacks are called %% for all events to this server. --callback init(Args :: term()) -> init_result(). +-callback init(Args :: term()) -> init_result(state()). %% This callback shall return the callback mode of the callback module. %% @@ -275,6 +289,8 @@ -optional_callbacks( [init/1, % One may use enter_loop/5,6,7 instead format_status/2, % Has got a default implementation + terminate/3, % Has got a default implementation + code_change/4, % Only needed by advanced soft upgrade %% state_name/3, % Example for callback_mode() =:= state_functions: %% there has to be a StateName/3 callback function @@ -304,12 +320,16 @@ event_type({call,From}) -> from(From); event_type(Type) -> case Type of + {call,From} -> + from(From); cast -> true; info -> true; timeout -> true; + state_timeout -> + true; internal -> true; _ -> @@ -588,6 +608,22 @@ enter(Module, Opts, State, Data, Server, Actions, Parent) -> true -> [Actions,{postpone,false}] end, + TimerRefs = #{}, + %% Key: timer ref + %% Value: the timer type i.e the timer's event type + %% + TimerTypes = #{}, + %% Key: timer type i.e the timer's event type + %% Value: timer ref + %% + %% We add a timer to both timer_refs and timer_types + %% when we start it. When we request an asynchronous + %% timer cancel we remove it from timer_types. When + %% the timer cancel message arrives we remove it from + %% timer_refs. + %% + Hibernate = false, + CancelTimers = 0, S = #{ callback_mode => undefined, state_enter => false, @@ -596,25 +632,25 @@ enter(Module, Opts, State, Data, Server, Actions, Parent) -> state => State, data => Data, postponed => P, - %% The rest of the fields are set from to the arguments to - %% loop_event_actions/10 when it finally loops back to loop/3 - %% in loop_events/10 %% - %% Marker for initial state, cleared immediately when used - init_state => true + %% The following fields are finally set from to the arguments to + %% loop_event_actions/9 when it finally loops back to loop/3 + %% in loop_event_result/11 + timer_refs => TimerRefs, + timer_types => TimerTypes, + hibernate => Hibernate, + cancel_timers => CancelTimers }, NewDebug = sys_debug(Debug, S, State, {enter,Event,State}), case call_callback_mode(S) of {ok,NewS} -> - TimerRefs = #{}, - TimerTypes = #{}, loop_event_actions( - Parent, NewDebug, NewS, TimerRefs, TimerTypes, - Events, Event, State, Data, NewActions); + Parent, NewDebug, NewS, + Events, Event, State, Data, NewActions, true); {Class,Reason,Stacktrace} -> terminate( - Class, Reason, Stacktrace, - NewDebug, S, [Event|Events]) + Class, Reason, Stacktrace, NewDebug, + S, [Event|Events]) end. %%%========================================================================== @@ -683,9 +719,7 @@ system_continue(Parent, Debug, S) -> loop(Parent, Debug, S). system_terminate(Reason, _Parent, Debug, S) -> - terminate( - exit, Reason, ?STACKTRACE(), - Debug, S, []). + terminate(exit, Reason, ?STACKTRACE(), Debug, S, []). system_code_change( #{module := Module, @@ -796,23 +830,22 @@ wakeup_from_hibernate(Parent, Debug, S) -> %% and detours through sys:handle_system_message/7 and proc_lib:hibernate/3 %% Entry point for system_continue/3 -loop(Parent, Debug, #{hibernate := Hibernate} = S) -> - case Hibernate of - true -> - %% Does not return but restarts process at - %% wakeup_from_hibernate/3 that jumps to loop_receive/3 - proc_lib:hibernate( - ?MODULE, wakeup_from_hibernate, [Parent,Debug,S]), - error( - {should_not_have_arrived_here_but_instead_in, - {wakeup_from_hibernate,3}}); - false -> - loop_receive(Parent, Debug, S) - end. +loop(Parent, Debug, #{hibernate := true, cancel_timers := 0} = S) -> + loop_hibernate(Parent, Debug, S); +loop(Parent, Debug, S) -> + loop_receive(Parent, Debug, S). + +loop_hibernate(Parent, Debug, S) -> + %% Does not return but restarts process at + %% wakeup_from_hibernate/3 that jumps to loop_receive/3 + proc_lib:hibernate( + ?MODULE, wakeup_from_hibernate, [Parent,Debug,S]), + error( + {should_not_have_arrived_here_but_instead_in, + {wakeup_from_hibernate,3}}). %% Entry point for wakeup_from_hibernate/3 -loop_receive( - Parent, Debug, #{timer_refs := TimerRefs, timer_types := TimerTypes} = S) -> +loop_receive(Parent, Debug, S) -> receive Msg -> case Msg of @@ -821,30 +854,87 @@ loop_receive( %% Does not return but tail recursively calls %% system_continue/3 that jumps to loop/3 sys:handle_system_msg( - Req, Pid, Parent, ?MODULE, Debug, S, Hibernate); + Req, Pid, Parent, ?MODULE, Debug, S, + Hibernate); {'EXIT',Parent,Reason} = EXIT -> - %% EXIT is not a 2-tuple and therefore - %% not an event and has no event_type(), - %% but this will stand out in the crash report... - terminate( - exit, Reason, ?STACKTRACE(), Debug, S, [EXIT]); + %% EXIT is not a 2-tuple therefore + %% not an event but this will stand out + %% in the crash report... + Q = [EXIT], + terminate(exit, Reason, ?STACKTRACE(), Debug, S, Q); {timeout,TimerRef,TimerMsg} -> + #{timer_refs := TimerRefs, + timer_types := TimerTypes, + hibernate := Hibernate} = S, case TimerRefs of #{TimerRef := TimerType} -> - Event = {TimerType,TimerMsg}, - %% Unregister the triggered timeout + %% We know of this timer; is it a running + %% timer or a timer being cancelled that + %% managed to send a late timeout message? + case TimerTypes of + #{TimerType := TimerRef} -> + %% The timer type maps back to this + %% timer ref, so it was a running timer + Event = {TimerType,TimerMsg}, + %% Unregister the triggered timeout + NewTimerRefs = + maps:remove(TimerRef, TimerRefs), + NewTimerTypes = + maps:remove(TimerType, TimerTypes), + loop_receive_result( + Parent, Debug, + S#{ + timer_refs := NewTimerRefs, + timer_types := NewTimerTypes}, + Hibernate, + Event); + _ -> + %% This was a late timeout message + %% from timer being cancelled, so + %% ignore it and expect a cancel_timer + %% msg shortly + loop_receive(Parent, Debug, S) + end; + _ -> + %% Not our timer; present it as an event + Event = {info,Msg}, loop_receive_result( - Parent, Debug, S, - maps:remove(TimerRef, TimerRefs), - maps:remove(TimerType, TimerTypes), - Event); + Parent, Debug, S, Hibernate, Event) + end; + {cancel_timer,TimerRef,_} -> + #{timer_refs := TimerRefs, + cancel_timers := CancelTimers, + hibernate := Hibernate} = S, + case TimerRefs of + #{TimerRef := _} -> + %% We must have requested a cancel + %% of this timer so it is already + %% removed from TimerTypes + NewTimerRefs = + maps:remove(TimerRef, TimerRefs), + NewCancelTimers = CancelTimers - 1, + NewS = + S#{ + timer_refs := NewTimerRefs, + cancel_timers := NewCancelTimers}, + if + Hibernate =:= true, NewCancelTimers =:= 0 -> + %% No more cancel_timer msgs to expect; + %% we can hibernate + loop_hibernate(Parent, Debug, NewS); + NewCancelTimers >= 0 -> % Assert + loop_receive(Parent, Debug, NewS) + end; _ -> + %% Not our cancel_timer msg; + %% present it as an event Event = {info,Msg}, loop_receive_result( - Parent, Debug, S, - TimerRefs, TimerTypes, Event) + Parent, Debug, S, Hibernate, Event) end; _ -> + %% External msg + #{hibernate := Hibernate} = S, Event = case Msg of {'$gen_call',From,Request} -> @@ -855,208 +945,212 @@ loop_receive( {info,Msg} end, loop_receive_result( - Parent, Debug, S, - TimerRefs, TimerTypes, Event) + Parent, Debug, S, Hibernate, Event) end end. loop_receive_result( - Parent, Debug, #{state := State} = S, - TimerRefs, TimerTypes, Event) -> - %% The fields 'timer_refs', 'timer_types' and 'hibernate' - %% are now invalid in state map S - they will be recalculated - %% and restored when we return to loop/3 - %% + Parent, Debug, + #{state := State, + timer_types := TimerTypes, cancel_timers := CancelTimers} = S, + Hibernate, Event) -> + %% From now the 'hibernate' field in S is invalid + %% and will be restored when looping back + %% in loop_event_result/11 NewDebug = sys_debug(Debug, S, State, {in,Event}), - %% Here the queue of not yet handled events is created + %% Here is the queue of not yet handled events created Events = [], - Hibernate = false, - loop_event( - Parent, NewDebug, S, TimerRefs, TimerTypes, Events, Event, Hibernate). + %% Cancel any running event timer + case + cancel_timer_by_type(timeout, TimerTypes, CancelTimers) + of + {_,CancelTimers} -> + %% No timer cancelled + loop_event(Parent, NewDebug, S, Events, Event, Hibernate); + {NewTimerTypes,NewCancelTimers} -> + %% The timer is removed from NewTimerTypes but + %% remains in TimerRefs until we get + %% the cancel_timer msg + NewS = + S#{ + timer_types := NewTimerTypes, + cancel_timers := NewCancelTimers}, + loop_event(Parent, NewDebug, NewS, Events, Event, Hibernate) + end. %% Entry point for handling an event, received or enqueued loop_event( - Parent, Debug, #{state := State, data := Data} = S, TimerRefs, TimerTypes, + Parent, Debug, + #{state := State, data := Data} = S, Events, {Type,Content} = Event, Hibernate) -> %% - %% If Hibernate is true here it can only be + %% If (this old) Hibernate is true here it can only be %% because it was set from an event action - %% and we did not go into hibernation since there - %% were events in queue, so we do what the user + %% and we did not go into hibernation since there were + %% events in queue, so we do what the user %% might rely on i.e collect garbage which %% would have happened if we actually hibernated %% and immediately was awakened Hibernate andalso garbage_collect(), case call_state_function(S, Type, Content, State, Data) of {ok,Result,NewS} -> - %% Cancel event timeout - {NewTimerRefs,NewTimerTypes} = - cancel_timer_by_type( - timeout, TimerRefs, TimerTypes), - {NewData,NextState,Actions} = + {NextState,NewData,Actions,EnterCall} = parse_event_result( - true, Debug, NewS, Result, - Events, Event, State, Data), + true, Debug, NewS, + Events, Event, State, Data, Result), loop_event_actions( - Parent, Debug, S, NewTimerRefs, NewTimerTypes, - Events, Event, NextState, NewData, Actions); + Parent, Debug, NewS, + Events, Event, NextState, NewData, Actions, EnterCall); {Class,Reason,Stacktrace} -> terminate( - Class, Reason, Stacktrace, Debug, S, [Event|Events]) + Class, Reason, Stacktrace, Debug, S, + [Event|Events]) end. loop_event_actions( Parent, Debug, - #{state := State, state_enter := StateEnter} = S, TimerRefs, TimerTypes, - Events, Event, NextState, NewData, Actions) -> + #{state := State, state_enter := StateEnter} = S, + Events, Event, NextState, NewData, + Actions, EnterCall) -> + %% Hibernate is reborn here as false being + %% the default value from parse_actions/4 case parse_actions(Debug, S, State, Actions) of {ok,NewDebug,Hibernate,TimeoutsR,Postpone,NextEventsR} -> if - StateEnter, NextState =/= State -> + StateEnter, EnterCall -> loop_event_enter( - Parent, NewDebug, S, TimerRefs, TimerTypes, + Parent, NewDebug, S, Events, Event, NextState, NewData, Hibernate, TimeoutsR, Postpone, NextEventsR); - StateEnter -> - case maps:is_key(init_state, S) of - true -> - %% Avoid infinite loop in initial state - %% with state entry events - NewS = maps:remove(init_state, S), - loop_event_enter( - Parent, NewDebug, NewS, TimerRefs, TimerTypes, - Events, Event, NextState, NewData, - Hibernate, TimeoutsR, Postpone, NextEventsR); - false -> - loop_event_result( - Parent, NewDebug, S, TimerRefs, TimerTypes, - Events, Event, NextState, NewData, - Hibernate, TimeoutsR, Postpone, NextEventsR) - end; true -> loop_event_result( - Parent, NewDebug, S, TimerRefs, TimerTypes, + Parent, NewDebug, S, Events, Event, NextState, NewData, Hibernate, TimeoutsR, Postpone, NextEventsR) end; {Class,Reason,Stacktrace} -> terminate( - Class, Reason, Stacktrace, - Debug, S#{data := NewData}, [Event|Events]) + Class, Reason, Stacktrace, Debug, S, + [Event|Events]) end. loop_event_enter( - Parent, Debug, #{state := State} = S, TimerRefs, TimerTypes, + Parent, Debug, #{state := State} = S, Events, Event, NextState, NewData, Hibernate, TimeoutsR, Postpone, NextEventsR) -> case call_state_function(S, enter, State, NextState, NewData) of {ok,Result,NewS} -> - {NewerData,_,Actions} = - parse_event_result( - false, Debug, NewS, Result, - Events, Event, NextState, NewData), - loop_event_enter_actions( - Parent, Debug, NewS, TimerRefs, TimerTypes, - Events, Event, NextState, NewerData, - Hibernate, TimeoutsR, Postpone, NextEventsR, Actions); + case parse_event_result( + false, Debug, NewS, + Events, Event, NextState, NewData, Result) of + {_,NewerData,Actions,EnterCall} -> + loop_event_enter_actions( + Parent, Debug, NewS, + Events, Event, NextState, NewerData, + Hibernate, TimeoutsR, Postpone, NextEventsR, + Actions, EnterCall) + end; {Class,Reason,Stacktrace} -> terminate( - Class, Reason, Stacktrace, - Debug, S#{state := NextState, data := NewData}, + Class, Reason, Stacktrace, Debug, + S#{ + state := NextState, + data := NewData, + hibernate := Hibernate}, [Event|Events]) end. loop_event_enter_actions( - Parent, Debug, S, TimerRefs, TimerTypes, + Parent, Debug, #{state_enter := StateEnter} = S, Events, Event, NextState, NewData, - Hibernate, TimeoutsR, Postpone, NextEventsR, Actions) -> + Hibernate, TimeoutsR, Postpone, NextEventsR, + Actions, EnterCall) -> case parse_enter_actions( - Debug, S, NextState, Actions, - Hibernate, TimeoutsR) + Debug, S, NextState, Actions, Hibernate, TimeoutsR) of {ok,NewDebug,NewHibernate,NewTimeoutsR,_,_} -> - loop_event_result( - Parent, NewDebug, S, TimerRefs, TimerTypes, - Events, Event, NextState, NewData, - NewHibernate, NewTimeoutsR, Postpone, NextEventsR); + if + StateEnter, EnterCall -> + loop_event_enter( + Parent, NewDebug, S, + Events, Event, NextState, NewData, + NewHibernate, NewTimeoutsR, Postpone, NextEventsR); + true -> + loop_event_result( + Parent, NewDebug, S, + Events, Event, NextState, NewData, + NewHibernate, NewTimeoutsR, Postpone, NextEventsR) + end; {Class,Reason,Stacktrace} -> terminate( - Class, Reason, Stacktrace, - Debug, S#{state := NextState, data := NewData}, + Class, Reason, Stacktrace, Debug, + S#{ + state := NextState, + data := NewData, + hibernate := Hibernate}, [Event|Events]) end. loop_event_result( - Parent, Debug, - #{state := State, postponed := P_0} = S, TimerRefs_0, TimerTypes_0, - Events, Event, NextState, NewData, + Parent, Debug_0, + #{state := State, postponed := P_0, + timer_refs := TimerRefs_0, timer_types := TimerTypes_0, + cancel_timers := CancelTimers_0} = S_0, + Events_0, Event_0, NextState, NewData, Hibernate, TimeoutsR, Postpone, NextEventsR) -> %% %% All options have been collected and next_events are buffered. %% Do the actual state transition. %% - {NewDebug,P_1} = % Move current event to postponed if Postpone + {Debug_1,P_1} = % Move current event to postponed if Postpone case Postpone of true -> - {sys_debug(Debug, S, State, {postpone,Event,State}), - [Event|P_0]}; + {sys_debug(Debug_0, S_0, State, {postpone,Event_0,State}), + [Event_0|P_0]}; false -> - {sys_debug(Debug, S, State, {consume,Event,State}), + {sys_debug(Debug_0, S_0, State, {consume,Event_0,State}), P_0} end, - {Events_1,NewP,{TimerRefs_1,TimerTypes_1}} = + {Events_1,P_2,{TimerTypes_1,CancelTimers_1}} = %% Move all postponed events to queue and cancel the %% state timeout if the state changes if NextState =:= State -> - {Events,P_1,{TimerRefs_0,TimerTypes_0}}; + {Events_0,P_1,{TimerTypes_0,CancelTimers_0}}; true -> - {lists:reverse(P_1, Events),[], + {lists:reverse(P_1, Events_0), + [], cancel_timer_by_type( - state_timeout, TimerRefs_0, TimerTypes_0)} + state_timeout, TimerTypes_0, CancelTimers_0)} + %% The state timer is removed from TimerTypes_1 + %% but remains in TimerRefs_0 until we get + %% the cancel_timer msg end, - {TimerRefs_2,TimerTypes_2,TimeoutEvents} = - %% Stop and start timers non-event timers - parse_timers(TimerRefs_1, TimerTypes_1, TimeoutsR), + {TimerRefs_2,TimerTypes_2,CancelTimers_2,TimeoutEvents} = + %% Stop and start non-event timers + parse_timers(TimerRefs_0, TimerTypes_1, CancelTimers_1, TimeoutsR), %% Place next events last in reversed queue Events_2R = lists:reverse(Events_1, NextEventsR), %% Enqueue immediate timeout events and start event timer - {NewTimerRefs,NewTimerTypes,Events_3R} = - process_timeout_events( - TimerRefs_2, TimerTypes_2, TimeoutEvents, Events_2R), - NewEvents = lists:reverse(Events_3R), - loop_events( - Parent, NewDebug, S, NewTimerRefs, NewTimerTypes, - NewEvents, Hibernate, NextState, NewData, NewP). - -%% Loop until out of enqueued events -%% -loop_events( - Parent, Debug, S, TimerRefs, TimerTypes, - [] = _Events, Hibernate, State, Data, P) -> - %% Update S and loop back to loop/3 to receive a new event - NewS = - S#{ - state := State, - data := Data, - postponed := P, - hibernate => Hibernate, - timer_refs => TimerRefs, - timer_types => TimerTypes}, - loop(Parent, Debug, NewS); -loop_events( - Parent, Debug, S, TimerRefs, TimerTypes, - [Event|Events], Hibernate, State, Data, P) -> - %% Update S and continue with enqueued events - NewS = - S#{ - state := State, - data := Data, - postponed := P}, - loop_event( - Parent, Debug, NewS, TimerRefs, TimerTypes, Events, Event, Hibernate). - + Events_3R = prepend_timeout_events(TimeoutEvents, Events_2R), + S_1 = + S_0#{ + state := NextState, + data := NewData, + postponed := P_2, + timer_refs := TimerRefs_2, + timer_types := TimerTypes_2, + cancel_timers := CancelTimers_2, + hibernate := Hibernate}, + case lists:reverse(Events_3R) of + [] -> + %% Get a new event + loop(Parent, Debug_1, S_1); + [Event|Events] -> + %% Loop until out of enqueued events + loop_event(Parent, Debug_1, S_1, Events, Event, Hibernate) + end. %%--------------------------------------------------------------------------- @@ -1069,19 +1163,6 @@ call_callback_mode(#{module := Module} = S) -> catch CallbackMode -> callback_mode_result(S, CallbackMode); - error:undef -> - %% Process undef to check for the simple mistake - %% of calling a nonexistent state function - %% to make the undef more precise - case erlang:get_stacktrace() of - [{Module,callback_mode,[]=Args,_} - |Stacktrace] -> - {error, - {undef_callback,{Module,callback_mode,Args}}, - Stacktrace}; - Stacktrace -> - {error,undef,Stacktrace} - end; Class:Reason -> {Class,Reason,erlang:get_stacktrace()} end. @@ -1126,8 +1207,7 @@ parse_callback_mode(_, _CBMode, StateEnter) -> call_state_function( - #{callback_mode := undefined} = S, - Type, Content, State, Data) -> + #{callback_mode := undefined} = S, Type, Content, State, Data) -> case call_callback_mode(S) of {ok,NewS} -> call_state_function(NewS, Type, Content, State, Data); @@ -1135,13 +1215,12 @@ call_state_function( Error end; call_state_function( - #{callback_mode := CallbackMode, - module := Module} = S, + #{callback_mode := CallbackMode, module := Module} = S, Type, Content, State, Data) -> try case CallbackMode of state_functions -> - erlang:apply(Module, State, [Type,Content,Data]); + Module:State(Type, Content, Data); handle_event_function -> Module:handle_event(Type, Content, State, Data) end @@ -1151,41 +1230,6 @@ call_state_function( catch Result -> {ok,Result,S}; - error:badarg -> - case erlang:get_stacktrace() of - [{erlang,apply, - [Module,State,[Type,Content,Data]=Args], - _} - |Stacktrace] - when CallbackMode =:= state_functions -> - %% We get here e.g if apply fails - %% due to State not being an atom - {error, - {undef_state_function,{Module,State,Args}}, - Stacktrace}; - Stacktrace -> - {error,badarg,Stacktrace} - end; - error:undef -> - %% Process undef to check for the simple mistake - %% of calling a nonexistent state function - %% to make the undef more precise - case erlang:get_stacktrace() of - [{Module,State,[Type,Content,Data]=Args,_} - |Stacktrace] - when CallbackMode =:= state_functions -> - {error, - {undef_state_function,{Module,State,Args}}, - Stacktrace}; - [{Module,handle_event,[Type,Content,State,Data]=Args,_} - |Stacktrace] - when CallbackMode =:= handle_event_function -> - {error, - {undef_state_function,{Module,handle_event,Args}}, - Stacktrace}; - Stacktrace -> - {error,undef,Stacktrace} - end; Class:Reason -> {Class,Reason,erlang:get_stacktrace()} end. @@ -1193,65 +1237,83 @@ call_state_function( %% Interpret all callback return variants parse_event_result( - AllowStateChange, Debug, S, Result, Events, Event, State, Data) -> + AllowStateChange, Debug, S, + Events, Event, State, Data, Result) -> case Result of stop -> terminate( - exit, normal, ?STACKTRACE(), Debug, S, [Event|Events]); + exit, normal, ?STACKTRACE(), Debug, + S#{state := State, data := Data}, + [Event|Events]); {stop,Reason} -> terminate( - exit, Reason, ?STACKTRACE(), Debug, S, [Event|Events]); + exit, Reason, ?STACKTRACE(), Debug, + S#{state := State, data := Data}, + [Event|Events]); {stop,Reason,NewData} -> terminate( - exit, Reason, ?STACKTRACE(), - Debug, S#{data := NewData}, [Event|Events]); + exit, Reason, ?STACKTRACE(), Debug, + S#{state := State, data := NewData}, + [Event|Events]); + %% {stop_and_reply,Reason,Replies} -> - Q = [Event|Events], reply_then_terminate( - exit, Reason, ?STACKTRACE(), - Debug, S, Q, Replies); + exit, Reason, ?STACKTRACE(), Debug, + S#{state := State, data := Data}, + [Event|Events], Replies); {stop_and_reply,Reason,Replies,NewData} -> - Q = [Event|Events], reply_then_terminate( - exit, Reason, ?STACKTRACE(), - Debug, S#{data := NewData}, Q, Replies); + exit, Reason, ?STACKTRACE(), Debug, + S#{state := State, data := NewData}, + [Event|Events], Replies); + %% {next_state,State,NewData} -> - {NewData,State,[]}; + {State,NewData,[],false}; {next_state,NextState,NewData} when AllowStateChange -> - {NewData,NextState,[]}; + {NextState,NewData,[],true}; {next_state,State,NewData,Actions} -> - {NewData,State,Actions}; + {State,NewData,Actions,false}; {next_state,NextState,NewData,Actions} when AllowStateChange -> - {NewData,NextState,Actions}; + {NextState,NewData,Actions,true}; + %% {keep_state,NewData} -> - {NewData,State,[]}; + {State,NewData,[],false}; {keep_state,NewData,Actions} -> - {NewData,State,Actions}; + {State,NewData,Actions,false}; keep_state_and_data -> - {Data,State,[]}; + {State,Data,[],false}; {keep_state_and_data,Actions} -> - {Data,State,Actions}; + {State,Data,Actions,false}; + %% + {repeat_state,NewData} -> + {State,NewData,[],true}; + {repeat_state,NewData,Actions} -> + {State,NewData,Actions,true}; + repeat_state_and_data -> + {State,Data,[],true}; + {repeat_state_and_data,Actions} -> + {State,Data,Actions,true}; + %% _ -> terminate( error, {bad_return_from_state_function,Result}, - ?STACKTRACE(), - Debug, S, [Event|Events]) + ?STACKTRACE(), Debug, + S#{state := State, data := Data}, + [Event|Events]) end. -parse_enter_actions( - Debug, S, State, Actions, - Hibernate, TimeoutsR) -> +parse_enter_actions(Debug, S, State, Actions, Hibernate, TimeoutsR) -> Postpone = forbidden, NextEventsR = forbidden, parse_actions( Debug, S, State, listify(Actions), Hibernate, TimeoutsR, Postpone, NextEventsR). - + parse_actions(Debug, S, State, Actions) -> Hibernate = false, - TimeoutsR = [], + TimeoutsR = [{timeout,infinity,infinity}], %% Will cancel event timer Postpone = false, NextEventsR = [], parse_actions( @@ -1279,64 +1341,29 @@ parse_actions( {bad_action_from_state_function,Action}, ?STACKTRACE()} end; + %% %% Actions that set options {hibernate,NewHibernate} when is_boolean(NewHibernate) -> parse_actions( Debug, S, State, Actions, NewHibernate, TimeoutsR, Postpone, NextEventsR); - {hibernate,_} -> - {error, - {bad_action_from_state_function,Action}, - ?STACKTRACE()}; hibernate -> + NewHibernate = true, parse_actions( Debug, S, State, Actions, - true, TimeoutsR, Postpone, NextEventsR); - {state_timeout,Time,_} = StateTimeout - when is_integer(Time), Time >= 0; - Time =:= infinity -> - parse_actions( - Debug, S, State, Actions, - Hibernate, [StateTimeout|TimeoutsR], Postpone, NextEventsR); - {state_timeout,_,_} -> - {error, - {bad_action_from_state_function,Action}, - ?STACKTRACE()}; - {timeout,infinity,_} -> - %% Ignore - timeout will never happen and already cancelled - parse_actions( - Debug, S, State, Actions, - Hibernate, TimeoutsR, Postpone, NextEventsR); - {timeout,Time,_} = Timeout when is_integer(Time), Time >= 0 -> - parse_actions( - Debug, S, State, Actions, - Hibernate, [Timeout|TimeoutsR], Postpone, NextEventsR); - {timeout,_,_} -> - {error, - {bad_action_from_state_function,Action}, - ?STACKTRACE()}; - infinity -> % Ignore - timeout will never happen - parse_actions( - Debug, S, State, Actions, - Hibernate, TimeoutsR, Postpone, NextEventsR); - Time when is_integer(Time), Time >= 0 -> - Timeout = {timeout,Time,Time}, - parse_actions( - Debug, S, State, Actions, - Hibernate, [Timeout|TimeoutsR], Postpone, NextEventsR); + NewHibernate, TimeoutsR, Postpone, NextEventsR); + %% {postpone,NewPostpone} when is_boolean(NewPostpone), Postpone =/= forbidden -> parse_actions( Debug, S, State, Actions, Hibernate, TimeoutsR, NewPostpone, NextEventsR); - {postpone,_} -> - {error, - {bad_action_from_state_function,Action}, - ?STACKTRACE()}; postpone when Postpone =/= forbidden -> + NewPostpone = true, parse_actions( Debug, S, State, Actions, - Hibernate, TimeoutsR, true, NextEventsR); + Hibernate, TimeoutsR, NewPostpone, NextEventsR); + %% {next_event,Type,Content} -> case event_type(Type) of true when NextEventsR =/= forbidden -> @@ -1351,96 +1378,150 @@ parse_actions( {bad_action_from_state_function,Action}, ?STACKTRACE()} end; - _ -> + %% + {state_timeout,_,_} = Timeout -> + parse_actions_timeout( + Debug, S, State, Actions, + Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout); + {timeout,_,_} = Timeout -> + parse_actions_timeout( + Debug, S, State, Actions, + Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout); + Time -> + parse_actions_timeout( + Debug, S, State, Actions, + Hibernate, TimeoutsR, Postpone, NextEventsR, Time) + end. + +parse_actions_timeout( + Debug, S, State, Actions, + Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout) -> + Time = + case Timeout of + {_,T,_} -> T; + T -> T + end, + case validate_time(Time) of + true -> + parse_actions( + Debug, S, State, Actions, + Hibernate, [Timeout|TimeoutsR], + Postpone, NextEventsR); + false -> {error, - {bad_action_from_state_function,Action}, + {bad_action_from_state_function,Timeout}, ?STACKTRACE()} end. +validate_time(Time) when is_integer(Time), Time >= 0 -> true; +validate_time(infinity) -> true; +validate_time(_) -> false. %% Stop and start timers as well as create timeout zero events %% and pending event timer %% %% Stop and start timers non-event timers -parse_timers(TimerRefs, TimerTypes, TimeoutsR) -> - parse_timers(TimerRefs, TimerTypes, TimeoutsR, #{}, []). +parse_timers(TimerRefs, TimerTypes, CancelTimers, TimeoutsR) -> + parse_timers(TimerRefs, TimerTypes, CancelTimers, TimeoutsR, #{}, []). %% -parse_timers(TimerRefs, TimerTypes, [], _Seen, TimeoutEvents) -> - {TimerRefs,TimerTypes,TimeoutEvents}; parse_timers( - TimerRefs, TimerTypes, [Timeout|TimeoutsR], Seen, TimeoutEvents) -> - {TimerType,Time,TimerMsg} = Timeout, + TimerRefs, TimerTypes, CancelTimers, [], _Seen, TimeoutEvents) -> + {TimerRefs,TimerTypes,CancelTimers,TimeoutEvents}; +parse_timers( + TimerRefs, TimerTypes, CancelTimers, [Timeout|TimeoutsR], + Seen, TimeoutEvents) -> + case Timeout of + {TimerType,Time,TimerMsg} -> + parse_timers( + TimerRefs, TimerTypes, CancelTimers, TimeoutsR, + Seen, TimeoutEvents, + TimerType, Time, TimerMsg); + Time -> + parse_timers( + TimerRefs, TimerTypes, CancelTimers, TimeoutsR, + Seen, TimeoutEvents, + timeout, Time, Time) + end. + +parse_timers( + TimerRefs, TimerTypes, CancelTimers, TimeoutsR, + Seen, TimeoutEvents, + TimerType, Time, TimerMsg) -> case Seen of #{TimerType := _} -> %% Type seen before - ignore parse_timers( - TimerRefs, TimerTypes, TimeoutsR, Seen, TimeoutEvents); + TimerRefs, TimerTypes, CancelTimers, TimeoutsR, + Seen, TimeoutEvents); #{} -> %% Unseen type - handle NewSeen = Seen#{TimerType => true}, - %% Cancel any running timer - {NewTimerRefs,NewTimerTypes} = - cancel_timer_by_type(TimerType, TimerRefs, TimerTypes), - if - Time =:= infinity -> - %% Ignore - timer will never fire + case Time of + infinity -> + %% Cancel any running timer + {NewTimerTypes,NewCancelTimers} = + cancel_timer_by_type( + TimerType, TimerTypes, CancelTimers), parse_timers( - NewTimerRefs, NewTimerTypes, TimeoutsR, + TimerRefs, NewTimerTypes, NewCancelTimers, TimeoutsR, NewSeen, TimeoutEvents); - TimerType =:= timeout -> - %% Handle event timer later - parse_timers( - NewTimerRefs, NewTimerTypes, TimeoutsR, - NewSeen, [Timeout|TimeoutEvents]); - Time =:= 0 -> + 0 -> + %% Cancel any running timer + {NewTimerTypes,NewCancelTimers} = + cancel_timer_by_type( + TimerType, TimerTypes, CancelTimers), %% Handle zero time timeouts later TimeoutEvent = {TimerType,TimerMsg}, parse_timers( - NewTimerRefs, NewTimerTypes, TimeoutsR, + TimerRefs, NewTimerTypes, NewCancelTimers, TimeoutsR, NewSeen, [TimeoutEvent|TimeoutEvents]); - true -> - %% Start a new timer - TimerRef = erlang:start_timer(Time, self(), TimerMsg), - parse_timers( - NewTimerRefs#{TimerRef => TimerType}, - NewTimerTypes#{TimerType => TimerRef}, - TimeoutsR, NewSeen, TimeoutEvents) + _ -> + %% (Re)start the timer + TimerRef = + erlang:start_timer(Time, self(), TimerMsg), + case TimerTypes of + #{TimerType := OldTimerRef} -> + %% Cancel the running timer + cancel_timer(OldTimerRef), + NewCancelTimers = CancelTimers + 1, + %% Insert the new timer into + %% both TimerRefs and TimerTypes + parse_timers( + TimerRefs#{TimerRef => TimerType}, + TimerTypes#{TimerType => TimerRef}, + NewCancelTimers, TimeoutsR, + NewSeen, TimeoutEvents); + #{} -> + parse_timers( + TimerRefs#{TimerRef => TimerType}, + TimerTypes#{TimerType => TimerRef}, + CancelTimers, TimeoutsR, + NewSeen, TimeoutEvents) + end end end. -%% Enqueue immediate timeout events and start event timer -process_timeout_events(TimerRefs, TimerTypes, [], EventsR) -> - {TimerRefs, TimerTypes, EventsR}; -process_timeout_events( - TimerRefs, TimerTypes, - [{timeout,0,TimerMsg}|TimeoutEvents], []) -> - %% No enqueued events - insert a timeout zero event - TimeoutEvent = {timeout,TimerMsg}, - process_timeout_events( - TimerRefs, TimerTypes, - TimeoutEvents, [TimeoutEvent]); -process_timeout_events( - TimerRefs, TimerTypes, - [{timeout,Time,TimerMsg}], []) -> - %% No enqueued events - start event timer - TimerRef = erlang:start_timer(Time, self(), TimerMsg), - process_timeout_events( - TimerRefs#{TimerRef => timeout}, TimerTypes#{timeout => TimerRef}, - [], []); -process_timeout_events( - TimerRefs, TimerTypes, - [{timeout,_Time,_TimerMsg}|TimeoutEvents], EventsR) -> - %% There will be some other event so optimize by not starting - %% an event timer to just have to cancel it again - process_timeout_events( - TimerRefs, TimerTypes, - TimeoutEvents, EventsR); -process_timeout_events( - TimerRefs, TimerTypes, - [{_TimeoutType,_TimeoutMsg} = TimeoutEvent|TimeoutEvents], EventsR) -> - process_timeout_events( - TimerRefs, TimerTypes, - TimeoutEvents, [TimeoutEvent|EventsR]). +%% Enqueue immediate timeout events (timeout 0 events) +%% +%% Event timer timeout 0 events gets special treatment since +%% an event timer is cancelled by any received event, +%% so if there are enqueued events before the event timer +%% timeout 0 event - the event timer is cancelled hence no event. +%% +%% Other (state_timeout) timeout 0 events that are after +%% the event timer timeout 0 events are considered to +%% belong to timers that were started after the event timer +%% timeout 0 event fired, so they do not cancel the event timer. +%% +prepend_timeout_events([], EventsR) -> + EventsR; +prepend_timeout_events([{timeout,_} = TimeoutEvent|TimeoutEvents], []) -> + prepend_timeout_events(TimeoutEvents, [TimeoutEvent]); +prepend_timeout_events([{timeout,_}|TimeoutEvents], EventsR) -> + prepend_timeout_events(TimeoutEvents, EventsR); +prepend_timeout_events([TimeoutEvent|TimeoutEvents], EventsR) -> + %% Just prepend all others + prepend_timeout_events(TimeoutEvents, [TimeoutEvent|EventsR]). @@ -1448,18 +1529,11 @@ process_timeout_events( %% Server helpers reply_then_terminate( - Class, Reason, Stacktrace, - Debug, #{state := State} = S, Q, Replies) -> - if - is_list(Replies) -> - do_reply_then_terminate( - Class, Reason, Stacktrace, - Debug, S, Q, Replies, State); - true -> - do_reply_then_terminate( - Class, Reason, Stacktrace, - Debug, S, Q, [Replies], State) - end. + Class, Reason, Stacktrace, Debug, + #{state := State} = S, Q, Replies) -> + do_reply_then_terminate( + Class, Reason, Stacktrace, Debug, + S, Q, listify(Replies), State). %% do_reply_then_terminate( Class, Reason, Stacktrace, Debug, S, Q, [], _State) -> @@ -1485,21 +1559,25 @@ do_reply(Debug, S, State, From, Reply) -> terminate( - Class, Reason, Stacktrace, - Debug, + Class, Reason, Stacktrace, Debug, #{module := Module, state := State, data := Data, postponed := P} = S, Q) -> - try Module:terminate(Reason, State, Data) of - _ -> ok - catch - _ -> ok; - C:R -> - ST = erlang:get_stacktrace(), - error_info( - C, R, ST, S, Q, P, - format_status(terminate, get(), S)), - sys:print_log(Debug), - erlang:raise(C, R, ST) + case erlang:function_exported(Module, terminate, 3) of + true -> + try Module:terminate(Reason, State, Data) of + _ -> ok + catch + _ -> ok; + C:R -> + ST = erlang:get_stacktrace(), + error_info( + C, R, ST, S, Q, P, + format_status(terminate, get(), S)), + sys:print_log(Debug), + erlang:raise(C, R, ST) + end; + false -> + ok end, _ = case Reason of @@ -1637,28 +1715,21 @@ listify(Item) -> [Item]. %% Cancel timer if running, otherwise no op -cancel_timer_by_type(TimerType, TimerRefs, TimerTypes) -> +%% +%% This is an asynchronous cancel so the timer is not really cancelled +%% until we get a cancel_timer msg i.e {cancel_timer,TimerRef,_}. +%% In the mean time we might get a timeout message. +%% +%% Remove the timer from TimerTypes. +%% When we get the cancel_timer msg we remove it from TimerRefs. +cancel_timer_by_type(TimerType, TimerTypes, CancelTimers) -> case TimerTypes of #{TimerType := TimerRef} -> cancel_timer(TimerRef), - {maps:remove(TimerRef, TimerRefs), - maps:remove(TimerType, TimerTypes)}; + {maps:remove(TimerType, TimerTypes),CancelTimers + 1}; #{} -> - {TimerRefs,TimerTypes} + {TimerTypes,CancelTimers} end. -%%cancel_timer(undefined) -> -%% ok; -cancel_timer(TRef) -> - case erlang:cancel_timer(TRef) of - false -> - %% We have to assume that TRef is the ref of a running timer - %% and if so the timer has expired - %% hence we must wait for the timeout message - receive - {timeout,TRef,_} -> - ok - end; - _TimeLeft -> - ok - end. +cancel_timer(TimerRef) -> + ok = erlang:cancel_timer(TimerRef, [{async,true}]). diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl index ad98bc0420..a91143a764 100644 --- a/lib/stdlib/src/io_lib.erl +++ b/lib/stdlib/src/io_lib.erl @@ -28,7 +28,7 @@ %% Most of the code here is derived from the original prolog versions and %% from similar code written by Joe Armstrong and myself. %% -%% This module has been split into seperate modules: +%% This module has been split into separate modules: %% io_lib - basic write and utilities %% io_lib_format - formatted output %% io_lib_fread - formatted input diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl index c7b75961cb..3113767614 100644 --- a/lib/stdlib/src/io_lib_format.erl +++ b/lib/stdlib/src/io_lib_format.erl @@ -265,7 +265,10 @@ control($W, [A,Depth], F, Adj, P, Pad, _Enc, _Str, _I) when is_integer(Depth) -> term(io_lib:write(A, Depth), F, Adj, P, Pad); control($P, [A,Depth], F, Adj, P, Pad, Enc, Str, I) when is_integer(Depth) -> print(A, Depth, F, Adj, P, Pad, Enc, Str, I); -control($s, [A], F, Adj, P, Pad, _Enc, _Str, _I) when is_atom(A) -> +control($s, [A], F, Adj, P, Pad, latin1, _Str, _I) when is_atom(A) -> + L = iolist_to_chars(atom_to_list(A)), + string(L, F, Adj, P, Pad); +control($s, [A], F, Adj, P, Pad, unicode, _Str, _I) when is_atom(A) -> string(atom_to_list(A), F, Adj, P, Pad); control($s, [L0], F, Adj, P, Pad, latin1, _Str, _I) -> L = iolist_to_chars(L0), diff --git a/lib/stdlib/src/io_lib_pretty.erl b/lib/stdlib/src/io_lib_pretty.erl index 94376408d1..aabccfc5d9 100644 --- a/lib/stdlib/src/io_lib_pretty.erl +++ b/lib/stdlib/src/io_lib_pretty.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2016. All Rights Reserved. +%% Copyright Ericsson AB 1996-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -139,6 +139,10 @@ max_cs(M, _Len) -> M. -define(ATM(T), is_list(element(1, T))). +-define(ATM_PAIR(Pair), + ?ATM(element(2, element(1, Pair))) % Key + andalso + ?ATM(element(3, element(1, Pair)))). % Value -define(ATM_FLD(Field), ?ATM(element(4, element(1, Field)))). pp({_S, Len} = If, Col, Ll, M, _TInd, _Ind, LD, W) @@ -151,9 +155,8 @@ pp({{tuple,true,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) -> pp({{tuple,false,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) -> [${, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $,, W + 1), $}]; pp({{map,Pairs},_Len}, Col, Ll, M, TInd, Ind, LD, W) -> - [$#,${, pp_list(Pairs, Col + 2, Ll, M, TInd, indent(2, Ind), LD, $,, W + 1), $}]; -pp({{map_pair,K,V},_Len}, Col, Ll, M, TInd, Ind, LD, W) -> - [pp(K, Col, Ll, M, TInd, Ind, LD, W), " => ", pp(V, Col, Ll, M, TInd, Ind, LD, W)]; + [$#, ${, pp_map(Pairs, Col + 2, Ll, M, TInd, indent(2, Ind), LD, W + 1), + $}]; pp({{record,[{Name,NLen} | L]}, _Len}, Col, Ll, M, TInd, Ind, LD, W) -> [Name, ${, pp_record(L, NLen, Col, Ll, M, TInd, Ind, LD, W + NLen+1), $}]; pp({{bin,S}, _Len}, Col, Ll, M, _TInd, Ind, LD, W) -> @@ -178,6 +181,46 @@ pp_tag_tuple([{Tag,Tlen} | L], Col, Ll, M, TInd, Ind, LD, W) -> [Tag, S | pp_list(L, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen+1)] end. +pp_map([], _Col, _Ll, _M, _TInd, _Ind, _LD, _W) -> + ""; +pp_map({dots, _}, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) -> + "..."; +pp_map([P | Ps], Col, Ll, M, TInd, Ind, LD, W) -> + {PS, PW} = pp_pair(P, Col, Ll, M, TInd, Ind, last_depth(Ps, LD), W), + [PS | pp_pairs_tail(Ps, Col, Col + PW, Ll, M, TInd, Ind, LD, PW)]. + +pp_pairs_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) -> + ""; +pp_pairs_tail({dots, _}, _Col0, _Col, _M, _Ll, _TInd, _Ind, _LD, _W) -> + ",..."; +pp_pairs_tail([{_, Len}=P | Ps], Col0, Col, Ll, M, TInd, Ind, LD, W) -> + LD1 = last_depth(Ps, LD), + ELen = 1 + Len, + if + LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_PAIR(P); + LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_PAIR(P) -> + [$,, write_pair(P) | + pp_pairs_tail(Ps, Col0, Col+ELen, Ll, M, TInd, Ind, LD, W+ELen)]; + true -> + {PS, PW} = pp_pair(P, Col0, Ll, M, TInd, Ind, LD1, 0), + [$,, $\n, Ind, PS | + pp_pairs_tail(Ps, Col0, Col0 + PW, Ll, M, TInd, Ind, LD, PW)] + end. + +pp_pair({_, Len}=Pair, Col, Ll, M, _TInd, _Ind, LD, W) + when Len < Ll - Col - LD, Len + W + LD =< M -> + {write_pair(Pair), if + ?ATM_PAIR(Pair) -> + Len; + true -> + Ll % force nl + end}; +pp_pair({{map_pair, K, V}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W) -> + I = map_value_indent(TInd), + Ind = indent(I, Ind0), + {[pp(K, Col0, Ll, M, TInd, Ind0, LD, W), " =>\n", + Ind | pp(V, Col0 + I, Ll, M, TInd, Ind, LD, 0)], Ll}. % force nl + pp_record([], _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) -> ""; pp_record({dots, _}, _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) -> @@ -216,7 +259,11 @@ pp_field({_, Len}=Fl, Col, Ll, M, _TInd, _Ind, LD, W) end}; pp_field({{field, Name, NameL, F}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W0) -> {Col, Ind, S, W} = rec_indent(NameL, TInd, Col0, Ind0, W0 + NameL), - {[Name, " = ", S | pp(F, Col, Ll, M, TInd, Ind, LD, W)], Ll}. % force nl + Sep = case S of + [$\n | _] -> " ="; + _ -> " = " + end, + {[Name, Sep, S | pp(F, Col, Ll, M, TInd, Ind, LD, W)], Ll}. % force nl rec_indent(RInd, TInd, Col0, Ind0, W0) -> %% this uses TInd @@ -305,8 +352,8 @@ write({{list, L}, _}) -> [$[, write_list(L, $|), $]]; write({{map, Pairs}, _}) -> [$#,${, write_list(Pairs, $,), $}]; -write({{map_pair, K, V}, _}) -> - [write(K)," => ",write(V)]; +write({{map_pair, _K, _V}, _}=Pair) -> + write_pair(Pair); write({{record, [{Name,_} | L]}, _}) -> [Name, ${, write_fields(L), $}]; write({{bin, S}, _}) -> @@ -314,6 +361,9 @@ write({{bin, S}, _}) -> write({S, _}) -> S. +write_pair({{map_pair, K, V}, _}) -> + [write(K), " => ", write(V)]. + write_fields([]) -> ""; write_fields({dots, _}) -> @@ -347,7 +397,7 @@ write_tail(E, S) -> %% The depth (D) is used for extracting and counting the characters to %% print. The structure is kept so that the returned intermediate -%% format can be formatted. The separators (list, tuple, record) are +%% format can be formatted. The separators (list, tuple, record, map) are %% counted but need to be added later. %% D =/= 0 @@ -423,21 +473,22 @@ print_length(Term, _D, _RF, _Enc, _Str) -> print_length_map(_Map, 1, _RF, _Enc, _Str) -> {"#{...}", 6}; print_length_map(Map, D, RF, Enc, Str) when is_map(Map) -> - Pairs = print_length_map_pairs(maps:to_list(Map), D, RF, Enc, Str), + Pairs = print_length_map_pairs(erts_internal:maps_to_list(Map, D), D, RF, Enc, Str), {{map, Pairs}, list_length(Pairs, 3)}. print_length_map_pairs([], _D, _RF, _Enc, _Str) -> []; print_length_map_pairs(_Pairs, 1, _RF, _Enc, _Str) -> {dots, 3}; -print_length_map_pairs([{K,V}|Pairs], D, RF, Enc, Str) -> - [print_length_map_pair(K,V,D-1,RF,Enc,Str) | - print_length_map_pairs(Pairs,D-1,RF,Enc,Str)]. +print_length_map_pairs([{K, V} | Pairs], D, RF, Enc, Str) -> + [print_length_map_pair(K, V, D - 1, RF, Enc, Str) | + print_length_map_pairs(Pairs, D - 1, RF, Enc, Str)]. print_length_map_pair(K, V, D, RF, Enc, Str) -> {KS, KL} = print_length(K, D, RF, Enc, Str), {VS, VL} = print_length(V, D, RF, Enc, Str), - {{map_pair, {KS,KL}, {VS,VL}}, KL + VL}. + KL1 = KL + 4, + {{map_pair, {KS, KL1}, {VS, VL}}, KL1 + VL}. print_length_tuple(_Tuple, 1, _RF, _Enc, _Str) -> {"{...}", 5}; @@ -630,6 +681,8 @@ cind({{tuple,true,L}, _Len}, Col, Ll, M, Ind, LD, W) -> cind_tag_tuple(L, Col, Ll, M, Ind, LD, W + 1); cind({{tuple,false,L}, _Len}, Col, Ll, M, Ind, LD, W) -> cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1); +cind({{map,Pairs},_Len}, Col, Ll, M, Ind, LD, W) -> + cind_map(Pairs, Col + 2, Ll, M, Ind, LD, W + 2); cind({{record,[{_Name,NLen} | L]}, _Len}, Col, Ll, M, Ind, LD, W) -> cind_record(L, NLen, Col, Ll, M, Ind, LD, W + NLen + 1); cind({{bin,_S}, _Len}, _Col, _Ll, _M, Ind, _LD, _W) -> @@ -655,6 +708,48 @@ cind_tag_tuple([{_Tag,Tlen} | L], Col, Ll, M, Ind, LD, W) -> throw(no_good) end. +cind_map([P | Ps], Col, Ll, M, Ind, LD, W) -> + PW = cind_pair(P, Col, Ll, M, Ind, last_depth(Ps, LD), W), + cind_pairs_tail(Ps, Col, Col + PW, Ll, M, Ind, LD, W + PW); +cind_map(_, _Col, _Ll, _M, Ind, _LD, _W) -> + Ind. + +cind_pairs_tail([{_, Len}=P | Ps], Col0, Col, Ll, M, Ind, LD, W) -> + LD1 = last_depth(Ps, LD), + ELen = 1 + Len, + if + LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_PAIR(P); + LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_PAIR(P) -> + cind_pairs_tail(Ps, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen); + true -> + PW = cind_pair(P, Col0, Ll, M, Ind, LD1, 0), + cind_pairs_tail(Ps, Col0, Col0 + PW, Ll, M, Ind, LD, PW) + end; +cind_pairs_tail(_, _Col0, _Col, _Ll, _M, Ind, _LD, _W) -> + Ind. + +cind_pair({{map_pair, _Key, _Value}, Len}=Pair, Col, Ll, M, _Ind, LD, W) + when Len < Ll - Col - LD, Len + W + LD =< M -> + if + ?ATM_PAIR(Pair) -> + Len; + true -> + Ll + end; +cind_pair({{map_pair, K, V}, _Len}, Col0, Ll, M, Ind, LD, W0) -> + cind(K, Col0, Ll, M, Ind, LD, W0), + I = map_value_indent(Ind), + cind(V, Col0 + I, Ll, M, Ind, LD, 0), + Ll. + +map_value_indent(TInd) -> + case TInd > 0 of + true -> + TInd; + false -> + 4 + end. + cind_record([F | Fs], Nlen, Col0, Ll, M, Ind, LD, W0) -> Nind = Nlen + 1, {Col, W} = cind_rec(Nind, Col0, Ll, M, Ind, W0), diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl index 5bf77a5160..fda7a2cd8a 100644 --- a/lib/stdlib/src/otp_internal.erl +++ b/lib/stdlib/src/otp_internal.erl @@ -548,7 +548,14 @@ obsolete_1(queue, lait, 1) -> obsolete_1(overload, _, _) -> {removed, "removed in OTP 19"}; obsolete_1(rpc, safe_multi_server_call, A) when A =:= 2; A =:= 3 -> - {removed, {rpc, multi_server_call, A}}; + {removed, {rpc, multi_server_call, A}, "removed in OTP 19"}; + +%% Added in OTP 20. + +obsolete_1(filename, find_src, 1) -> + {deprecated, "deprecated; use filelib:find_source/1 instead"}; +obsolete_1(filename, find_src, 2) -> + {deprecated, "deprecated; use filelib:find_source/3 instead"}; %% Removed in OTP 20. diff --git a/lib/stdlib/src/proplists.erl b/lib/stdlib/src/proplists.erl index 21de8c45c1..340dfdcac9 100644 --- a/lib/stdlib/src/proplists.erl +++ b/lib/stdlib/src/proplists.erl @@ -83,7 +83,7 @@ property(Key, Value) -> %% --------------------------------------------------------------------- -%% @doc Unfolds all occurences of atoms in <code>ListIn</code> to tuples +%% @doc Unfolds all occurrences of atoms in <code>ListIn</code> to tuples %% <code>{Atom, true}</code>. %% %% @see compact/1 diff --git a/lib/stdlib/src/qlc.erl b/lib/stdlib/src/qlc.erl index f3665824f2..8c4d835432 100644 --- a/lib/stdlib/src/qlc.erl +++ b/lib/stdlib/src/qlc.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2016. All Rights Reserved. +%% Copyright Ericsson AB 2004-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1292,6 +1292,10 @@ abstr_term(Fun, Line) when is_function(Fun) -> end; abstr_term(PPR, Line) when is_pid(PPR); is_port(PPR); is_reference(PPR) -> {special, Line, lists:flatten(io_lib:write(PPR))}; +abstr_term(Map, Line) when is_map(Map) -> + {map,Line, + [{map_field_assoc,Line,abstr_term(K, Line),abstr_term(V, Line)} || + {K,V} <- maps:to_list(Map)]}; abstr_term(Simple, Line) -> erl_parse:abstract(Simple, erl_anno:line(Line)). diff --git a/lib/stdlib/src/qlc_pt.erl b/lib/stdlib/src/qlc_pt.erl index 28221ea75f..4a39f8ae9d 100644 --- a/lib/stdlib/src/qlc_pt.erl +++ b/lib/stdlib/src/qlc_pt.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2016. All Rights Reserved. +%% Copyright Ericsson AB 2004-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -439,7 +439,7 @@ compile_forms(Forms0, Options) -> (_) -> false end, Forms = ([F || F <- Forms0, not Exclude(element(1, F))] - ++ [{eof,anno0()}]), + ++ [{eof,0}]), try case compile:noenv_forms(Forms, compile_options(Options)) of {ok, _ModName, Ws0} -> diff --git a/lib/stdlib/src/shell_default.erl b/lib/stdlib/src/shell_default.erl index cd63ab28b5..a0c1d98513 100644 --- a/lib/stdlib/src/shell_default.erl +++ b/lib/stdlib/src/shell_default.erl @@ -23,7 +23,7 @@ -module(shell_default). --export([help/0,lc/1,c/1,c/2,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0, +-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0, memory/0,memory/1,uptime/0, erlangrc/1,bi/1, regs/0, flush/0,pwd/0,ls/0,ls/1,cd/1, y/1, y/2, @@ -72,6 +72,7 @@ bi(I) -> c:bi(I). bt(Pid) -> c:bt(Pid). c(File) -> c:c(File). c(File, Opt) -> c:c(File, Opt). +c(File, Opt, Filter) -> c:c(File, Opt, Filter). cd(D) -> c:cd(D). erlangrc(X) -> c:erlangrc(X). flush() -> c:flush(). diff --git a/lib/stdlib/src/sofs.erl b/lib/stdlib/src/sofs.erl index c244e06ca4..cc50e1b52c 100644 --- a/lib/stdlib/src/sofs.erl +++ b/lib/stdlib/src/sofs.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2016. All Rights Reserved. +%% Copyright Ericsson AB 2001-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -76,7 +76,7 @@ %% %% See also "Naive Set Theory" by Paul R. Halmos. %% -%% By convention, erlang:error/2 is called from exported functions. +%% By convention, erlang:error/1 is called from exported functions. -define(TAG, 'Set'). -define(ORDTAG, 'OrdSet'). @@ -87,12 +87,6 @@ -define(LIST(S), (S)#?TAG.data). -define(TYPE(S), (S)#?TAG.type). -%%-define(SET(L, T), -%% case is_type(T) of -%% true -> #?TAG{data = L, type = T}; -%% false -> erlang:error(badtype, [T]) -%% end -%% ). -define(SET(L, T), #?TAG{data = L, type = T}). -define(IS_SET(S), is_record(S, ?TAG)). -define(IS_UNTYPED_SET(S), ?TYPE(S) =:= ?ANYTYPE). @@ -154,11 +148,8 @@ from_term(T) -> _ when is_list(T) -> [?ANYTYPE]; _ -> ?ANYTYPE end, - case catch setify(T, Type) of - {'EXIT', _} -> - erlang:error(badarg, [T]); - Set -> - Set + try setify(T, Type) + catch _:_ -> erlang:error(badarg) end. -spec(from_term(Term, Type) -> AnySet when @@ -168,14 +159,11 @@ from_term(T) -> from_term(L, T) -> case is_type(T) of true -> - case catch setify(L, T) of - {'EXIT', _} -> - erlang:error(badarg, [L, T]); - Set -> - Set + try setify(L, T) + catch _:_ -> erlang:error(badarg) end; false -> - erlang:error(badarg, [L, T]) + erlang:error(badarg) end. -spec(from_external(ExternalSet, Type) -> AnySet when @@ -208,33 +196,26 @@ is_type(_T) -> Set :: a_set(), Terms :: [term()]). set(L) -> - case catch usort(L) of - {'EXIT', _} -> - erlang:error(badarg, [L]); - SL -> - ?SET(SL, ?ATOM_TYPE) + try usort(L) of + SL -> ?SET(SL, ?ATOM_TYPE) + catch _:_ -> erlang:error(badarg) end. -spec(set(Terms, Type) -> Set when Set :: a_set(), Terms :: [term()], Type :: type()). -set(L, ?SET_OF(Type) = T) when ?IS_ATOM_TYPE(Type), Type =/= ?ANYTYPE -> - case catch usort(L) of - {'EXIT', _} -> - erlang:error(badarg, [L, T]); - SL -> - ?SET(SL, Type) +set(L, ?SET_OF(Type)) when ?IS_ATOM_TYPE(Type), Type =/= ?ANYTYPE -> + try usort(L) of + SL -> ?SET(SL, Type) + catch _:_ -> erlang:error(badarg) end; set(L, ?SET_OF(_) = T) -> - case catch setify(L, T) of - {'EXIT', _} -> - erlang:error(badarg, [L, T]); - Set -> - Set + try setify(L, T) + catch _:_ -> erlang:error(badarg) end; -set(L, T) -> - erlang:error(badarg, [L, T]). +set(_, _) -> + erlang:error(badarg). -spec(from_sets(ListOfSets) -> Set when Set :: a_set(), @@ -245,19 +226,19 @@ set(L, T) -> from_sets(Ss) when is_list(Ss) -> case set_of_sets(Ss, [], ?ANYTYPE) of {error, Error} -> - erlang:error(Error, [Ss]); + erlang:error(Error); Set -> Set end; from_sets(Tuple) when is_tuple(Tuple) -> case ordset_of_sets(tuple_to_list(Tuple), [], []) of error -> - erlang:error(badarg, [Tuple]); + erlang:error(badarg); Set -> Set end; -from_sets(T) -> - erlang:error(badarg, [T]). +from_sets(_) -> + erlang:error(badarg). -spec(relation(Tuples) -> Relation when Relation :: relation(), @@ -265,14 +246,11 @@ from_sets(T) -> relation([]) -> ?SET([], ?BINREL(?ATOM_TYPE, ?ATOM_TYPE)); relation(Ts = [T | _]) when is_tuple(T) -> - case catch rel(Ts, tuple_size(T)) of - {'EXIT', _} -> - erlang:error(badarg, [Ts]); - Set -> - Set + try rel(Ts, tuple_size(T)) + catch _:_ -> erlang:error(badarg) end; -relation(E) -> - erlang:error(badarg, [E]). +relation(_) -> + erlang:error(badarg). -spec(relation(Tuples, Type) -> Relation when N :: integer(), @@ -280,24 +258,20 @@ relation(E) -> Relation :: relation(), Tuples :: [tuple()]). relation(Ts, TS) -> - case catch rel(Ts, TS) of - {'EXIT', _} -> - erlang:error(badarg, [Ts, TS]); - Set -> - Set + try rel(Ts, TS) + catch _:_ -> erlang:error(badarg) end. -spec(a_function(Tuples) -> Function when Function :: a_function(), Tuples :: [tuple()]). a_function(Ts) -> - case catch func(Ts, ?BINREL(?ATOM_TYPE, ?ATOM_TYPE)) of - {'EXIT', _} -> - erlang:error(badarg, [Ts]); + try func(Ts, ?BINREL(?ATOM_TYPE, ?ATOM_TYPE)) of Bad when is_atom(Bad) -> - erlang:error(Bad, [Ts]); - Set -> - Set + erlang:error(Bad); + Set -> + Set + catch _:_ -> erlang:error(badarg) end. -spec(a_function(Tuples, Type) -> Function when @@ -305,26 +279,24 @@ a_function(Ts) -> Tuples :: [tuple()], Type :: type()). a_function(Ts, T) -> - case catch a_func(Ts, T) of - {'EXIT', _} -> - erlang:error(badarg, [Ts, T]); + try a_func(Ts, T) of Bad when is_atom(Bad) -> - erlang:error(Bad, [Ts, T]); + erlang:error(Bad); Set -> Set + catch _:_ -> erlang:error(badarg) end. -spec(family(Tuples) -> Family when Family :: family(), Tuples :: [tuple()]). family(Ts) -> - case catch fam2(Ts, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE)) of - {'EXIT', _} -> - erlang:error(badarg, [Ts]); + try fam2(Ts, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE)) of Bad when is_atom(Bad) -> - erlang:error(Bad, [Ts]); + erlang:error(Bad); Set -> Set + catch _:_ -> erlang:error(badarg) end. -spec(family(Tuples, Type) -> Family when @@ -332,13 +304,12 @@ family(Ts) -> Tuples :: [tuple()], Type :: type()). family(Ts, T) -> - case catch fam(Ts, T) of - {'EXIT', _} -> - erlang:error(badarg, [Ts, T]); + try fam(Ts, T) of Bad when is_atom(Bad) -> - erlang:error(Bad, [Ts, T]); + erlang:error(Bad); Set -> Set + catch _:_ -> erlang:error(badarg) end. %%% @@ -373,7 +344,7 @@ to_sets(S) when ?IS_SET(S) -> to_sets(S) when ?IS_ORDSET(S), is_tuple(?ORDTYPE(S)) -> tuple_of_sets(tuple_to_list(?ORDDATA(S)), tuple_to_list(?ORDTYPE(S)), []); to_sets(S) when ?IS_ORDSET(S) -> - erlang:error(badarg, [S]). + erlang:error(badarg). -spec(no_elements(ASet) -> NoElements when ASet :: a_set() | ordset(), @@ -383,7 +354,7 @@ no_elements(S) when ?IS_SET(S) -> no_elements(S) when ?IS_ORDSET(S), is_tuple(?ORDTYPE(S)) -> tuple_size(?ORDDATA(S)); no_elements(S) when ?IS_ORDSET(S) -> - erlang:error(badarg, [S]). + erlang:error(badarg). -spec(specification(Fun, Set1) -> Set2 when Fun :: spec_fun(), @@ -401,7 +372,7 @@ specification(Fun, S) when ?IS_SET(S) -> SL when is_list(SL) -> ?SET(SL, Type); Bad -> - erlang:error(Bad, [Fun, S]) + erlang:error(Bad) end. -spec(union(Set1, Set2) -> Set3 when @@ -410,7 +381,7 @@ specification(Fun, S) when ?IS_SET(S) -> Set3 :: a_set()). union(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case unify_types(?TYPE(S1), ?TYPE(S2)) of - [] -> erlang:error(type_mismatch, [S1, S2]); + [] -> erlang:error(type_mismatch); Type -> ?SET(umerge(?LIST(S1), ?LIST(S2)), Type) end. @@ -420,7 +391,7 @@ union(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> Set3 :: a_set()). intersection(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case unify_types(?TYPE(S1), ?TYPE(S2)) of - [] -> erlang:error(type_mismatch, [S1, S2]); + [] -> erlang:error(type_mismatch); Type -> ?SET(intersection(?LIST(S1), ?LIST(S2), []), Type) end. @@ -430,7 +401,7 @@ intersection(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> Set3 :: a_set()). difference(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case unify_types(?TYPE(S1), ?TYPE(S2)) of - [] -> erlang:error(type_mismatch, [S1, S2]); + [] -> erlang:error(type_mismatch); Type -> ?SET(difference(?LIST(S1), ?LIST(S2), []), Type) end. @@ -440,7 +411,7 @@ difference(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> Set3 :: a_set()). symdiff(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case unify_types(?TYPE(S1), ?TYPE(S2)) of - [] -> erlang:error(type_mismatch, [S1, S2]); + [] -> erlang:error(type_mismatch); Type -> ?SET(symdiff(?LIST(S1), ?LIST(S2), []), Type) end. @@ -452,7 +423,7 @@ symdiff(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> Set5 :: a_set()). symmetric_partition(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case unify_types(?TYPE(S1), ?TYPE(S2)) of - [] -> erlang:error(type_mismatch, [S1, S2]); + [] -> erlang:error(type_mismatch); Type -> sympart(?LIST(S1), ?LIST(S2), [], [], [], Type) end. @@ -477,11 +448,9 @@ product({S1, S2}) -> product(S1, S2); product(T) when is_tuple(T) -> Ss = tuple_to_list(T), - case catch sets_to_list(Ss) of - {'EXIT', _} -> - erlang:error(badarg, [T]); + try sets_to_list(Ss) of [] -> - erlang:error(badarg, [T]); + erlang:error(badarg); L -> Type = types(Ss, []), case member([], L) of @@ -490,6 +459,7 @@ product(T) when is_tuple(T) -> false -> ?SET(reverse(prod(L, [], [])), Type) end + catch _:_ -> erlang:error(badarg) end. -spec(constant_function(Set, AnySet) -> Function when @@ -502,10 +472,10 @@ constant_function(S, E) when ?IS_SET(S) -> {Type, true} -> NType = ?BINREL(Type, type(E)), ?SET(constant_function(?LIST(S), to_external(E), []), NType); - _ -> erlang:error(badarg, [S, E]) + _ -> erlang:error(badarg) end; -constant_function(S, E) when ?IS_ORDSET(S) -> - erlang:error(badarg, [S, E]). +constant_function(S, _) when ?IS_ORDSET(S) -> + erlang:error(badarg). -spec(is_equal(AnySet1, AnySet2) -> Bool when AnySet1 :: anyset(), @@ -514,17 +484,17 @@ constant_function(S, E) when ?IS_ORDSET(S) -> is_equal(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case match_types(?TYPE(S1), ?TYPE(S2)) of true -> ?LIST(S1) == ?LIST(S2); - false -> erlang:error(type_mismatch, [S1, S2]) + false -> erlang:error(type_mismatch) end; is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_ORDSET(S2) -> case match_types(?ORDTYPE(S1), ?ORDTYPE(S2)) of true -> ?ORDDATA(S1) == ?ORDDATA(S2); - false -> erlang:error(type_mismatch, [S1, S2]) + false -> erlang:error(type_mismatch) end; is_equal(S1, S2) when ?IS_SET(S1), ?IS_ORDSET(S2) -> - erlang:error(type_mismatch, [S1, S2]); + erlang:error(type_mismatch); is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_SET(S2) -> - erlang:error(type_mismatch, [S1, S2]). + erlang:error(type_mismatch). -spec(is_subset(Set1, Set2) -> Bool when Bool :: boolean(), @@ -533,7 +503,7 @@ is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_SET(S2) -> is_subset(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> case match_types(?TYPE(S1), ?TYPE(S2)) of true -> subset(?LIST(S1), ?LIST(S2)); - false -> erlang:error(type_mismatch, [S1, S2]) + false -> erlang:error(type_mismatch) end. -spec(is_sofs_set(Term) -> Bool when @@ -573,7 +543,7 @@ is_disjoint(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> [] -> true; [A | As] -> disjoint(?LIST(S2), A, As) end; - false -> erlang:error(type_mismatch, [S1, S2]) + false -> erlang:error(type_mismatch) end. %%% @@ -587,7 +557,7 @@ union(Sets) when ?IS_SET(Sets) -> case ?TYPE(Sets) of ?SET_OF(Type) -> ?SET(lunion(?LIST(Sets)), Type); ?ANYTYPE -> Sets; - _ -> erlang:error(badarg, [Sets]) + _ -> erlang:error(badarg) end. -spec(intersection(SetOfSets) -> Set when @@ -595,12 +565,12 @@ union(Sets) when ?IS_SET(Sets) -> SetOfSets :: set_of_sets()). intersection(Sets) when ?IS_SET(Sets) -> case ?LIST(Sets) of - [] -> erlang:error(badarg, [Sets]); + [] -> erlang:error(badarg); [L | Ls] -> case ?TYPE(Sets) of ?SET_OF(Type) -> ?SET(lintersection(Ls, L), Type); - _ -> erlang:error(badarg, [Sets]) + _ -> erlang:error(badarg) end end. @@ -614,7 +584,7 @@ canonical_relation(Sets) when ?IS_SET(Sets) -> ?SET_OF(Type) -> ?SET(can_rel(?LIST(Sets), []), ?BINREL(Type, ST)); ?ANYTYPE -> Sets; - _ -> erlang:error(badarg, [Sets]) + _ -> erlang:error(badarg) end. %%% @@ -636,7 +606,7 @@ relation_to_family(R) when ?IS_SET(R) -> ?BINREL(DT, RT) -> ?SET(rel2family(?LIST(R)), ?FAMILY(DT, RT)); ?ANYTYPE -> R; - _Else -> erlang:error(badarg, [R]) + _Else -> erlang:error(badarg) end. -spec(domain(BinRel) -> Set when @@ -646,7 +616,7 @@ domain(R) when ?IS_SET(R) -> case ?TYPE(R) of ?BINREL(DT, _) -> ?SET(dom(?LIST(R)), DT); ?ANYTYPE -> R; - _Else -> erlang:error(badarg, [R]) + _Else -> erlang:error(badarg) end. -spec(range(BinRel) -> Set when @@ -656,7 +626,7 @@ range(R) when ?IS_SET(R) -> case ?TYPE(R) of ?BINREL(_, RT) -> ?SET(ran(?LIST(R), []), RT); ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R]) + _ -> erlang:error(badarg) end. -spec(field(BinRel) -> Set when @@ -679,7 +649,7 @@ relative_product(RT) when is_tuple(RT) -> relative_product(RL) when is_list(RL) -> case relprod_n(RL, foo, false, false) of {error, Reason} -> - erlang:error(Reason, [RL]); + erlang:error(Reason); Reply -> Reply end. @@ -703,11 +673,11 @@ relative_product(RL, R) when is_list(RL), ?IS_SET(R) -> EmptyR = case ?TYPE(R) of ?BINREL(_, _) -> ?LIST(R) =:= []; ?ANYTYPE -> true; - _ -> erlang:error(badarg, [RL, R]) + _ -> erlang:error(badarg) end, case relprod_n(RL, R, EmptyR, true) of {error, Reason} -> - erlang:error(Reason, [RL, R]); + erlang:error(Reason); Reply -> Reply end. @@ -720,18 +690,18 @@ relative_product1(R1, R2) when ?IS_SET(R1), ?IS_SET(R2) -> {DTR1, RTR1} = case ?TYPE(R1) of ?BINREL(_, _) = R1T -> R1T; ?ANYTYPE -> {?ANYTYPE, ?ANYTYPE}; - _ -> erlang:error(badarg, [R1, R2]) + _ -> erlang:error(badarg) end, {DTR2, RTR2} = case ?TYPE(R2) of ?BINREL(_, _) = R2T -> R2T; ?ANYTYPE -> {?ANYTYPE, ?ANYTYPE}; - _ -> erlang:error(badarg, [R1, R2]) + _ -> erlang:error(badarg) end, case match_types(DTR1, DTR2) of true when DTR1 =:= ?ANYTYPE -> R1; true when DTR2 =:= ?ANYTYPE -> R2; true -> ?SET(relprod(?LIST(R1), ?LIST(R2)), ?BINREL(RTR1, RTR2)); - false -> erlang:error(type_mismatch, [R1, R2]) + false -> erlang:error(type_mismatch) end. -spec(converse(BinRel1) -> BinRel2 when @@ -741,7 +711,7 @@ converse(R) when ?IS_SET(R) -> case ?TYPE(R) of ?BINREL(DT, RT) -> ?SET(converse(?LIST(R), []), ?BINREL(RT, DT)); ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R]) + _ -> erlang:error(badarg) end. -spec(image(BinRel, Set1) -> Set2 when @@ -755,10 +725,10 @@ image(R, S) when ?IS_SET(R), ?IS_SET(S) -> true -> ?SET(usort(restrict(?LIST(S), ?LIST(R))), RT); false -> - erlang:error(type_mismatch, [R, S]) + erlang:error(type_mismatch) end; ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R, S]) + _ -> erlang:error(badarg) end. -spec(inverse_image(BinRel, Set1) -> Set2 when @@ -773,10 +743,10 @@ inverse_image(R, S) when ?IS_SET(R), ?IS_SET(S) -> NL = restrict(?LIST(S), converse(?LIST(R), [])), ?SET(usort(NL), DT); false -> - erlang:error(type_mismatch, [R, S]) + erlang:error(type_mismatch) end; ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R, S]) + _ -> erlang:error(badarg) end. -spec(strict_relation(BinRel1) -> BinRel2 when @@ -787,7 +757,7 @@ strict_relation(R) when ?IS_SET(R) -> Type = ?BINREL(_, _) -> ?SET(strict(?LIST(R), []), Type); ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R]) + _ -> erlang:error(badarg) end. -spec(weak_relation(BinRel1) -> BinRel2 when @@ -798,12 +768,12 @@ weak_relation(R) when ?IS_SET(R) -> ?BINREL(DT, RT) -> case unify_types(DT, RT) of [] -> - erlang:error(badarg, [R]); + erlang:error(badarg); Type -> ?SET(weak(?LIST(R)), ?BINREL(Type, Type)) end; ?ANYTYPE -> R; - _ -> erlang:error(badarg, [R]) + _ -> erlang:error(badarg) end. -spec(extension(BinRel1, Set, AnySet) -> BinRel2 when @@ -816,7 +786,7 @@ extension(R, S, E) when ?IS_SET(R), ?IS_SET(S) -> {T=?BINREL(DT, RT), ST, true} -> case match_types(DT, ST) and match_types(RT, type(E)) of false -> - erlang:error(type_mismatch, [R, S, E]); + erlang:error(type_mismatch); true -> RL = ?LIST(R), case extc([], ?LIST(S), to_external(E), RL) of @@ -836,7 +806,7 @@ extension(R, S, E) when ?IS_SET(R), ?IS_SET(S) -> ?SET([], ?BINREL(ST, ET)) end; {_, _, true} -> - erlang:error(badarg, [R, S, E]) + erlang:error(badarg) end. -spec(is_a_function(BinRel) -> Bool when @@ -850,7 +820,7 @@ is_a_function(R) when ?IS_SET(R) -> [{V,_} | Es] -> is_a_func(Es, V) end; ?ANYTYPE -> true; - _ -> erlang:error(badarg, [R]) + _ -> erlang:error(badarg) end. -spec(restriction(BinRel1, Set) -> BinRel2 when @@ -879,12 +849,12 @@ composite(Fn1, Fn2) when ?IS_SET(Fn1), ?IS_SET(Fn2) -> ?BINREL(DTF1, RTF1) = case ?TYPE(Fn1)of ?BINREL(_, _) = F1T -> F1T; ?ANYTYPE -> {?ANYTYPE, ?ANYTYPE}; - _ -> erlang:error(badarg, [Fn1, Fn2]) + _ -> erlang:error(badarg) end, ?BINREL(DTF2, RTF2) = case ?TYPE(Fn2) of ?BINREL(_, _) = F2T -> F2T; ?ANYTYPE -> {?ANYTYPE, ?ANYTYPE}; - _ -> erlang:error(badarg, [Fn1, Fn2]) + _ -> erlang:error(badarg) end, case match_types(RTF1, DTF2) of true when DTF1 =:= ?ANYTYPE -> Fn1; @@ -894,9 +864,9 @@ composite(Fn1, Fn2) when ?IS_SET(Fn1), ?IS_SET(Fn2) -> SL when is_list(SL) -> ?SET(sort(SL), ?BINREL(DTF1, RTF2)); Bad -> - erlang:error(Bad, [Fn1, Fn2]) + erlang:error(Bad) end; - false -> erlang:error(type_mismatch, [Fn1, Fn2]) + false -> erlang:error(type_mismatch) end. -spec(inverse(Function1) -> Function2 when @@ -909,10 +879,10 @@ inverse(Fn) when ?IS_SET(Fn) -> SL when is_list(SL) -> ?SET(SL, ?BINREL(RT, DT)); Bad -> - erlang:error(Bad, [Fn]) + erlang:error(Bad) end; ?ANYTYPE -> Fn; - _ -> erlang:error(badarg, [Fn]) + _ -> erlang:error(badarg) end. %%% @@ -932,7 +902,7 @@ restriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> empty -> R; error -> - erlang:error(badarg, [I, R, S]); + erlang:error(badarg); Sort -> RL = ?LIST(R), case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of @@ -945,7 +915,7 @@ restriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> {true, [E | Es]} -> ?SET(sort(restrict_n(I, keysort(I, RL), E, Es, [])), RT); {false, _SL} -> - erlang:error(type_mismatch, [I, R, S]) + erlang:error(type_mismatch) end end; restriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> @@ -963,28 +933,27 @@ restriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> NL = sort(restrict(?LIST(S2), converse(NSL, []))), ?SET(NL, Type1); false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end; Bad -> - erlang:error(Bad, [SetFun, S1, S2]) + erlang:error(Bad) end; _ when Type1 =:= ?ANYTYPE -> S1; _XFun when ?IS_SET_OF(Type1) -> - erlang:error(badarg, [SetFun, S1, S2]); + erlang:error(badarg); XFun -> FunT = XFun(Type1), - case catch check_fun(Type1, XFun, FunT) of - {'EXIT', _} -> - erlang:error(badarg, [SetFun, S1, S2]); + try check_fun(Type1, XFun, FunT) of Sort -> case match_types(FunT, Type2) of true -> R1 = inverse_substitution(SL1, XFun, Sort), ?SET(sort(Sort, restrict(?LIST(S2), R1)), Type1); false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end + catch _:_ -> erlang:error(badarg) end end. @@ -1000,7 +969,7 @@ drestriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> empty -> R; error -> - erlang:error(badarg, [I, R, S]); + erlang:error(badarg); Sort -> RL = ?LIST(R), case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of @@ -1013,7 +982,7 @@ drestriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> {true, [E | Es]} -> ?SET(diff_restrict_n(I, keysort(I, RL), E, Es, []), RT); {false, _SL} -> - erlang:error(type_mismatch, [I, R, S]) + erlang:error(type_mismatch) end end; drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> @@ -1032,20 +1001,18 @@ drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> NL = sort(diff_restrict(SL2, converse(NSL, []))), ?SET(NL, Type1); false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end; Bad -> - erlang:error(Bad, [SetFun, S1, S2]) + erlang:error(Bad) end; _ when Type1 =:= ?ANYTYPE -> S1; _XFun when ?IS_SET_OF(Type1) -> - erlang:error(badarg, [SetFun, S1, S2]); + erlang:error(badarg); XFun -> FunT = XFun(Type1), - case catch check_fun(Type1, XFun, FunT) of - {'EXIT', _} -> - erlang:error(badarg, [SetFun, S1, S2]); + try check_fun(Type1, XFun, FunT) of Sort -> case match_types(FunT, Type2) of true -> @@ -1053,8 +1020,9 @@ drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> SL2 = ?LIST(S2), ?SET(sort(Sort, diff_restrict(SL2, R1)), Type1); false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end + catch _:_ -> erlang:error(badarg) end end. @@ -1068,7 +1036,7 @@ projection(I, Set) when is_integer(I), ?IS_SET(Set) -> empty -> Set; error -> - erlang:error(badarg, [I, Set]); + erlang:error(badarg); _ when I =:= 1 -> ?SET(projection1(?LIST(Set)), ?REL_TYPE(I, Type)); _ -> @@ -1087,7 +1055,7 @@ substitution(I, Set) when is_integer(I), ?IS_SET(Set) -> empty -> Set; error -> - erlang:error(badarg, [I, Set]); + erlang:error(badarg); _Sort -> NType = ?REL_TYPE(I, Type), NSL = substitute_element(?LIST(Set), I, []), @@ -1102,22 +1070,21 @@ substitution(SetFun, Set) when ?IS_SET(Set) -> {SL, NewType} -> ?SET(reverse(SL), ?BINREL(Type, NewType)); Bad -> - erlang:error(Bad, [SetFun, Set]) + erlang:error(Bad) end; false -> empty_set(); _ when Type =:= ?ANYTYPE -> empty_set(); _XFun when ?IS_SET_OF(Type) -> - erlang:error(badarg, [SetFun, Set]); + erlang:error(badarg); XFun -> FunT = XFun(Type), - case catch check_fun(Type, XFun, FunT) of - {'EXIT', _} -> - erlang:error(badarg, [SetFun, Set]); + try check_fun(Type, XFun, FunT) of _Sort -> SL = substitute(L, XFun, []), ?SET(SL, ?BINREL(Type, FunT)) + catch _:_ -> erlang:error(badarg) end end. @@ -1139,7 +1106,7 @@ partition(I, Set) when is_integer(I), ?IS_SET(Set) -> empty -> Set; error -> - erlang:error(badarg, [I, Set]); + erlang:error(badarg); false -> % I =:= 1 ?SET(partition_n(I, ?LIST(Set)), ?SET_OF(Type)); true -> @@ -1161,7 +1128,7 @@ partition(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> empty -> {R, R}; error -> - erlang:error(badarg, [I, R, S]); + erlang:error(badarg); Sort -> RL = ?LIST(R), case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of @@ -1176,7 +1143,7 @@ partition(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) -> [L1 | L2] = partition3_n(I, keysort(I,RL), E, Es, [], []), {?SET(L1, RT), ?SET(L2, RT)}; {false, _SL} -> - erlang:error(type_mismatch, [I, R, S]) + erlang:error(type_mismatch) end end; partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> @@ -1195,20 +1162,18 @@ partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> [L1 | L2] = partition3(?LIST(S2), R1), {?SET(sort(L1), Type1), ?SET(sort(L2), Type1)}; false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end; Bad -> - erlang:error(Bad, [SetFun, S1, S2]) + erlang:error(Bad) end; _ when Type1 =:= ?ANYTYPE -> {S1, S1}; _XFun when ?IS_SET_OF(Type1) -> - erlang:error(badarg, [SetFun, S1, S2]); + erlang:error(badarg); XFun -> FunT = XFun(Type1), - case catch check_fun(Type1, XFun, FunT) of - {'EXIT', _} -> - erlang:error(badarg, [SetFun, S1, S2]); + try check_fun(Type1, XFun, FunT) of Sort -> case match_types(FunT, Type2) of true -> @@ -1216,8 +1181,9 @@ partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) -> [L1 | L2] = partition3(?LIST(S2), R1), {?SET(sort(L1), Type1), ?SET(sort(L2), Type1)}; false -> - erlang:error(type_mismatch, [SetFun, S1, S2]) + erlang:error(type_mismatch) end + catch _:_ -> erlang:error(badarg) end end. @@ -1234,7 +1200,7 @@ multiple_relative_product(T, R) when is_tuple(T), ?IS_SET(R) -> MProd = mul_relprod(tuple_to_list(T), 1, R), relative_product(MProd); false -> - erlang:error(badarg, [T, R]) + erlang:error(badarg) end. -spec(join(Relation1, I, Relation2, J) -> Relation3 when @@ -1246,8 +1212,7 @@ multiple_relative_product(T, R) when is_tuple(T), ?IS_SET(R) -> join(R1, I1, R2, I2) when ?IS_SET(R1), ?IS_SET(R2), is_integer(I1), is_integer(I2) -> case test_rel(R1, I1, lte) and test_rel(R2, I2, lte) of - false -> - erlang:error(badarg, [R1, I1, R2, I2]); + false -> erlang:error(badarg); true when ?TYPE(R1) =:= ?ANYTYPE -> R1; true when ?TYPE(R2) =:= ?ANYTYPE -> R2; true -> @@ -1294,7 +1259,7 @@ family_to_relation(F) when ?IS_SET(F) -> ?FAMILY(DT, RT) -> ?SET(family2rel(?LIST(F), []), ?BINREL(DT, RT)); ?ANYTYPE -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_specification(Fun, Family1) -> Family2 when @@ -1314,10 +1279,10 @@ family_specification(Fun, F) when ?IS_SET(F) -> SL when is_list(SL) -> ?SET(SL, FType); Bad -> - erlang:error(Bad, [Fun, F]) + erlang:error(Bad) end; ?ANYTYPE -> F; - _ -> erlang:error(badarg, [Fun, F]) + _ -> erlang:error(badarg) end. -spec(union_of_family(Family) -> Set when @@ -1328,7 +1293,7 @@ union_of_family(F) when ?IS_SET(F) -> ?FAMILY(_DT, Type) -> ?SET(un_of_fam(?LIST(F), []), Type); ?ANYTYPE -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(intersection_of_family(Family) -> Set when @@ -1341,9 +1306,9 @@ intersection_of_family(F) when ?IS_SET(F) -> FU when is_list(FU) -> ?SET(FU, Type); Bad -> - erlang:error(Bad, [F]) + erlang:error(Bad) end; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_union(Family1) -> Family2 when @@ -1354,7 +1319,7 @@ family_union(F) when ?IS_SET(F) -> ?FAMILY(DT, ?SET_OF(Type)) -> ?SET(fam_un(?LIST(F), []), ?FAMILY(DT, Type)); ?ANYTYPE -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_intersection(Family1) -> Family2 when @@ -1367,10 +1332,10 @@ family_intersection(F) when ?IS_SET(F) -> FU when is_list(FU) -> ?SET(FU, ?FAMILY(DT, Type)); Bad -> - erlang:error(Bad, [F]) + erlang:error(Bad) end; ?ANYTYPE -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_domain(Family1) -> Family2 when @@ -1382,7 +1347,7 @@ family_domain(F) when ?IS_SET(F) -> ?SET(fam_dom(?LIST(F), []), ?FAMILY(FDT, DT)); ?ANYTYPE -> F; ?FAMILY(_, ?ANYTYPE) -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_range(Family1) -> Family2 when @@ -1394,7 +1359,7 @@ family_range(F) when ?IS_SET(F) -> ?SET(fam_ran(?LIST(F), []), ?FAMILY(DT, RT)); ?ANYTYPE -> F; ?FAMILY(_, ?ANYTYPE) -> F; - _ -> erlang:error(badarg, [F]) + _ -> erlang:error(badarg) end. -spec(family_field(Family1) -> Family2 when @@ -1428,12 +1393,12 @@ family_difference(F1, F2) -> fam_binop(F1, F2, FF) when ?IS_SET(F1), ?IS_SET(F2) -> case unify_types(?TYPE(F1), ?TYPE(F2)) of [] -> - erlang:error(type_mismatch, [F1, F2]); + erlang:error(type_mismatch); ?ANYTYPE -> F1; Type = ?FAMILY(_, _) -> ?SET(FF(?LIST(F1), ?LIST(F2), []), Type); - _ -> erlang:error(badarg, [F1, F2]) + _ -> erlang:error(badarg) end. -spec(partition_family(SetFun, Set) -> Family when @@ -1446,7 +1411,7 @@ partition_family(I, Set) when is_integer(I), ?IS_SET(Set) -> empty -> Set; error -> - erlang:error(badarg, [I, Set]); + erlang:error(badarg); false -> % when I =:= 1 ?SET(fam_partition_n(I, ?LIST(Set)), ?BINREL(?REL_TYPE(I, Type), ?SET_OF(Type))); @@ -1464,23 +1429,22 @@ partition_family(SetFun, Set) when ?IS_SET(Set) -> P = fam_partition(converse(NSL, []), true), ?SET(reverse(P), ?BINREL(NewType, ?SET_OF(Type))); Bad -> - erlang:error(Bad, [SetFun, Set]) + erlang:error(Bad) end; false -> empty_set(); _ when Type =:= ?ANYTYPE -> empty_set(); _XFun when ?IS_SET_OF(Type) -> - erlang:error(badarg, [SetFun, Set]); + erlang:error(badarg); XFun -> DType = XFun(Type), - case catch check_fun(Type, XFun, DType) of - {'EXIT', _} -> - erlang:error(badarg, [SetFun, Set]); + try check_fun(Type, XFun, DType) of Sort -> Ts = inverse_substitution(?LIST(Set), XFun, Sort), P = fam_partition(Ts, Sort), ?SET(reverse(P), ?BINREL(DType, ?SET_OF(Type))) + catch _:_ -> erlang:error(badarg) end end. @@ -1499,13 +1463,13 @@ family_projection(SetFun, F) when ?IS_SET(F) -> {SL, NewType} -> ?SET(SL, ?BINREL(DT, NewType)); Bad -> - erlang:error(Bad, [SetFun, F]) + erlang:error(Bad) end; _ -> - erlang:error(badarg, [SetFun, F]) + erlang:error(badarg) end; ?ANYTYPE -> F; - _ -> erlang:error(badarg, [SetFun, F]) + _ -> erlang:error(badarg) end. %%% @@ -1519,7 +1483,7 @@ family_to_digraph(F) when ?IS_SET(F) -> case ?TYPE(F) of ?FAMILY(_, _) -> fam2digraph(F, digraph:new()); ?ANYTYPE -> digraph:new(); - _Else -> erlang:error(badarg, [F]) + _Else -> erlang:error(badarg) end. -spec(family_to_digraph(Family, GraphType) -> Graph when @@ -1530,27 +1494,27 @@ family_to_digraph(F, Type) when ?IS_SET(F) -> case ?TYPE(F) of ?FAMILY(_, _) -> ok; ?ANYTYPE -> ok; - _Else -> erlang:error(badarg, [F, Type]) + _Else -> erlang:error(badarg) end, try digraph:new(Type) of G -> case catch fam2digraph(F, G) of {error, Reason} -> true = digraph:delete(G), - erlang:error(Reason, [F, Type]); + erlang:error(Reason); _ -> G end catch - error:badarg -> erlang:error(badarg, [F, Type]) + error:badarg -> erlang:error(badarg) end. -spec(digraph_to_family(Graph) -> Family when Graph :: digraph:graph(), Family :: family()). digraph_to_family(G) -> - case catch digraph_family(G) of - {'EXIT', _} -> erlang:error(badarg, [G]); + try digraph_family(G) of L -> ?SET(L, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE)) + catch _:_ -> erlang:error(badarg) end. -spec(digraph_to_family(Graph, Type) -> Family when @@ -1560,12 +1524,12 @@ digraph_to_family(G) -> digraph_to_family(G, T) -> case {is_type(T), T} of {true, ?SET_OF(?FAMILY(_,_) = Type)} -> - case catch digraph_family(G) of - {'EXIT', _} -> erlang:error(badarg, [G, T]); + try digraph_family(G) of L -> ?SET(L, Type) + catch _:_ -> erlang:error(badarg) end; _ -> - erlang:error(badarg, [G, T]) + erlang:error(badarg) end. %% @@ -1713,14 +1677,15 @@ func_type([], SL, Type, F) -> setify(L, ?SET_OF(Atom)) when ?IS_ATOM_TYPE(Atom), Atom =/= ?ANYTYPE -> ?SET(usort(L), Atom); setify(L, ?SET_OF(Type0)) -> - case catch is_no_lists(Type0) of - {'EXIT', _} -> - {?SET_OF(Type), Set} = create(L, Type0, Type0, []), - ?SET(Set, Type); + try is_no_lists(Type0) of N when is_integer(N) -> - rel(L, N, Type0); + rel(L, N, Type0); Sizes -> make_oset(L, Sizes, L, Type0) + catch + _:_ -> + {?SET_OF(Type), Set} = create(L, Type0, Type0, []), + ?SET(Set, Type) end; setify(E, Type0) -> {Type, OrdSet} = make_element(E, Type0, Type0), diff --git a/lib/stdlib/src/stdlib.appup.src b/lib/stdlib/src/stdlib.appup.src index 979161fef7..3c9e95e3a9 100644 --- a/lib/stdlib/src/stdlib.appup.src +++ b/lib/stdlib/src/stdlib.appup.src @@ -18,7 +18,7 @@ %% %CopyrightEnd% {"%VSN%", %% Up from - max one major revision back - [{<<"3\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.* + [{<<"3\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.* %% Down to - max one major revision back - [{<<"3\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.* + [{<<"3\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.* }. diff --git a/lib/stdlib/src/zip.erl b/lib/stdlib/src/zip.erl index 340cc21390..fadf96146e 100644 --- a/lib/stdlib/src/zip.erl +++ b/lib/stdlib/src/zip.erl @@ -179,19 +179,6 @@ external_attr, local_header_offset}). -%% Unix extra fields (not yet supported) --define(UNIX_EXTRA_FIELD_TAG, 16#000d). --record(unix_extra_field, {atime, - mtime, - uid, - gid}). - -%% extended timestamps (not yet supported) --define(EXTENDED_TIMESTAMP_TAG, 16#5455). -%% -record(extended_timestamp, {mtime, -%% atime, -%% ctime}). - -define(END_OF_CENTRAL_DIR_MAGIC, 16#06054b50). -define(END_OF_CENTRAL_DIR_SZ, (4+2+2+2+2+4+4+2)). @@ -381,9 +368,12 @@ do_unzip(F, Options) -> {Info, In1} = get_central_dir(In0, RawIterator, Input), %% get rid of zip-comment Z = zlib:open(), - Files = get_z_files(Info, Z, In1, Opts, []), - zlib:close(Z), - Input(close, In1), + Files = try + get_z_files(Info, Z, In1, Opts, []) + after + zlib:close(Z), + Input(close, In1) + end, {ok, Files}. %% Iterate over all files in a zip archive @@ -460,11 +450,20 @@ do_zip(F, Files, Options) -> #zip_opts{output = Output, open_opts = OpO} = Opts, Out0 = Output({open, F, OpO}, []), Z = zlib:open(), - {Out1, LHS, Pos} = put_z_files(Files, Z, Out0, 0, Opts, []), - zlib:close(Z), - Out2 = put_central_dir(LHS, Pos, Out1, Opts), - Out3 = Output({close, F}, Out2), - {ok, Out3}. + try + {Out1, LHS, Pos} = put_z_files(Files, Z, Out0, 0, Opts, []), + zlib:close(Z), + Out2 = put_central_dir(LHS, Pos, Out1, Opts), + Out3 = Output({close, F}, Out2), + {ok, Out3} + catch + C:R -> + Stk = erlang:get_stacktrace(), + zlib:close(Z), + Output({close, F}, Out0), + erlang:raise(C, R, Stk) + end. + %% List zip directory contents %% @@ -1379,12 +1378,7 @@ cd_file_header_to_file_info(FileName, gid = 0}, add_extra_info(FI, ExtraField). -%% add extra info to file (some day when we implement it) -add_extra_info(FI, <<?EXTENDED_TIMESTAMP_TAG:16/little, _Rest/binary>>) -> - FI; % not yet supported, some other day... -add_extra_info(FI, <<?UNIX_EXTRA_FIELD_TAG:16/little, Rest/binary>>) -> - _UnixExtra = unix_extra_field_and_var_from_bin(Rest), - FI; % not yet supported, and not widely used +%% Currently, we ignore all the extra fields. add_extra_info(FI, _) -> FI. @@ -1572,20 +1566,6 @@ dos_date_time_from_datetime({{Year, Month, Day}, {Hour, Min, Sec}}) -> <<DosDate:16>> = <<YearFrom1980:7, Month:4, Day:5>>, {DosDate, DosTime}. -unix_extra_field_and_var_from_bin(<<TSize:16/little, - ATime:32/little, - MTime:32/little, - UID:16/little, - GID:16/little, - Var:TSize/binary>>) -> - {#unix_extra_field{atime = ATime, - mtime = MTime, - uid = UID, - gid = GID}, - Var}; -unix_extra_field_and_var_from_bin(_) -> - throw(bad_unix_extra_field). - %% A pwrite-like function for iolists (used by memory-option) pwrite_binary(B, Pos, Bin) when byte_size(B) =:= Pos -> diff --git a/lib/stdlib/test/base64_SUITE.erl b/lib/stdlib/test/base64_SUITE.erl index d0abe5c961..6ddc67464c 100644 --- a/lib/stdlib/test/base64_SUITE.erl +++ b/lib/stdlib/test/base64_SUITE.erl @@ -82,7 +82,7 @@ base64_decode(Config) when is_list(Config) -> Alphabet = list_to_binary(lists:seq(0, 255)), Alphabet = base64:decode(base64:encode(Alphabet)), - %% Encoded base 64 strings may be devided by non base 64 chars. + %% Encoded base 64 strings may be divided by non base 64 chars. %% In this cases whitespaces. "0123456789!@#0^&*();:<>,. []{}" = base64:decode_to_string( diff --git a/lib/stdlib/test/beam_lib_SUITE.erl b/lib/stdlib/test/beam_lib_SUITE.erl index 4521ecc0ef..279e15f703 100644 --- a/lib/stdlib/test/beam_lib_SUITE.erl +++ b/lib/stdlib/test/beam_lib_SUITE.erl @@ -81,12 +81,8 @@ normal(Conf) when is_list(Conf) -> NoOfTables = length(ets:all()), P0 = pps(), - CompileFlags = [{outdir,PrivDir}, debug_info], - {ok,_} = compile:file(Source, CompileFlags), - {ok, Binary} = file:read_file(BeamFile), - - do_normal(BeamFile), - do_normal(Binary), + do_normal(Source, PrivDir, BeamFile, []), + do_normal(Source, PrivDir, BeamFile, [no_utf8_atoms]), {ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]), {ok, {simple, [{abstract_code, no_abstract_code}]}} = @@ -101,7 +97,15 @@ normal(Conf) when is_list(Conf) -> true = (P0 == pps()), ok. -do_normal(BeamFile) -> +do_normal(Source, PrivDir, BeamFile, Opts) -> + CompileFlags = [{outdir,PrivDir}, debug_info | Opts], + {ok,_} = compile:file(Source, CompileFlags), + {ok, Binary} = file:read_file(BeamFile), + + do_normal(BeamFile, Opts), + do_normal(Binary, Opts). + +do_normal(BeamFile, Opts) -> Imports = {imports, [{erlang, get_module_info, 1}, {erlang, get_module_info, 2}, {lists, member, 2}]}, @@ -130,20 +134,31 @@ do_normal(BeamFile) -> beam_lib:chunks(BeamFile, [abstract_code]), %% Test reading optional chunks. - All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"], + All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"], {ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]), - verify_simple(Chunks). + case {verify_simple(Chunks),Opts} of + {{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok; + {{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok + end, -verify_simple([{"Atom", AtomBin}, + %% Make sure that reading the atom chunk works when the 'allow_missing_chunks' + %% option is used. + Some = ["Code",atoms,"ExpT","LitT"], + {ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]), + [{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] = + SomeChunks. + +verify_simple([{"Atom", PlainAtomChunk}, {"Code", CodeBin}, {"StrT", StrBin}, {"ImpT", ImpBin}, {"ExpT", ExpBin}, {"FunT", missing_chunk}, - {"LitT", missing_chunk}]) - when is_binary(AtomBin), is_binary(CodeBin), is_binary(StrBin), + {"LitT", missing_chunk}, + {"AtU8", AtU8Chunk}]) + when is_binary(CodeBin), is_binary(StrBin), is_binary(ImpBin), is_binary(ExpBin) -> - ok. + {PlainAtomChunk, AtU8Chunk}. %% Read invalid beam files. error(Conf) when is_list(Conf) -> @@ -211,7 +226,7 @@ last_chunk(Bin) -> do_error(BeamFile, ACopy) -> %% evil tests Chunks = chunk_info(BeamFile), - {value, {_, AtomStart, _}} = lists:keysearch("Atom", 1, Chunks), + {value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks), {value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks), {value, {_, AbstractStart, _}} = lists:keysearch("Abst", 1, Chunks), {value, {_, AttributesStart, _}} = @@ -234,7 +249,7 @@ do_error(BeamFile, ACopy) -> verify(not_a_beam_file, beam_lib:info(BF7)), BF8 = set_byte(ACopy, BeamFile, 13, 17), - verify(missing_chunk, beam_lib:chunks(BF8, ["Atom"])), + verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])), BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+10, 17), verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])). diff --git a/lib/stdlib/test/dets_SUITE.erl b/lib/stdlib/test/dets_SUITE.erl index aa31fdde5a..95c9b47465 100644 --- a/lib/stdlib/test/dets_SUITE.erl +++ b/lib/stdlib/test/dets_SUITE.erl @@ -3012,8 +3012,13 @@ repair_continuation(Config) -> MS = [{'_',[],[true]}], - {[true], C1} = dets:select(Tab, MS, 1), - C2 = binary_to_term(term_to_binary(C1)), + SRes = term_to_binary(dets:select(Tab, MS, 1)), + %% Get rid of compiled match spec + lists:foreach(fun (P) -> + garbage_collect(P) + end, processes()), + {[true], C2} = binary_to_term(SRes), + {'EXIT', {badarg, _}} = (catch dets:select(C2)), C3 = dets:repair_continuation(C2, MS), {[true], C4} = dets:select(C3), diff --git a/lib/stdlib/test/edlin_expand_SUITE.erl b/lib/stdlib/test/edlin_expand_SUITE.erl index 718d91c6a3..1f694ea549 100644 --- a/lib/stdlib/test/edlin_expand_SUITE.erl +++ b/lib/stdlib/test/edlin_expand_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -21,7 +21,8 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_testcase/2, end_per_testcase/2, init_per_group/2,end_per_group/2]). --export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1]). +-export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1, + erl_352/1]). -include_lib("common_test/include/ct.hrl"). @@ -36,7 +37,7 @@ suite() -> {timetrap,{minutes,1}}]. all() -> - [normal, quoted_fun, quoted_module, quoted_both, erl_1152]. + [normal, quoted_fun, quoted_module, quoted_both, erl_1152, erl_352]. groups() -> []. @@ -153,6 +154,78 @@ erl_1152(Config) when is_list(Config) -> "\n"++"foo"++" "++[1089]++_ = do_format(["foo",[1089]]), ok. +erl_352(Config) when is_list(Config) -> + erl_352_test(3, 3), + + erl_352_test(3, 75), + erl_352_test(3, 76, [trailing]), + erl_352_test(4, 74), + erl_352_test(4, 75, [leading]), + erl_352_test(4, 76, [leading, trailing]), + + erl_352_test(75, 3), + erl_352_test(76, 3, [leading]), + erl_352_test(74, 4), + erl_352_test(75, 4, [leading]), + erl_352_test(76, 4, [leading]), + + erl_352_test(74, 74, [leading]), + erl_352_test(74, 75, [leading]), + erl_352_test(74, 76, [leading, trailing]). + +erl_352_test(PrefixLen, SuffixLen) -> + erl_352_test(PrefixLen, SuffixLen, []). + +erl_352_test(PrefixLen, SuffixLen, Dots) -> + io:format("\nPrefixLen = ~w, SuffixLen = ~w\n", [PrefixLen, SuffixLen]), + + PrefixM = lists:duplicate(PrefixLen, $p), + SuffixM = lists:duplicate(SuffixLen, $s), + LM = [PrefixM ++ S ++ SuffixM || S <- ["1", "2"]], + StrM = do_format(LM), + check_leading(StrM, "", PrefixM, SuffixM, Dots), + + PrefixF = lists:duplicate(PrefixLen, $p), + SuffixF = lists:duplicate(SuffixLen-2, $s), + LF = [{PrefixF ++ S ++ SuffixF, 1} || S <- ["1", "2"]], + StrF = do_format(LF), + true = check_leading(StrF, "/1", PrefixF, SuffixF, Dots), + + ok. + +check_leading(FormStr, ArityStr, Prefix, Suffix, Dots) -> + List = string:tokens(FormStr, "\n "), + io:format("~p\n", [List]), + true = lists:all(fun(L) -> length(L) < 80 end, List), + case lists:member(leading, Dots) of + true -> + true = lists:all(fun(L) -> + {"...", Rest} = lists:split(3, L), + check_trailing(Rest, ArityStr, + Suffix, Dots) + end, List); + false -> + true = lists:all(fun(L) -> + {Prefix, Rest} = + lists:split(length(Prefix), L), + check_trailing(Rest, ArityStr, + Suffix, Dots) + end, List) + end. + +check_trailing([I|Str], ArityStr, Suffix, Dots) -> + true = lists:member(I, [$1, $2]), + case lists:member(trailing, Dots) of + true -> + {Rest, "..." ++ ArityStr} = + lists:split(length(Str) - (3 + length(ArityStr)), Str), + true = lists:prefix(Rest, Suffix); + false -> + {Rest, ArityStr} = + lists:split(length(Str) - length(ArityStr), Str), + Rest =:= Suffix + end. + do_expand(String) -> edlin_expand:expand(lists:reverse(String)). diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl index c7dcd9ae16..df38edf393 100644 --- a/lib/stdlib/test/erl_lint_SUITE.erl +++ b/lib/stdlib/test/erl_lint_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1999-2016. All Rights Reserved. +%% Copyright Ericsson AB 1999-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -2549,7 +2549,7 @@ otp_5878(Config) when is_list(Config) -> {function,9,t,0,[{clause,9,[],[],[{record,10,r,[]}]}]}, {eof,11}], {error,[{"rec.erl",[{7,erl_lint,old_abstract_code}]}],[]} = - compile:forms(OldAbstract, [return, report]), + compile_forms(OldAbstract, [return, report]), ok. @@ -3848,9 +3848,13 @@ otp_11879(_Config) -> [{1,erl_lint,{spec_fun_undefined,{f,1}}}, {2,erl_lint,spec_wrong_arity}, {22,erl_lint,callback_wrong_arity}]}], - []} = compile:forms(Fs, [return,report]), + []} = compile_forms(Fs, [return,report]), ok. +compile_forms(Terms, Opts) -> + Forms = [erl_parse:anno_from_term(Term) || Term <- Terms], + compile:forms(Forms, Opts). + %% OTP-13230: -deprecated without -module. otp_13230(Config) when is_list(Config) -> Abstr = <<"-deprecated([{frutt,0,next_version}]).">>, diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl index 31ea3210a8..1a028204b4 100644 --- a/lib/stdlib/test/erl_pp_SUITE.erl +++ b/lib/stdlib/test/erl_pp_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2016. All Rights Reserved. +%% Copyright Ericsson AB 2006-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1068,10 +1068,10 @@ otp_11100(Config) when is_list(Config) -> %% There are a few places where the added code ("options(none)") %% doesn't make a difference (pp:bit_elem_type/1 is an example). + A1 = erl_anno:new(1), %% Cannot trigger the use of the hook function with export/import. "-export([{fy,a}/b]).\n" = - pf({attribute,1,export,[{{fy,a},b}]}), - A1 = erl_anno:new(1), + pf({attribute,A1,export,[{{fy,a},b}]}), "-type foo() :: integer(INVALID-FORM:{foo,bar}:).\n" = pf({attribute,A1,type,{foo,{type,A1,integer,[{foo,bar}]},[]}}), pf({attribute,A1,type, @@ -1100,10 +1100,11 @@ otp_11100(Config) when is_list(Config) -> %% OTP-11861. behaviour_info() and -callback. otp_11861(Config) when is_list(Config) -> + A3 = erl_anno:new(3), "-optional_callbacks([bar/0]).\n" = - pf({attribute,3,optional_callbacks,[{bar,0}]}), + pf({attribute,A3,optional_callbacks,[{bar,0}]}), "-optional_callbacks([{bar,1,bad}]).\n" = - pf({attribute,4,optional_callbacks,[{bar,1,bad}]}), + pf({attribute,A3,optional_callbacks,[{bar,1,bad}]}), ok. pf(Form) -> diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl index 4ae734eb65..aca5b1e54f 100644 --- a/lib/stdlib/test/erl_scan_SUITE.erl +++ b/lib/stdlib/test/erl_scan_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1998-2016. All Rights Reserved. +%% Copyright Ericsson AB 1998-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -772,10 +772,9 @@ unicode() -> erl_scan:string([1089]), {error,{{1,1},erl_scan,{illegal,character}},{1,2}} = erl_scan:string([1089], {1,1}), - {error,{1,erl_scan,{illegal,atom}},1} = - erl_scan:string("'a"++[1089]++"b'", 1), - {error,{{1,1},erl_scan,{illegal,atom}},{1,6}} = - erl_scan:string("'a"++[1089]++"b'", {1,1}), + {error,{{1,3},erl_scan,{illegal,character}},{1,4}} = + erl_scan:string("'a" ++ [999999999] ++ "c'", {1,1}), + test("\"a"++[1089]++"b\""), {ok,[{char,1,1}],1} = erl_scan_string([$$,$\\,$^,1089], 1), @@ -786,8 +785,8 @@ unicode() -> erl_scan:format_error(Error), {error,{{1,1},erl_scan,_},{1,11}} = erl_scan:string("\"qa\\x{aaa}",{1,1}), - {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} = - erl_scan:string("'qa\\x{aaa}'",{1,1}), + {error,{{1,1},erl_scan,_},{1,11}} = + erl_scan:string("'qa\\x{aaa}",{1,1}), {ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1), @@ -904,10 +903,10 @@ more_chars() -> %% OTP-10302. Unicode characters scanner/parser. otp_10302(Config) when is_list(Config) -> %% From unicode(): - {error,{1,erl_scan,{illegal,atom}},1} = - erl_scan:string("'a"++[1089]++"b'", 1), - {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} = - erl_scan:string("'qa\\x{aaa}'",{1,1}), + {ok,[{atom,1,'aсb'}],1} = + erl_scan_string("'a"++[1089]++"b'", 1), + {ok,[{atom,{1,1},'qaપ'}],{1,12}} = + erl_scan_string("'qa\\x{aaa}'",{1,1}), {ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1), {ok,[{char,1,1089}],1} = erl_scan_string([$$,$\\,1089],1), diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl index f68d5eca3f..8581440d58 100644 --- a/lib/stdlib/test/ets_SUITE.erl +++ b/lib/stdlib/test/ets_SUITE.erl @@ -22,7 +22,7 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2]). -export([default/1,setbag/1,badnew/1,verybadnew/1,named/1,keypos2/1, - privacy/1,privacy_owner/2]). + privacy/1]). -export([empty/1,badinsert/1]). -export([time_lookup/1,badlookup/1,lookup_order/1]). -export([delete_elem/1,delete_tab/1,delete_large_tab/1, @@ -82,27 +82,6 @@ %% Convenience for manual testing -export([random_test/0]). -%% internal exports --export([dont_make_worse_sub/0, make_better_sub1/0, make_better_sub2/0]). --export([t_repair_continuation_do/1, t_bucket_disappears_do/1, - select_fail_do/1, whitebox_1/1, whitebox_2/1, t_delete_all_objects_do/1, - t_delete_object_do/1, t_init_table_do/1, t_insert_list_do/1, - update_element_opts/1, update_element_opts/4, update_element/4, update_element_do/4, - update_element_neg/1, update_element_neg_do/1, update_counter_do/1, update_counter_neg/1, - evil_update_counter_do/1, fixtable_next_do/1, heir_do/1, give_away_do/1, setopts_do/1, - rename_do/1, rename_unnamed_do/1, interface_equality_do/1, ordered_match_do/1, - ordered_do/1, privacy_do/1, empty_do/1, badinsert_do/1, time_lookup_do/1, - lookup_order_do/1, lookup_element_mult_do/1, delete_tab_do/1, delete_elem_do/1, - match_delete_do/1, match_delete3_do/1, firstnext_do/1, - slot_do/1, match1_do/1, match2_do/1, match_object_do/1, match_object2_do/1, - misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1, - heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1, - do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2, - types_do/1, sleeper/0, memory_do/1, update_counter_with_default_do/1, - update_counter_table_growth_do/1, - ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4 - ]). - -export([t_select_reverse/1]). -include_lib("common_test/include/ct.hrl"). @@ -228,7 +207,7 @@ memory_check_summary(_Config) -> %% Test that a disappearing bucket during select of a non-fixed table works. t_bucket_disappears(Config) when is_list(Config) -> - repeat_for_opts(t_bucket_disappears_do). + repeat_for_opts(fun t_bucket_disappears_do/1). t_bucket_disappears_do(Opts) -> EtsMem = etsmem(), @@ -396,11 +375,16 @@ ms_tracer_collect(Tracee, Ref, Acc) -> ms_tracee(Parent, CallArgList) -> Parent ! {self(), ready}, receive start -> ok end, - lists:foreach(fun(Args) -> - erlang:apply(?MODULE, ms_tracee_dummy, tuple_to_list(Args)) - end, CallArgList). - - + F = fun({A1}) -> + ms_tracee_dummy(A1); + ({A1,A2}) -> + ms_tracee_dummy(A1, A2); + ({A1,A2,A3}) -> + ms_tracee_dummy(A1, A2, A3); + ({A1,A2,A3,A4}) -> + ms_tracee_dummy(A1, A2, A3, A4) + end, + lists:foreach(F, CallArgList). ms_tracee_dummy(_) -> ok. ms_tracee_dummy(_,_) -> ok. @@ -418,7 +402,7 @@ assert_eq(A,B) -> %% Test ets:repair_continuation/2. t_repair_continuation(Config) when is_list(Config) -> - repeat_for_opts(t_repair_continuation_do). + repeat_for_opts(fun t_repair_continuation_do/1). t_repair_continuation_do(Opts) -> @@ -564,7 +548,8 @@ default(Config) when is_list(Config) -> %% Test that select fails even if nothing can match. select_fail(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(select_fail_do, [all_types,write_concurrency]), + repeat_for_opts(fun select_fail_do/1, + [all_types,write_concurrency]), verify_etsmem(EtsMem). select_fail_do(Opts) -> @@ -594,7 +579,7 @@ select_fail_do(Opts) -> %% Whitebox test of ets:info(X, memory). memory(Config) when is_list(Config) -> ok = chk_normal_tab_struct_size(), - repeat_for_opts(memory_do,[compressed]), + repeat_for_opts(fun memory_do/1, [compressed]), catch erts_debug:set_internal_state(available_internal_state, false). memory_do(Opts) -> @@ -704,12 +689,12 @@ adjust_xmem([_T1,_T2,_T3,_T4], {A0,B0,C0,D0} = _Mem0, EstCnt) -> %% Misc. whitebox tests t_whitebox(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(whitebox_1), - repeat_for_opts(whitebox_1), - repeat_for_opts(whitebox_1), - repeat_for_opts(whitebox_2), - repeat_for_opts(whitebox_2), - repeat_for_opts(whitebox_2), + repeat_for_opts(fun whitebox_1/1), + repeat_for_opts(fun whitebox_1/1), + repeat_for_opts(fun whitebox_1/1), + repeat_for_opts(fun whitebox_2/1), + repeat_for_opts(fun whitebox_2/1), + repeat_for_opts(fun whitebox_2/1), verify_etsmem(EtsMem). whitebox_1(Opts) -> @@ -774,7 +759,7 @@ check_badarg({'EXIT', {badarg, [{M,F,A,_} | _]}}, M, F, Args) -> %% Test ets:delete_all_objects/1. t_delete_all_objects(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(t_delete_all_objects_do), + repeat_for_opts(fun t_delete_all_objects_do/1), verify_etsmem(EtsMem). get_kept_objects(T) -> @@ -808,7 +793,7 @@ t_delete_all_objects_do(Opts) -> %% Test ets:delete_object/2. t_delete_object(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(t_delete_object_do), + repeat_for_opts(fun t_delete_object_do/1), verify_etsmem(EtsMem). t_delete_object_do(Opts) -> @@ -881,7 +866,7 @@ make_init_fun(N) -> %% Test ets:init_table/2. t_init_table(Config) when is_list(Config)-> EtsMem = etsmem(), - repeat_for_opts(t_init_table_do), + repeat_for_opts(fun t_init_table_do/1), verify_etsmem(EtsMem). t_init_table_do(Opts) -> @@ -957,7 +942,7 @@ t_insert_new(Config) when is_list(Config) -> %% Test ets:insert/2 with list of objects. t_insert_list(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(t_insert_list_do), + repeat_for_opts(fun t_insert_list_do/1), verify_etsmem(EtsMem). t_insert_list_do(Opts) -> @@ -1187,7 +1172,7 @@ partly_bound(Config) when is_list(Config) -> end. dont_make_worse() -> - seventyfive_percent_success({?MODULE,dont_make_worse_sub,[]},0,0,10). + seventyfive_percent_success(fun dont_make_worse_sub/0, 0, 0, 10). dont_make_worse_sub() -> T = build_table([a,b],[a,b],15000), @@ -1199,8 +1184,9 @@ dont_make_worse_sub() -> ok. make_better() -> - fifty_percent_success({?MODULE,make_better_sub2,[]},0,0,10), - fifty_percent_success({?MODULE,make_better_sub1,[]},0,0,10). + fifty_percent_success(fun make_better_sub2/0, 0, 0, 10), + fifty_percent_success(fun make_better_sub1/0, 0, 0, 10). + make_better_sub1() -> T = build_table2([a,b],[a,b],15000), T1 = time_match_object(T,{'_',1500,a,a}, [{{1500,a,a},1500,a,a}]), @@ -1485,7 +1471,7 @@ do_random_test() -> %% Ttest various variants of update_element. update_element(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(update_element_opts), + repeat_for_opts(fun update_element_opts/1), verify_etsmem(EtsMem). update_element_opts(Opts) -> @@ -1647,7 +1633,7 @@ update_element_neg_do(T) -> %% test various variants of update_counter. update_counter(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(update_counter_do), + repeat_for_opts(fun update_counter_do/1), verify_etsmem(EtsMem). update_counter_do(Opts) -> @@ -1868,7 +1854,7 @@ evil_update_counter(Config) when is_list(Config) -> ordsets:module_info(), rand:module_info(), - repeat_for_opts(evil_update_counter_do). + repeat_for_opts(fun evil_update_counter_do/1). evil_update_counter_do(Opts) -> EtsMem = etsmem(), @@ -1915,7 +1901,7 @@ evil_counter_1(Iter, T) -> evil_counter_1(Iter-1, T). update_counter_with_default(Config) when is_list(Config) -> - repeat_for_opts(update_counter_with_default_do). + repeat_for_opts(fun update_counter_with_default_do/1). update_counter_with_default_do(Opts) -> T1 = ets_new(a, [set | Opts]), @@ -1953,7 +1939,7 @@ update_counter_with_default_do(Opts) -> ok. update_counter_table_growth(_Config) -> - repeat_for_opts(update_counter_table_growth_do). + repeat_for_opts(fun update_counter_table_growth_do/1). update_counter_table_growth_do(Opts) -> Set = ets_new(b, [set | Opts]), @@ -1964,7 +1950,8 @@ update_counter_table_growth_do(Opts) -> %% Check that a first-next sequence always works on a fixed table. fixtable_next(Config) when is_list(Config) -> - repeat_for_opts(fixtable_next_do, [write_concurrency,all_types]). + repeat_for_opts(fun fixtable_next_do/1, + [write_concurrency,all_types]). fixtable_next_do(Opts) -> EtsMem = etsmem(), @@ -2104,7 +2091,7 @@ write_concurrency(Config) when is_list(Config) -> %% The 'heir' option. heir(Config) when is_list(Config) -> - repeat_for_opts(heir_do). + repeat_for_opts(fun heir_do/1). heir_do(Opts) -> EtsMem = etsmem(), @@ -2244,7 +2231,7 @@ heir_1(HeirData,Mode,Opts) -> %% Test ets:give_way/3. give_away(Config) when is_list(Config) -> - repeat_for_opts(give_away_do). + repeat_for_opts(fun give_away_do/1). give_away_do(Opts) -> T = ets_new(foo,[named_table, private | Opts]), @@ -2325,7 +2312,7 @@ give_away_receiver(T, Giver) -> %% Test ets:setopts/2. setopts(Config) when is_list(Config) -> - repeat_for_opts(setopts_do,[write_concurrency,all_types]). + repeat_for_opts(fun setopts_do/1, [write_concurrency,all_types]). setopts_do(Opts) -> Self = self(), @@ -2475,7 +2462,7 @@ bad_table_call(T,{F,Args,_,{return,Return}}) -> %% Check rename of ets tables. rename(Config) when is_list(Config) -> - repeat_for_opts(rename_do, [write_concurrency, all_types]). + repeat_for_opts(fun rename_do/1, [write_concurrency, all_types]). rename_do(Opts) -> EtsMem = etsmem(), @@ -2490,7 +2477,8 @@ rename_do(Opts) -> %% Check rename of unnamed ets table. rename_unnamed(Config) when is_list(Config) -> - repeat_for_opts(rename_unnamed_do,[write_concurrency,all_types]). + repeat_for_opts(fun rename_unnamed_do/1, + [write_concurrency,all_types]). rename_unnamed_do(Opts) -> EtsMem = etsmem(), @@ -2565,7 +2553,7 @@ evil_create_fixed_tab() -> %% Tests that the return values and errors are equal for set's and %% ordered_set's where applicable. interface_equality(Config) when is_list(Config) -> - repeat_for_opts(interface_equality_do). + repeat_for_opts(fun interface_equality_do/1). interface_equality_do(Opts) -> EtsMem = etsmem(), @@ -2629,7 +2617,7 @@ maybe_sort(Any) -> %% Test match, match_object and match_delete in ordered set's. ordered_match(Config) when is_list(Config)-> - repeat_for_opts(ordered_match_do). + repeat_for_opts(fun ordered_match_do/1). ordered_match_do(Opts) -> EtsMem = etsmem(), @@ -2675,7 +2663,7 @@ ordered_match_do(Opts) -> %% Test basic functionality in ordered_set's. ordered(Config) when is_list(Config) -> - repeat_for_opts(ordered_do). + repeat_for_opts(fun ordered_do/1). ordered_do(Opts) -> EtsMem = etsmem(), @@ -2801,12 +2789,13 @@ keypos2(Config) when is_list(Config) -> %% Privacy check. Check that a named(public/private/protected) table %% cannot be read by the wrong process(es). privacy(Config) when is_list(Config) -> - repeat_for_opts(privacy_do). + repeat_for_opts(fun privacy_do/1). privacy_do(Opts) -> EtsMem = etsmem(), process_flag(trap_exit,true), - Owner = my_spawn_link(?MODULE,privacy_owner,[self(),Opts]), + Parent = self(), + Owner = my_spawn_link(fun() -> privacy_owner(Parent, Opts) end), receive {'EXIT',Owner,Reason} -> exit({privacy_test,Reason}); @@ -2886,7 +2875,7 @@ rotate_tuple(Tuple, N) -> %% Check lookup in an empty table and lookup of a non-existing key. empty(Config) when is_list(Config) -> - repeat_for_opts(empty_do). + repeat_for_opts(fun empty_do/1). empty_do(Opts) -> EtsMem = etsmem(), @@ -2899,7 +2888,7 @@ empty_do(Opts) -> %% Check proper return values for illegal insert operations. badinsert(Config) when is_list(Config) -> - repeat_for_opts(badinsert_do). + repeat_for_opts(fun badinsert_do/1). badinsert_do(Opts) -> EtsMem = etsmem(), @@ -2923,7 +2912,7 @@ badinsert_do(Opts) -> time_lookup(Config) when is_list(Config) -> %% just for timing, really EtsMem = etsmem(), - Values = repeat_for_opts(time_lookup_do), + Values = repeat_for_opts(fun time_lookup_do/1), verify_etsmem(EtsMem), {comment,lists:flatten(io_lib:format( "~p ets lookups/s",[Values]))}. @@ -2957,7 +2946,8 @@ badlookup(Config) when is_list(Config) -> %% Test that lookup returns objects in order of insertion for bag and dbag. lookup_order(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(lookup_order_do, [write_concurrency,[bag,duplicate_bag]]), + repeat_for_opts(fun lookup_order_do/1, + [write_concurrency,[bag,duplicate_bag]]), verify_etsmem(EtsMem), ok. @@ -3048,7 +3038,7 @@ fill_tab(Tab,Val) -> %% OTP-2386. Multiple return elements. lookup_element_mult(Config) when is_list(Config) -> - repeat_for_opts(lookup_element_mult_do). + repeat_for_opts(fun lookup_element_mult_do/1). lookup_element_mult_do(Opts) -> EtsMem = etsmem(), @@ -3086,7 +3076,8 @@ lem_crash_3(T) -> %% Check delete of an element inserted in a `filled' table. delete_elem(Config) when is_list(Config) -> - repeat_for_opts(delete_elem_do, [write_concurrency, all_types]). + repeat_for_opts(fun delete_elem_do/1, + [write_concurrency, all_types]). delete_elem_do(Opts) -> EtsMem = etsmem(), @@ -3103,7 +3094,8 @@ delete_elem_do(Opts) -> %% Check that ets:delete() works and releases the name of the %% deleted table. delete_tab(Config) when is_list(Config) -> - repeat_for_opts(delete_tab_do,[write_concurrency,all_types]). + repeat_for_opts(fun delete_tab_do/1, + [write_concurrency,all_types]). delete_tab_do(Opts) -> Name = foo, @@ -3301,10 +3293,14 @@ exit_large_table_owner(Config) when is_list(Config) -> end, 1) end, EtsMem = etsmem(), - repeat_for_opts({exit_large_table_owner_do,{FEData,Config}}), + repeat_for_opts(fun(Opts) -> + exit_large_table_owner_do(Opts, + FEData, + Config) + end), verify_etsmem(EtsMem). -exit_large_table_owner_do(Opts,{FEData,Config}) -> +exit_large_table_owner_do(Opts, FEData, Config) -> verify_rescheduling_exit(Config, FEData, [named_table | Opts], true, 1, 1), verify_rescheduling_exit(Config, FEData, Opts, false, 1, 1). @@ -3472,7 +3468,8 @@ baddelete(Config) when is_list(Config) -> %% Check that match_delete works. Also tests tab2list function. match_delete(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(match_delete_do,[write_concurrency,all_types]), + repeat_for_opts(fun match_delete_do/1, + [write_concurrency,all_types]), verify_etsmem(EtsMem). match_delete_do(Opts) -> @@ -3489,7 +3486,7 @@ match_delete_do(Opts) -> %% OTP-3005: check match_delete with constant argument. match_delete3(Config) when is_list(Config) -> - repeat_for_opts(match_delete3_do). + repeat_for_opts(fun match_delete3_do/1). match_delete3_do(Opts) -> EtsMem = etsmem(), @@ -3514,7 +3511,7 @@ match_delete3_do(Opts) -> %% Test ets:first/1 & ets:next/2. firstnext(Config) when is_list(Config) -> - repeat_for_opts(firstnext_do). + repeat_for_opts(fun firstnext_do/1). firstnext_do(Opts) -> EtsMem = etsmem(), @@ -3572,7 +3569,7 @@ dyn_lookup(T, K) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% slot(Config) when is_list(Config) -> - repeat_for_opts(slot_do). + repeat_for_opts(fun slot_do/1). slot_do(Opts) -> EtsMem = etsmem(), @@ -3597,7 +3594,7 @@ slot_loop(Tab,SlotNo,EltsSoFar) -> match1(Config) when is_list(Config) -> - repeat_for_opts(match1_do). + repeat_for_opts(fun match1_do/1). match1_do(Opts) -> EtsMem = etsmem(), @@ -3633,7 +3630,7 @@ match1_do(Opts) -> %% Test match with specified keypos bag table. match2(Config) when is_list(Config) -> - repeat_for_opts(match2_do). + repeat_for_opts(fun match2_do/1). match2_do(Opts) -> EtsMem = etsmem(), @@ -3660,7 +3657,7 @@ match2_do(Opts) -> %% Some ets:match_object tests. match_object(Config) when is_list(Config) -> - repeat_for_opts(match_object_do). + repeat_for_opts(fun match_object_do/1). match_object_do(Opts) -> EtsMem = etsmem(), @@ -3760,7 +3757,7 @@ match_object_do(Opts) -> %% Tests that db_match_object does not generate a `badarg' when %% resuming a search with no previous matches. match_object2(Config) when is_list(Config) -> - repeat_for_opts(match_object2_do). + repeat_for_opts(fun match_object2_do/1). match_object2_do(Opts) -> EtsMem = etsmem(), @@ -3796,7 +3793,7 @@ tab2list(Config) when is_list(Config) -> %% Simple general small test. If this fails, ets is in really bad %% shape. misc1(Config) when is_list(Config) -> - repeat_for_opts(misc1_do). + repeat_for_opts(fun misc1_do/1). misc1_do(Opts) -> EtsMem = etsmem(), @@ -3814,7 +3811,7 @@ misc1_do(Opts) -> %% Check the safe_fixtable function. safe_fixtable(Config) when is_list(Config) -> - repeat_for_opts(safe_fixtable_do). + repeat_for_opts(fun safe_fixtable_do/1). safe_fixtable_do(Opts) -> EtsMem = etsmem(), @@ -3872,7 +3869,7 @@ safe_fixtable_do(Opts) -> %% Tests ets:info result for required tuples. info(Config) when is_list(Config) -> - repeat_for_opts(info_do). + repeat_for_opts(fun info_do/1). info_do(Opts) -> EtsMem = etsmem(), @@ -3904,7 +3901,7 @@ info_do(Opts) -> %% Test various duplicate_bags stuff. dups(Config) when is_list(Config) -> - repeat_for_opts(dups_do). + repeat_for_opts(fun dups_do/1). dups_do(Opts) -> EtsMem = etsmem(), @@ -3970,7 +3967,9 @@ tab2file_do(FName, Opts) -> %% Check the ets:tab2file function on a filled set/bag type ets table. tab2file2(Config) when is_list(Config) -> - repeat_for_opts({tab2file2_do,Config}, [[set,bag],compressed]). + repeat_for_opts(fun(Opts) -> + tab2file2_do(Opts, Config) + end, [[set,bag],compressed]). tab2file2_do(Opts, Config) -> EtsMem = etsmem(), @@ -4234,7 +4233,7 @@ make_sub_binary(List, Num) when is_list(List) -> %% Perform multiple lookups for every key in a large table. heavy_lookup(Config) when is_list(Config) -> - repeat_for_opts(heavy_lookup_do). + repeat_for_opts(fun heavy_lookup_do/1). heavy_lookup_do(Opts) -> EtsMem = etsmem(), @@ -4257,7 +4256,7 @@ do_lookup(Tab, N) -> %% Perform multiple lookups for every element in a large table. heavy_lookup_element(Config) when is_list(Config) -> - repeat_for_opts(heavy_lookup_element_do). + repeat_for_opts(fun heavy_lookup_element_do/1). heavy_lookup_element_do(Opts) -> EtsMem = etsmem(), @@ -4285,7 +4284,7 @@ do_lookup_element(Tab, N, M) -> heavy_concurrent(Config) when is_list(Config) -> ct:timetrap({minutes,30}), %% valgrind needs a lot of time - repeat_for_opts(do_heavy_concurrent). + repeat_for_opts(fun do_heavy_concurrent/1). do_heavy_concurrent(Opts) -> Size = 10000, @@ -4370,7 +4369,7 @@ foldr_ordered(Config) when is_list(Config) -> %% Test ets:member BIF. member(Config) when is_list(Config) -> - repeat_for_opts(member_do, [write_concurrency, all_types]). + repeat_for_opts(fun member_do/1, [write_concurrency, all_types]). member_do(Opts) -> EtsMem = etsmem(), @@ -4453,26 +4452,26 @@ time_match(Tab,Match) -> seventyfive_percent_success(_,S,Fa,0) -> true = (S > ((S + Fa) * 0.75)); -seventyfive_percent_success({M,F,A},S,Fa,N) -> - case (catch apply(M,F,A)) of - {'EXIT', _} -> - seventyfive_percent_success({M,F,A},S,Fa+1,N-1); - _ -> - seventyfive_percent_success({M,F,A},S+1,Fa,N-1) +seventyfive_percent_success(F, S, Fa, N) when is_function(F, 0) -> + try F() of + _ -> + seventyfive_percent_success(F, S+1, Fa, N-1) + catch error:_ -> + seventyfive_percent_success(F, S, Fa+1, N-1) end. fifty_percent_success(_,S,Fa,0) -> true = (S > ((S + Fa) * 0.5)); -fifty_percent_success({M,F,A},S,Fa,N) -> - case (catch apply(M,F,A)) of - {'EXIT', _} -> - fifty_percent_success({M,F,A},S,Fa+1,N-1); - _ -> - fifty_percent_success({M,F,A},S+1,Fa,N-1) +fifty_percent_success(F, S, Fa, N) when is_function(F, 0) -> + try F() of + _ -> + fifty_percent_success(F, S+1, Fa, N-1) + catch + error:_ -> + fifty_percent_success(F, S, Fa+1, N-1) end. - create_random_string(0) -> []; @@ -4811,7 +4810,7 @@ otp_6338(Config) when is_list(Config) -> %% Elements could come in the wrong order in a bag if a rehash occurred. otp_5340(Config) when is_list(Config) -> - repeat_for_opts(otp_5340_do). + repeat_for_opts(fun otp_5340_do/1). otp_5340_do(Opts) -> N = 3000, @@ -4847,7 +4846,7 @@ verify2(_Err, _) -> %% delete_object followed by delete on fixed bag failed to delete objects. otp_7665(Config) when is_list(Config) -> - repeat_for_opts(otp_7665_do). + repeat_for_opts(fun otp_7665_do/1). otp_7665_do(Opts) -> Tab = ets_new(otp_7665,[bag | Opts]), @@ -4877,7 +4876,7 @@ otp_7665_act(Tab,Min,Max,DelNr) -> %% Whitebox testing of meta name table hashing. meta_wb(Config) when is_list(Config) -> EtsMem = etsmem(), - repeat_for_opts(meta_wb_do), + repeat_for_opts(fun meta_wb_do/1), verify_etsmem(EtsMem). @@ -5446,7 +5445,7 @@ smp_select_delete(Config) when is_list(Config) -> %% Test different types. types(Config) when is_list(Config) -> init_externals(), - repeat_for_opts(types_do,[[set,ordered_set],compressed]). + repeat_for_opts(fun types_do/1, [[set,ordered_set],compressed]). types_do(Opts) -> EtsMem = etsmem(), @@ -5848,12 +5847,8 @@ log_test_proc(Proc) when is_pid(Proc) -> Proc. my_spawn(Fun) -> log_test_proc(spawn(Fun)). -%%my_spawn(M,F,A) -> log_test_proc(spawn(M,F,A)). -%%my_spawn(N,M,F,A) -> log_test_proc(spawn(N,M,F,A)). my_spawn_link(Fun) -> log_test_proc(spawn_link(Fun)). -my_spawn_link(M,F,A) -> log_test_proc(spawn_link(M,F,A)). -%%my_spawn_link(N,M,F,A) -> log_test_proc(spawn_link(N,M,F,A)). my_spawn_opt(Fun,Opts) -> case spawn_opt(Fun,Opts) of @@ -6096,7 +6091,7 @@ make_port() -> open_port({spawn, "efile"}, [eof]). make_pid() -> - spawn_link(?MODULE, sleeper, []). + spawn_link(fun sleeper/0). sleeper() -> receive after infinity -> ok end. @@ -6232,11 +6227,7 @@ make_unaligned_sub_binary(List) -> repeat_for_opts(F) -> repeat_for_opts(F, [write_concurrency, read_concurrency, compressed]). -repeat_for_opts(F, OptGenList) when is_atom(F) -> - repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts) end, OptGenList); -repeat_for_opts({F,Args}, OptGenList) when is_atom(F) -> - repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts,Args) end, OptGenList); -repeat_for_opts(F, OptGenList) -> +repeat_for_opts(F, OptGenList) when is_function(F, 1) -> repeat_for_opts(F, OptGenList, []). repeat_for_opts(F, [], Acc) -> diff --git a/lib/stdlib/test/ets_tough_SUITE.erl b/lib/stdlib/test/ets_tough_SUITE.erl index 49aba7a529..0abce3200f 100644 --- a/lib/stdlib/test/ets_tough_SUITE.erl +++ b/lib/stdlib/test/ets_tough_SUITE.erl @@ -19,10 +19,15 @@ %% -module(ets_tough_SUITE). -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, - init_per_group/2,end_per_group/2,ex1/1]). --export([init/1,terminate/2,handle_call/3,handle_info/2]). + init_per_group/2,end_per_group/2, + ex1/1]). -export([init_per_testcase/2, end_per_testcase/2]). --compile([export_all]). + +%% gen_server behavior. +-behavior(gen_server). +-export([init/1,terminate/2,handle_call/3,handle_cast/2, + handle_info/2,code_change/3]). + -include_lib("common_test/include/ct.hrl"). suite() -> @@ -235,33 +240,6 @@ random_element(T) -> I = rand:uniform(tuple_size(T)), element(I,T). -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -show_table(N) -> - FileName = ["etsdump.",integer_to_list(N)], - case file:open(FileName,read) of - {ok,Fd} -> - show_entries(Fd); - _ -> - error - end. - -show_entries(Fd) -> - case phys_read_len(Fd) of - {ok,Len} -> - case phys_read_entry(Fd,Len) of - {ok,ok} -> - ok; - {ok,{Key,Val}} -> - io:format("~w\n",[{Key,Val}]), - show_entries(Fd); - _ -> - error - end; - _ -> - error - end. - %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -378,20 +356,6 @@ dget_class(ServerPid,Class,Condition) -> derase_class(ServerPid,Class) -> gen_server:call(ServerPid,{handle_delete_class,Class}, infinity). -%%% dmodify(ServerPid,Application) -> ok -%%% -%%% Applies a function on every instance in the database. -%%% The user provided function must always return one of the -%%% terms {ok,NewItem}, true, or false. -%%% Aug 96, this is only used to reset all timestamp values -%%% in the database. -%%% The function is supplied as Application = {Mod, Fun, ExtraArgs}, -%%% where the instance will be prepended to ExtraArgs before each -%%% call is made. - -dmodify(ServerPid,Application) -> - gen_server:call(ServerPid,{handle_dmodify,Application}, infinity). - %%% ddump_first(ServerPid,DumpDir) -> {dump_more,Ticket} | already_dumping %%% %%% Starts dumping the database. This call redirects all database updates @@ -643,9 +607,15 @@ handle_call(stop,_From,Admin) -> ?ets_delete(Admin), % Make sure table is gone before reply is sent. {stop, normal, ok, []}. +handle_cast(_Req, Admin) -> + {noreply, Admin}. + handle_info({'EXIT',_Pid,_Reason},Admin) -> {stop,normal,Admin}. +code_change(_OldVsn, StateData, _Extra) -> + {ok, StateData}. + handle_delete(Class, Key, Admin) -> handle_call({handle_delete,Class,Key},from,Admin). diff --git a/lib/stdlib/test/filelib_SUITE.erl b/lib/stdlib/test/filelib_SUITE.erl index 4f8936edbf..87fba815d2 100644 --- a/lib/stdlib/test/filelib_SUITE.erl +++ b/lib/stdlib/test/filelib_SUITE.erl @@ -25,7 +25,8 @@ init_per_testcase/2,end_per_testcase/2, wildcard_one/1,wildcard_two/1,wildcard_errors/1, fold_files/1,otp_5960/1,ensure_dir_eexist/1,ensure_dir_symlink/1, - wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1]). + wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1, + find_source/1]). -import(lists, [foreach/2]). @@ -45,7 +46,8 @@ suite() -> all() -> [wildcard_one, wildcard_two, wildcard_errors, fold_files, otp_5960, ensure_dir_eexist, ensure_dir_symlink, - wildcard_symlink, is_file_symlink, file_props_symlink]. + wildcard_symlink, is_file_symlink, file_props_symlink, + find_source]. groups() -> []. @@ -503,3 +505,52 @@ file_props_symlink(Config) -> FileSize = filelib:file_size(Alias, erl_prim_loader), FileSize = filelib:file_size(Alias, prim_file) end. + +find_source(Config) when is_list(Config) -> + BeamFile = code:which(lists), + BeamName = filename:basename(BeamFile), + BeamDir = filename:dirname(BeamFile), + SrcName = filename:basename(BeamFile, ".beam") ++ ".erl", + + {ok, BeamFile} = filelib:find_file(BeamName, BeamDir), + {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, []), + {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, [{"",""},{"ebin","src"}]), + {error, not_found} = filelib:find_file(BeamName, BeamDir, [{"ebin","src"}]), + + {ok, SrcFile} = filelib:find_file(SrcName, BeamDir), + {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, []), + {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, [{"foo","bar"},{"ebin","src"}]), + {error, not_found} = filelib:find_file(SrcName, BeamDir, [{"",""}]), + + {ok, SrcFile} = filelib:find_source(BeamFile), + {ok, SrcFile} = filelib:find_source(BeamName, BeamDir), + {ok, SrcFile} = filelib:find_source(BeamName, BeamDir, + [{".erl",".yrl",[{"",""}]}, + {".beam",".erl",[{"ebin","src"}]}]), + {error, not_found} = filelib:find_source(BeamName, BeamDir, + [{".erl",".yrl",[{"",""}]}]), + + {ok, ParserErl} = filelib:find_source(code:which(erl_parse)), + {ok, ParserYrl} = filelib:find_source(ParserErl), + "lry." ++ _ = lists:reverse(ParserYrl), + {ok, ParserYrl} = filelib:find_source(ParserErl, + [{".beam",".erl",[{"ebin","src"}]}, + {".erl",".yrl",[{"",""}]}]), + + %% find_source automatically checks the local directory regardless of rules + {ok, ParserYrl} = filelib:find_source(ParserErl), + {ok, ParserYrl} = filelib:find_source(ParserErl, + [{".beam",".erl",[{"ebin","src"}]}]), + + %% find_file does not check the local directory unless in the rules + ParserYrlName = filename:basename(ParserYrl), + ParserYrlDir = filename:dirname(ParserYrl), + {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir, + [{"",""}]), + {error, not_found} = filelib:find_file(ParserYrlName, ParserYrlDir, + [{"ebin","src"}]), + + %% local directory is in the default list for find_file + {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir), + {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir, []), + ok. diff --git a/lib/stdlib/test/filename_SUITE.erl b/lib/stdlib/test/filename_SUITE.erl index b7c4d3a6e5..dc3daa56c1 100644 --- a/lib/stdlib/test/filename_SUITE.erl +++ b/lib/stdlib/test/filename_SUITE.erl @@ -29,6 +29,7 @@ dirname_bin/1, extension_bin/1, join_bin/1, t_nativename_bin/1]). -export([pathtype_bin/1,rootname_bin/1,split_bin/1]). -export([t_basedir_api/1, t_basedir_xdg/1, t_basedir_windows/1]). +-export([safe_relative_path/1]). -include_lib("common_test/include/ct.hrl"). @@ -41,7 +42,8 @@ all() -> find_src, absname_bin, absname_bin_2, {group,p}, - t_basedir_xdg, t_basedir_windows]. + t_basedir_xdg, t_basedir_windows, + safe_relative_path]. groups() -> [{p, [parallel], @@ -421,8 +423,10 @@ t_nativename(Config) when is_list(Config) -> find_src(Config) when is_list(Config) -> {Source,_} = filename:find_src(file), ["file"|_] = lists:reverse(filename:split(Source)), - {_,_} = filename:find_src(init, [{".","."}, {"ebin","src"}]), - + {Source,_} = filename:find_src(file, [{"",""}, {"ebin","src"}]), + {Source,_} = filename:find_src(Source), + {Source,_} = filename:find_src(Source ++ ".erl"), + %% Try to find the source for a preloaded module. {error,{preloaded,init}} = filename:find_src(init), @@ -768,6 +772,71 @@ t_nativename_bin(Config) when is_list(Config) -> filename:nativename(<<"/usr/tmp//arne/">>) end. +safe_relative_path(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + Root = filename:join(PrivDir, ?FUNCTION_NAME), + ok = file:make_dir(Root), + ok = file:set_cwd(Root), + + ok = file:make_dir("a"), + ok = file:set_cwd("a"), + ok = file:make_dir("b"), + ok = file:set_cwd("b"), + ok = file:make_dir("c"), + + ok = file:set_cwd(Root), + + "a" = test_srp("a"), + "a/b" = test_srp("a/b"), + "a/b" = test_srp("a/./b"), + "a/b" = test_srp("a/./b/."), + + "" = test_srp("a/.."), + "" = test_srp("a/./.."), + "" = test_srp("a/../."), + "a" = test_srp("a/b/.."), + "a" = test_srp("a/../a"), + "a" = test_srp("a/../a/../a"), + "a/b/c" = test_srp("a/../a/b/c"), + + unsafe = test_srp("a/../.."), + unsafe = test_srp("a/../../.."), + unsafe = test_srp("a/./../.."), + unsafe = test_srp("a/././../../.."), + unsafe = test_srp("a/b/././../../.."), + + unsafe = test_srp(PrivDir), %Absolute path. + + ok. + +test_srp(RelPath) -> + Res = do_test_srp(RelPath), + Res = case do_test_srp(list_to_binary(RelPath)) of + Bin when is_binary(Bin) -> + binary_to_list(Bin); + Other -> + Other + end. + +do_test_srp(RelPath) -> + {ok,Root} = file:get_cwd(), + ok = file:set_cwd(RelPath), + {ok,Cwd} = file:get_cwd(), + ok = file:set_cwd(Root), + case filename:safe_relative_path(RelPath) of + unsafe -> + true = length(Cwd) < length(Root), + unsafe; + "" -> + ""; + SafeRelPath -> + ok = file:set_cwd(SafeRelPath), + {ok,Cwd} = file:get_cwd(), + true = length(Cwd) >= length(Root), + ok = file:set_cwd(Root), + SafeRelPath + end. + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% basedirs t_basedir_api(Config) when is_list(Config) -> diff --git a/lib/stdlib/test/gen_statem_SUITE.erl b/lib/stdlib/test/gen_statem_SUITE.erl index 8f2ba0cab2..ac27c9fc79 100644 --- a/lib/stdlib/test/gen_statem_SUITE.erl +++ b/lib/stdlib/test/gen_statem_SUITE.erl @@ -38,7 +38,7 @@ all() -> {group, abnormal}, {group, abnormal_handle_event}, shutdown, stop_and_reply, state_enter, event_order, - state_timeout, code_change, + state_timeout, event_types, code_change, {group, sys}, hibernate, enter_loop]. @@ -600,15 +600,26 @@ state_enter(_Config) -> (internal, Prev, N) -> Self ! {internal,start,Prev,N}, {keep_state,N + 1}; + ({call,From}, repeat, N) -> + {repeat_state,N + 1, + [{reply,From,{repeat,start,N}}]}; ({call,From}, echo, N) -> - {next_state,wait,N + 1,{reply,From,{echo,start,N}}}; + {next_state,wait,N + 1, + {reply,From,{echo,start,N}}}; ({call,From}, {stop,Reason}, N) -> - {stop_and_reply,Reason,[{reply,From,{stop,N}}],N + 1} + {stop_and_reply,Reason, + [{reply,From,{stop,N}}],N + 1} end, wait => - fun (enter, Prev, N) -> + fun (enter, Prev, N) when N < 5 -> + {repeat_state,N + 1, + {reply,{Self,N},{enter,Prev}}}; + (enter, Prev, N) -> Self ! {enter,wait,Prev,N}, {keep_state,N + 1}; + ({call,From}, repeat, N) -> + {repeat_state_and_data, + [{reply,From,{repeat,wait,N}}]}; ({call,From}, echo, N) -> {next_state,start,N + 1, [{next_event,internal,wait}, @@ -620,11 +631,15 @@ state_enter(_Config) -> [{enter,start,start,1}] = flush(), {echo,start,2} = gen_statem:call(STM, echo), - [{enter,wait,start,3}] = flush(), - {wait,[4|_]} = sys:get_state(STM), - {echo,wait,4} = gen_statem:call(STM, echo), - [{enter,start,wait,5},{internal,start,wait,6}] = flush(), - {stop,7} = gen_statem:call(STM, {stop,bye}), + [{3,{enter,start}},{4,{enter,start}},{enter,wait,start,5}] = flush(), + {wait,[6|_]} = sys:get_state(STM), + {repeat,wait,6} = gen_statem:call(STM, repeat), + [{enter,wait,wait,6}] = flush(), + {echo,wait,7} = gen_statem:call(STM, echo), + [{enter,start,wait,8},{internal,start,wait,9}] = flush(), + {repeat,start,10} = gen_statem:call(STM, repeat), + [{enter,start,start,11}] = flush(), + {stop,12} = gen_statem:call(STM, {stop,bye}), [{'EXIT',STM,bye}] = flush(), {noproc,_} = @@ -801,6 +816,74 @@ state_timeout(_Config) -> +%% Test that all event types can be sent with {next_event,EventType,_} +event_types(_Config) -> + process_flag(trap_exit, true), + + Machine = + %% Abusing the internal format of From... + #{init => + fun () -> + {ok, start, undefined} + end, + start => + fun ({call,_} = Call, Req, undefined) -> + {next_state, state1, undefined, + [{next_event,internal,1}, + {next_event,state_timeout,2}, + {next_event,timeout,3}, + {next_event,info,4}, + {next_event,cast,5}, + {next_event,Call,Req}]} + end, + state1 => + fun (internal, 1, undefined) -> + {next_state, state2, undefined} + end, + state2 => + fun (state_timeout, 2, undefined) -> + {next_state, state3, undefined} + end, + state3 => + fun (timeout, 3, undefined) -> + {next_state, state4, undefined} + end, + state4 => + fun (info, 4, undefined) -> + {next_state, state5, undefined} + end, + state5 => + fun (cast, 5, undefined) -> + {next_state, state6, undefined} + end, + state6 => + fun ({call,From}, stop, undefined) -> + {stop_and_reply, shutdown, + [{reply,From,stopped}]} + end}, + {ok,STM} = + gen_statem:start_link( + ?MODULE, {map_statem,Machine,[]}, [{debug,[trace]}]), + + stopped = gen_statem:call(STM, stop), + receive + {'EXIT',STM,shutdown} -> + ok + after 500 -> + ct:fail(did_not_stop) + end, + + {noproc,_} = + ?EXPECT_FAILURE(gen_statem:call(STM, hej), Reason), + case flush() of + [] -> + ok; + Other2 -> + ct:fail({unexpected,Other2}) + end. + + + sys1(Config) -> {ok,Pid} = gen_statem:start(?MODULE, start_arg(Config, []), []), {status, Pid, {module,gen_statem}, _} = sys:get_status(Pid), @@ -1722,6 +1805,10 @@ handle_event( {keep_state,[NewData|Machine]}; {keep_state,NewData,Ops} -> {keep_state,[NewData|Machine],Ops}; + {repeat_state,NewData} -> + {repeat_state,[NewData|Machine]}; + {repeat_state,NewData,Ops} -> + {repeat_state,[NewData|Machine],Ops}; Other -> Other end; diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl index 7d48cbc97c..d546e8fad2 100644 --- a/lib/stdlib/test/io_SUITE.erl +++ b/lib/stdlib/test/io_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1999-2016. All Rights Reserved. +%% Copyright Ericsson AB 1999-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -30,7 +30,7 @@ io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1, otp_10836/1, io_lib_width_too_small/1, io_with_huge_message_queue/1, format_string/1, - maps/1, coverage/1]). + maps/1, coverage/1, otp_14178_unicode_atoms/1, otp_14175/1]). -export([pretty/2]). @@ -61,7 +61,7 @@ all() -> printable_range, bad_printable_range, io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836, io_lib_width_too_small, io_with_huge_message_queue, - format_string, maps, coverage]. + format_string, maps, coverage, otp_14178_unicode_atoms, otp_14175]. %% Error cases for output. error_1(Config) when is_list(Config) -> @@ -415,13 +415,13 @@ otp_6354(Config) when is_list(Config) -> bt(<<"#rrrrr{\n" " f1 = 1,\n" " f2 = #rrrrr{f1 = a,f2 = b,f3 = c},\n" - " f3 = \n" + " f3 =\n" " #rrrrr{\n" " f1 = h,f2 = i,\n" - " f3 = \n" + " f3 =\n" " #rrrrr{\n" " f1 = aa,\n" - " f2 = \n" + " f2 =\n" " #rrrrr{\n" " f1 = #rrrrr{f1 = a,f2 = b,f3 = c},\n" " f2 = 2,f3 = 3},\n" @@ -431,17 +431,17 @@ otp_6354(Config) when is_list(Config) -> 2,3},bb}}}, -1)), bt(<<"#d{aaaaaaaaaaaaaaaaaaaa = 1,\n" - " bbbbbbbbbbbbbbbbbbbb = \n" + " bbbbbbbbbbbbbbbbbbbb =\n" " #d{aaaaaaaaaaaaaaaaaaaa = a,bbbbbbbbbbbbbbbbbbbb = b,\n" " cccccccccccccccccccc = c,dddddddddddddddddddd = d,\n" " eeeeeeeeeeeeeeeeeeee = e},\n" " cccccccccccccccccccc = 3,\n" - " dddddddddddddddddddd = \n" + " dddddddddddddddddddd =\n" " #d{aaaaaaaaaaaaaaaaaaaa = h,bbbbbbbbbbbbbbbbbbbb = i,\n" - " cccccccccccccccccccc = \n" + " cccccccccccccccccccc =\n" " #d{aaaaaaaaaaaaaaaaaaaa = aa," "bbbbbbbbbbbbbbbbbbbb = bb,\n" - " cccccccccccccccccccc = \n" + " cccccccccccccccccccc =\n" " #d{aaaaaaaaaaaaaaaaaaaa = 1," "bbbbbbbbbbbbbbbbbbbb = 2,\n" " cccccccccccccccccccc = 3," @@ -534,21 +534,21 @@ otp_6354(Config) when is_list(Config) -> p({A,{A,{A,{A,{A,{A,{A, {g,{h,{i,{j,{k,{l,{m,{n,{o,{a}}}}}}}}}}}}}}}}}, 100)), bt(<<"#c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" - " f1 = \n" + " f1 =\n" " #c{\n" " f1 = #c{f1 = #c{f1 = #c{f1 = a," "f2 = b},f2 = b},f2 = b},\n" @@ -564,13 +564,13 @@ otp_6354(Config) when is_list(Config) -> p({c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,a,b},b},b},b},b},b}, b},b},b},b},b},b}, -1)), bt(<<"#rrrrr{\n" - " f1 = \n" + " f1 =\n" " #rrrrr{\n" - " f1 = \n" + " f1 =\n" " #rrrrr{\n" - " f1 = \n" + " f1 =\n" " #rrrrr{\n" - " f1 = \n" + " f1 =\n" " {rrrrr,{rrrrr,a,#rrrrr{f1 = {rrrrr,1,2},f2 = a," "f3 = b}},b},\n" " f2 = {rrrrr,c,d},\n" @@ -2106,3 +2106,221 @@ coverage(_Config) -> io:format("~s\n", [S2]), ok. + +%% Test UTF-8 atoms. +otp_14178_unicode_atoms(_Config) -> + "atom" = fmt("~ts", ['atom']), + "кирилли́ческий атом" = fmt("~ts", ['кирилли́ческий атом']), + [16#10FFFF] = fmt("~ts", ['\x{10FFFF}']), + + %% ~s must not accept code points greater than 255. + bad_io_lib_format("~s", ['\x{100}']), + bad_io_lib_format("~s", ['кирилли́ческий атом']), + + ok. + +bad_io_lib_format(F, S) -> + try io_lib:format(F, S) of + _ -> + ct:fail({should_fail,F,S}) + catch + error:badarg -> + ok + end. + +otp_14175(_Config) -> + "..." = p(#{}, 0), + "#{}" = p(#{}, 1), + "#{...}" = p(#{a => 1}, 1), + "#{#{} => a}" = p(#{#{} => a}, 2), + "#{a => 1,...}" = p(#{a => 1, b => 2}, 2), + "#{a => 1,b => 2}" = p(#{a => 1, b => 2}, -1), + + M = #{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2, + kccccccccccccccccccc => v3,kddddddddddddddddddd => v4, + keeeeeeeeeeeeeeeeeee => v5}, + "#{...}" = p(M, 1), + mt("#{kaaaaaaaaaaaaaaaaaaaa => v1,...}", p(M, 2)), + mt("#{kaaaaaaaaaaaaaaaaaaaa => 1,kbbbbbbbbbbbbbbbbbbbb => 2,...}", + p(M, 3)), + + mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n" + " kccccccccccccccccccc => v3,...}", p(M, 4)), + + mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n" + " kccccccccccccccccccc => v3,kddddddddddddddddddd => v4,...}", + p(M, 5)), + + mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n" + " kccccccccccccccccccc => v3,kddddddddddddddddddd => v4,\n" + " keeeeeeeeeeeeeeeeeee => v5}", p(M, 6)), + + weak("#{aaaaaaaaaaaaaaaaaaa => 1,bbbbbbbbbbbbbbbbbbbb => 2,\n" + " cccccccccccccccccccc => {3},\n" + " dddddddddddddddddddd => 4,eeeeeeeeeeeeeeeeeeee => 5}", + p(#{aaaaaaaaaaaaaaaaaaa => 1,bbbbbbbbbbbbbbbbbbbb => 2, + cccccccccccccccccccc => {3}, + dddddddddddddddddddd => 4,eeeeeeeeeeeeeeeeeeee => 5}, -1)), + + M2 = #{dddddddddddddddddddd => {1}, {aaaaaaaaaaaaaaaaaaaa} => 2, + {bbbbbbbbbbbbbbbbbbbb} => 3,{cccccccccccccccccccc} => 4, + {eeeeeeeeeeeeeeeeeeee} => 5}, + "#{...}" = p(M2, 1), + weak("#{dddddddddddddddddddd => {...},...}", p(M2, 2)), + weak("#{dddddddddddddddddddd => {1},{...} => 2,...}", p(M2, 3)), + + weak("#{dddddddddddddddddddd => {1},\n" + " {aaaaaaaaaaaaaaaaaaaa} => 2,\n" + " {...} => 3,...}", p(M2, 4)), + + weak("#{dddddddddddddddddddd => {1},\n" + " {aaaaaaaaaaaaaaaaaaaa} => 2,\n" + " {bbbbbbbbbbbbbbbbbbbb} => 3,\n" + " {...} => 4,...}", p(M2, 5)), + + weak("#{dddddddddddddddddddd => {1},\n" + " {aaaaaaaaaaaaaaaaaaaa} => 2,\n" + " {bbbbbbbbbbbbbbbbbbbb} => 3,\n" + " {cccccccccccccccccccc} => 4,\n" + " {...} => 5}", p(M2, 6)), + + weak("#{dddddddddddddddddddd => {1},\n" + " {aaaaaaaaaaaaaaaaaaaa} => 2,\n" + " {bbbbbbbbbbbbbbbbbbbb} => 3,\n" + " {cccccccccccccccccccc} => 4,\n" + " {eeeeeeeeeeeeeeeeeeee} => 5}", p(M2, 7)), + + M3 = #{kaaaaaaaaaaaaaaaaaaa => vuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuu, + kbbbbbbbbbbbbbbbbbbb => vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv, + kccccccccccccccccccc => vxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx, + kddddddddddddddddddd => vyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy, + keeeeeeeeeeeeeeeeeee => vzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz}, + + mt("#{aaaaaaaaaaaaaaaaaaaa =>\n" + " uuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuu,\n" + " bbbbbbbbbbbbbbbbbbbb =>\n" + " vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv,\n" + " cccccccccccccccccccc =>\n" + " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,\n" + " dddddddddddddddddddd =>\n" + " yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,\n" + " eeeeeeeeeeeeeeeeeeee =>\n" + " zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz}", p(M3, -1)), + + R4 = {c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,a,b},b},b},b},b},b}, + b},b},b},b},b},b}, + M4 = #{aaaaaaaaaaaaaaaaaaaa => R4, + bbbbbbbbbbbbbbbbbbbb => R4, + cccccccccccccccccccc => R4, + dddddddddddddddddddd => R4, + eeeeeeeeeeeeeeeeeeee => R4}, + + weak("#{aaaaaaaaaaaaaaaaaaaa =>\n" + " #c{f1 = #c{f1 = #c{...},f2 = b},f2 = b},\n" + " bbbbbbbbbbbbbbbbbbbb => #c{f1 = #c{f1 = {...},...},f2 = b},\n" + " cccccccccccccccccccc => #c{f1 = #c{...},f2 = b},\n" + " dddddddddddddddddddd => #c{f1 = {...},...},\n" + " eeeeeeeeeeeeeeeeeeee => #c{...}}", p(M4, 7)), + + M5 = #{aaaaaaaaaaaaaaaaaaaa => R4}, + mt("#{aaaaaaaaaaaaaaaaaaaa =>\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 =\n" + " #c{\n" + " f1 = #c{f1 = #c{f1 = #c{f1 = a,f2 = b},f2 = b}," + "f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b},\n" + " f2 = b}}", p(M5, -1)), + ok. + +%% Just check number of newlines and dots ('...'). +-define(WEAK, true). + +-ifdef(WEAK). + +weak(S, R) -> + (nl(S) =:= nl(R) andalso + dots(S) =:= dots(S)). + +nl(S) -> + [C || C <- S, C =:= $\n]. + +dots(S) -> + [C || C <- S, C =:= $\.]. + +-else. % WEAK + +weak(S, R) -> + mt(S, R). + +-endif. % WEAK + +%% If EXACT is defined: mt() matches strings exactly. +%% +%% if EXACT is not defined: do not match the strings exactly, but +%% compare them assuming that all map keys and all map values are +%% equal (by assuming all map keys and all map values have the same +%% length and begin with $k and $v respectively). + +%-define(EXACT, true). + +-ifdef(EXACT). + +mt(S, R) -> + S =:= R. + +-else. % EXACT + +mt(S, R) -> + anon(S) =:= anon(R). + +anon(S) -> + {ok, Ts0, _} = erl_scan:string(S, 1, [text]), + Ts = anon1(Ts0), + text(Ts). + +anon1([]) -> []; +anon1([{atom,Anno,Atom}=T|Ts]) -> + case erl_anno:text(Anno) of + "k" ++ _ -> + NewAnno = erl_anno:set_text("key", Anno), + [{atom,NewAnno,Atom}|anon1(Ts)]; + "v" ++ _ -> + NewAnno = erl_anno:set_text("val", Anno), + [{atom,NewAnno,Atom}|anon1(Ts)]; + _ -> + [T|anon1(Ts)] + end; +anon1([T|Ts]) -> + [T|anon1(Ts)]. + +text(Ts) -> + lists:append(text1(Ts)). + +text1([]) -> []; +text1([T|Ts]) -> + Anno = element(2, T), + [erl_anno:text(Anno) | text1(Ts)]. + +-endif. % EXACT diff --git a/lib/stdlib/test/lists_SUITE.erl b/lib/stdlib/test/lists_SUITE.erl index 531e97e8d6..5f2d8f0f4e 100644 --- a/lib/stdlib/test/lists_SUITE.erl +++ b/lib/stdlib/test/lists_SUITE.erl @@ -121,7 +121,7 @@ groups() -> {zip, [parallel], [zip_unzip, zip_unzip3, zipwith, zipwith3]}, {misc, [parallel], [reverse, member, dropwhile, takewhile, filter_partition, suffix, subtract, join, - hof]} + hof, droplast]} ]. init_per_suite(Config) -> diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl index c08e138ad3..2b5d52287e 100644 --- a/lib/stdlib/test/qlc_SUITE.erl +++ b/lib/stdlib/test/qlc_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2004-2016. All Rights Reserved. +%% Copyright Ericsson AB 2004-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -886,11 +886,12 @@ eval_unique(Config) when is_list(Config) -> [a] = qlc:e(Q2, {unique_all, true}) ">>, - <<"Q = qlc:q([SQV || SQV <- qlc:q([X || X <- [1,2,1]],unique)], + <<"Q = qlc:q([SQV || SQV <- qlc:q([X || X <- [1,2,1,#{a => 1}]], + unique)], unique), {call,_,_,[{lc,_,{var,_,'X'},[{generate,_,{var,_,'X'},_}]},_]} = qlc:info(Q, [{format,abstract_code},unique_all]), - [1,2] = qlc:e(Q)">>, + [1,2,#{a := 1}] = qlc:e(Q)">>, <<"Q = qlc:q([X || X <- [1,2,1]]), {call,_,_,[{lc,_,{var,_,'X'},[{generate,_,{var,_,'X'},_}]},_]} = @@ -2637,7 +2638,16 @@ info(Config) when is_list(Config) -> {cons, _, _, _}]}, {nil,_}}]}]} = i(QH, {format, abstract_code}), [{5},{6}] = qlc:e(QH), - [{4},{5},{6}] = qlc:e(F(3))">> + [{4},{5},{6}] = qlc:e(F(3))">>, + + <<"Fun = fun ?MODULE:i/2, + L = [{#{k => #{v => Fun}}, Fun}], + H = qlc:q([Q || Q <- L, Q =:= {#{k => #{v => Fun}}, Fun}]), + L = qlc:e(H), + {call,_,_,[{lc,_,{var,_,'Q'}, + [{generate,_,_,_}, + {op,_,_,_,_}]}]} = + qlc:info(H, [{format,abstract_code}])">> ], run(Config, Ts), diff --git a/lib/stdlib/test/random_iolist.erl b/lib/stdlib/test/random_iolist.erl index 555f063e0a..b62cf5b82b 100644 --- a/lib/stdlib/test/random_iolist.erl +++ b/lib/stdlib/test/random_iolist.erl @@ -24,17 +24,13 @@ -module(random_iolist). --export([run/3, run2/3, standard_seed/0, compare/3, compare2/3, +-export([run/3, standard_seed/0, compare/3, random_iolist/1]). run(Iter,Fun1,Fun2) -> standard_seed(), compare(Iter,Fun1,Fun2). -run2(Iter,Fun1,Fun2) -> - standard_seed(), - compare2(Iter,Fun1,Fun2). - random_byte() -> rand:uniform(256) - 1. @@ -150,16 +146,6 @@ do_comp(List,F1,F2) -> _ -> true end. - -do_comp(List,List2,F1,F2) -> - X = F1(List,List2), - Y = F2(List,List2), - case X =:= Y of - false -> - exit({not_matching,List,List2,X,Y}); - _ -> - true - end. compare(0,Fun1,Fun2) -> do_comp(<<>>,Fun1,Fun2), @@ -172,25 +158,3 @@ compare(N,Fun1,Fun2) -> L = random_iolist(N), do_comp(L,Fun1,Fun2), compare(N-1,Fun1,Fun2). - -compare2(0,Fun1,Fun2) -> - L = random_iolist(100), - do_comp(<<>>,L,Fun1,Fun2), - do_comp(L,<<>>,Fun1,Fun2), - do_comp(<<>>,<<>>,Fun1,Fun2), - do_comp([],L,Fun1,Fun2), - do_comp(L,[],Fun1,Fun2), - do_comp([],[],Fun1,Fun2), - do_comp([[]|<<>>],L,Fun1,Fun2), - do_comp(L,[[]|<<>>],Fun1,Fun2), - do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2), - do_comp([<<>>,[]|<<>>],L,Fun1,Fun2), - do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2), - do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2), - true; - -compare2(N,Fun1,Fun2) -> - L = random_iolist(N), - L2 = random_iolist(N), - do_comp(L,L2,Fun1,Fun2), - compare2(N-1,Fun1,Fun2). diff --git a/lib/stdlib/test/random_unicode_list.erl b/lib/stdlib/test/random_unicode_list.erl index 8db2fa8b56..2eeb28113d 100644 --- a/lib/stdlib/test/random_unicode_list.erl +++ b/lib/stdlib/test/random_unicode_list.erl @@ -24,7 +24,7 @@ -module(random_unicode_list). --export([run/3, run/4, run2/3, standard_seed/0, compare/4, compare2/3, +-export([run/3, run/4, standard_seed/0, compare/4, random_unicode_list/2]). run(I,F1,F2) -> @@ -33,10 +33,6 @@ run(Iter,Fun1,Fun2,Enc) -> standard_seed(), compare(Iter,Fun1,Fun2,Enc). -run2(Iter,Fun1,Fun2) -> - standard_seed(), - compare2(Iter,Fun1,Fun2). - int_to_utf8(I) when I =< 16#7F -> <<I>>; int_to_utf8(I) when I =< 16#7FF -> @@ -225,16 +221,6 @@ do_comp(List,F1,F2) -> _ -> true end. - -do_comp(List,List2,F1,F2) -> - X = F1(List,List2), - Y = F2(List,List2), - case X =:= Y of - false -> - exit({not_matching,List,List2,X,Y}); - _ -> - true - end. compare(0,Fun1,Fun2,_Enc) -> do_comp(<<>>,Fun1,Fun2), @@ -247,25 +233,3 @@ compare(N,Fun1,Fun2,Enc) -> L = random_unicode_list(N,Enc), do_comp(L,Fun1,Fun2), compare(N-1,Fun1,Fun2,Enc). - -compare2(0,Fun1,Fun2) -> - L = random_unicode_list(100,utf8), - do_comp(<<>>,L,Fun1,Fun2), - do_comp(L,<<>>,Fun1,Fun2), - do_comp(<<>>,<<>>,Fun1,Fun2), - do_comp([],L,Fun1,Fun2), - do_comp(L,[],Fun1,Fun2), - do_comp([],[],Fun1,Fun2), - do_comp([[]|<<>>],L,Fun1,Fun2), - do_comp(L,[[]|<<>>],Fun1,Fun2), - do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2), - do_comp([<<>>,[]|<<>>],L,Fun1,Fun2), - do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2), - do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2), - true; - -compare2(N,Fun1,Fun2) -> - L = random_unicode_list(N,utf8), - L2 = random_unicode_list(N,utf8), - do_comp(L,L2,Fun1,Fun2), - compare2(N-1,Fun1,Fun2). diff --git a/lib/stdlib/test/re_testoutput1_replacement_test.erl b/lib/stdlib/test/re_testoutput1_replacement_test.erl index a40800d760..563e0001e4 100644 --- a/lib/stdlib/test/re_testoutput1_replacement_test.erl +++ b/lib/stdlib/test/re_testoutput1_replacement_test.erl @@ -18,7 +18,7 @@ %% %CopyrightEnd% %% -module(re_testoutput1_replacement_test). --compile(export_all). +-export([run/0]). -compile(no_native). %% This file is generated by running run_pcre_tests:gen_repl_test("re_SUITE_data/testoutput1") run() -> diff --git a/lib/stdlib/test/re_testoutput1_split_test.erl b/lib/stdlib/test/re_testoutput1_split_test.erl index 02987971fa..b39cb53a55 100644 --- a/lib/stdlib/test/re_testoutput1_split_test.erl +++ b/lib/stdlib/test/re_testoutput1_split_test.erl @@ -18,7 +18,7 @@ %% %CopyrightEnd% %% -module(re_testoutput1_split_test). --compile(export_all). +-export([run/0]). -compile(no_native). %% This file is generated by running run_pcre_tests:gen_split_test("re_SUITE_data/testoutput1") join([]) -> []; diff --git a/lib/stdlib/test/run_pcre_tests.erl b/lib/stdlib/test/run_pcre_tests.erl index ae56db59d6..b62674d6e0 100644 --- a/lib/stdlib/test/run_pcre_tests.erl +++ b/lib/stdlib/test/run_pcre_tests.erl @@ -18,8 +18,7 @@ %% %CopyrightEnd% %% -module(run_pcre_tests). - --compile(export_all). +-export([test/1,gen_split_test/1,gen_repl_test/1]). test(RootDir) -> put(verbose,false), @@ -119,49 +118,6 @@ test([{RE0,Line,Options0,Tests}|T],PreCompile,XMode,REAsList) -> end end. -loopexec(_,_,X,Y,_,_) when X > Y -> - {match,[]}; -loopexec(P,Chal,X,Y,Unicode,Xopt) -> - case re:run(Chal,P,[{offset,X}]++Xopt) of - nomatch -> - {match,[]}; - {match,[{A,B}|More]} -> - {match,Rest} = - case B>0 of - true -> - loopexec(P,Chal,A+B,Y,Unicode,Xopt); - false -> - {match,M} = case re:run(Chal,P,[{offset,X},notempty,anchored]++Xopt) of - nomatch -> - {match,[]}; - {match,Other} -> - {match,fixup(Chal,Other,0)} - end, - NewA = forward(Chal,A,1,Unicode), - {match,MM} = loopexec(P,Chal,NewA,Y,Unicode,Xopt), - {match,M ++ MM} - end, - {match,fixup(Chal,[{A,B}|More],0)++Rest} - end. - -forward(_Chal,A,0,_) -> - A; -forward(_Chal,A,N,false) -> - A+N; -forward(Chal,A,N,true) -> - <<_:A/binary,Tl/binary>> = Chal, - Forw = case Tl of - <<1:1,1:1,0:1,_:5,_/binary>> -> - 2; - <<1:1,1:1,1:1,0:1,_:4,_/binary>> -> - 3; - <<1:1,1:1,1:1,1:1,0:1,_:3,_/binary>> -> - 4; - _ -> - 1 - end, - forward(Chal,A+Forw,N-1,true). - contains_eightbit(<<>>) -> false; contains_eightbit(<<X:8,_/binary>>) when X >= 128 -> @@ -201,23 +157,6 @@ clean_duplicates([X|T],L) -> end. -global_fixup(_,nomatch) -> - nomatch; -global_fixup(P,{match,M}) -> - {match,lists:flatten(global_fixup2(P,M))}. - -global_fixup2(_,[]) -> - []; -global_fixup2(P,[H|T]) -> - [gfixup_one(P,0,H)|global_fixup2(P,T)]. - -gfixup_one(_,_,[]) -> - []; -gfixup_one(P,I,[{Start,Len}|T]) -> - <<_:Start/binary,R:Len/binary,_/binary>> = P, - [{I,R}|gfixup_one(P,I+1,T)]. - - press([]) -> []; press([H|T]) -> @@ -981,7 +920,7 @@ gen_split_test(OneFile) -> ErlFileName = ErlModule++".erl", {ok,F}= file:open(ErlFileName,[write]), io:format(F,"-module(~s).~n",[ErlModule]), - io:format(F,"-compile(export_all).~n",[]), + io:format(F,"-export([run/0]).~n",[]), io:format(F,"-compile(no_native).~n",[]), io:format(F,"%% This file is generated by running ~w:gen_split_test(~p)~n", [?MODULE,OneFile]), @@ -1024,7 +963,7 @@ dumponesplit(F,{RE,Line,O,TS}) -> "$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; " "print \" <<\\\"$x\\\">> = " "iolist_to_binary(join(re:split(\\\"~s\\\"," - "\\\"~s\\\",~p))), \\n\";'~n", + "\\\"~s\\\",~p))),\\n\";'~n", [zsafe(safe(RE)), SSS, ysafe(safe(Str)), @@ -1035,7 +974,7 @@ dumponesplit(F,{RE,Line,O,TS}) -> "$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; " "print \" <<\\\"$x\\\">> = " "iolist_to_binary(join(re:split(\\\"~s\\\"," - "\\\"~s\\\",~p))), \\n\";'~n", + "\\\"~s\\\",~p))),\\n\";'~n", [zsafe(safe(RE)), SSS, ysafe(safe(Str)), @@ -1046,7 +985,7 @@ dumponesplit(F,{RE,Line,O,TS}) -> "$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; " "print \" <<\\\"$x\\\">> = " "iolist_to_binary(join(re:split(\\\"~s\\\"," - "\\\"~s\\\",~p))), \\n\";'~n", + "\\\"~s\\\",~p))),\\n\";'~n", [zsafe(safe(RE)), SSS, ysafe(safe(Str)), @@ -1071,7 +1010,7 @@ gen_repl_test(OneFile) -> ErlFileName = ErlModule++".erl", {ok,F}= file:open(ErlFileName,[write]), io:format(F,"-module(~s).~n",[ErlModule]), - io:format(F,"-compile(export_all).~n",[]), + io:format(F,"-export([run/0]).~n",[]), io:format(F,"-compile(no_native).~n",[]), io:format(F,"%% This file is generated by running ~w:gen_repl_test(~p)~n", [?MODULE,OneFile]), diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl index 15ccdea284..4864bc3d72 100644 --- a/lib/stdlib/test/shell_SUITE.erl +++ b/lib/stdlib/test/shell_SUITE.erl @@ -282,7 +282,7 @@ restricted_local(Config) when is_list(Config) -> comm_err(<<"begin F=fun() -> hello end, foo(F) end.">>), "exception error: undefined shell command banan/1" = comm_err(<<"begin F=fun() -> hello end, banan(F) end.">>), - "{error,"++_ = t(<<"begin F=fun() -> hello end, c(F) end.">>), + "Recompiling "++_ = t(<<"c(shell_SUITE).">>), "exception exit: restricted shell does not allow l(" ++ _ = comm_err(<<"begin F=fun() -> hello end, l(F) end.">>), "exception error: variable 'F' is unbound" = diff --git a/lib/stdlib/test/sofs_SUITE.erl b/lib/stdlib/test/sofs_SUITE.erl index 13c12ad2f2..f67bf16f0f 100644 --- a/lib/stdlib/test/sofs_SUITE.erl +++ b/lib/stdlib/test/sofs_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2016. All Rights Reserved. +%% Copyright Ericsson AB 2001-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -1837,11 +1837,8 @@ digraph(Conf) when is_list(Conf) -> ok. digraph_fail(ExitReason, Fail) -> - {'EXIT', {ExitReason, [{sofs,family_to_digraph,A,_}|_]}} = Fail, - case {test_server:is_native(sofs),A} of - {false,[_,_]} -> ok; - {true,2} -> ok - end. + {'EXIT', {ExitReason, [{sofs,family_to_digraph,2,_}|_]}} = Fail, + ok. constant_function(Conf) when is_list(Conf) -> E = empty_set(), diff --git a/lib/stdlib/test/tar_SUITE.erl b/lib/stdlib/test/tar_SUITE.erl index 6f3979bb77..2e1ae7bcff 100644 --- a/lib/stdlib/test/tar_SUITE.erl +++ b/lib/stdlib/test/tar_SUITE.erl @@ -20,11 +20,14 @@ -module(tar_SUITE). -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, - init_per_group/2,end_per_group/2, borderline/1, atomic/1, long_names/1, + init_per_group/2, end_per_group/2, + init_per_testcase/2, + borderline/1, atomic/1, long_names/1, create_long_names/1, bad_tar/1, errors/1, extract_from_binary/1, - extract_from_binary_compressed/1, + extract_from_binary_compressed/1, extract_filtered/1, extract_from_open_file/1, symlinks/1, open_add_close/1, cooked_compressed/1, - memory/1,unicode/1]). + memory/1,unicode/1,read_other_implementations/1, + sparse/1, init/1]). -include_lib("common_test/include/ct.hrl"). -include_lib("kernel/include/file.hrl"). @@ -35,7 +38,10 @@ all() -> [borderline, atomic, long_names, create_long_names, bad_tar, errors, extract_from_binary, extract_from_binary_compressed, extract_from_open_file, - symlinks, open_add_close, cooked_compressed, memory, unicode]. + extract_filtered, + symlinks, open_add_close, cooked_compressed, memory, unicode, + read_other_implementations, + sparse,init]. groups() -> []. @@ -52,6 +58,9 @@ init_per_group(_GroupName, Config) -> end_per_group(_GroupName, Config) -> Config. +init_per_testcase(_Case, Config) -> + Ports = ordsets:from_list(erlang:ports()), + [{ports,Ports}|Config]. %% Test creating, listing and extracting one file from an archive, %% multiple times with different file sizes. Also check that the file @@ -81,20 +90,33 @@ borderline(Config) when is_list(Config) -> %% Clean up. delete_files([TempDir]), - ok. + verify_ports(Config). borderline_test(Size, TempDir) -> - Archive = filename:join(TempDir, "ar_"++integer_to_list(Size)++".tar"), - Name = filename:join(TempDir, "file_"++integer_to_list(Size)), io:format("Testing size ~p", [Size]), + borderline_test(Size, TempDir, true), + borderline_test(Size, TempDir, false), + ok. + +borderline_test(Size, TempDir, IsUstar) -> + Prefix = case IsUstar of + true -> + "file_"; + false -> + lists:duplicate(100, $f) ++ "ile_" + end, + SizeList = integer_to_list(Size), + Archive = filename:join(TempDir, "ar_"++ SizeList ++".tar"), + Name = filename:join(TempDir, Prefix++SizeList), %% Create a file and archive it. X0 = erlang:monotonic_time(), - file:write_file(Name, random_byte_list(X0, Size)), + ok = file:write_file(Name, random_byte_list(X0, Size)), ok = erl_tar:create(Archive, [Name]), ok = file:delete(Name), %% Verify listing and extracting. + IsUstar = is_ustar(Archive), {ok, [Name]} = erl_tar:table(Archive), ok = erl_tar:extract(Archive, [verbose]), @@ -103,7 +125,12 @@ borderline_test(Size, TempDir) -> true = match_byte_list(X0, binary_to_list(Bin)), %% Verify that Unix tar can read it. - tar_tf(Archive, Name), + case IsUstar of + true -> + tar_tf(Archive, Name); + false -> + ok + end, ok. @@ -248,7 +275,7 @@ atomic(Config) when is_list(Config) -> %% Clean up. delete_files([Tar1,Tar2,Tar3,Tar4|Names]), - ok. + verify_ports(Config). %% Returns a sequence of characters. @@ -282,7 +309,9 @@ long_names(Config) when is_list(Config) -> DataDir = proplists:get_value(data_dir, Config), Long = filename:join(DataDir, "long_names.tar"), run_in_short_tempdir(Config, - fun() -> do_long_names(Long) end). + fun() -> do_long_names(Long) end), + verify_ports(Config). + do_long_names(Long) -> %% Try table/2 and extract/2. @@ -314,7 +343,8 @@ do_long_names(Long) -> %% Creates a tar file from a deep directory structure (filenames are %% longer than 100 characters). create_long_names(Config) when is_list(Config) -> - run_in_short_tempdir(Config, fun create_long_names/0). + run_in_short_tempdir(Config, fun create_long_names/0), + verify_ports(Config). create_long_names() -> {ok,Dir} = file:get_cwd(), @@ -336,6 +366,7 @@ create_long_names() -> ok = erl_tar:tt(TarName), %% Extract and verify. + true = is_ustar(TarName), ExtractDir = "extract_dir", ok = file:make_dir(ExtractDir), ok = erl_tar:extract(TarName, [{cwd,ExtractDir}]), @@ -357,10 +388,10 @@ make_dirs([], Dir) -> %% Try erl_tar:table/2 and erl_tar:extract/2 on some corrupted tar files. bad_tar(Config) when is_list(Config) -> try_bad("bad_checksum", bad_header, Config), - try_bad("bad_octal", bad_header, Config), + try_bad("bad_octal", invalid_tar_checksum, Config), try_bad("bad_too_short", eof, Config), try_bad("bad_even_shorter", eof, Config), - ok. + verify_ports(Config). try_bad(Name0, Reason, Config) -> %% Intentionally no macros here. @@ -370,8 +401,10 @@ try_bad(Name0, Reason, Config) -> Name = Name0 ++ ".tar", io:format("~nTrying ~s", [Name]), Full = filename:join(DataDir, Name), - Opts = [verbose, {cwd, PrivDir}], + Dest = filename:join(PrivDir, Name0), + Opts = [verbose, {cwd, Dest}], Expected = {error, Reason}, + io:fwrite("Expected: ~p\n", [Expected]), case {erl_tar:table(Full, Opts), erl_tar:extract(Full, Opts)} of {Expected, Expected} -> io:format("Result: ~p", [Expected]), @@ -408,7 +441,7 @@ errors(Config) when is_list(Config) -> %% Clean up. delete_files([GoodTar,BadTar]), - ok. + verify_ports(Config). try_error(M, F, A, Error) -> io:format("Trying ~p:~p(~p)", [M, F, A]), @@ -458,7 +491,7 @@ extract_from_binary(Config) when is_list(Config) -> %% Clean up. delete_files([ExtractDir]), - ok. + verify_ports(Config). extract_from_binary_compressed(Config) when is_list(Config) -> %% Test extracting a compressed tar archive from a binary. @@ -491,7 +524,28 @@ extract_from_binary_compressed(Config) when is_list(Config) -> %% Clean up the rest. delete_files([ExtractDir]), - ok. + verify_ports(Config). + +%% Test extracting a tar archive from a binary. +extract_filtered(Config) when is_list(Config) -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + Long = filename:join(DataDir, "no_fancy_stuff.tar"), + ExtractDir = filename:join(PrivDir, "extract_from_binary"), + ok = file:make_dir(ExtractDir), + + ok = erl_tar:extract(Long, [{cwd,ExtractDir},{files,["no_fancy_stuff/EPLICENCE"]}]), + + %% Verify. + Dir = filename:join(ExtractDir, "no_fancy_stuff"), + true = filelib:is_dir(Dir), + false = filelib:is_file(filename:join(Dir, "a_dir_list")), + true = filelib:is_file(filename:join(Dir, "EPLICENCE")), + + %% Clean up. + delete_files([ExtractDir]), + + verify_ports(Config). %% Test extracting a tar archive from an open file. extract_from_open_file(Config) when is_list(Config) -> @@ -516,7 +570,7 @@ extract_from_open_file(Config) when is_list(Config) -> %% Clean up. delete_files([ExtractDir]), - ok. + verify_ports(Config). %% Test that archives containing symlinks can be created and extracted. symlinks(Config) when is_list(Config) -> @@ -535,6 +589,7 @@ symlinks(Config) when is_list(Config) -> %% Clean up. delete_files([Dir]), + verify_ports(Config), Res. make_symlink(Path, Link) -> @@ -573,6 +628,7 @@ symlinks(Dir, BadSymlink, PointsTo) -> ok = file:write_file(AFile, ALine), ok = file:make_symlink(AFile, GoodSymlink), ok = erl_tar:create(Tar, [BadSymlink, GoodSymlink, AFile], [verbose]), + true = is_ustar(Tar), %% List contents of tar file. @@ -581,6 +637,7 @@ symlinks(Dir, BadSymlink, PointsTo) -> %% Also create another archive with the dereference flag. ok = erl_tar:create(DerefTar, [AFile, GoodSymlink], [dereference, verbose]), + true = is_ustar(DerefTar), %% Extract files to a new directory. @@ -619,13 +676,51 @@ long_symlink(Dir) -> ok = file:set_cwd(Dir), AFile = "long_symlink", - FarTooLong = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed", - ok = file:make_symlink(FarTooLong, AFile), - {error,Error} = erl_tar:create(Tar, [AFile], [verbose]), - io:format("Error: ~s\n", [erl_tar:format_error(Error)]), - {FarTooLong,symbolic_link_too_long} = Error, + RequiresPAX = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed", + ok = file:make_symlink(RequiresPAX, AFile), + ok = erl_tar:create(Tar, [AFile], [verbose]), + false = is_ustar(Tar), + NewDir = filename:join(Dir, "extracted"), + _ = file:make_dir(NewDir), + ok = erl_tar:extract(Tar, [{cwd, NewDir}, verbose]), + ok = file:set_cwd(NewDir), + {ok, #file_info{type=symlink}} = file:read_link_info(AFile), + {ok, RequiresPAX} = file:read_link(AFile), ok. +init(Config) when is_list(Config) -> + PrivDir = proplists:get_value(priv_dir, Config), + ok = file:set_cwd(PrivDir), + Dir = filename:join(PrivDir, "init"), + ok = file:make_dir(Dir), + + [{FileOne,_,_}|_] = oac_files(), + TarOne = filename:join(Dir, "archive1.tar"), + {ok,Fd} = file:open(TarOne, [write]), + + %% If the arity of the fun is wrong, badarg should be returned + {error, badarg} = erl_tar:init(Fd, write, fun file_op_bad/1), + + %% Otherwise we should be good to go + {ok, Tar} = erl_tar:init(Fd, write, fun file_op/2), + ok = erl_tar:add(Tar, FileOne, []), + ok = erl_tar:close(Tar), + {ok, [FileOne]} = erl_tar:table(TarOne), + + verify_ports(Config). + +file_op_bad(_) -> + throw({error, should_never_be_called}). + +file_op(write, {Fd, Data}) -> + file:write(Fd, Data); +file_op(position, {Fd, Pos}) -> + file:position(Fd, Pos); +file_op(read2, {Fd, Size}) -> + file:read(Fd, Size); +file_op(close, Fd) -> + file:close(Fd). + open_add_close(Config) when is_list(Config) -> PrivDir = proplists:get_value(priv_dir, Config), ok = file:set_cwd(PrivDir), @@ -643,21 +738,30 @@ open_add_close(Config) when is_list(Config) -> TarOne = filename:join(Dir, "archive1.tar"), {ok,AD} = erl_tar:open(TarOne, [write]), ok = erl_tar:add(AD, FileOne, []), - ok = erl_tar:add(AD, FileTwo, "second file", []), - ok = erl_tar:add(AD, FileThree, [verbose]), + + %% Add with {NameInArchive,Name} + ok = erl_tar:add(AD, {"second file", FileTwo}, []), + + %% Add with {binary, Bin} + {ok,FileThreeBin} = file:read_file(FileThree), + ok = erl_tar:add(AD, {FileThree, FileThreeBin}, [verbose]), + + %% Add with Name ok = erl_tar:add(AD, FileThree, "chunked", [{chunks,11411},verbose]), ok = erl_tar:add(AD, ADir, [verbose]), ok = erl_tar:add(AD, AnotherDir, [verbose]), ok = erl_tar:close(AD), + true = is_ustar(TarOne), ok = erl_tar:t(TarOne), ok = erl_tar:tt(TarOne), - {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]} = erl_tar:table(TarOne), + Expected = {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]}, + Expected = erl_tar:table(TarOne), delete_files(["oac_file","oac_small","oac_big",Dir,AnotherDir,ADir]), - ok. + verify_ports(Config). oac_files() -> Files = [{"oac_file", 1459, $x}, @@ -688,7 +792,8 @@ cooked_compressed(Config) when is_list(Config) -> %% Clean up. delete_files([filename:join(PrivDir, "ddll_SUITE_data")]), - ok. + + verify_ports(Config). %% Test that an archive can be created directly from binaries and %% that an archive can be extracted into binaries. @@ -716,7 +821,45 @@ memory(Config) when is_list(Config) -> %% Clean up. ok = delete_files([Name1,Name2]), - ok. + + verify_ports(Config). + +read_other_implementations(Config) when is_list(Config) -> + DataDir = proplists:get_value(data_dir, Config), + Files = ["v7.tar", "gnu.tar", "bsd.tar", + "star.tar", "pax_mtime.tar"], + do_read_other_implementations(Files, DataDir), + verify_ports(Config). + +do_read_other_implementations([], _DataDir) -> + ok; +do_read_other_implementations([File|Rest], DataDir) -> + io:format("~nTrying ~s", [File]), + Full = filename:join(DataDir, File), + {ok, _} = erl_tar:table(Full), + {ok, _} = erl_tar:extract(Full, [memory]), + do_read_other_implementations(Rest, DataDir). + + +%% Test handling of sparse files +sparse(Config) when is_list(Config) -> + DataDir = proplists:get_value(data_dir, Config), + PrivDir = proplists:get_value(priv_dir, Config), + Sparse01Empty = "sparse01_empty.tar", + Sparse01 = "sparse01.tar", + Sparse10Empty = "sparse10_empty.tar", + Sparse10 = "sparse10.tar", + do_sparse([Sparse01Empty, Sparse01, Sparse10Empty, Sparse10], DataDir, PrivDir), + verify_ports(Config). + +do_sparse([], _DataDir, _PrivDir) -> + ok; +do_sparse([Name|Rest], DataDir, PrivDir) -> + io:format("~nTrying sparse file ~s", [Name]), + Full = filename:join(DataDir, Name), + {ok, [_]} = erl_tar:table(Full), + {ok, _} = erl_tar:extract(Full, [memory]), + do_sparse(Rest, DataDir, PrivDir). %% Test filenames with characters outside the US ASCII range. unicode(Config) when is_list(Config) -> @@ -753,6 +896,9 @@ do_unicode(PrivDir) -> Names = lists:sort(unicode_create_files()), Tar = "unicöde.tar", ok = erl_tar:create(Tar, ["unicöde"], []), + + %% Unicode filenames require PAX format. + false = is_ustar(Tar), {ok,Names0} = erl_tar:table(Tar, []), Names = lists:sort(Names0), _ = [ok = file:delete(Name) || Name <- Names], @@ -850,3 +996,26 @@ start_node(Name, Args) -> ct:log("Node ~p started~n", [Node]), Node end. + +%% Test that the given tar file is a plain USTAR archive, +%% without any PAX extensions. +is_ustar(File) -> + {ok,Bin} = file:read_file(File), + <<_:257/binary,"ustar",0,_/binary>> = Bin, + <<_:156/binary,Type:8,_/binary>> = Bin, + case Type of + $x -> false; + $g -> false; + _ -> true + end. + + +verify_ports(Config) -> + PortsBefore = proplists:get_value(ports, Config), + PortsAfter = ordsets:from_list(erlang:ports()), + case ordsets:subtract(PortsAfter, PortsBefore) of + [] -> + ok; + [_|_]=Rem -> + error({leaked_ports,Rem}) + end. diff --git a/lib/stdlib/test/tar_SUITE_data/bsd.tar b/lib/stdlib/test/tar_SUITE_data/bsd.tar Binary files differnew file mode 100644 index 0000000000..8c31864be0 --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/bsd.tar diff --git a/lib/stdlib/test/tar_SUITE_data/gnu.tar b/lib/stdlib/test/tar_SUITE_data/gnu.tar Binary files differnew file mode 100644 index 0000000000..60268065c1 --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/gnu.tar diff --git a/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar Binary files differnew file mode 100644 index 0000000000..1b6e80ffac --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar diff --git a/lib/stdlib/test/tar_SUITE_data/sparse00.tar b/lib/stdlib/test/tar_SUITE_data/sparse00.tar Binary files differnew file mode 100644 index 0000000000..61a04de90b --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/sparse00.tar diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01.tar b/lib/stdlib/test/tar_SUITE_data/sparse01.tar Binary files differnew file mode 100644 index 0000000000..61a04de90b --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/sparse01.tar diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar Binary files differnew file mode 100644 index 0000000000..efa6d060f4 --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10.tar b/lib/stdlib/test/tar_SUITE_data/sparse10.tar Binary files differnew file mode 100644 index 0000000000..61a04de90b --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/sparse10.tar diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar Binary files differnew file mode 100644 index 0000000000..efa6d060f4 --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar diff --git a/lib/stdlib/test/tar_SUITE_data/star.tar b/lib/stdlib/test/tar_SUITE_data/star.tar Binary files differnew file mode 100644 index 0000000000..b0631e3b13 --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/star.tar diff --git a/lib/stdlib/test/tar_SUITE_data/v7.tar b/lib/stdlib/test/tar_SUITE_data/v7.tar Binary files differnew file mode 100644 index 0000000000..9918e006bb --- /dev/null +++ b/lib/stdlib/test/tar_SUITE_data/v7.tar diff --git a/lib/stdlib/test/zip_SUITE.erl b/lib/stdlib/test/zip_SUITE.erl index 7d90795c9e..f0feda217a 100644 --- a/lib/stdlib/test/zip_SUITE.erl +++ b/lib/stdlib/test/zip_SUITE.erl @@ -27,7 +27,7 @@ openzip_api/1, zip_api/1, open_leak/1, unzip_jar/1, unzip_traversal_exploit/1, compress_control/1, - foldl/1]). + foldl/1,fd_leak/1]). -include_lib("common_test/include/ct.hrl"). -include_lib("kernel/include/file.hrl"). @@ -40,7 +40,7 @@ all() -> unzip_to_binary, zip_to_binary, unzip_options, zip_options, list_dir_options, aliases, openzip_api, zip_api, open_leak, unzip_jar, compress_control, foldl, - unzip_traversal_exploit]. + unzip_traversal_exploit,fd_leak]. groups() -> []. @@ -882,3 +882,35 @@ foldl(Config) -> {error, enoent} = zip:foldl(ZipFun, [], File), ok. + +fd_leak(Config) -> + ok = file:set_cwd(proplists:get_value(priv_dir, Config)), + DataDir = proplists:get_value(data_dir, Config), + Name = filename:join(DataDir, "bad_file_header.zip"), + BadExtract = fun() -> + {error,bad_file_header} = zip:extract(Name), + ok + end, + do_fd_leak(BadExtract, 1), + + BadCreate = fun() -> + {error,enoent} = zip:zip("failed.zip", + ["none"]), + ok + end, + do_fd_leak(BadCreate, 1), + + ok. + +do_fd_leak(_Bad, 10000) -> + ok; +do_fd_leak(Bad, N) -> + try Bad() of + ok -> + do_fd_leak(Bad, N + 1) + catch + C:R -> + Stk = erlang:get_stacktrace(), + io:format("Bad error after ~p attempts\n", [N]), + erlang:raise(C, R, Stk) + end. diff --git a/lib/stdlib/vsn.mk b/lib/stdlib/vsn.mk index e67cb9b08d..f7bd21472c 100644 --- a/lib/stdlib/vsn.mk +++ b/lib/stdlib/vsn.mk @@ -1 +1 @@ -STDLIB_VSN = 3.2 +STDLIB_VSN = 3.3 diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl index 72170ec5da..b92cd8d607 100644 --- a/lib/syntax_tools/src/igor.erl +++ b/lib/syntax_tools/src/igor.erl @@ -417,7 +417,7 @@ merge_files(Name, Files, Options) -> %% %% <dd>Specifies a list of rules for associating object files with %% source files, to be passed to the function -%% `filename:find_src/2'. This can be used to change the +%% `filelib:find_source/2'. This can be used to change the %% way Igor looks for source files. If this option is not specified, %% the default system rules are used. The first occurrence of this %% option completely overrides any later in the option list.</dd> @@ -462,7 +462,7 @@ merge_files(Name, Files, Options) -> %% @see merge/3 %% @see merge_files/3 %% @see merge_sources/3 -%% @see //stdlib/filename:find_src/2 +%% @see //stdlib/filelib:find_source/2 %% @see epp_dodger -spec merge_files(atom(), erl_syntax:forms(), [file:filename()], [option()]) -> @@ -2746,8 +2746,8 @@ read_module(Name, Options) -> %% It seems that we have no file - go on anyway, %% just to get a decent error message. read_module_1(Name, Options); - {Name1, _} -> - read_module_1(Name1 ++ ".erl", Options) + {ok, Name1} -> + read_module_1(Name1, Options) end end. @@ -2807,9 +2807,9 @@ check_forms([], _) -> ok. find_src(Name, undefined) -> - filename:find_src(filename(Name)); + filelib:find_source(filename(Name)); find_src(Name, Rules) -> - filename:find_src(filename(Name), Rules). + filelib:find_source(filename(Name), Rules). %% file_type(filename()) -> {value, Type} | none diff --git a/lib/tools/doc/src/make.xml b/lib/tools/doc/src/make.xml index fddf5ebd7b..6b878f72fb 100644 --- a/lib/tools/doc/src/make.xml +++ b/lib/tools/doc/src/make.xml @@ -43,15 +43,15 @@ <fsummary>Compile a set of modules.</fsummary> <type> <v>Options = [Option]</v> - <v> Option = noexec | load | netload | <compiler option></v> + <v> Option = noexec | load | netload | {emake, Emake} | <compiler option></v> </type> <desc> - <p>This function first looks in the current working directory - for a file named <c>Emakefile</c> (see below) specifying the - set of modules to compile and the compile options to use. If - no such file is found, the set of modules to compile - defaults to all modules in the current working - directory.</p> + <p>This function determines the set of modules to compile and the + compile options to use, by first looking for the <c>emake</c> make + option, if not present reads the configuration from a file named + <c>Emakefile</c> (see below). If no such file is found, the + set of modules to compile defaults to all modules in the + current working directory.</p> <p>Traversing the set of modules, it then recompiles every module for which at least one of the following conditions apply:</p> <list type="bulleted"> @@ -77,6 +77,9 @@ <item><c>netload</c> <br></br> Net load mode. Loads all recompiled modules on all known nodes.</item> + <item><c>{emake, Emake}</c> <br></br> + + Rather than reading the <c>Emakefile</c> specify configuration explicitly.</item> </list> <p>All items in <c>Options</c> that are not make options are assumed to be compiler options and are passed as-is to @@ -108,9 +111,10 @@ <section> <title>Emakefile</title> - <p><c>make:all/0,1</c> and <c>make:files/1,2</c> looks in the - current working directory for a file named <c>Emakefile</c>. If - it exists, <c>Emakefile</c> should contain elements like this:</p> + <p><c>make:all/0,1</c> and <c>make:files/1,2</c> first looks for + <c>{emake, Emake}</c> in options, then in the current working directory + for a file named <c>Emakefile</c>. If present <c>Emake</c> should + contain elements like this:</p> <code type="none"> Modules. {Modules,Options}. </code> diff --git a/lib/tools/doc/src/notes.xml b/lib/tools/doc/src/notes.xml index 415f1b8516..af20200d49 100644 --- a/lib/tools/doc/src/notes.xml +++ b/lib/tools/doc/src/notes.xml @@ -31,6 +31,21 @@ </header> <p>This document describes the changes made to the Tools application.</p> +<section><title>Tools 2.9.1</title> + + <section><title>Improvements and New Features</title> + <list> + <item> + <p> + Improved edoc support in emacs mode.</p> + <p> + Own Id: OTP-14217 Aux Id: PR-1282 </p> + </item> + </list> + </section> + +</section> + <section><title>Tools 2.9</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/tools/emacs/erlang-edoc.el b/lib/tools/emacs/erlang-edoc.el index 2801aa8ae7..d0dcc81028 100644 --- a/lib/tools/emacs/erlang-edoc.el +++ b/lib/tools/emacs/erlang-edoc.el @@ -36,7 +36,7 @@ "Tags that can be used anywhere within a module.") (defvar erlang-edoc-overview-tags - '("author" "copyright" "reference" "see" "since" "title" "version") + '("author" "copyright" "doc" "reference" "see" "since" "title" "version") "Tags that can be used in an overview file.") (defvar erlang-edoc-module-tags @@ -45,8 +45,8 @@ "Tags that can be used before a module declaration.") (defvar erlang-edoc-function-tags - '("deprecated" "doc" "equiv" "hidden" "private" "see" "since" "spec" - "throws" "type") + '("deprecated" "doc" "equiv" "hidden" "param" "private" "returns" + "see" "since" "spec" "throws" "type") "Tags that can be used before a function definition.") (defvar erlang-edoc-predefined-macros @@ -169,4 +169,10 @@ (jit-lock-refontify)) (provide 'erlang-edoc) + +;; Local variables: +;; coding: utf-8 +;; indent-tabs-mode: nil +;; End: + ;;; erlang-edoc.el ends here diff --git a/lib/tools/emacs/erlang-eunit.el b/lib/tools/emacs/erlang-eunit.el index 3b85e6680a..38c40927f4 100644 --- a/lib/tools/emacs/erlang-eunit.el +++ b/lib/tools/emacs/erlang-eunit.el @@ -68,7 +68,7 @@ buffer and vice versa" ;;; (defun erlang-eunit-open-src-file-other-window (test-file-path) "Open the src file which corresponds to the an EUnit test file" - (find-file-other-window (erlang-eunit-src-filename test-file-path))) + (find-file-other-window (erlang-eunit-src-filename test-file-path))) ;;; Return the name and path of the EUnit test file ;;, (input may be either the source filename itself or the EUnit test filename) @@ -154,7 +154,7 @@ buffer and vice versa" ;;; Join filenames (defun filename-join (dir file) (if (or (= (elt file 0) ?/) - (= (car (last (append dir nil))) ?/)) + (= (car (last (append dir nil))) ?/)) (concat dir file) (concat dir "/" file))) @@ -299,7 +299,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." ;;; Compile source and EUnit test file and finally run EUnit tests for ;;; the current module (defun erlang-eunit-compile-and-test (test-fun test-args &optional under-cover) - "Compile the source and test files and run the EUnit test suite. + "Compile the source and test files and run the EUnit test suite. If under-cover is set to t, the module under test is compile for code coverage analysis. If under-cover is left out or not set, @@ -311,7 +311,7 @@ and the number of times each line is covered). With prefix arg, compiles for debug and runs tests with the verbose flag set." (erlang-eunit-record-recent-compile under-cover) (let ((src-filename (erlang-eunit-src-filename buffer-file-name)) - (test-filename (erlang-eunit-test-filename buffer-file-name))) + (test-filename (erlang-eunit-test-filename buffer-file-name))) ;; The purpose of out-maneuvering `save-some-buffers', as is done ;; below, is to ask the question about saving buffers only once, @@ -326,9 +326,9 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." ;; be placed in the source file instead. Any compilation error ;; will prevent the subsequent steps to be run (hence the `and') (and (erlang-eunit-compile-file src-filename under-cover) - (if (file-readable-p test-filename) - (erlang-eunit-compile-file test-filename) - t) + (if (file-readable-p test-filename) + (erlang-eunit-compile-file test-filename) + t) (apply test-fun test-args) (if under-cover (save-excursion @@ -381,16 +381,16 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (goto-char compilation-parsing-end) (erlang-eunit-all-list-elems-fulfill-p (lambda (re) (let ((continue t) - (result t)) - (while continue ; ignore warnings, stop at errors - (if (re-search-forward re (point-max) t) - (if (erlang-eunit-is-compilation-warning) - t - (setq result nil) - (setq continue nil)) - (setq result t) - (setq continue nil))) - result)) + (result t)) + (while continue ; ignore warnings, stop at errors + (if (re-search-forward re (point-max) t) + (if (erlang-eunit-is-compilation-warning) + t + (setq result nil) + (setq continue nil)) + (setq result t) + (setq continue nil))) + result)) (mapcar (lambda (e) (car e)) erlang-error-regexp-alist)))) (defun erlang-eunit-is-compilation-warning () @@ -402,7 +402,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (let ((matches-p t)) (while (and list matches-p) (if (not (funcall pred (car list))) - (setq matches-p nil)) + (setq matches-p nil)) (setq list (cdr list))) matches-p)) @@ -439,15 +439,21 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set." (defun erlang-eunit-ensure-keymap-for-key (key-seq) (let ((prefix-keys (butlast (append key-seq nil))) - (prefix-seq "")) + (prefix-seq "")) (while prefix-keys (setq prefix-seq (concat prefix-seq (make-string 1 (car prefix-keys)))) (setq prefix-keys (cdr prefix-keys)) (if (not (keymapp (lookup-key (current-local-map) prefix-seq))) - (local-set-key prefix-seq (make-sparse-keymap)))))) + (local-set-key prefix-seq (make-sparse-keymap)))))) (add-hook 'erlang-mode-hook 'erlang-eunit-add-key-bindings) (provide 'erlang-eunit) -;; erlang-eunit ends here + +;; Local variables: +;; coding: utf-8 +;; indent-tabs-mode: nil +;; End: + +;; erlang-eunit.el ends here diff --git a/lib/tools/emacs/erlang-pkg.el b/lib/tools/emacs/erlang-pkg.el index 4d0aa6fcd3..02d6bebbf4 100644 --- a/lib/tools/emacs/erlang-pkg.el +++ b/lib/tools/emacs/erlang-pkg.el @@ -1,3 +1,3 @@ (define-package "erlang" "2.7.0" - "Erlang major mode" - '()) + "Erlang major mode" + '((emacs "24.1"))) diff --git a/lib/tools/emacs/erlang-skels.el b/lib/tools/emacs/erlang-skels.el index eeba7f34e9..bdb3d9ad4a 100644 --- a/lib/tools/emacs/erlang-skels.el +++ b/lib/tools/emacs/erlang-skels.el @@ -1,7 +1,7 @@ ;; ;; %CopyrightBegin% ;; -;; Copyright Ericsson AB 2010-2016. All Rights Reserved. +;; Copyright Ericsson AB 2010-2017. All Rights Reserved. ;; ;; Licensed under the Apache License, Version 2.0 (the "License"); ;; you may not use this file except in compliance with the License. @@ -915,11 +915,7 @@ Please see the function `tempo-define-template'.") "%% process to initialize." n (erlang-skel-separator-end 2) "-spec init(Args :: term()) ->" n> - "{ok, State :: term(), Data :: term()} |" n> - "{ok, State :: term(), Data :: term()," n> - "[gen_statem:action()] | gen_statem:action()} |" n> - "ignore |" n> - "{stop, Reason :: term()}." n + "gen_statem:init_result(atom())." n "init([]) ->" n> "process_flag(trap_exit, true)," n> "{ok, state_name, #data{}}." n @@ -1028,11 +1024,7 @@ Please see the function `tempo-define-template'.") "%% process to initialize." n (erlang-skel-separator-end 2) "-spec init(Args :: term()) ->" n> - "{ok, State :: term(), Data :: term()} |" n> - "{ok, State :: term(), Data :: term()," n> - "[gen_statem:action()] | gen_statem:action()} |" n> - "ignore |" n> - "{stop, Reason :: term()}." n + "gen_statem:init_result(term())." n "init([]) ->" n> "process_flag(trap_exit, true)," n> "{ok, state_name, #data{}}." n diff --git a/lib/tools/emacs/erlang-start.el b/lib/tools/emacs/erlang-start.el index 160057e179..c35f280bf4 100644 --- a/lib/tools/emacs/erlang-start.el +++ b/lib/tools/emacs/erlang-start.el @@ -39,7 +39,7 @@ ;; ;; Please state as exactly as possible: ;; - Version number of Erlang Mode (see the menu), Emacs, Erlang, -;; and of any other relevant software. +;; and of any other relevant software. ;; - What the expected result was. ;; - What you did, preferably in a repeatable step-by-step form. ;; - A description of the unexpected result. @@ -60,7 +60,7 @@ ;; (autoload 'erlang-mode "erlang" "Major mode for editing Erlang code." t) -(autoload 'erlang-version "erlang" +(autoload 'erlang-version "erlang" "Return the current version of Erlang mode." t) (autoload 'erlang-shell "erlang" "Start a new Erlang shell." t) (autoload 'run-erlang "erlang" "Start a new Erlang shell." t) @@ -68,7 +68,7 @@ (autoload 'erlang-compile "erlang" "Compile Erlang module in current buffer." t) -(autoload 'erlang-man-module "erlang" +(autoload 'erlang-man-module "erlang" "Find manual page for MODULE." t) (autoload 'erlang-man-function "erlang" "Find manual page for NAME, where NAME is module:function." t) @@ -108,25 +108,22 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil) ;; ;; Associate files using interpreter "escript" with Erlang mode. -;; +;; ;;;###autoload (add-to-list 'interpreter-mode-alist (cons "escript" 'erlang-mode)) ;; ;; Ignore files ending in ".jam", ".vee", and ".beam" when performing -;; file completion. +;; file completion and in dired omit mode. ;; ;;;###autoload (let ((erl-ext '(".jam" ".vee" ".beam"))) (while erl-ext - (let ((cie completion-ignored-extensions)) - (while (and cie (not (string-equal (car cie) (car erl-ext)))) - (setq cie (cdr cie))) - (if (null cie) - (setq completion-ignored-extensions - (cons (car erl-ext) completion-ignored-extensions)))) + (add-to-list 'completion-ignored-extensions (car erl-ext)) + (when (boundp 'dired-omit-extensions) + (add-to-list 'dired-omit-extensions (car erl-ext))) (setq erl-ext (cdr erl-ext)))) @@ -136,4 +133,9 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil) (provide 'erlang-start) +;; Local variables: +;; coding: utf-8 +;; indent-tabs-mode: nil +;; End: + ;; erlang-start.el ends here. diff --git a/lib/tools/emacs/erlang-test.el b/lib/tools/emacs/erlang-test.el index ba6190d194..ea5d637199 100644 --- a/lib/tools/emacs/erlang-test.el +++ b/lib/tools/emacs/erlang-test.el @@ -2,7 +2,7 @@ ;;; Unit tests for erlang.el. -;; Author: Johan Claesson +;; Author: Johan Claesson ;; Created: 2016-05-07 ;; Keywords: erlang, languages @@ -28,6 +28,27 @@ ;;; Commentary: ;; This library require GNU Emacs 25 or later. +;; +;; There are two ways to run emacs unit tests. +;; +;; 1. Within a running emacs process. Load this file. Then to run +;; all defined test cases: +;; +;; M-x ert RET t RET +;; +;; To run only the erlang test cases: +;; +;; M-x ert RET "^erlang" RET +;; +;; +;; 2. In a new stand-alone emacs process. This process exits +;; when it executed the tests. For example: +;; +;; emacs -Q -batch -L . -l erlang.el -l erlang-test.el \ +;; -f ert-run-tests-batch-and-exit +;; +;; The -L option adds a directory to the load-path. It should be the +;; directory containing erlang.el and erlang-test.el. ;;; Code: @@ -59,11 +80,12 @@ concatenated to form an erlang file to test on.") tags-file-name tags-table-list tags-table-set-list + tags-add-tables + tags-completion-table erlang-buffer erlang-mode-hook prog-mode-hook - erlang-shell-mode-hook - tags-add-tables) + erlang-shell-mode-hook) (unwind-protect (progn (setq-default tags-file-name nil) @@ -71,11 +93,14 @@ concatenated to form an erlang file to test on.") (erlang-test-create-erlang-file erlang-file) (erlang-test-compile-tags erlang-file tags-file) (setq erlang-buffer (find-file-noselect erlang-file)) - (with-current-buffer erlang-buffer - (setq-local tags-file-name tags-file)) - ;; Setting global tags-file-name is a workaround for - ;; GNU Emacs bug#23164. - (setq tags-file-name tags-file) + (if (< emacs-major-version 26) + (progn + (with-current-buffer erlang-buffer + (setq-local tags-file-name tags-file)) + ;; Setting global tags-file-name is a workaround for + ;; GNU Emacs bug#23164. + (setq tags-file-name tags-file)) + (visit-tags-table tags-file t)) (erlang-test-complete-at-point tags-file) (erlang-test-completion-table) (erlang-test-xref-find-definitions erlang-file erlang-buffer)) @@ -117,12 +142,20 @@ concatenated to form an erlang file to test on.") for line = 1 then (1+ line) do (when tagname (switch-to-buffer erlang-buffer) - (xref-find-definitions tagname) - (erlang-test-verify-pos erlang-file line) - (xref-find-definitions (concat "erlang_test:" tagname)) - (erlang-test-verify-pos erlang-file line))) - (xref-find-definitions "erlang_test:") - (erlang-test-verify-pos erlang-file 1)) + (erlang-test-xref-jump tagname erlang-file line) + (erlang-test-xref-jump (concat "erlang_test:" tagname) + erlang-file line))) + (erlang-test-xref-jump "erlang_test:" erlang-file 1)) + +(defun erlang-test-xref-jump (id expected-file expected-line) + (goto-char (point-max)) + (insert "\n%% " id) + (save-buffer) + (if (fboundp 'xref-find-definitions) + (xref-find-definitions (erlang-id-to-string + (erlang-get-identifier-at-point))) + (error "xref-find-definitions not defined (too old emacs?)")) + (erlang-test-verify-pos expected-file expected-line)) (defun erlang-test-verify-pos (expected-file expected-line) (should (string-equal (file-truename expected-file) @@ -136,13 +169,13 @@ concatenated to form an erlang file to test on.") (setq-local tags-file-name tags-file) (insert "\nerlang_test:fun") (erlang-complete-tag) - (should (looking-back "erlang_test:function")) + (should (looking-back "erlang_test:function" (point-at-bol))) (insert "\nfun") (erlang-complete-tag) - (should (looking-back "function")) + (should (looking-back "function" (point-at-bol))) (insert "\nerlang_") (erlang-complete-tag) - (should (looking-back "erlang_test:")))) + (should (looking-back "erlang_test:" (point-at-bol))))) (ert-deftest erlang-test-compile-options () @@ -179,6 +212,30 @@ concatenated to form an erlang file to test on.") erlang)) +(ert-deftest erlang-test-parse-id () + (cl-loop for id-string in '("fun/10" + "qualified-function module:fun/10" + "record reko" + "macro _SYMBOL" + "macro MACRO/10" + "module modula" + "macro" + nil) + for id-list in '((nil nil "fun" 10) + (qualified-function "module" "fun" 10) + (record nil "reko" nil) + (macro nil "_SYMBOL" nil) + (macro nil "MACRO" 10) + (module nil "modula" nil) + (nil nil "macro" nil) + nil) + for id-list2 = (erlang-id-to-list id-string) + do (should (equal id-list id-list2)) + for id-string2 = (erlang-id-to-string id-list) + do (should (equal id-string id-string2)) + collect id-list2)) + + (provide 'erlang-test) ;;; erlang-test.el ends here diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el index 51f7e8e26c..59b20c552e 100644 --- a/lib/tools/emacs/erlang.el +++ b/lib/tools/emacs/erlang.el @@ -4,6 +4,8 @@ ;; Author: Anders Lindgren ;; Keywords: erlang, languages, processes ;; Date: 2011-12-11 +;; Version: 2.7.0 +;; Package-Requires: ((emacs "24.1")) ;; %CopyrightBegin% ;; @@ -24,7 +26,7 @@ ;; %CopyrightEnd% ;; -;; Lars Thors�n's modifications of 2000-06-07 included. +;; Lars Thorsén's modifications of 2000-06-07 included. ;; The original version of this package was written by Robert Virding. ;; ;;; Commentary: @@ -85,30 +87,15 @@ (defconst erlang-version "2.7" "The version number of Erlang mode.") -(defvar erlang-root-dir nil +(defcustom erlang-root-dir nil "The directory where the Erlang system is installed. The name should not contain the trailing slash. Should this variable be nil, no manual pages will show up in the -Erlang mode menu.") - -(eval-and-compile - (defconst erlang-emacs-major-version - (if (boundp 'emacs-major-version) - emacs-major-version - (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version) - (erlang-string-to-int (substring emacs-version - (match-beginning 1) (match-end 1)))) - "Major version number of Emacs.")) - -(eval-and-compile - (defconst erlang-emacs-minor-version - (if (boundp 'emacs-minor-version) - emacs-minor-version - (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version) - (erlang-string-to-int (substring emacs-version - (match-beginning 2) (match-end 2)))) - "Minor version number of Emacs.")) +Erlang mode menu." + :group 'erlang + :type '(restricted-sexp :match-alternatives (stringp 'nil)) + :safe (lambda (val) (or (eq nil val) (stringp val)))) (defconst erlang-xemacs-p (string-match "Lucid\\|XEmacs" emacs-version) "Non-nil when running under XEmacs or Lucid Emacs.") @@ -129,7 +116,7 @@ Never EVER set this variable!") erlang-menu-man-items erlang-menu-personal-items erlang-menu-version-items) - "*List of menu item list to combine to create Erlang mode menu. + "List of menu item list to combine to create Erlang mode menu. External programs which temporarily add menu items to the Erlang mode menu may use this variable. Please use the function `add-hook' to add @@ -238,7 +225,7 @@ This variable is added to the list of Erlang menus stored in The menu is in the form described by the variable `erlang-menu-base-items'.") (defvar erlang-mode-hook nil - "*Functions to run when Erlang mode is activated. + "Functions to run when Erlang mode is activated. This hook is used to change the behaviour of Erlang mode. It is normally used by the user to personalise the programming environment. @@ -272,7 +259,7 @@ To use the example, copy the following lines to your `~/.emacs' file: (imenu-add-to-menubar \"Imenu\")))") (defvar erlang-load-hook nil - "*Functions to run when Erlang mode is loaded. + "Functions to run when Erlang mode is loaded. This hook is used to change the behaviour of Erlang mode. It is normally used by the user to personalise the programming environment. @@ -304,17 +291,20 @@ manual pages can be retrieved (note that you must set the value of A useful function is `tempo-template-erlang-normal-header'. \(This function only exists when the `tempo' package is available.)") -(defvar erlang-check-module-name 'ask - "*Non-nil means check that module name and file name agrees when saving. +(defcustom erlang-check-module-name 'ask + "Non-nil means check that module name and file name agrees when saving. -If the value of this variable is the atom `ask', the user is -prompted. If the value is t the source is silently changed.") +If the value of this variable is the symbol `ask', the user is +prompted. If the value is t the source is silently changed." + :group 'erlang + :type '(choice (const :tag "Check on save" 'ask) + (const :tag "Don't check on save" t))) (defvar erlang-electric-commands '(erlang-electric-comma erlang-electric-semicolon erlang-electric-gt) - "*List of activated electric commands. + "List of activated electric commands. The list should contain the electric commands which should be active. Currently, the available electric commands are: @@ -328,8 +318,8 @@ are activated. To deactivate all electric commands, set this variable to nil.") -(defvar erlang-electric-newline-inhibit t - "*Set to non-nil to inhibit newline after electric command. +(defcustom erlang-electric-newline-inhibit t + "Set to non-nil to inhibit newline after electric command. This is useful since a lot of people press return after executing an electric command. @@ -339,28 +329,32 @@ list `erlang-electric-newline-inhibit-list'. Note that commands in this list are required to set the variable `erlang-electric-newline-inhibit' to nil when the newline shouldn't be -inhibited.") +inhibited." + :group 'erlang + :type 'boolean + :safe 'booleanp) (defvar erlang-electric-newline-inhibit-list '(erlang-electric-semicolon erlang-electric-comma erlang-electric-gt) - "*Commands which can inhibit the next newline.") + "Commands which can inhibit the next newline.") -(defvar erlang-electric-semicolon-insert-blank-lines nil - "*Number of blank lines inserted before header, or nil. +(defcustom erlang-electric-semicolon-insert-blank-lines nil + "Number of blank lines inserted before header, or nil. This variable controls the behaviour of `erlang-electric-semicolon' when a new function header is generated. When nil, no blank line is inserted between the current line and the new header. When bound to a number it represents the number of blank lines which should be -inserted.") +inserted." + :group 'erlang) (defvar erlang-electric-semicolon-criteria '(erlang-next-lines-empty-p erlang-at-keyword-end-p erlang-at-end-of-function-p) - "*List of functions controlling `erlang-electric-semicolon'. + "List of functions controlling `erlang-electric-semicolon'. The functions in this list are called, in order, whenever a semicolon is typed. Each function in the list is called with no arguments, and should return one of the following values: @@ -381,7 +375,7 @@ The test is performed by the function `erlang-test-criteria-list'.") erlang-at-keyword-end-p erlang-at-end-of-clause-p erlang-at-end-of-function-p) - "*List of functions controlling `erlang-electric-comma'. + "List of functions controlling `erlang-electric-comma'. The functions in this list are called, in order, whenever a comma is typed. Each function in the list is called with no arguments, and should return one of the following values: @@ -399,7 +393,7 @@ The test is performed by the function `erlang-test-criteria-list'.") '(erlang-stop-when-in-type-spec erlang-next-lines-empty-p erlang-at-end-of-function-p) - "*List of functions controlling the arrow aspect of `erlang-electric-gt'. + "List of functions controlling the arrow aspect of `erlang-electric-gt'. The functions in this list are called, in order, whenever a `>' is typed. Each function in the list is called with no arguments, and should return one of the following values: @@ -415,7 +409,7 @@ The test is performed by the function `erlang-test-criteria-list'.") (defvar erlang-electric-newline-criteria '(t) - "*List of functions controlling `erlang-electric-newline'. + "List of functions controlling `erlang-electric-newline'. The electric newline commands indents the next line. Should the current line begin with a comment the comment start is copied to @@ -435,8 +429,8 @@ list, it is treated as a function triggering the electric command. The test is performed by the function `erlang-test-criteria-list'.") -(defvar erlang-next-lines-empty-threshold 2 - "*Number of blank lines required to activate an electric command. +(defcustom erlang-next-lines-empty-threshold 2 + "Number of blank lines required to activate an electric command. Actually, this value controls the behaviour of the function `erlang-next-lines-empty-p' which normally is a member of the @@ -457,46 +451,67 @@ function `erlang-next-lines-empty-p' would be removed from the criteria lists. Note that even if `erlang-next-lines-empty-p' should not trigger an -electric command, other functions in the criteria list could.") +electric command, other functions in the criteria list could." + :group 'erlang + :type '(restricted-sexp :match-alternatives (integerp 'nil)) + :safe (lambda (val) (or (eq val nil) (integerp val)))) -(defvar erlang-new-clause-with-arguments nil - "*Non-nil means that the arguments are cloned when a clause is generated. +(defcustom erlang-new-clause-with-arguments nil + "Non-nil means that the arguments are cloned when a clause is generated. A new function header can be generated by calls to the function -`erlang-generate-new-clause' and by use of the electric semicolon.") +`erlang-generate-new-clause' and by use of the electric semicolon." + :group 'erlang + :type 'boolean + :safe 'booleanp) -(defvar erlang-compile-use-outdir t - "*When nil, go to the directory containing source file when compiling. +(defcustom erlang-compile-use-outdir t + "When nil, go to the directory containing source file when compiling. This is a workaround for a bug in the `outdir' option of compile. If the outdir is not in the current load path, Erlang doesn't load the object module after it has been compiled. To activate the workaround, place the following in your `~/.emacs' file: - (setq erlang-compile-use-outdir nil)") - -(defvar erlang-indent-level 4 - "*Indentation of Erlang calls/clauses within blocks.") -(put 'erlang-indent-level 'safe-local-variable 'integerp) - -(defvar erlang-icr-indent nil - "*Indentation of Erlang if/case/receive/ patterns. `nil' means - keeping default behavior. When non-nil, indent to th column of - if/case/receive.") - -(defvar erlang-indent-guard 2 - "*Indentation of Erlang guards.") -(put 'erlang-indent-guard 'safe-local-variable 'integerp) - -(defvar erlang-argument-indent 2 - "*Indentation of the first argument in a function call. + (setq erlang-compile-use-outdir nil)" + :group 'erlang + :type 'boolean + :safe 'booleanp) + +(defcustom erlang-indent-level 4 + "Indentation of Erlang calls/clauses within blocks." + :group 'erlang + :type 'integer + :safe 'integerp) + +(defcustom erlang-icr-indent nil + "Indentation of Erlang if/case/receive patterns. +nil means keeping default behavior. When non-nil, indent to the column of +if/case/receive." + :group 'erlang + :type 'boolean + :safe 'booleanp) + +(defcustom erlang-indent-guard 2 + "Indentation of Erlang guards." + :group 'erlang + :type 'integer + :safe 'integerp) + +(defcustom erlang-argument-indent 2 + "Indentation of the first argument in a function call. When nil, indent to the column after the `(' of the -function.") -(put 'erlang-argument-indent 'safe-local-variable '(lambda (val) (or (null val) (integerp val)))) - -(defvar erlang-tab-always-indent t - "*Non-nil means TAB in Erlang mode should always re-indent the current line, -regardless of where in the line point is when the TAB command is used.") +function." + :group 'erlang + :type '(restricted-sexp :match-alternatives (integerp 'nil)) + :safe (lambda (val) (or (eq val nil) (integerp val)))) + +(defcustom erlang-tab-always-indent t + "Non-nil means TAB in Erlang mode should always re-indent the current line, +regardless of where in the line point is when the TAB command is used." + :group 'erlang + :type 'boolean + :safe 'booleanp) (defvar erlang-man-inhibit (eq system-type 'windows-nt) "Inhibit the creation of the Erlang Manual Pages menu. @@ -509,7 +524,7 @@ there is no attempt to create the menu.") ("Man - Modules" "/man/man3" t) ("Man - Files" "/man/man4" t) ("Man - Applications" "/man/man6" t)) - "*The man directories displayed in the Erlang menu. + "The man directories displayed in the Erlang menu. Each item in the list should be a list with three elements, the first the name of the menu, the second the directory, and the last a flag. @@ -517,17 +532,17 @@ Should the flag the nil, the directory is absolute, should it be non-nil the directory is relative to the variable `erlang-root-dir'.") (defvar erlang-man-max-menu-size 35 - "*The maximum number of menu items in one menu allowed.") + "The maximum number of menu items in one menu allowed.") (defvar erlang-man-display-function 'erlang-man-display - "*Function used to display man page. + "Function used to display man page. The function is called with one argument, the name of the file containing the man page. Use this variable when the default function, `erlang-man-display', does not work on your system.") (defvar erlang-compile-extra-opts '() - "*Additional options to the compilation command. + "Additional options to the compilation command. This is an elisp list of options. Each option can be either: - an atom - a dotted pair @@ -539,7 +554,7 @@ Example: '(bin_opt_info (i . \"/path1/include\") (i . \"/path2/include\"))") (".xrl\\'" . inferior-erlang-compute-leex-compile-command) (".yrl\\'" . inferior-erlang-compute-yecc-compile-command) ("." . inferior-erlang-compute-erl-compile-command)) - "*Alist of filename patterns vs corresponding compilation functions. + "Alist of filename patterns vs corresponding compilation functions. Each element looks like (REGEXP . FUNCTION). Compiling a file whose name matches REGEXP specifies FUNCTION to use to compute the compilation command. The FUNCTION will be called with two arguments: module name and @@ -547,14 +562,14 @@ default compilation options, like output directory. The FUNCTION is expected to return a string.") (defvar erlang-leex-compile-opts '() - "*Options to pass to leex when compiling xrl files. + "Options to pass to leex when compiling xrl files. This is an elisp list of options. Each option can be either: - an atom - a dotted pair - a string") (defvar erlang-yecc-compile-opts '() - "*Options to pass to yecc when compiling yrl files. + "Options to pass to yecc when compiling yrl files. This is an elisp list of options. Each option can be either: - an atom - a dotted pair @@ -562,7 +577,7 @@ This is an elisp list of options. Each option can be either: (eval-and-compile (defvar erlang-regexp-modern-p - (if (> erlang-emacs-major-version 21) t nil) + (if (> emacs-major-version 21) t nil) "Non-nil when this version of Emacs uses a modern version of regexp. Supporting \_< and \_> This is determined by checking the version of Emacs used.")) @@ -608,6 +623,24 @@ The regexp must be surrounded with a pair of regexp parentheses.")) This is used to determine matches in complex regexps which contains `erlang-variable-regexp'.")) +(defconst erlang-module-function-regexp + (eval-when-compile + (concat erlang-atom-regexp ":" erlang-atom-regexp)) + "Regexp matching an erlang module:function.") + +(defconst erlang-name-regexp + (concat "\\(" + "\\(?:\\sw\\|\\s_\\)+" + "\\|" + erlang-atom-quoted-regexp + "\\)") + "Matches a name of a function, macro or record") + +(defconst erlang-id-regexp + (concat "\\(?:\\(qualified-function\\|record\\|macro\\|module\\) \\)?" + "\\(?:" erlang-atom-regexp ":\\)?" + erlang-name-regexp "?" + "\\(?:/\\([0-9]+\\)\\)?")) (eval-and-compile (defun erlang-regexp-opt (strings &optional paren) @@ -983,7 +1016,7 @@ resulting regexp is surrounded by \\_< and \\_>." "Regexp which should match beginning of a clause.") (defvar erlang-file-name-extension-regexp "\\.erl$" - "*Regexp which should match an Erlang file name. + "Regexp which should match an Erlang file name. This regexp is used when an Erlang module name is extracted from the name of an Erlang source file. @@ -997,7 +1030,7 @@ tags system should interpret tags on the form `module:tag' for files written in other languages than Erlang.") (defvar erlang-inferior-shell-split-window t - "*If non-nil, when starting an inferior shell, split windows. + "If non-nil, when starting an inferior shell, split windows. If nil, the inferior shell replaces the window. This is the traditional behaviour.") @@ -1043,7 +1076,7 @@ behaviour.") (unless inferior-erlang-use-cmm (define-key map "\C-x`" 'erlang-next-error)) map) - "*Keymap used in Erlang mode.") + "Keymap used in Erlang mode.") (defvar erlang-mode-abbrev-table nil "Abbrev table in use in Erlang-mode buffers.") (defvar erlang-mode-syntax-table nil @@ -1310,29 +1343,6 @@ replaced by `erlang-etags-tags-completion-table'.") ;;; Avoid errors while compiling this file. -;; `eval-when-compile' is not defined in Emacs 18. We define it as a -;; no-op. -(or (fboundp 'eval-when-compile) - (defmacro eval-when-compile (&rest rest) nil)) - -;; These umm...functions are new in Emacs 20. And, yes, until version -;; 19.27 Emacs backquotes were this ugly. - -(or (fboundp 'unless) - (defmacro unless (condition &rest body) - "(unless CONDITION BODY...): If CONDITION is false, do BODY, else return nil." - `((if (, condition) nil ,@body)))) - -(or (fboundp 'when) - (defmacro when (condition &rest body) - "(when CONDITION BODY...): If CONDITION is true, do BODY, else return nil." - `((if (, condition) (progn ,@body) nil)))) - -(or (fboundp 'char-before) - (defmacro char-before (&optional pos) - "Return the character in the current buffer just before POS." - `( (char-after (1- (or ,pos (point))))))) - ;; defvar some obsolete variables, which we still support for ;; backwards compatibility reasons. (eval-when-compile @@ -1360,20 +1370,11 @@ replaced by `erlang-etags-tags-completion-table'.") (defun erlang-version () "Return the current version of Erlang mode." (interactive) - (if (erlang-interactive-p) + (if (called-interactively-p 'interactive) (message "Erlang mode version %s, written by Anders Lindgren" erlang-version)) erlang-version) -(defun erlang-interactive-p () - (if (fboundp 'called-interactively-p) - (called-interactively-p 'interactive) - (funcall (symbol-function 'interactive-p)))) - -(unless (fboundp 'prog-mode) - (defun prog-mode () - (use-local-map (make-keymap)))) - ;;;###autoload (define-derived-mode erlang-mode prog-mode "Erlang" "Major mode for editing Erlang source files in Emacs. @@ -1462,40 +1463,43 @@ Other commands: (add-to-list 'auto-mode-alist (cons r 'erlang-mode))) (defun erlang-syntax-table-init () - (if (null erlang-mode-syntax-table) - (let ((table (make-syntax-table))) - (modify-syntax-entry ?\n ">" table) - (modify-syntax-entry ?\" "\"" table) - (modify-syntax-entry ?# "." table) - ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards - ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems - (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $" - ;; we have to live with that for now..it is the best alternative - ;; that can be worked around with "string hat ends with \$" - (modify-syntax-entry ?% "<" table) - (modify-syntax-entry ?& "." table) - (modify-syntax-entry ?\' "\"" table) - (modify-syntax-entry ?* "." table) - (modify-syntax-entry ?+ "." table) - (modify-syntax-entry ?- "." table) - (modify-syntax-entry ?/ "." table) - (modify-syntax-entry ?: "." table) - (modify-syntax-entry ?< "." table) - (modify-syntax-entry ?= "." table) - (modify-syntax-entry ?> "." table) - (modify-syntax-entry ?\\ "\\" table) - (modify-syntax-entry ?_ "_" table) - (modify-syntax-entry ?| "." table) - (modify-syntax-entry ?^ "'" table) - - ;; Pseudo bit-syntax: Latin1 double angle quotes as parens. - ;;(modify-syntax-entry ?\253 "(?\273" table) - ;;(modify-syntax-entry ?\273 ")?\253" table) - - (setq erlang-mode-syntax-table table))) - + (erlang-ensure-syntax-table-is-initialized) (set-syntax-table erlang-mode-syntax-table)) +(defun erlang-ensure-syntax-table-is-initialized () + (unless erlang-mode-syntax-table + (let ((table (make-syntax-table))) + (modify-syntax-entry ?\n ">" table) + (modify-syntax-entry ?\" "\"" table) + (modify-syntax-entry ?# "." table) + ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards + ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems + (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $" + ;; we have to live with that for now..it is the best alternative + ;; that can be worked around with "string that ends with \$" + (modify-syntax-entry ?% "<" table) + (modify-syntax-entry ?& "." table) + (modify-syntax-entry ?\' "\"" table) + (modify-syntax-entry ?* "." table) + (modify-syntax-entry ?+ "." table) + (modify-syntax-entry ?- "." table) + (modify-syntax-entry ?/ "." table) + (modify-syntax-entry ?: "." table) + (modify-syntax-entry ?< "." table) + (modify-syntax-entry ?= "." table) + (modify-syntax-entry ?> "." table) + (modify-syntax-entry ?\\ "\\" table) + (modify-syntax-entry ?_ "_" table) + (modify-syntax-entry ?| "." table) + (modify-syntax-entry ?^ "'" table) + + ;; Pseudo bit-syntax: Latin1 double angle quotes as parens. + ;;(modify-syntax-entry ?\253 "(?\273" table) + ;;(modify-syntax-entry ?\273 ")?\253" table) + + (setq erlang-mode-syntax-table table)))) + + (defun erlang-electric-init () ;; Set up electric character functions to work with @@ -1541,7 +1545,7 @@ Other commands: (make-local-variable 'indent-region-function) (setq indent-region-function 'erlang-indent-region) (set (make-local-variable 'comment-indent-function) 'erlang-comment-indent) - (if (<= erlang-emacs-major-version 18) + (if (<= emacs-major-version 18) (set (make-local-variable 'comment-indent-hook) 'erlang-comment-indent)) (set (make-local-variable 'parse-sexp-ignore-comments) t) (set (make-local-variable 'dabbrev-case-fold-search) nil) @@ -1778,7 +1782,7 @@ Please see the variable `erlang-menu-base-items'." (if (and popup (boundp 'mode-popup-menu)) (funcall (symbol-function 'set) 'mode-popup-menu erlang-xemacs-popup-menu)))) - ((>= erlang-emacs-major-version 19) + ((>= emacs-major-version 19) (define-key keymap (vector 'menu-bar (intern name)) (erlang-menu-make-keymap name items))) (t nil))) @@ -1961,7 +1965,9 @@ menu is left unchanged." The variable `erlang-man-dirs' contains entries describing the location of the manual pages." (interactive) - (if erlang-man-inhibit + (if (or erlang-man-inhibit + (and (boundp 'menu-bar-mode) + (not menu-bar-mode))) () (setq erlang-menu-man-items '(nil @@ -2000,7 +2006,7 @@ The format is described in the documentation of `erlang-man-dirs'." (setq dir (cond ((nth 2 (car dir-list)) ;; Relative to `erlang-root-dir'. (and (stringp erlang-root-dir) - (concat erlang-root-dir (nth 1 (car dir-list))))) + (erlang-man-dir (nth 1 (car dir-list))))) (t ;; Absolute (nth 1 (car dir-list))))) @@ -2018,6 +2024,8 @@ The format is described in the documentation of `erlang-man-dirs'." '(("Man Pages" (("Error! Why?" erlang-man-describe-error))))))) +(defun erlang-man-dir (subdir) + (concat erlang-root-dir "/lib/erlang/" subdir)) ;; Should the menu be to long, let's split it into a number of ;; smaller menus. Warning, this code contains beautiful @@ -2080,7 +2088,7 @@ menus is created." "Find manual page for MODULE, defaults to module of function under point. This function is aware of imported functions." (interactive - (list (let* ((mod (car-safe (erlang-get-function-under-point))) + (list (let* ((mod (erlang-default-module)) (input (read-string (format "Manual entry for module%s: " (if (or (null mod) (string= mod "")) @@ -2089,26 +2097,36 @@ This function is aware of imported functions." (if (string= input "") mod input)))) - (or module (setq module (car (erlang-get-function-under-point)))) - (if (or (null module) (string= module "")) - (error "No Erlang module name given")) + (setq module (or module + (erlang-default-module))) + (when (or (null module) (string= module "")) + (error "No Erlang module name given")) (let ((dir-list erlang-man-dirs) - (pat (concat "/" (regexp-quote module) "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$")) + (pat (concat "/" (regexp-quote module) + "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$")) (file nil) file-list) (while (and dir-list (null file)) - (setq file-list (erlang-man-get-files - (if (nth 2 (car dir-list)) - (concat erlang-root-dir (nth 1 (car dir-list))) - (nth 1 (car dir-list))))) - (while (and file-list (null file)) - (if (string-match pat (car file-list)) - (setq file (car file-list))) - (setq file-list (cdr file-list))) - (setq dir-list (cdr dir-list))) + (let ((dir (if (nth 2 (car dir-list)) + (erlang-man-dir (nth 1 (car dir-list))) + (nth 1 (car dir-list))))) + (when (file-directory-p dir) + (setq file-list (erlang-man-get-files dir)) + (while (and file-list (null file)) + (if (string-match pat (car file-list)) + (setq file (car file-list))) + (setq file-list (cdr file-list)))) + (setq dir-list (cdr dir-list)))) (if file (funcall erlang-man-display-function file) - (error "No manual page for module %s found" module)))) + ;; Did not found the manual file. Fallback to manual-entry. + (manual-entry module)))) + +(defun erlang-default-module () + (let ((id (erlang-get-identifier-at-point))) + (if (eq (erlang-id-kind id) 'qualified-function) + (erlang-id-module id) + (erlang-id-name id)))) ;; Warning, the function `erlang-man-function' is a hack! @@ -2128,37 +2146,28 @@ The entry for `function' is displayed. This function is aware of imported functions." (interactive - (list (let* ((mod-func (erlang-get-function-under-point)) - (mod (car-safe mod-func)) - (func (nth 1 mod-func)) + (list (let* ((default (erlang-default-function-or-module)) (input (read-string (format "Manual entry for `module:func' or `module'%s: " - (if (or (null mod) (string= mod "")) - "" - (format " (default %s:%s)" mod func)))))) + (if default + (format " (default %s)" default) + ""))))) (if (string= input "") - (if (and mod func) - (concat mod ":" func) - mod) + default input)))) - ;; Emacs 18 doesn't provide `man'... - (condition-case nil - (require 'man) - (error nil)) + (require 'man) + (setq name (or name + (erlang-default-function-or-module))) (let ((modname nil) (funcname nil)) - (cond ((null name) - (let ((mod-func (erlang-get-function-under-point))) - (setq modname (car-safe mod-func)) - (setq funcname (nth 1 mod-func)))) - ((string-match ":" name) + (cond ((string-match ":" name) (setq modname (substring name 0 (match-beginning 0))) (setq funcname (substring name (match-end 0) nil))) ((stringp name) (setq modname name))) - (if (or (null modname) (string= modname "")) - (error "No Erlang module name given")) + (when (or (null modname) (string= modname "")) + (error "No Erlang module name given")) (cond ((fboundp 'Man-notify-when-ready) ;; Emacs 19: The man command could possibly start an ;; asynchronous process, i.e. we must hook ourselves into @@ -2168,16 +2177,20 @@ This function is aware of imported functions." () (erlang-man-patch-notify) (setq erlang-man-function-name funcname)) - (condition-case nil + (condition-case err (erlang-man-module modname) - (error (setq erlang-man-function-name nil)))) + (error (setq erlang-man-function-name nil) + (signal (car err) (cdr err))))) (t (erlang-man-module modname) - (if funcname - (erlang-man-find-function - (or (get-buffer "*Manual Entry*") ; Emacs 18 - (current-buffer)) ; XEmacs - funcname)))))) + (when funcname + (erlang-man-find-function (current-buffer) funcname)))))) + +(defun erlang-default-function-or-module () + (let ((id (erlang-get-identifier-at-point))) + (if (eq (erlang-id-kind id) 'qualified-function) + (format "%s:%s" (erlang-id-module id) (erlang-id-name id)) + (erlang-id-name id)))) ;; Should the defadvice be at the top level, the package `advice' would @@ -2222,36 +2235,22 @@ command is executed asynchronously." (set-window-point win (point))) (message "Could not find function `%s'" func))))))) +(defvar erlang-man-file-regexp + "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$") (defun erlang-man-display (file) "Display FILE as a `man' file. This is the default manual page display function. The variables `erlang-man-display-function' contains the function to be used." - ;; Emacs 18 doesn't `provide' man. - (condition-case nil - (require 'man) - (error nil)) + (require 'man) (if file (let ((process-environment (copy-sequence process-environment))) - (if (string-match "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$" file) + (if (string-match erlang-man-file-regexp file) (let ((dir (substring file (match-beginning 1) (match-end 1))) (page (substring file (match-beginning 2) (match-end 2)))) - (if (fboundp 'setenv) - (setenv "MANPATH" dir) - ;; Emacs 18 - (setq process-environment (cons (concat "MANPATH=" dir) - process-environment))) - (cond ((not (and (not erlang-xemacs-p) - (= erlang-emacs-major-version 19) - (< erlang-emacs-minor-version 29))) - (manual-entry page)) - (t - ;; Emacs 19.28 and earlier versions of 19: - ;; The manual-entry command unconditionally prompts - ;; the user :-( - (funcall (symbol-function 'Man-getpage-in-background) - page)))) + (setenv "MANPATH" dir) + (manual-entry page)) (error "Can't find man page for %s\n" file))))) @@ -2394,7 +2393,7 @@ can contain other `tempo' attributes. Please see the function The first character of DD is space if the value is less than 10." (let ((date (current-time-string))) (format "%2d %s %s" - (erlang-string-to-int (substring date 8 10)) + (string-to-number (substring date 8 10)) (substring date 4 7) (substring date -4)))) @@ -2956,10 +2955,10 @@ Return nil if inside string, t if in a comment." ((eq (car stack-top) '->) ;; If in fun definition use standard indent level not double ;;(if (not (eq (car (car (cdr stack))) 'fun)) - ;; Removed it made multi clause fun's look to bad + ;; Removed it made multi clause fun's look too bad (setq off (+ erlang-indent-level (if (not erlang-icr-indent) erlang-indent-level - erlang-icr-indent))))) + erlang-icr-indent))))) (let ((base (erlang-indent-find-base stack indent-point off skip))) ;; Special cases (goto-char indent-point) @@ -3597,7 +3596,7 @@ corresponds to the order of the parsed Erlang list." (erlang-remove-quotes (erlang-buffer-substring (match-beginning 1) (match-end 1))) - (erlang-string-to-int + (string-to-number (erlang-buffer-substring (match-beginning (+ 1 erlang-atom-regexp-matches)) @@ -3696,34 +3695,50 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.: (defun erlang-get-function-arity () "Return the number of arguments of function at point, or nil." - (and (looking-at (eval-when-compile - (concat "^" erlang-atom-regexp "\\s *("))) - (save-excursion - (goto-char (match-end 0)) - (condition-case nil - (let ((res 0) - (cont t)) - (while cont - (cond ((eobp) - (setq res nil) - (setq cont nil)) - ((looking-at "\\s *)") - (setq cont nil)) - ((looking-at "\\s *\\($\\|%\\)") - (forward-line 1)) - ((looking-at "\\s *<<[^>]*?>>") - (when (zerop res) - (setq res (+ 1 res))) - (goto-char (match-end 0))) - ((looking-at "\\s *,") - (setq res (+ 1 res)) - (goto-char (match-end 0))) - (t - (when (zerop res) - (setq res (+ 1 res))) - (forward-sexp 1)))) - res) - (error nil))))) + (erlang-get-arity-after-regexp (concat "^" erlang-atom-regexp "\\s *("))) + +(defun erlang-get-argument-list-arity () + "Return the number of arguments in argument list at point, or nil. +The point should be before the opening parenthesis of the +argument list before calling this function." + (erlang-get-arity-after-regexp "\\s *(")) + +(defun erlang-get-arity-after-regexp (regexp) + "Return the number of arguments in argument list after REGEXP, or nil." + (when (looking-at regexp) + (save-excursion + (goto-char (match-end 0)) + (erlang-get-arity)))) + +(defun erlang-get-arity () + "Return the number of arguments in argument list at point, or nil. +The point should be after the opening parenthesis of the argument +list before calling this function." + (condition-case nil + (let ((res 0) + (cont t)) + (while cont + (cond ((eobp) + (setq res nil) + (setq cont nil)) + ((looking-at "\\s *)") + (setq cont nil)) + ((looking-at "\\s *\\($\\|%\\)") + (forward-line 1)) + ((looking-at "\\s *<<[^>]*?>>") + (when (zerop res) + (setq res (+ 1 res))) + (goto-char (match-end 0))) + ((looking-at "\\s *,") + (setq res (+ 1 res)) + (goto-char (match-end 0))) + (t + (when (zerop res) + (setq res (+ 1 res))) + (forward-sexp 1)))) + res) + (error nil))) + (defun erlang-get-function-name-and-arity () "Return the name and arity of the function at point, or nil. @@ -3746,6 +3761,8 @@ The return value is a string of the form \"foo/1\"." (error nil))))) +;; Keeping erlang-get-function-under-point for backward compatibility. +;; It is used by erldoc.el and maybe other code out there. (defun erlang-get-function-under-point () "Return the module and function under the point, or nil. @@ -3755,44 +3772,141 @@ list of imported functions is searched. The following could be returned: (\"module\" \"function\") -- Both module and function name found. (nil \"function\") -- No module name was found. - nil -- No function name found + nil -- No function name found. + +See also `erlang-get-identifier-at-point'." + (let* ((id (erlang-get-identifier-at-point)) + (kind (erlang-id-kind id)) + (module (erlang-id-module id)) + (name (erlang-id-name id))) + (cond ((eq kind 'qualified-function) + (list module name)) + (name + (list nil name))))) + +(defun erlang-get-identifier-at-point () + "Return the erlang identifier at point, or nil. + +Should no explicit module name be present at the point, the +list of imported functions is searched. + +When an identifier is found return a list with 4 elements: + +1. Kind - One of the symbols qualified-function, record, macro, +module or nil. + +2. Module - Module name string or nil. In case of a +qualified-function a search fails if no entries with correct +module are found. For other kinds the module is just a +preference. If no matching entries are found the search will be +retried without regard to module. + +3. Name - String name of function, module, record or macro. -In the future the list may contain more elements." +4. Arity - Integer in case of functions and macros if the number +of arguments could be found, otherwise nil." (save-excursion - (let ((md (match-data)) - (res nil)) + (save-match-data (if (eq (char-syntax (following-char)) ? ) (skip-chars-backward " \t")) - (skip-chars-backward "a-zA-Z0-9_:'") - (cond ((looking-at (eval-when-compile - (concat erlang-atom-regexp ":" erlang-atom-regexp))) - (setq res (list - (erlang-remove-quotes - (erlang-buffer-substring - (match-beginning 1) (match-end 1))) - (erlang-remove-quotes - (erlang-buffer-substring - (match-beginning (1+ erlang-atom-regexp-matches)) - (match-end (1+ erlang-atom-regexp-matches))))))) - ((looking-at erlang-atom-regexp) - (let ((fk (erlang-remove-quotes - (erlang-buffer-substring - (match-beginning 0) (match-end 0)))) - (mod nil) - (imports (erlang-get-import))) - (while (and imports (null mod)) - (if (assoc fk (cdr (car imports))) - (setq mod (car (car imports))) - (setq imports (cdr imports)))) - (cond ((eq (preceding-char) ?#) - (setq fk (concat "-record(" fk))) - ((eq (preceding-char) ??) - (setq fk (concat "-define(" fk))) - ((and (null mod) (not (member fk erlang-int-bifs))) - (setq mod (erlang-get-module)))) - (setq res (list mod fk))))) - (store-match-data md) - res))) + (skip-chars-backward "[:word:]_:'") + (cond ((looking-at erlang-module-function-regexp) + (erlang-get-qualified-function-id-at-point)) + ((looking-at (concat erlang-atom-regexp ":")) + (erlang-get-module-id-at-point)) + ((looking-at erlang-name-regexp) + (erlang-get-some-other-id-at-point)))))) + +(defun erlang-get-qualified-function-id-at-point () + (let ((kind 'qualified-function) + (module (erlang-remove-quotes + (erlang-buffer-substring + (match-beginning 1) (match-end 1)))) + (name (erlang-remove-quotes + (erlang-buffer-substring + (match-beginning (1+ erlang-atom-regexp-matches)) + (match-end (1+ erlang-atom-regexp-matches))))) + (arity (progn + (goto-char (match-end 0)) + (erlang-get-argument-list-arity)))) + (list kind module name arity))) + +(defun erlang-get-module-id-at-point () + (let ((kind 'module) + (module nil) + (name (erlang-remove-quotes + (erlang-buffer-substring (match-beginning 1) + (match-end 1)))) + (arity nil)) + (list kind module name arity))) + +(defun erlang-get-some-other-id-at-point () + (let ((name (erlang-remove-quotes + (erlang-buffer-substring + (match-beginning 0) (match-end 0)))) + (imports (erlang-get-import)) + kind module arity) + (while (and imports (null module)) + (if (assoc name (cdr (car imports))) + (setq module (car (car imports))) + (setq imports (cdr imports)))) + (cond ((eq (preceding-char) ?#) + (setq kind 'record)) + ((eq (preceding-char) ??) + (setq kind 'macro)) + ((and (null module) (not (member name erlang-int-bifs))) + (setq module (erlang-get-module)))) + (setq arity (progn + (goto-char (match-end 0)) + (erlang-get-argument-list-arity))) + (list kind module name arity))) + +(defmacro erlang-with-id (slots id-string &rest body) + (declare (indent 2)) + (let ((id-var (make-symbol "id"))) + `(let* ((,id-var (erlang-id-to-list ,id-string)) + ,@(mapcar (lambda (slot) + (list slot + (list (intern (format "erlang-id-%s" slot)) + id-var))) + slots)) + ,@body))) + +(defun erlang-id-to-string (id) + (when id + (erlang-with-id (kind module name arity) id + (format "%s%s%s%s" + (if kind (format "%s " kind) "") + (if module (format "%s:" module) "") + name + (if arity (format "/%s" arity) ""))))) + +(defun erlang-id-to-list (id) + (if (listp id) + id + (save-match-data + (erlang-ensure-syntax-table-is-initialized) + (with-syntax-table erlang-mode-syntax-table + (let (case-fold-search) + (when (string-match erlang-id-regexp id) + (list (when (match-string 1 id) + (intern (match-string 1 id))) + (match-string 2 id) + (match-string 3 id) + (when (match-string 4 id) + (string-to-number (match-string 4 id)))))))))) + +(defun erlang-id-kind (id) + (car (erlang-id-to-list id))) + +(defun erlang-id-module (id) + (nth 1 (erlang-id-to-list id))) + +(defun erlang-id-name (id) + (nth 2 (erlang-id-to-list id))) + +(defun erlang-id-arity (id) + (nth 3 (erlang-id-to-list id))) ;; TODO: Escape single quotes inside the string without @@ -3822,10 +3936,10 @@ In the future the list may contain more elements." "Returns non-nil if there is an exported function in the current buffer between point and MAX." (block nil - (while (and (not erlang-inhibit-exported-function-name-face) - (erlang-match-next-function max)) - (when (erlang-last-match-exported-p) - (return (match-data)))))) + (while (and (not erlang-inhibit-exported-function-name-face) + (erlang-match-next-function max)) + (when (erlang-last-match-exported-p) + (return (match-data)))))) (defun erlang-match-next-function (max) "Searches forward in current buffer for the next erlang function, @@ -4084,7 +4198,7 @@ non-whitespace characters following the point on the current line." nil))) -(defun erlang-electric-arrow\ off (&optional arg) +(defun erlang-electric-arrow (&optional arg) "Insert a '>'-sign and possibly a new indented line. This command is only `electric' when the `>' is part of an `->' arrow. @@ -4310,8 +4424,8 @@ This function is designed to be a member of a criteria list." (looking-at "end[^_a-zA-Z0-9]"))) -;; Erlang tags support which is aware of erlang modules. -;; +;;; Erlang tags support which is aware of erlang modules. + ;; Not yet implemented under XEmacs. (Hint: The Emacs 19 etags ;; package works under XEmacs.) @@ -4369,7 +4483,7 @@ This function only works under Emacs 18 and Emacs 19. Currently, It is not implemented under XEmacs. (Hint: The Emacs 19 etags module works under XEmacs.)" (interactive) - (cond ((= erlang-emacs-major-version 18) + (cond ((= emacs-major-version 18) (require 'tags) (erlang-tags-define-keys (current-local-map)) (setq erlang-tags-installed t)) @@ -4409,20 +4523,6 @@ works under XEmacs.)" (erlang-menu-substitute erlang-menu-base-items erlang-tags-function-alist) (erlang-menu-init)) - -(defun erlang-find-tag-default () - "Return the default tag. -Search `-import' list of imported functions. -Single quotes are been stripped away." - (let ((mod-func (erlang-get-function-under-point))) - (cond ((null mod-func) - nil) - ((null (car mod-func)) - (nth 1 mod-func)) - (t - (concat (car mod-func) ":" (nth 1 mod-func)))))) - - ;; Return `t' since it is used inside `tags-loop-form'. ;;;###autoload (defun erlang-find-tag (modtagname &optional next-p regexp-p) @@ -4609,7 +4709,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'." (list nil (if (< (prefix-numeric-value current-prefix-arg) 0) '- t)) - (let* ((default (erlang-find-tag-default)) + (let* ((default (erlang-default-function-or-module)) (prompt (if default (format "%s(default %s) " prompt default) prompt)) @@ -4633,7 +4733,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'." ;; Make sure our functions are installed in TAGS files loaded ;; into Emacs while searching. (cond - ((>= erlang-emacs-major-version 20) + ((>= emacs-major-version 20) (setq erlang-tags-orig-format-functions (symbol-value 'tags-table-format-functions)) (funcall (symbol-function 'set) 'tags-table-format-functions @@ -4711,7 +4811,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'." (defun erlang-tags-remove-module-check () "Remove our own tags search functions." (cond - ((>= erlang-emacs-major-version 20) + ((>= emacs-major-version 20) (funcall (symbol-function 'set) 'tags-table-format-functions erlang-tags-orig-format-functions) @@ -4961,6 +5061,14 @@ about Erlang modules." ;; It adds awareness of the module:tag syntax in a similar way that is ;; done above for the old etags commands. +(defvar erlang-current-arity nil + "The arity of the function currently being searched. + +There is no information about arity in the TAGS file. +Consecutive functions with same name but different arity will +only get one entry in the TAGS file. Matching TAGS entries are +therefore selected without regarding arity. The arity is +considered first when it is time to jump to the definition.") (defun erlang-etags--xref-backend () 'erlang-etags) @@ -4970,13 +5078,14 @@ about Erlang modules." (and (erlang-soft-require 'xref) (erlang-soft-require 'cl-generic) + (erlang-soft-require 'eieio) ;; The purpose of using eval here is to avoid compilation - ;; warnings in emacsen without cl-defmethod. + ;; warnings in emacsen without cl-defmethod etc. (eval '(progn (cl-defmethod xref-backend-identifier-at-point ((_backend (eql erlang-etags))) - (erlang-find-tag-default)) + (erlang-id-to-string (erlang-get-identifier-at-point))) (cl-defmethod xref-backend-definitions ((_backend (eql erlang-etags)) identifier) @@ -4989,42 +5098,99 @@ about Erlang modules." (cl-defmethod xref-backend-identifier-completion-table ((_backend (eql erlang-etags))) (let ((erlang-replace-etags-tags-completion-table t)) - (tags-completion-table)))))) - - + (tags-completion-table))) + + (defclass erlang-xref-location (xref-etags-location) ()) + + (defun erlang-convert-xrefs (xrefs) + (mapcar (lambda (xref) + (oset xref location (erlang-make-location + (oref xref location))) + xref) + xrefs)) + + (defun erlang-make-location (etags-location) + (with-slots (tag-info file) etags-location + (make-instance 'erlang-xref-location :tag-info tag-info + :file file))) + + (cl-defmethod xref-location-marker ((locus erlang-xref-location)) + (with-slots (tag-info file) locus + (with-current-buffer (find-file-noselect file) + (save-excursion + (or (erlang-goto-tag-location-by-arity tag-info) + (etags-goto-tag-location tag-info)) + ;; Reset erlang-current-arity. We want to jump to + ;; correct arity in the first attempt. That is now + ;; done. Possible remaining jumps will be from + ;; entries in the *xref* buffer and then we want to + ;; ignore the arity. (Alternatively we could remove + ;; all but one xref entry per file when we know the + ;; arity). + (setq erlang-current-arity nil) + (point-marker))))) + + (defun erlang-xref-context (xref) + (with-slots (tag-info) (xref-item-location xref) + (car tag-info)))))) + + +(defun erlang-goto-tag-location-by-arity (tag-info) + (when erlang-current-arity + (let* ((tag-text (car tag-info)) + (tag-pos (cdr (cdr tag-info))) + (tag-line (car (cdr tag-info))) + (regexp (erlang-tag-info-regexp tag-text)) + (startpos (or tag-pos + (when tag-line + (goto-char (point-min)) + (forward-line (1- tag-line)) + (point)) + (point-min)))) + (setq startpos (max (- startpos 2000) + (point-min))) + (goto-char startpos) + (let ((pos (or (erlang-search-by-arity regexp) + (unless (eq startpos (point-min)) + (goto-char (point-min)) + (erlang-search-by-arity regexp))))) + (when pos + (goto-char pos) + t))))) + +(defun erlang-tag-info-regexp (tag-text) + (concat "^" + (regexp-quote tag-text) + ;; Erlang function entries in TAGS includes the opening + ;; parenthesis for the argument list. Erlang macro entries + ;; do not. Add it here in order to end up in correct + ;; position for erlang-get-arity. + (if (string-prefix-p "-define" tag-text) + "\\s-*(" + ""))) + +(defun erlang-search-by-arity (regexp) + (let (pos) + (while (and (null pos) + (re-search-forward regexp nil t)) + (when (eq erlang-current-arity (save-excursion (erlang-get-arity))) + (setq pos (point-at-bol)))) + pos)) (defun erlang-xref-find-definitions (identifier &optional is-regexp) - (let ((id-list (split-string identifier ":"))) - (cond - ;; Handle "tag" - ((null (cdr id-list)) - (erlang-xref-find-definitions-tag identifier is-regexp)) - ;; Handle "module:" - ((string-equal (cadr id-list) "") - (erlang-xref-find-definitions-module (car id-list))) - ;; Handle "module:tag" - (t - (erlang-xref-find-definitions-module-tag (car id-list) - (cadr id-list) - is-regexp))))) - -(defun erlang-xref-find-definitions-tag (tag is-regexp) - "Find all definitions of TAG and reorder them so that -definitions in the currently visited file comes first." - (when (fboundp 'etags--xref-find-definitions) - (let* ((current-file (and (buffer-file-name) - (file-truename (buffer-file-name)))) - (xrefs (etags--xref-find-definitions tag is-regexp)) - local-xrefs non-local-xrefs) - (while xrefs - (if (string-equal (erlang-xref-truename-file (car xrefs)) - current-file) - (push (car xrefs) local-xrefs) - (push (car xrefs) non-local-xrefs)) - (setq xrefs (cdr xrefs))) - (append (reverse local-xrefs) - (reverse non-local-xrefs))))) + (erlang-with-id (kind module name arity) identifier + (setq erlang-current-arity arity) + (cond ((eq kind 'module) + (erlang-xref-find-definitions-module name)) + (module + (erlang-xref-find-definitions-module-tag module + name + (eq kind + 'qualified-function) + is-regexp)) + (t + (erlang-xref-find-definitions-tag kind name is-regexp))))) (defun erlang-xref-find-definitions-module (module) (and (fboundp 'xref-make) @@ -5048,17 +5214,58 @@ definitions in the currently visited file comes first." (setq files (cdr files)))))) (nreverse xrefs)))) -(defun erlang-xref-find-definitions-module-tag (module tag is-regexp) - "Find all definitions of TAG and filter away definitions -outside of MODULE." - (when (fboundp 'etags--xref-find-definitions) - (let ((xrefs (etags--xref-find-definitions tag is-regexp)) - xrefs-in-module) - (while xrefs - (when (string-equal module (erlang-xref-module (car xrefs))) - (push (car xrefs) xrefs-in-module)) - (setq xrefs (cdr xrefs))) - xrefs-in-module))) + +(defun erlang-xref-find-definitions-module-tag (module + tag + is-qualified + is-regexp) + "Find definitions of TAG and filter away definitions outside of +MODULE. If IS-QUALIFIED is nil and no definitions was found inside +the MODULE then return any definitions found outside. If +IS-REGEXP is non-nil then TAG is a regexp." + (and (fboundp 'etags--xref-find-definitions) + (fboundp 'erlang-convert-xrefs) + (let ((xrefs (erlang-convert-xrefs + (etags--xref-find-definitions tag is-regexp))) + xrefs-in-module) + (dolist (xref xrefs) + (when (string-equal module (erlang-xref-module xref)) + (push xref xrefs-in-module))) + (cond (is-qualified xrefs-in-module) + (xrefs-in-module xrefs-in-module) + (t xrefs))))) + +(defun erlang-xref-find-definitions-tag (kind tag is-regexp) + "Find all definitions of TAG and reorder them so that +definitions in the currently visited file comes first." + (and (fboundp 'etags--xref-find-definitions) + (fboundp 'erlang-convert-xrefs) + (let* ((current-file (and (buffer-file-name) + (file-truename (buffer-file-name)))) + (regexp (erlang-etags-regexp kind tag is-regexp)) + (xrefs (erlang-convert-xrefs + (etags--xref-find-definitions regexp t))) + local-xrefs non-local-xrefs) + (while xrefs + (let ((xref (car xrefs))) + (if (string-equal (erlang-xref-truename-file xref) + current-file) + (push xref local-xrefs) + (push xref non-local-xrefs)) + (setq xrefs (cdr xrefs)))) + (append (reverse local-xrefs) + (reverse non-local-xrefs))))) + +(defun erlang-etags-regexp (kind tag is-regexp) + (let ((tag-regexp (if is-regexp + tag + (regexp-quote tag)))) + (cond ((eq kind 'record) + (concat "-record\\s-*(\\s-*" tag-regexp)) + ((eq kind 'macro) + (concat "-define\\s-*(\\s-*" tag-regexp)) + (t tag-regexp)))) + (defun erlang-xref-module (xref) (erlang-get-module-from-file-name (erlang-xref-file xref))) @@ -5174,7 +5381,7 @@ future, a new shell on an already running host will be started." (defvar erlang-shell-mode-hook nil - "*User functions to run when an Erlang shell is started. + "User functions to run when an Erlang shell is started. This hook is used to change the behaviour of Erlang mode. It is normally used by the user to personalise the programming environment. @@ -5190,7 +5397,7 @@ Erlang source file is loaded into Emacs.") (defvar erlang-input-ring-file-name "~/.erlang_history" - "*When non-nil, file name used to store Erlang shell history information.") + "When non-nil, file name used to store Erlang shell history information.") (defun erlang-shell-mode () @@ -5290,7 +5497,7 @@ Selects Comint or Compilation mode command as appropriate." ;;; (defvar inferior-erlang-display-buffer-any-frame nil - "*When nil, `inferior-erlang-display-buffer' use only selected frame. + "When nil, `inferior-erlang-display-buffer' use only selected frame. When t, all frames are searched. When 'raise, the frame is raised.") (defvar inferior-erlang-shell-type 'newshell @@ -5303,10 +5510,10 @@ nil, the default shell is used. This variable influence the setting of other variables.") (defvar inferior-erlang-machine "erl" - "*The name of the Erlang shell.") + "The name of the Erlang shell.") (defvar inferior-erlang-machine-options '() - "*The options used when activating the Erlang shell. + "The options used when activating the Erlang shell. This must be a list of strings.") @@ -5317,7 +5524,7 @@ This must be a list of strings.") "The name of the inferior Erlang buffer.") (defvar inferior-erlang-prompt-timeout 60 - "*Number of seconds before `inferior-erlang-wait-prompt' timeouts. + "Number of seconds before `inferior-erlang-wait-prompt' timeouts. The time specified is waited after every output made by the inferior Erlang shell. When this variable is t, we assume that we always have @@ -5383,7 +5590,7 @@ editing control characters: (setq inferior-erlang-process (get-buffer-process inferior-erlang-buffer)) - (if (> 21 erlang-emacs-major-version) ; funcalls to avoid compiler warnings + (if (> 21 emacs-major-version) ; funcalls to avoid compiler warnings (funcall (symbol-function 'set-process-query-on-exit-flag) inferior-erlang-process nil) (funcall (symbol-function 'process-kill-without-query) inferior-erlang-process)) @@ -5454,7 +5661,7 @@ frame will become deselected before the next command." (defun inferior-erlang-window (&optional all-frames) "Return the window containing the inferior Erlang, or nil." (and (inferior-erlang-running-p) - (if (and all-frames (>= erlang-emacs-major-version 19)) + (if (and all-frames (>= emacs-major-version 19)) (get-buffer-window inferior-erlang-buffer t) (get-buffer-window inferior-erlang-buffer)))) @@ -5551,7 +5758,7 @@ Return the position after the newly inserted command." (boundp 'comint-last-output-start)) (save-excursion (goto-char - (if (erlang-interactive-p) + (if (called-interactively-p 'interactive) (symbol-value 'comint-last-input-end) (symbol-value 'comint-last-output-start))) (while (progn (skip-chars-forward "^\C-h") @@ -5570,7 +5777,7 @@ Return the position after the newly inserted command." (let ((pmark (process-mark (get-buffer-process (current-buffer))))) (save-excursion (goto-char - (if (erlang-interactive-p) + (if (called-interactively-p 'interactive) (symbol-value 'comint-last-input-end) (symbol-value 'comint-last-output-start))) (while (re-search-forward "\r+$" pmark t) @@ -5938,12 +6145,6 @@ it assumes that NEWDEF is loaded." (ad-unadvise 'Man-notify-when-ready) (ad-unadvise 'set-visited-file-name))))) - -(defun erlang-string-to-int (string) - (if (fboundp 'string-to-number) - (string-to-number string) - (funcall (symbol-function 'string-to-int) string))) - ;; The end... (provide 'erlang) @@ -5951,7 +6152,7 @@ it assumes that NEWDEF is loaded." (run-hooks 'erlang-load-hook) ;; Local variables: -;; coding: iso-8859-1 +;; coding: utf-8 ;; indent-tabs-mode: nil ;; End: diff --git a/lib/tools/emacs/erldoc.el b/lib/tools/emacs/erldoc.el index cb355374d9..348800f880 100644 --- a/lib/tools/emacs/erldoc.el +++ b/lib/tools/emacs/erldoc.el @@ -23,8 +23,8 @@ ;; Crawl Erlang/OTP HTML documentation and generate lookup tables. ;; ;; This package depends on `cl-lib', `pcase' and -;; `libxml-parse-html-region'; emacs 24+ compiled with libxml2 should -;; work. On emacs 24.1 and 24.2 do `M-x package-install RET cl-lib +;; `libxml-parse-html-region'. Emacs 24+ compiled with libxml2 should +;; work. On Emacs 24.1 and 24.2 do `M-x package-install RET cl-lib ;; RET' to install `cl-lib'. ;; ;; Please customise `erldoc-man-index' to point to your local OTP @@ -407,7 +407,7 @@ up the indexing." (defvar erldoc-user-guides nil) (defvar erldoc-missing-user-guides - '("compiler" "hipe" "kernel" "os_mon" "parsetools" "typer") + '("compiler" "hipe" "kernel" "os_mon" "parsetools") "List of standard Erlang applications with no user guides.") ;; Search in `code:lib_dir/0' using find LIB_DIR -type f -name @@ -417,7 +417,7 @@ up the indexing." "runtime_tools" "sasl" "snmp" "ssl" "test_server" ("ssh" . "SSH") ("stdlib" . "STDLIB") - ("hipe" . "HiPE") ("typer" . "TypEr")) + ("hipe" . "HiPE")) "List of applications that come with a manual.") (defun erldoc-user-guide-chapters (user-guide) @@ -505,4 +505,10 @@ up the indexing." (browse-url (cdr (assoc topic (erldoc-user-guides))))) (provide 'erldoc) + +;; Local variables: +;; coding: utf-8 +;; indent-tabs-mode: nil +;; End: + ;;; erldoc.el ends here diff --git a/lib/tools/examples/xref_examples.erl b/lib/tools/examples/xref_examples.erl index 4c082195a2..f7e71c9708 100644 --- a/lib/tools/examples/xref_examples.erl +++ b/lib/tools/examples/xref_examples.erl @@ -7,7 +7,7 @@ %% ${HOME}/unused_locals.txt. script() -> Root = code:root_dir(), - Dir = os:getenv("HOME"), + {ok,[[Dir]]} = init:get_argument(home), Server = s, xref:start(Server), {ok, _Relname} = xref:add_release(Server, code:lib_dir(), {name,otp}), diff --git a/lib/tools/src/make.erl b/lib/tools/src/make.erl index 37e67cbe34..60695febb4 100644 --- a/lib/tools/src/make.erl +++ b/lib/tools/src/make.erl @@ -29,7 +29,7 @@ -include_lib("kernel/include/file.hrl"). --define(MakeOpts,[noexec,load,netload,noload]). +-define(MakeOpts,[noexec,load,netload,noload,emake]). all_or_nothing() -> case all() of @@ -43,29 +43,30 @@ all() -> all([]). all(Options) -> - {MakeOpts,CompileOpts} = sort_options(Options,[],[]), - case read_emakefile('Emakefile',CompileOpts) of - Files when is_list(Files) -> - do_make_files(Files,MakeOpts); - error -> - error - end. + run_emake(undefined, Options). files(Fs) -> files(Fs, []). files(Fs0, Options) -> Fs = [filename:rootname(F,".erl") || F <- Fs0], + run_emake(Fs, Options). + +run_emake(Mods, Options) -> {MakeOpts,CompileOpts} = sort_options(Options,[],[]), - case get_opts_from_emakefile(Fs,'Emakefile',CompileOpts) of + Emake = get_emake(Options), + case normalize_emake(Emake, Mods, CompileOpts) of Files when is_list(Files) -> - do_make_files(Files,MakeOpts); - error -> error + do_make_files(Files,MakeOpts); + error -> + error end. do_make_files(Fs, Opts) -> process(Fs, lists:member(noexec, Opts), load_opt(Opts)). +sort_options([{emake, _}=H|T],Make,Comp) -> + sort_options(T,[H|Make],Comp); sort_options([H|T],Make,Comp) -> case lists:member(H,?MakeOpts) of @@ -89,20 +90,35 @@ sort_options([],Make,Comp) -> %%% %%% These elements are converted to [{ModList,OptList},...] %%% ModList is a list of modulenames (strings) -read_emakefile(Emakefile,Opts) -> - case file:consult(Emakefile) of - {ok,Emake} -> + +normalize_emake(EmakeRaw, Mods, Opts) -> + case EmakeRaw of + {ok, Emake} when Mods =:= undefined -> transform(Emake,Opts,[],[]); - {error,enoent} -> + {ok, Emake} when is_list(Mods) -> + ModsOpts = transform(Emake,Opts,[],[]), + ModStrings = [coerce_2_list(M) || M <- Mods], + get_opts_from_emakefile(ModsOpts,ModStrings,Opts,[]); + {error,enoent} when Mods =:= undefined -> %% No Emakefile found - return all modules in current %% directory and the options given at command line - Mods = [filename:rootname(F) || F <- filelib:wildcard("*.erl")], + CwdMods = [filename:rootname(F) || F <- filelib:wildcard("*.erl")], + [{CwdMods, Opts}]; + {error,enoent} when is_list(Mods) -> [{Mods, Opts}]; - {error,Other} -> - io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Other]), + {error, Error} -> + io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Error]), error end. +get_emake(Opts) -> + case proplists:get_value(emake, Opts, false) of + false -> + file:consult('Emakefile'); + OptsEmake -> + {ok, OptsEmake} + end. + transform([{Mod,ModOpts}|Emake],Opts,Files,Already) -> case expand(Mod,Already) of [] -> @@ -143,31 +159,19 @@ expand(Mod,Already) -> end end. -%%% Reads the given Emakefile to see if there are any specific compile +%%% Reads the given Emake to see if there are any specific compile %%% options given for the modules. -get_opts_from_emakefile(Mods,Emakefile,Opts) -> - case file:consult(Emakefile) of - {ok,Emake} -> - Modsandopts = transform(Emake,Opts,[],[]), - ModStrings = [coerce_2_list(M) || M <- Mods], - get_opts_from_emakefile2(Modsandopts,ModStrings,Opts,[]); - {error,enoent} -> - [{Mods, Opts}]; - {error,Other} -> - io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Other]), - error - end. -get_opts_from_emakefile2([{MakefileMods,O}|Rest],Mods,Opts,Result) -> +get_opts_from_emakefile([{MakefileMods,O}|Rest],Mods,Opts,Result) -> case members(Mods,MakefileMods,[],Mods) of {[],_} -> - get_opts_from_emakefile2(Rest,Mods,Opts,Result); + get_opts_from_emakefile(Rest,Mods,Opts,Result); {I,RestOfMods} -> - get_opts_from_emakefile2(Rest,RestOfMods,Opts,[{I,O}|Result]) + get_opts_from_emakefile(Rest,RestOfMods,Opts,[{I,O}|Result]) end; -get_opts_from_emakefile2([],[],_Opts,Result) -> +get_opts_from_emakefile([],[],_Opts,Result) -> Result; -get_opts_from_emakefile2([],RestOfMods,Opts,Result) -> +get_opts_from_emakefile([],RestOfMods,Opts,Result) -> [{RestOfMods,Opts}|Result]. members([H|T],MakefileMods,I,Rest) -> diff --git a/lib/tools/test/Makefile b/lib/tools/test/Makefile index 84c4e56aff..fe65d1484d 100644 --- a/lib/tools/test/Makefile +++ b/lib/tools/test/Makefile @@ -52,8 +52,8 @@ RELSYSDIR = $(RELEASE_PATH)/tools_test # ---------------------------------------------------- # FLAGS # ---------------------------------------------------- -ERL_MAKE_FLAGS += -ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/percept/include +ERL_MAKE_FLAGS += +ERL_COMPILE_FLAGS += EBIN = . diff --git a/lib/tools/test/make_SUITE.erl b/lib/tools/test/make_SUITE.erl index e6284db8b8..2a94ead329 100644 --- a/lib/tools/test/make_SUITE.erl +++ b/lib/tools/test/make_SUITE.erl @@ -20,7 +20,7 @@ -module(make_SUITE). -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, - init_per_group/2,end_per_group/2, make_all/1, make_files/1]). + init_per_group/2,end_per_group/2, make_all/1, make_files/1, emake_opts/1]). -export([otp_6057_init/1, otp_6057_a/1, otp_6057_b/1, otp_6057_c/1, otp_6057_end/1]). @@ -40,7 +40,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [make_all, make_files, {group, otp_6057}]. + [make_all, make_files, emake_opts, {group, otp_6057}]. groups() -> [{otp_6057,[],[otp_6057_a, otp_6057_b, @@ -86,6 +86,20 @@ make_files(Config) when is_list(Config) -> ensure_no_messages(), ok. +emake_opts(Config) when is_list(Config) -> + Current = prepare_data_dir(Config), + + %% prove that emake is used in opts instead of local Emakefile + Opts = [{emake, [test8, test9]}], + error = make:all(Opts), + error = make:files([test9], Opts), + "test8.beam" = ensure_exists([test8]), + "test9.beam" = ensure_exists([test9]), + "test5.S" = ensure_exists(["test5"],".S"), + + file:set_cwd(Current), + ensure_no_messages(), + ok. %% Moves to the data directory of this suite, clean it from any object %% files (*.jam for a JAM emulator). Returns the previous directory. diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk index 07bc39f76e..f60da27c44 100644 --- a/lib/tools/vsn.mk +++ b/lib/tools/vsn.mk @@ -1 +1 @@ -TOOLS_VSN = 2.9 +TOOLS_VSN = 2.9.1 diff --git a/lib/typer/Makefile b/lib/typer/Makefile deleted file mode 100644 index bd1b6458a8..0000000000 --- a/lib/typer/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -# -# %CopyrightBegin% -# -# Copyright Ericsson AB 2006-2016. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# %CopyrightEnd% -# -#============================================================================= -# -# File: lib/typer/Makefile -# Authors: Bingwen He, Tobias Lindahl, and Kostis Sagonas -# -#============================================================================= -include $(ERL_TOP)/make/target.mk -include $(ERL_TOP)/make/$(TARGET)/otp.mk - -# -# Macros -# - -SUB_DIRECTORIES = src doc/src - -include vsn.mk -VSN = $(TYPER_VSN) - -SPECIAL_TARGETS = - -# -# Default Subdir Targets -# -include $(ERL_TOP)/make/otp_subdir.mk - diff --git a/lib/typer/RELEASE_NOTES b/lib/typer/RELEASE_NOTES deleted file mode 100644 index d91a815ee9..0000000000 --- a/lib/typer/RELEASE_NOTES +++ /dev/null @@ -1,22 +0,0 @@ -============================================================================== - Major features, additions and changes between Typer versions - (in reversed chronological order) -============================================================================== - -Version 0.9 (in Erlang/OTP R14B02) ----------------------------------- - - Major rewrite; all code has been cleaned up and placed in one file. - The only reason why this is not version 1.0 yet is that there is no proper - documentation for typer which can be displayed in the www.erlang.org site. - - Added ability to receive the set of exported types and report unknown ones. - - Better handling of overloaded contracts; especially erroneous ones on which - typer does not crash anymore. - - Fixed problem that caused typer to hang when given a file whose module name - did not correspond to the file name. - - Added two undocumented options that may come very handy when trying to - understand why typer reports some particular set of types for the functions - in a module. These options are mainly for typer developers at this point, - but may become documented in some future version. - -Older versions --------------- diff --git a/lib/typer/doc/Makefile b/lib/typer/doc/Makefile deleted file mode 100644 index 1015ca78eb..0000000000 --- a/lib/typer/doc/Makefile +++ /dev/null @@ -1,40 +0,0 @@ -# -# %CopyrightBegin% -# -# Copyright Ericsson AB 2006-2016. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# %CopyrightEnd% -# -SHELL=/bin/sh - -include $(ERL_TOP)/make/target.mk -include $(ERL_TOP)/make/$(TARGET)/otp.mk - -clean: - -rm -f *.html edoc-info stylesheet.css erlang.png - -distclean: clean -realclean: clean - -# ---------------------------------------------------- -# Special Build Targets -# ---------------------------------------------------- - - - -# ---------------------------------------------------- -# Release Target -# ---------------------------------------------------- -include $(ERL_TOP)/make/otp_release_targets.mk diff --git a/lib/typer/doc/html/.gitignore b/lib/typer/doc/html/.gitignore deleted file mode 100644 index e69de29bb2..0000000000 --- a/lib/typer/doc/html/.gitignore +++ /dev/null diff --git a/lib/typer/doc/pdf/.gitignore b/lib/typer/doc/pdf/.gitignore deleted file mode 100644 index e69de29bb2..0000000000 --- a/lib/typer/doc/pdf/.gitignore +++ /dev/null diff --git a/lib/typer/doc/src/Makefile b/lib/typer/doc/src/Makefile deleted file mode 100644 index 3724a2e4d1..0000000000 --- a/lib/typer/doc/src/Makefile +++ /dev/null @@ -1,118 +0,0 @@ -# -# %CopyrightBegin% -# -# Copyright Ericsson AB 2006-2016. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# %CopyrightEnd% -# -include $(ERL_TOP)/make/target.mk -include $(ERL_TOP)/make/$(TARGET)/otp.mk - -# ---------------------------------------------------- -# Application version -# ---------------------------------------------------- -include ../../vsn.mk -VSN=$(TYPER_VSN) -APPLICATION=typer - -# ---------------------------------------------------- -# Release directory specification -# ---------------------------------------------------- -RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN) - -# ---------------------------------------------------- -# Target Specs -# ---------------------------------------------------- -XML_APPLICATION_FILES = ref_man.xml -XML_REF3_FILES = - -XML_PART_FILES = part_notes.xml -XML_CHAPTER_FILES = notes.xml - -BOOK_FILES = book.xml - -XML_FILES = \ - $(BOOK_FILES) $(XML_CHAPTER_FILES) \ - $(XML_PART_FILES) $(XML_REF3_FILES) $(XML_APPLICATION_FILES) - -GIF_FILES = - -# ---------------------------------------------------- - -HTML_FILES = $(XML_APPLICATION_FILES:%.xml=$(HTMLDIR)/%.html) \ - $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html) - -INFO_FILE = ../../info -EXTRA_FILES = \ - $(DEFAULT_GIF_FILES) \ - $(DEFAULT_HTML_FILES) \ - $(XML_REF3_FILES:%.xml=$(HTMLDIR)/%.html) \ - $(XML_CHAPTER_FILES:%.xml=$(HTMLDIR)/%.html) - -MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3) - -HTML_REF_MAN_FILE = $(HTMLDIR)/index.html - -TOP_PDF_FILE = $(PDFDIR)/$(APPLICATION)-$(VSN).pdf - -# ---------------------------------------------------- -# FLAGS -# ---------------------------------------------------- -XML_FLAGS += - -# ---------------------------------------------------- -# Targets -# ---------------------------------------------------- -$(HTMLDIR)/%.gif: %.gif - $(INSTALL_DATA) $< $@ - -docs: pdf html man - -$(TOP_PDF_FILE): $(XML_FILES) - -pdf: $(TOP_PDF_FILE) - -html: gifs $(HTML_REF_MAN_FILE) - -man: $(MAN3_FILES) - -gifs: $(GIF_FILES:%=$(HTMLDIR)/%) - -debug opt: - -clean clean_docs: - rm -rf $(HTMLDIR)/* - rm -f $(MAN3DIR)/* - rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo) - rm -f errs core *~ - -distclean: clean -realclean: clean - -# ---------------------------------------------------- -# Release Target -# ---------------------------------------------------- -include $(ERL_TOP)/make/otp_release_targets.mk - -release_docs_spec: docs - $(INSTALL_DIR) "$(RELSYSDIR)/doc/pdf" - $(INSTALL_DATA) $(TOP_PDF_FILE) "$(RELSYSDIR)/doc/pdf" - $(INSTALL_DIR) "$(RELSYSDIR)/doc/html" - $(INSTALL_DATA) $(HTMLDIR)/* \ - "$(RELSYSDIR)/doc/html" - $(INSTALL_DATA) $(INFO_FILE) "$(RELSYSDIR)" - - -release_spec: diff --git a/lib/typer/doc/src/book.xml b/lib/typer/doc/src/book.xml deleted file mode 100644 index 20da44ae04..0000000000 --- a/lib/typer/doc/src/book.xml +++ /dev/null @@ -1,42 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE book SYSTEM "book.dtd"> - -<book xmlns:xi="http://www.w3.org/2001/XInclude"> - <header titlestyle="normal"> - <copyright> - <year>2006</year><year>2016</year> - <holder>Ericsson AB. All Rights Reserved.</holder> - </copyright> - <legalnotice> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - </legalnotice> - - <title>TypEr</title> - <prepared></prepared> - <docno></docno> - <date></date> - <rev></rev> - </header> - <pagetext></pagetext> - <preamble> - </preamble> - <pagetext>TypEr</pagetext> - <applications> - <xi:include href="ref_man.xml"/> - </applications> - <releasenotes> - <xi:include href="notes.xml"/> - </releasenotes> -</book> - diff --git a/lib/typer/doc/src/fascicules.xml b/lib/typer/doc/src/fascicules.xml deleted file mode 100644 index b15610fa8b..0000000000 --- a/lib/typer/doc/src/fascicules.xml +++ /dev/null @@ -1,12 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE fascicules SYSTEM "fascicules.dtd"> - -<fascicules> - <fascicule file="part_notes" href="part_notes_frame.html" entry="yes"> - Release Notes - </fascicule> - <fascicule file="" href="../../../../doc/print.html" entry="no"> - Off-Print - </fascicule> -</fascicules> - diff --git a/lib/typer/doc/src/notes.xml b/lib/typer/doc/src/notes.xml deleted file mode 100644 index 9ef5ca1c70..0000000000 --- a/lib/typer/doc/src/notes.xml +++ /dev/null @@ -1,111 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE chapter SYSTEM "chapter.dtd"> - -<chapter> - <header> - <copyright> - <year>2014</year><year>2016</year> - <holder>Ericsson AB. All Rights Reserved.</holder> - </copyright> - <legalnotice> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - </legalnotice> - - <title>TypEr Release Notes</title> - <prepared>otp_appnotes</prepared> - <docno>nil</docno> - <date>nil</date> - <rev>nil</rev> - <file>notes.xml</file> - </header> - <p>This document describes the changes made to TypEr.</p> - -<section><title>TypEr 0.9.11</title> - - <section><title>Improvements and New Features</title> - <list> - <item> - <p> - Internal changes</p> - <p> - Own Id: OTP-13551</p> - </item> - </list> - </section> - -</section> - -<section><title>TypEr 0.9.10</title> - - <section><title>Fixed Bugs and Malfunctions</title> - <list> - <item> - <p>Fix a bug that could result in a crash when printing - warnings onto standard error. </p> - <p> - Own Id: OTP-13010</p> - </item> - </list> - </section> - -</section> - -<section><title>TypEr 0.9.9</title> - - <section><title>Fixed Bugs and Malfunctions</title> - <list> - <item> - <p> Properly extract annotations from core code. </p> - <p> - Own Id: OTP-12727</p> - </item> - </list> - </section> - -</section> - -<section><title>TypEr 0.9.8</title> - - <section><title>Fixed Bugs and Malfunctions</title> - <list> - <item> - <p> The name of a compiler option has been fixed in the - Makefile. </p> - <p> - Own Id: OTP-11996</p> - </item> - </list> - </section> - -</section> - -<section><title>TypEr 0.9.7</title> - - <section><title>Fixed Bugs and Malfunctions</title> - <list> - <item> - <p> - Added initial documentation framework for TypEr.</p> - <p> - Own Id: OTP-11860</p> - </item> - </list> - </section> - -</section> - - - -</chapter> - diff --git a/lib/typer/doc/src/part_notes.xml b/lib/typer/doc/src/part_notes.xml deleted file mode 100644 index 3234f0903e..0000000000 --- a/lib/typer/doc/src/part_notes.xml +++ /dev/null @@ -1,36 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE part SYSTEM "part.dtd"> - -<part xmlns:xi="http://www.w3.org/2001/XInclude"> - <header> - <copyright> - <year>2006</year><year>2016</year> - <holder>Ericsson AB. All Rights Reserved.</holder> - </copyright> - <legalnotice> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - </legalnotice> - - <title>TypEr Release Notes</title> - <prepared></prepared> - <docno></docno> - <date></date> - <rev></rev> - </header> - <description> - <p><em>TypEr</em></p> - </description> - <xi:include href="notes.xml"/> -</part> - diff --git a/lib/typer/doc/src/ref_man.xml b/lib/typer/doc/src/ref_man.xml deleted file mode 100644 index c793207443..0000000000 --- a/lib/typer/doc/src/ref_man.xml +++ /dev/null @@ -1,36 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE application SYSTEM "application.dtd"> - -<application xmlns:xi="http://www.w3.org/2001/XInclude"> - <header> - <copyright> - <year>2014</year><year>2016</year> - <holder>Ericsson AB. All Rights Reserved.</holder> - </copyright> - <legalnotice> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - </legalnotice> - - <title>TypEr</title> - <prepared></prepared> - <docno></docno> - <date></date> - <rev></rev> - <file>ref_man.xml</file> - </header> - <description> - </description> - <xi:include href="typer_app.xml"/> -</application> - diff --git a/lib/typer/doc/src/typer_app.xml b/lib/typer/doc/src/typer_app.xml deleted file mode 100644 index d52df5d0da..0000000000 --- a/lib/typer/doc/src/typer_app.xml +++ /dev/null @@ -1,44 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE appref SYSTEM "appref.dtd"> - -<appref> - <header> - <copyright> - <year>2014</year><year>2016</year> - <holder>Ericsson AB. All Rights Reserved.</holder> - </copyright> - <legalnotice> - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - </legalnotice> - - <title>TypEr</title> - <prepared></prepared> - <responsible></responsible> - <docno></docno> - <approved></approved> - <checked></checked> - <date></date> - <rev></rev> - <file>typer.xml</file> - </header> - <app>TypEr</app> - <appsummary>The TypEr Application</appsummary> - <description> - <p>An Erlang/OTP application that shows type information - for Erlang modules to the user. Additionally, it can - annotate the code of files with such type information.</p> - </description> - -</appref> - diff --git a/lib/typer/ebin/.gitignore b/lib/typer/ebin/.gitignore deleted file mode 100644 index e69de29bb2..0000000000 --- a/lib/typer/ebin/.gitignore +++ /dev/null diff --git a/lib/typer/info b/lib/typer/info deleted file mode 100644 index 5145fbcfff..0000000000 --- a/lib/typer/info +++ /dev/null @@ -1,2 +0,0 @@ -group: tools -short: TypEr diff --git a/lib/typer/src/Makefile b/lib/typer/src/Makefile deleted file mode 100644 index 6c5d8b0726..0000000000 --- a/lib/typer/src/Makefile +++ /dev/null @@ -1,111 +0,0 @@ -# -# %CopyrightBegin% -# -# Copyright Ericsson AB 2006-2016. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# %CopyrightEnd% -# -#============================================================================= -# -# File: lib/typer/src/Makefile -# Authors: Kostis Sagonas -# -#============================================================================= - -include $(ERL_TOP)/make/target.mk -include $(ERL_TOP)/make/$(TARGET)/otp.mk - -# ---------------------------------------------------- -# Application version -# ---------------------------------------------------- -include ../vsn.mk -VSN=$(TYPER_VSN) - -# ---------------------------------------------------- -# Release directory specification -# ---------------------------------------------------- -RELSYSDIR = $(RELEASE_PATH)/lib/typer-$(VSN) - -# ---------------------------------------------------- -# Orientation information -- find dialyzer's dir -# ---------------------------------------------------- -DIALYZER_DIR = $(ERL_TOP)/lib/dialyzer - -# ---------------------------------------------------- -# Target Specs -# ---------------------------------------------------- -MODULES = typer - -HRL_FILES= -ERL_FILES= $(MODULES:%=%.erl) -INSTALL_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET) -TARGET_FILES= $(INSTALL_FILES) - -APP_FILE= typer.app -APP_SRC= $(APP_FILE).src -APP_TARGET= $(EBIN)/$(APP_FILE) - -APPUP_FILE= typer.appup -APPUP_SRC= $(APPUP_FILE).src -APPUP_TARGET= $(EBIN)/$(APPUP_FILE) - -# ---------------------------------------------------- -# FLAGS -# ---------------------------------------------------- -ERL_COMPILE_FLAGS += +warn_export_vars +warn_untyped_record +warn_missing_spec - -# ---------------------------------------------------- -# Targets -# ---------------------------------------------------- - -debug opt: $(TARGET_FILES) - -docs: - -clean: - rm -f $(TARGET_FILES) - rm -f core - -# ---------------------------------------------------- -# Special Build Targets -# ---------------------------------------------------- - -$(EBIN)/typer.$(EMULATOR): typer.erl ../vsn.mk Makefile - $(erlc_verbose)erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) typer.erl - -$(APP_TARGET): $(APP_SRC) ../vsn.mk - $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@ - -$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk - $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@ - -# --------------------------------------------------------------------- -# dependencies -# --------------------------------------------------------------------- - - -# ---------------------------------------------------- -# Release Target -# ---------------------------------------------------- -include $(ERL_TOP)/make/otp_release_targets.mk - -release_spec: opt - $(INSTALL_DIR) "$(RELSYSDIR)/src" - $(INSTALL_DATA) $(ERL_FILES) $(HRL_FILES) $(YRL_FILES) \ - "$(RELSYSDIR)/src" - $(INSTALL_DIR) "$(RELSYSDIR)/ebin" - $(INSTALL_DATA) $(INSTALL_FILES) "$(RELSYSDIR)/ebin" - -release_docs_spec: diff --git a/lib/typer/src/typer.app.src b/lib/typer/src/typer.app.src deleted file mode 100644 index 974091b44c..0000000000 --- a/lib/typer/src/typer.app.src +++ /dev/null @@ -1,11 +0,0 @@ -% This is an -*- erlang -*- file. - -{application, typer, - [{description, "TYPe annotator for ERlang programs, version %VSN%"}, - {vsn, "%VSN%"}, - {modules, [typer]}, - {registered, []}, - {applications, [compiler, dialyzer, hipe, kernel, stdlib]}, - {env, []}, - {runtime_dependencies, ["stdlib-2.0","kernel-3.0","hipe-3.10.3","erts-6.0", - "dialyzer-2.7","compiler-5.0"]}]}. diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl deleted file mode 100644 index 3bff546243..0000000000 --- a/lib/typer/src/typer.erl +++ /dev/null @@ -1,1124 +0,0 @@ -%% -*- erlang-indent-level: 2 -*- -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. - -%%----------------------------------------------------------------------- -%% File : typer.erl -%% Author(s) : The first version of typer was written by Bingwen He -%% with guidance from Kostis Sagonas and Tobias Lindahl. -%% Since June 2008 typer is maintained by Kostis Sagonas. -%% Description : An Erlang/OTP application that shows type information -%% for Erlang modules to the user. Additionally, it can -%% annotate the code of files with such type information. -%%----------------------------------------------------------------------- - --module(typer). - --export([start/0]). - -%%----------------------------------------------------------------------- - --define(SHOW, show). --define(SHOW_EXPORTED, show_exported). --define(ANNOTATE, annotate). --define(ANNOTATE_INC_FILES, annotate_inc_files). - --type mode() :: ?SHOW | ?SHOW_EXPORTED | ?ANNOTATE | ?ANNOTATE_INC_FILES. - -%%----------------------------------------------------------------------- - --type files() :: [file:filename()]. --type callgraph() :: dialyzer_callgraph:callgraph(). --type codeserver() :: dialyzer_codeserver:codeserver(). --type plt() :: dialyzer_plt:plt(). - --record(analysis, - {mode :: mode() | 'undefined', - macros = [] :: [{atom(), term()}], - includes = [] :: files(), - codeserver = dialyzer_codeserver:new():: codeserver(), - callgraph = dialyzer_callgraph:new() :: callgraph(), - files = [] :: files(), % absolute names - plt = none :: 'none' | file:filename(), - no_spec = false :: boolean(), - show_succ = false :: boolean(), - %% For choosing between specs or edoc @spec comments - edoc = false :: boolean(), - %% Files in 'fms' are compilable with option 'to_pp'; we keep them - %% as {FileName, ModuleName} in case the ModuleName is different - fms = [] :: [{file:filename(), module()}], - ex_func = map__new() :: map_dict(), - record = map__new() :: map_dict(), - func = map__new() :: map_dict(), - inc_func = map__new() :: map_dict(), - trust_plt = dialyzer_plt:new() :: plt()}). --type analysis() :: #analysis{}. - --record(args, {files = [] :: files(), - files_r = [] :: files(), - trusted = [] :: files()}). --type args() :: #args{}. - -%%-------------------------------------------------------------------- - --spec start() -> no_return(). - -start() -> - {Args, Analysis} = process_cl_args(), - %% io:format("Args: ~p\n", [Args]), - %% io:format("Analysis: ~p\n", [Analysis]), - Timer = dialyzer_timing:init(false), - TrustedFiles = filter_fd(Args#args.trusted, [], fun is_erl_file/1), - Analysis2 = extract(Analysis, TrustedFiles), - All_Files = get_all_files(Args), - %% io:format("All_Files: ~p\n", [All_Files]), - Analysis3 = Analysis2#analysis{files = All_Files}, - Analysis4 = collect_info(Analysis3), - %% io:format("Final: ~p\n", [Analysis4#analysis.fms]), - TypeInfo = get_type_info(Analysis4), - dialyzer_timing:stop(Timer), - show_or_annotate(TypeInfo), - %% io:format("\nTyper analysis finished\n"), - erlang:halt(0). - -%%-------------------------------------------------------------------- - --spec extract(analysis(), files()) -> analysis(). - -extract(#analysis{macros = Macros, - includes = Includes, - trust_plt = TrustPLT} = Analysis, TrustedFiles) -> - %% io:format("--- Extracting trusted typer_info... "), - Ds = [{d, Name, Value} || {Name, Value} <- Macros], - CodeServer = dialyzer_codeserver:new(), - Fun = - fun(File, CS) -> - %% We include one more dir; the one above the one we are trusting - %% E.g, for /home/tests/typer_ann/test.ann.erl, we should include - %% /home/tests/ rather than /home/tests/typer_ann/ - AllIncludes = [filename:dirname(filename:dirname(File)) | Includes], - Is = [{i, Dir} || Dir <- AllIncludes], - CompOpts = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds, - case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of - {ok, AbstractCode} -> - case dialyzer_utils:get_record_and_type_info(AbstractCode) of - {ok, RecDict} -> - Mod = list_to_atom(filename:basename(File, ".erl")), - case dialyzer_utils:get_spec_info(Mod, AbstractCode, RecDict) of - {ok, SpecDict, CbDict} -> - CS1 = dialyzer_codeserver:store_temp_records(Mod, RecDict, CS), - dialyzer_codeserver:store_temp_contracts(Mod, SpecDict, CbDict, CS1); - {error, Reason} -> compile_error([Reason]) - end; - {error, Reason} -> compile_error([Reason]) - end; - {error, Reason} -> compile_error(Reason) - end - end, - CodeServer1 = lists:foldl(Fun, CodeServer, TrustedFiles), - %% Process remote types - NewCodeServer = - try - NewRecords = dialyzer_codeserver:get_temp_records(CodeServer1), - NewExpTypes = dialyzer_codeserver:get_temp_exported_types(CodeServer1), - case sets:size(NewExpTypes) of 0 -> ok end, - OldRecords = dialyzer_plt:get_types(TrustPLT), % XXX change to the PLT? - MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords), - CodeServer2 = dialyzer_codeserver:set_temp_records(MergedRecords, CodeServer1), - CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2), - {CodeServer4, RecordDict} = - dialyzer_utils:process_record_remote_types(CodeServer3), - dialyzer_contracts:process_contract_remote_types(CodeServer4, RecordDict) - catch - throw:{error, ErrorMsg} -> - compile_error(ErrorMsg) - end, - %% Create TrustPLT - Contracts = dialyzer_codeserver:get_contracts(NewCodeServer), - Modules = dict:fetch_keys(Contracts), - FoldFun = - fun(Module, TmpPlt) -> - {ok, ModuleContracts} = dict:find(Module, Contracts), - SpecList = [{MFA, Contract} - || {MFA, {_FileLine, Contract}} <- maps:to_list(ModuleContracts)], - dialyzer_plt:insert_contract_list(TmpPlt, SpecList) - end, - NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules), - Analysis#analysis{trust_plt = NewTrustPLT}. - -%%-------------------------------------------------------------------- - --spec get_type_info(analysis()) -> analysis(). - -get_type_info(#analysis{callgraph = CallGraph, - trust_plt = TrustPLT, - codeserver = CodeServer} = Analysis) -> - StrippedCallGraph = remove_external(CallGraph, TrustPLT), - %% io:format("--- Analyzing callgraph... "), - try - NewMiniPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph, - TrustPLT, - CodeServer), - NewPlt = dialyzer_plt:restore_full_plt(NewMiniPlt), - Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt} - catch - error:What -> - fatal_error(io_lib:format("Analysis failed with message: ~p", - [{What, erlang:get_stacktrace()}])); - throw:{dialyzer_succ_typing_error, Msg} -> - fatal_error(io_lib:format("Analysis failed with message: ~s", [Msg])) - end. - --spec remove_external(callgraph(), plt()) -> callgraph(). - -remove_external(CallGraph, PLT) -> - {StrippedCG0, Ext} = dialyzer_callgraph:remove_external(CallGraph), - case get_external(Ext, PLT) of - [] -> ok; - Externals -> - msg(io_lib:format(" Unknown functions: ~p\n", [lists:usort(Externals)])), - ExtTypes = rcv_ext_types(), - case ExtTypes of - [] -> ok; - _ -> msg(io_lib:format(" Unknown types: ~p\n", [ExtTypes])) - end - end, - StrippedCG0. - --spec get_external([{mfa(), mfa()}], plt()) -> [mfa()]. - -get_external(Exts, Plt) -> - Fun = fun ({_From, To = {M, F, A}}, Acc) -> - case dialyzer_plt:contains_mfa(Plt, To) of - false -> - case erl_bif_types:is_known(M, F, A) of - true -> Acc; - false -> [To|Acc] - end; - true -> Acc - end - end, - lists:foldl(Fun, [], Exts). - -%%-------------------------------------------------------------------- -%% Showing type information or annotating files with such information. -%%-------------------------------------------------------------------- - --define(TYPER_ANN_DIR, "typer_ann"). - --type line() :: non_neg_integer(). --type fa() :: {atom(), arity()}. --type func_info() :: {line(), atom(), arity()}. - --record(info, {records = maps:new() :: erl_types:type_table(), - functions = [] :: [func_info()], - types = map__new() :: map_dict(), - edoc = false :: boolean()}). --record(inc, {map = map__new() :: map_dict(), filter = [] :: files()}). --type inc() :: #inc{}. - --spec show_or_annotate(analysis()) -> 'ok'. - -show_or_annotate(#analysis{mode = Mode, fms = Files} = Analysis) -> - case Mode of - ?SHOW -> show(Analysis); - ?SHOW_EXPORTED -> show(Analysis); - ?ANNOTATE -> - Fun = fun ({File, Module}) -> - Info = get_final_info(File, Module, Analysis), - write_typed_file(File, Info) - end, - lists:foreach(Fun, Files); - ?ANNOTATE_INC_FILES -> - IncInfo = write_and_collect_inc_info(Analysis), - write_inc_files(IncInfo) - end. - -write_and_collect_inc_info(Analysis) -> - Fun = fun ({File, Module}, Inc) -> - Info = get_final_info(File, Module, Analysis), - write_typed_file(File, Info), - IncFuns = get_functions(File, Analysis), - collect_imported_functions(IncFuns, Info#info.types, Inc) - end, - NewInc = lists:foldl(Fun, #inc{}, Analysis#analysis.fms), - clean_inc(NewInc). - -write_inc_files(Inc) -> - Fun = - fun (File) -> - Val = map__lookup(File, Inc#inc.map), - %% Val is function with its type info - %% in form [{{Line,F,A},Type}] - Functions = [Key || {Key, _} <- Val], - Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val], - Info = #info{types = map__from_list(Val1), - records = maps:new(), - %% Note we need to sort functions here! - functions = lists:keysort(1, Functions)}, - %% io:format("Types ~p\n", [Info#info.types]), - %% io:format("Functions ~p\n", [Info#info.functions]), - %% io:format("Records ~p\n", [Info#info.records]), - write_typed_file(File, Info) - end, - lists:foreach(Fun, dict:fetch_keys(Inc#inc.map)). - -show(Analysis) -> - Fun = fun ({File, Module}) -> - Info = get_final_info(File, Module, Analysis), - show_type_info(File, Info) - end, - lists:foreach(Fun, Analysis#analysis.fms). - -get_final_info(File, Module, Analysis) -> - Records = get_records(File, Analysis), - Types = get_types(Module, Analysis, Records), - Functions = get_functions(File, Analysis), - Edoc = Analysis#analysis.edoc, - #info{records = Records, functions = Functions, types = Types, edoc = Edoc}. - -collect_imported_functions(Functions, Types, Inc) -> - %% Coming from other sourses, including: - %% FIXME: How to deal with yecc-generated file???? - %% --.yrl (yecc-generated file)??? - %% -- yeccpre.hrl (yecc-generated file)??? - %% -- other cases - Fun = fun ({File, _} = Obj, I) -> - case is_yecc_gen(File, I) of - {true, NewI} -> NewI; - {false, NewI} -> - check_imported_functions(Obj, NewI, Types) - end - end, - lists:foldl(Fun, Inc, Functions). - --spec is_yecc_gen(file:filename(), inc()) -> {boolean(), inc()}. - -is_yecc_gen(File, #inc{filter = Fs} = Inc) -> - case lists:member(File, Fs) of - true -> {true, Inc}; - false -> - case filename:extension(File) of - ".yrl" -> - Rootname = filename:rootname(File, ".yrl"), - Obj = Rootname ++ ".erl", - case lists:member(Obj, Fs) of - true -> {true, Inc}; - false -> - NewInc = Inc#inc{filter = [Obj|Fs]}, - {true, NewInc} - end; - _ -> - case filename:basename(File) of - "yeccpre.hrl" -> {true, Inc}; - _ -> {false, Inc} - end - end - end. - -check_imported_functions({File, {Line, F, A}}, Inc, Types) -> - IncMap = Inc#inc.map, - FA = {F, A}, - Type = get_type_info(FA, Types), - case map__lookup(File, IncMap) of - none -> %% File is not added. Add it - Obj = {File,[{FA, {Line, Type}}]}, - NewMap = map__insert(Obj, IncMap), - Inc#inc{map = NewMap}; - Val -> %% File is already in. Check. - case lists:keyfind(FA, 1, Val) of - false -> - %% Function is not in; add it - Obj = {File, Val ++ [{FA, {Line, Type}}]}, - NewMap = map__insert(Obj, IncMap), - Inc#inc{map = NewMap}; - Type -> - %% Function is in and with same type - Inc; - _ -> - %% Function is in but with diff type - inc_warning(FA, File), - Elem = lists:keydelete(FA, 1, Val), - NewMap = case Elem of - [] -> map__remove(File, IncMap); - _ -> map__insert({File, Elem}, IncMap) - end, - Inc#inc{map = NewMap} - end - end. - -inc_warning({F, A}, File) -> - io:format(" ***Warning: Skip function ~p/~p ", [F, A]), - io:format("in file ~p because of inconsistent type\n", [File]). - -clean_inc(Inc) -> - Inc1 = remove_yecc_generated_file(Inc), - normalize_obj(Inc1). - -remove_yecc_generated_file(#inc{filter = Filter} = Inc) -> - Fun = fun (Key, #inc{map = Map} = I) -> - I#inc{map = map__remove(Key, Map)} - end, - lists:foldl(Fun, Inc, Filter). - -normalize_obj(TmpInc) -> - Fun = fun (Key, Val, Inc) -> - NewVal = [{{Line,F,A},Type} || {{F,A},{Line,Type}} <- Val], - map__insert({Key, NewVal}, Inc) - end, - TmpInc#inc{map = map__fold(Fun, map__new(), TmpInc#inc.map)}. - -get_records(File, Analysis) -> - map__lookup(File, Analysis#analysis.record). - -get_types(Module, Analysis, Records) -> - TypeInfoPlt = Analysis#analysis.trust_plt, - TypeInfo = - case dialyzer_plt:lookup_module(TypeInfoPlt, Module) of - none -> []; - {value, List} -> List - end, - CodeServer = Analysis#analysis.codeserver, - TypeInfoList = - case Analysis#analysis.show_succ of - true -> - [convert_type_info(I) || I <- TypeInfo]; - false -> - [get_type(I, CodeServer, Records) || I <- TypeInfo] - end, - map__from_list(TypeInfoList). - -convert_type_info({{_M, F, A}, Range, Arg}) -> - {{F, A}, {Range, Arg}}. - -get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, Records) -> - case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of - error -> - {{F, A}, {Range, Arg}}; - {ok, {_FileLine, Contract, _Xtra}} -> - Sig = erl_types:t_fun(Arg, Range), - case dialyzer_contracts:check_contract(Contract, Sig) of - ok -> {{F, A}, {contract, Contract}}; - {error, {extra_range, _, _}} -> - {{F, A}, {contract, Contract}}; - {error, {overlapping_contract, []}} -> - {{F, A}, {contract, Contract}}; - {error, invalid_contract} -> - CString = dialyzer_contracts:contract_to_string(Contract), - SigString = dialyzer_utils:format_sig(Sig, Records), - Msg = io_lib:format("Error in contract of function ~w:~w/~w\n" - "\t The contract is: " ++ CString ++ "\n" ++ - "\t but the inferred signature is: ~s", - [M, F, A, SigString]), - fatal_error(Msg); - {error, ErrorStr} when is_list(ErrorStr) -> % ErrorStr is a string() - Msg = io_lib:format("Error in contract of function ~w:~w/~w: ~s", - [M, F, A, ErrorStr]), - fatal_error(Msg) - end - end. - -get_functions(File, Analysis) -> - case Analysis#analysis.mode of - ?SHOW -> - Funcs = map__lookup(File, Analysis#analysis.func), - Inc_Funcs = map__lookup(File, Analysis#analysis.inc_func), - remove_module_info(Funcs) ++ normalize_incFuncs(Inc_Funcs); - ?SHOW_EXPORTED -> - Ex_Funcs = map__lookup(File, Analysis#analysis.ex_func), - remove_module_info(Ex_Funcs); - ?ANNOTATE -> - Funcs = map__lookup(File, Analysis#analysis.func), - remove_module_info(Funcs); - ?ANNOTATE_INC_FILES -> - map__lookup(File, Analysis#analysis.inc_func) - end. - -normalize_incFuncs(Functions) -> - [FunInfo || {_FileName, FunInfo} <- Functions]. - --spec remove_module_info([func_info()]) -> [func_info()]. - -remove_module_info(FunInfoList) -> - F = fun ({_,module_info,0}) -> false; - ({_,module_info,1}) -> false; - ({Line,F,A}) when is_integer(Line), is_atom(F), is_integer(A) -> true - end, - lists:filter(F, FunInfoList). - -write_typed_file(File, Info) -> - io:format(" Processing file: ~p\n", [File]), - Dir = filename:dirname(File), - RootName = filename:basename(filename:rootname(File)), - Ext = filename:extension(File), - TyperAnnDir = filename:join(Dir, ?TYPER_ANN_DIR), - TmpNewFilename = lists:concat([RootName, ".ann", Ext]), - NewFileName = filename:join(TyperAnnDir, TmpNewFilename), - case file:make_dir(TyperAnnDir) of - {error, Reason} -> - case Reason of - eexist -> %% TypEr dir exists; remove old typer files if they exist - case file:delete(NewFileName) of - ok -> ok; - {error, enoent} -> ok; - {error, _} -> - Msg = io_lib:format("Error in deleting file ~s\n", [NewFileName]), - fatal_error(Msg) - end, - write_typed_file(File, Info, NewFileName); - enospc -> - Msg = io_lib:format("Not enough space in ~p\n", [Dir]), - fatal_error(Msg); - eacces -> - Msg = io_lib:format("No write permission in ~p\n", [Dir]), - fatal_error(Msg); - _ -> - Msg = io_lib:format("Unhandled error ~s when writing ~p\n", - [Reason, Dir]), - fatal_error(Msg) - end; - ok -> %% Typer dir does NOT exist - write_typed_file(File, Info, NewFileName) - end. - -write_typed_file(File, Info, NewFileName) -> - {ok, Binary} = file:read_file(File), - Chars = binary_to_list(Binary), - write_typed_file(Chars, NewFileName, Info, 1, []), - io:format(" Saved as: ~p\n", [NewFileName]). - -write_typed_file(Chars, File, #info{functions = []}, _LNo, _Acc) -> - ok = file:write_file(File, list_to_binary(Chars), [append]); -write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) -> - [{Line,F,A}|RestFuncs] = Info#info.functions, - case Line of - 1 -> %% This will happen only for inc files - ok = raw_write(F, A, Info, File, []), - NewInfo = Info#info{functions = RestFuncs}, - NewAcc = [], - write_typed_file(Chars, File, NewInfo, Line, NewAcc); - _ -> - case Ch of - 10 -> - NewLineNo = LineNo + 1, - {NewInfo, NewAcc} = - case NewLineNo of - Line -> - ok = raw_write(F, A, Info, File, [Ch|Acc]), - {Info#info{functions = RestFuncs}, []}; - _ -> - {Info, [Ch|Acc]} - end, - write_typed_file(Chs, File, NewInfo, NewLineNo, NewAcc); - _ -> - write_typed_file(Chs, File, Info, LineNo, [Ch|Acc]) - end - end. - -raw_write(F, A, Info, File, Content) -> - TypeInfo = get_type_string(F, A, Info, file), - ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n", - ContentBin = list_to_binary(ContentList), - file:write_file(File, ContentBin, [append]). - -get_type_string(F, A, Info, Mode) -> - Type = get_type_info({F,A}, Info#info.types), - TypeStr = - case Type of - {contract, C} -> - dialyzer_contracts:contract_to_string(C); - {RetType, ArgType} -> - Sig = erl_types:t_fun(ArgType, RetType), - dialyzer_utils:format_sig(Sig, Info#info.records) - end, - case Info#info.edoc of - false -> - case {Mode, Type} of - {file, {contract, _}} -> ""; - _ -> - Prefix = lists:concat(["-spec ", erl_types:atom_to_string(F)]), - lists:concat([Prefix, TypeStr, "."]) - end; - true -> - Prefix = lists:concat(["%% @spec ", F]), - lists:concat([Prefix, TypeStr, "."]) - end. - -show_type_info(File, Info) -> - io:format("\n%% File: ~p\n%% ", [File]), - OutputString = lists:concat(["~.", length(File)+8, "c~n"]), - io:fwrite(OutputString, [$-]), - Fun = fun ({_LineNo, F, A}) -> - TypeInfo = get_type_string(F, A, Info, show), - io:format("~s\n", [TypeInfo]) - end, - lists:foreach(Fun, Info#info.functions). - -get_type_info(Func, Types) -> - case map__lookup(Func, Types) of - none -> - %% Note: Typeinfo of any function should exist in - %% the result offered by dialyzer, otherwise there - %% *must* be something wrong with the analysis - Msg = io_lib:format("No type info for function: ~p\n", [Func]), - fatal_error(Msg); - {contract, _Fun} = C -> C; - {_RetType, _ArgType} = RA -> RA - end. - -%%-------------------------------------------------------------------- -%% Processing of command-line options and arguments. -%%-------------------------------------------------------------------- - --spec process_cl_args() -> {args(), analysis()}. - -process_cl_args() -> - ArgList = init:get_plain_arguments(), - %% io:format("Args is ~p\n", [ArgList]), - {Args, Analysis} = analyze_args(ArgList, #args{}, #analysis{}), - %% if the mode has not been set, set it to the default mode (show) - {Args, case Analysis#analysis.mode of - undefined -> Analysis#analysis{mode = ?SHOW}; - Mode when is_atom(Mode) -> Analysis - end}. - -analyze_args([], Args, Analysis) -> - {Args, Analysis}; -analyze_args(ArgList, Args, Analysis) -> - {Result, Rest} = cl(ArgList), - {NewArgs, NewAnalysis} = analyze_result(Result, Args, Analysis), - analyze_args(Rest, NewArgs, NewAnalysis). - -cl(["-h"|_]) -> help_message(); -cl(["--help"|_]) -> help_message(); -cl(["-v"|_]) -> version_message(); -cl(["--version"|_]) -> version_message(); -cl(["--edoc"|Opts]) -> {edoc, Opts}; -cl(["--show"|Opts]) -> {{mode, ?SHOW}, Opts}; -cl(["--show_exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts}; -cl(["--show-exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts}; -cl(["--show_success_typings"|Opts]) -> {show_succ, Opts}; -cl(["--show-success-typings"|Opts]) -> {show_succ, Opts}; -cl(["--annotate"|Opts]) -> {{mode, ?ANNOTATE}, Opts}; -cl(["--annotate-inc-files"|Opts]) -> {{mode, ?ANNOTATE_INC_FILES}, Opts}; -cl(["--no_spec"|Opts]) -> {no_spec, Opts}; -cl(["--plt",Plt|Opts]) -> {{plt, Plt}, Opts}; -cl(["-D"++Def|Opts]) -> - case Def of - "" -> fatal_error("no variable name specified after -D"); - _ -> - DefPair = process_def_list(re:split(Def, "=", [{return, list}])), - {{def, DefPair}, Opts} - end; -cl(["-I",Dir|Opts]) -> {{inc, Dir}, Opts}; -cl(["-I"++Dir|Opts]) -> - case Dir of - "" -> fatal_error("no include directory specified after -I"); - _ -> {{inc, Dir}, Opts} - end; -cl(["-T"|Opts]) -> - {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts), - case Files of - [] -> fatal_error("no file or directory specified after -T"); - [_|_] -> {{trusted, Files}, RestOpts} - end; -cl(["-r"|Opts]) -> - {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts), - {{files_r, Files}, RestOpts}; -cl(["-pa",Dir|Opts]) -> {{pa,Dir}, Opts}; -cl(["-pz",Dir|Opts]) -> {{pz,Dir}, Opts}; -cl(["-"++H|_]) -> fatal_error("unknown option -"++H); -cl(Opts) -> - {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts), - {{files, Files}, RestOpts}. - -process_def_list(L) -> - case L of - [Name, Value] -> - {ok, Tokens, _} = erl_scan:string(Value ++ "."), - {ok, ErlValue} = erl_parse:parse_term(Tokens), - {list_to_atom(Name), ErlValue}; - [Name] -> - {list_to_atom(Name), true} - end. - -%% Get information about files that the user trusts and wants to analyze -analyze_result({files, Val}, Args, Analysis) -> - NewVal = Args#args.files ++ Val, - {Args#args{files = NewVal}, Analysis}; -analyze_result({files_r, Val}, Args, Analysis) -> - NewVal = Args#args.files_r ++ Val, - {Args#args{files_r = NewVal}, Analysis}; -analyze_result({trusted, Val}, Args, Analysis) -> - NewVal = Args#args.trusted ++ Val, - {Args#args{trusted = NewVal}, Analysis}; -analyze_result(edoc, Args, Analysis) -> - {Args, Analysis#analysis{edoc = true}}; -%% Get useful information for actual analysis -analyze_result({mode, Mode}, Args, Analysis) -> - case Analysis#analysis.mode of - undefined -> {Args, Analysis#analysis{mode = Mode}}; - OldMode -> mode_error(OldMode, Mode) - end; -analyze_result({def, Val}, Args, Analysis) -> - NewVal = Analysis#analysis.macros ++ [Val], - {Args, Analysis#analysis{macros = NewVal}}; -analyze_result({inc, Val}, Args, Analysis) -> - NewVal = Analysis#analysis.includes ++ [Val], - {Args, Analysis#analysis{includes = NewVal}}; -analyze_result({plt, Plt}, Args, Analysis) -> - {Args, Analysis#analysis{plt = Plt}}; -analyze_result(show_succ, Args, Analysis) -> - {Args, Analysis#analysis{show_succ = true}}; -analyze_result(no_spec, Args, Analysis) -> - {Args, Analysis#analysis{no_spec = true}}; -analyze_result({pa, Dir}, Args, Analysis) -> - true = code:add_patha(Dir), - {Args, Analysis}; -analyze_result({pz, Dir}, Args, Analysis) -> - true = code:add_pathz(Dir), - {Args, Analysis}. - -%%-------------------------------------------------------------------- -%% File processing. -%%-------------------------------------------------------------------- - --spec get_all_files(args()) -> [file:filename(),...]. - -get_all_files(#args{files = Fs, files_r = Ds}) -> - case filter_fd(Fs, Ds, fun test_erl_file_exclude_ann/1) of - [] -> fatal_error("no file(s) to analyze"); - AllFiles -> AllFiles - end. - --spec test_erl_file_exclude_ann(file:filename()) -> boolean(). - -test_erl_file_exclude_ann(File) -> - case is_erl_file(File) of - true -> %% Exclude files ending with ".ann.erl" - case re:run(File, "[\.]ann[\.]erl$") of - {match, _} -> false; - nomatch -> true - end; - false -> false - end. - --spec is_erl_file(file:filename()) -> boolean(). - -is_erl_file(File) -> - filename:extension(File) =:= ".erl". - --type test_file_fun() :: fun((file:filename()) -> boolean()). - --spec filter_fd(files(), files(), test_file_fun()) -> files(). - -filter_fd(File_Dir, Dir_R, Fun) -> - All_File_1 = process_file_and_dir(File_Dir, Fun), - All_File_2 = process_dir_rec(Dir_R, Fun), - remove_dup(All_File_1 ++ All_File_2). - --spec process_file_and_dir(files(), test_file_fun()) -> files(). - -process_file_and_dir(File_Dir, TestFun) -> - Fun = - fun (Elem, Acc) -> - case filelib:is_regular(Elem) of - true -> process_file(Elem, TestFun, Acc); - false -> check_dir(Elem, false, Acc, TestFun) - end - end, - lists:foldl(Fun, [], File_Dir). - --spec process_dir_rec(files(), test_file_fun()) -> files(). - -process_dir_rec(Dirs, TestFun) -> - Fun = fun (Dir, Acc) -> check_dir(Dir, true, Acc, TestFun) end, - lists:foldl(Fun, [], Dirs). - --spec check_dir(file:filename(), boolean(), files(), test_file_fun()) -> files(). - -check_dir(Dir, Recursive, Acc, Fun) -> - case file:list_dir(Dir) of - {ok, Files} -> - {TmpDirs, TmpFiles} = split_dirs_and_files(Files, Dir), - case Recursive of - false -> - FinalFiles = process_file_and_dir(TmpFiles, Fun), - Acc ++ FinalFiles; - true -> - TmpAcc1 = process_file_and_dir(TmpFiles, Fun), - TmpAcc2 = process_dir_rec(TmpDirs, Fun), - Acc ++ TmpAcc1 ++ TmpAcc2 - end; - {error, eacces} -> - fatal_error("no access permission to dir \""++Dir++"\""); - {error, enoent} -> - fatal_error("cannot access "++Dir++": No such file or directory"); - {error, _Reason} -> - fatal_error("error involving a use of file:list_dir/1") - end. - -%% Same order as the input list --spec process_file(file:filename(), test_file_fun(), files()) -> files(). - -process_file(File, TestFun, Acc) -> - case TestFun(File) of - true -> Acc ++ [File]; - false -> Acc - end. - -%% Same order as the input list --spec split_dirs_and_files(files(), file:filename()) -> {files(), files()}. - -split_dirs_and_files(Elems, Dir) -> - Test_Fun = - fun (Elem, {DirAcc, FileAcc}) -> - File = filename:join(Dir, Elem), - case filelib:is_regular(File) of - false -> {[File|DirAcc], FileAcc}; - true -> {DirAcc, [File|FileAcc]} - end - end, - {Dirs, Files} = lists:foldl(Test_Fun, {[], []}, Elems), - {lists:reverse(Dirs), lists:reverse(Files)}. - -%% Removes duplicate filenames but keeps the order of the input list --spec remove_dup(files()) -> files(). - -remove_dup(Files) -> - Test_Dup = fun (File, Acc) -> - case lists:member(File, Acc) of - true -> Acc; - false -> [File|Acc] - end - end, - Reversed_Elems = lists:foldl(Test_Dup, [], Files), - lists:reverse(Reversed_Elems). - -%%-------------------------------------------------------------------- -%% Collect information. -%%-------------------------------------------------------------------- - --type inc_file_info() :: {file:filename(), func_info()}. - --record(tmpAcc, {file :: file:filename(), - module :: atom(), - funcAcc = [] :: [func_info()], - incFuncAcc = [] :: [inc_file_info()], - dialyzerObj = [] :: [{mfa(), {_, _}}]}). - --spec collect_info(analysis()) -> analysis(). - -collect_info(Analysis) -> - NewPlt = - try get_dialyzer_plt(Analysis) of - DialyzerPlt -> - dialyzer_plt:merge_plts([Analysis#analysis.trust_plt, DialyzerPlt]) - catch - throw:{dialyzer_error,_Reason} -> - fatal_error("Dialyzer's PLT is missing or is not up-to-date; please (re)create it") - end, - NewAnalysis = lists:foldl(fun collect_one_file_info/2, - Analysis#analysis{trust_plt = NewPlt}, - Analysis#analysis.files), - %% Process Remote Types - TmpCServer = NewAnalysis#analysis.codeserver, - NewCServer = - try - NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer), - NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer), - OldRecords = dialyzer_plt:get_types(NewPlt), - OldExpTypes = dialyzer_plt:get_exported_types(NewPlt), - MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords), - MergedExpTypes = sets:union(NewExpTypes, OldExpTypes), - %% io:format("Merged Records ~p",[MergedRecords]), - TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer), - TmpCServer2 = - dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1), - {TmpCServer3, RecordDict} = - dialyzer_utils:process_record_remote_types(TmpCServer2), - dialyzer_contracts:process_contract_remote_types(TmpCServer3, RecordDict) - catch - throw:{error, ErrorMsg} -> - fatal_error(ErrorMsg) - end, - NewAnalysis#analysis{codeserver = NewCServer}. - -collect_one_file_info(File, Analysis) -> - Ds = [{d,Name,Val} || {Name,Val} <- Analysis#analysis.macros], - %% Current directory should also be included in "Includes". - Includes = [filename:dirname(File)|Analysis#analysis.includes], - Is = [{i,Dir} || Dir <- Includes], - Options = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds, - case dialyzer_utils:get_abstract_code_from_src(File, Options) of - {error, Reason} -> - %% io:format("File=~p\n,Options=~p\n,Error=~p\n", [File,Options,Reason]), - compile_error(Reason); - {ok, AbstractCode} -> - case dialyzer_utils:get_core_from_abstract_code(AbstractCode, Options) of - error -> compile_error(["Could not get core erlang for "++File]); - {ok, Core} -> - case dialyzer_utils:get_record_and_type_info(AbstractCode) of - {error, Reason} -> compile_error([Reason]); - {ok, Records} -> - Mod = cerl:concrete(cerl:module_name(Core)), - case dialyzer_utils:get_spec_info(Mod, AbstractCode, Records) of - {error, Reason} -> compile_error([Reason]); - {ok, SpecInfo, CbInfo} -> - ExpTypes = get_exported_types_from_core(Core), - analyze_core_tree(Core, Records, SpecInfo, CbInfo, - ExpTypes, Analysis, File) - end - end - end - end. - -analyze_core_tree(Core, Records, SpecInfo, CbInfo, ExpTypes, Analysis, File) -> - Module = cerl:concrete(cerl:module_name(Core)), - TmpTree = cerl:from_records(Core), - CS1 = Analysis#analysis.codeserver, - NextLabel = dialyzer_codeserver:get_next_core_label(CS1), - {Tree, NewLabel} = cerl_trees:label(TmpTree, NextLabel), - CS2 = dialyzer_codeserver:insert(Module, Tree, CS1), - CS3 = dialyzer_codeserver:set_next_core_label(NewLabel, CS2), - CS4 = dialyzer_codeserver:store_temp_records(Module, Records, CS3), - CS5 = - case Analysis#analysis.no_spec of - true -> CS4; - false -> - dialyzer_codeserver:store_temp_contracts(Module, SpecInfo, CbInfo, CS4) - end, - OldExpTypes = dialyzer_codeserver:get_temp_exported_types(CS5), - MergedExpTypes = sets:union(ExpTypes, OldExpTypes), - CS6 = dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes, CS5), - Ex_Funcs = [{0,F,A} || {_,_,{F,A}} <- cerl:module_exports(Tree)], - CG = Analysis#analysis.callgraph, - {V, E} = dialyzer_callgraph:scan_core_tree(Tree, CG), - dialyzer_callgraph:add_edges(E, V, CG), - Fun = fun analyze_one_function/2, - All_Defs = cerl:module_defs(Tree), - Acc = lists:foldl(Fun, #tmpAcc{file = File, module = Module}, All_Defs), - Exported_FuncMap = map__insert({File, Ex_Funcs}, Analysis#analysis.ex_func), - %% we must sort all functions in the file which - %% originate from this file by *numerical order* of lineNo - Sorted_Functions = lists:keysort(1, Acc#tmpAcc.funcAcc), - FuncMap = map__insert({File, Sorted_Functions}, Analysis#analysis.func), - %% we do not need to sort functions which are imported from included files - IncFuncMap = map__insert({File, Acc#tmpAcc.incFuncAcc}, - Analysis#analysis.inc_func), - FMs = Analysis#analysis.fms ++ [{File, Module}], - RecordMap = map__insert({File, Records}, Analysis#analysis.record), - Analysis#analysis{fms = FMs, - callgraph = CG, - codeserver = CS6, - ex_func = Exported_FuncMap, - inc_func = IncFuncMap, - record = RecordMap, - func = FuncMap}. - -analyze_one_function({Var, FunBody} = Function, Acc) -> - F = cerl:fname_id(Var), - A = cerl:fname_arity(Var), - TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function}, - NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj], - Anno = cerl:get_ann(FunBody), - LineNo = get_line(Anno), - FileName = get_file(Anno), - BaseName = filename:basename(FileName), - FuncInfo = {LineNo, F, A}, - OriginalName = Acc#tmpAcc.file, - {FuncAcc, IncFuncAcc} = - case (FileName =:= OriginalName) orelse (BaseName =:= OriginalName) of - true -> %% Coming from original file - %% io:format("Added function ~p\n", [{LineNo, F, A}]), - {Acc#tmpAcc.funcAcc ++ [FuncInfo], Acc#tmpAcc.incFuncAcc}; - false -> - %% Coming from other sourses, including: - %% -- .yrl (yecc-generated file) - %% -- yeccpre.hrl (yecc-generated file) - %% -- other cases - {Acc#tmpAcc.funcAcc, Acc#tmpAcc.incFuncAcc ++ [{FileName, FuncInfo}]} - end, - Acc#tmpAcc{funcAcc = FuncAcc, - incFuncAcc = IncFuncAcc, - dialyzerObj = NewDialyzerObj}. - -get_line([Line|_]) when is_integer(Line) -> Line; -get_line([_|T]) -> get_line(T); -get_line([]) -> none. - -get_file([{file,File}|_]) -> File; -get_file([_|T]) -> get_file(T); -get_file([]) -> "no_file". % should not happen - --spec get_dialyzer_plt(analysis()) -> plt(). - -get_dialyzer_plt(#analysis{plt = PltFile0}) -> - PltFile = - case PltFile0 =:= none of - true -> dialyzer_plt:get_default_plt(); - false -> PltFile0 - end, - dialyzer_plt:from_file(PltFile). - -%% Exported Types - -get_exported_types_from_core(Core) -> - Attrs = cerl:module_attrs(Core), - ExpTypes1 = [cerl:concrete(L2) || {L1, L2} <- Attrs, - cerl:is_literal(L1), - cerl:is_literal(L2), - cerl:concrete(L1) =:= 'export_type'], - ExpTypes2 = lists:flatten(ExpTypes1), - M = cerl:atom_val(cerl:module_name(Core)), - sets:from_list([{M, F, A} || {F, A} <- ExpTypes2]). - -%%-------------------------------------------------------------------- -%% Utilities for error reporting. -%%-------------------------------------------------------------------- - --spec fatal_error(string()) -> no_return(). - -fatal_error(Slogan) -> - msg(io_lib:format("typer: ~s\n", [Slogan])), - erlang:halt(1). - --spec mode_error(mode(), mode()) -> no_return(). - -mode_error(OldMode, NewMode) -> - Msg = io_lib:format("Mode was previously set to '~s'; " - "can not set it to '~s' now", - [OldMode, NewMode]), - fatal_error(Msg). - --spec compile_error([string()]) -> no_return(). - -compile_error(Reason) -> - JoinedString = lists:flatten([X ++ "\n" || X <- Reason]), - Msg = "Analysis failed with error report:\n" ++ JoinedString, - fatal_error(Msg). - --spec msg(string()) -> 'ok'. - -msg(Msg) -> - io:format(standard_error, "~s", [Msg]). - -%%-------------------------------------------------------------------- -%% Version and help messages. -%%-------------------------------------------------------------------- - --spec version_message() -> no_return(). - -version_message() -> - io:format("TypEr version "++?VSN++"\n"), - erlang:halt(0). - --spec help_message() -> no_return(). - -help_message() -> - S = <<" Usage: typer [--help] [--version] [--plt PLT] [--edoc] - [--show | --show-exported | --annotate | --annotate-inc-files] - [-Ddefine]* [-I include_dir]* [-pa dir]* [-pz dir]* - [-T application]* [-r] file* - - Options: - -r dir* - search directories recursively for .erl files below them - --show - Prints type specifications for all functions on stdout. - (this is the default behaviour; this option is not really needed) - --show-exported (or --show_exported) - Same as --show, but prints specifications for exported functions only - Specs are displayed sorted alphabetically on the function's name - --annotate - Annotates the specified files with type specifications - --annotate-inc-files - Same as --annotate but annotates all -include() files as well as - all .erl files (use this option with caution - has not been tested much) - --edoc - Prints type information as Edoc @spec comments, not as type specs - --plt PLT - Use the specified dialyzer PLT file rather than the default one - -T file* - The specified file(s) already contain type specifications and these - are to be trusted in order to print specs for the rest of the files - (Multiple files or dirs, separated by spaces, can be specified.) - -Dname (or -Dname=value) - pass the defined name(s) to TypEr - (The syntax of defines is the same as that used by \"erlc\".) - -I include_dir - pass the include_dir to TypEr - (The syntax of includes is the same as that used by \"erlc\".) - -pa dir - -pz dir - Set code path options to TypEr - (This is useful for files that use parse tranforms.) - --version (or -v) - prints the Typer version and exits - --help (or -h) - prints this message and exits - - Note: - * denotes that multiple occurrences of these options are possible. -">>, - io:put_chars(S), - erlang:halt(0). - -%%-------------------------------------------------------------------- -%% Handle messages. -%%-------------------------------------------------------------------- - -rcv_ext_types() -> - Self = self(), - Self ! {Self, done}, - rcv_ext_types(Self, []). - -rcv_ext_types(Self, ExtTypes) -> - receive - {Self, ext_types, ExtType} -> - rcv_ext_types(Self, [ExtType|ExtTypes]); - {Self, done} -> - lists:usort(ExtTypes) - end. - -%%-------------------------------------------------------------------- -%% A convenient abstraction of a Key-Value mapping data structure -%% specialized for the uses in this module -%%-------------------------------------------------------------------- - --type map_dict() :: dict:dict(). - --spec map__new() -> map_dict(). -map__new() -> - dict:new(). - --spec map__insert({term(), term()}, map_dict()) -> map_dict(). -map__insert(Object, Map) -> - {Key, Value} = Object, - dict:store(Key, Value, Map). - --spec map__lookup(term(), map_dict()) -> term(). -map__lookup(Key, Map) -> - try dict:fetch(Key, Map) catch error:_ -> none end. - --spec map__from_list([{fa(), term()}]) -> map_dict(). -map__from_list(List) -> - dict:from_list(List). - --spec map__remove(term(), map_dict()) -> map_dict(). -map__remove(Key, Dict) -> - dict:erase(Key, Dict). - --spec map__fold(fun((term(), term(), term()) -> map_dict()), map_dict(), map_dict()) -> map_dict(). -map__fold(Fun, Acc0, Dict) -> - dict:fold(Fun, Acc0, Dict). diff --git a/lib/typer/test/Makefile b/lib/typer/test/Makefile deleted file mode 100644 index fb5570d9f0..0000000000 --- a/lib/typer/test/Makefile +++ /dev/null @@ -1,65 +0,0 @@ -include $(ERL_TOP)/make/target.mk -include $(ERL_TOP)/make/$(TARGET)/otp.mk - -# ---------------------------------------------------- -# Target Specs -# ---------------------------------------------------- - -MODULES= \ - typer_SUITE - -ERL_FILES= $(MODULES:%=%.erl) - -TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) -INSTALL_PROGS= $(TARGET_FILES) - -EMAKEFILE=Emakefile - -# ---------------------------------------------------- -# Release directory specification -# ---------------------------------------------------- -RELSYSDIR = $(RELEASE_PATH)/typer_test - -# ---------------------------------------------------- -# FLAGS -# ---------------------------------------------------- - -ERL_MAKE_FLAGS += -ERL_COMPILE_FLAGS += - -EBIN = . - -# ---------------------------------------------------- -# Targets -# ---------------------------------------------------- - -make_emakefile: - $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES) \ - > $(EMAKEFILE) - $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) '*_SUITE_make' \ - >> $(EMAKEFILE) - -tests debug opt: make_emakefile - erl $(ERL_MAKE_FLAGS) -make - -clean: - rm -f $(EMAKEFILE) - rm -f $(TARGET_FILES) $(GEN_FILES) - rm -f core - -docs: - -# ---------------------------------------------------- -# Release Target -# ---------------------------------------------------- -include $(ERL_TOP)/make/otp_release_targets.mk - -release_spec: opt - -release_tests_spec: make_emakefile - $(INSTALL_DIR) "$(RELSYSDIR)" - $(INSTALL_DATA) $(EMAKEFILE) $(ERL_FILES) "$(RELSYSDIR)" - $(INSTALL_DATA) typer.spec "$(RELSYSDIR)" - chmod -R u+w "$(RELSYSDIR)" - -release_docs_spec: diff --git a/lib/typer/test/typer.spec b/lib/typer/test/typer.spec deleted file mode 100644 index 79f51b6781..0000000000 --- a/lib/typer/test/typer.spec +++ /dev/null @@ -1 +0,0 @@ -{suites,"../typer_test",all}. diff --git a/lib/typer/test/typer_SUITE.erl b/lib/typer/test/typer_SUITE.erl deleted file mode 100644 index 25f0229640..0000000000 --- a/lib/typer/test/typer_SUITE.erl +++ /dev/null @@ -1,57 +0,0 @@ -%% ``Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%% -%% The Initial Developer of the Original Code is Ericsson Utvecklings AB. -%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings -%% AB. All Rights Reserved.'' -%% --module(typer_SUITE). - --compile([export_all]). --include_lib("common_test/include/ct.hrl"). - -suite() -> - [{ct_hooks, [ts_install_cth]}]. - -all() -> - case application:ensure_all_started(typer) of - {ok, Apps} -> - [application:stop(App) || App <- lists:reverse(Apps)], - [app, appup]; - _ -> - [appup] - end. - -groups() -> - []. - -init_per_suite(Config) -> - Config. - -end_per_suite(_Config) -> - ok. - -init_per_group(_GroupName, Config) -> - Config. - -end_per_group(_GroupName, Config) -> - Config. - -app() -> - [{doc, "Test that the typer app file is ok"}]. -app(Config) when is_list(Config) -> - ok = ?t:app_test(typer). - -appup() -> - [{doc, "Test that the typer appup file is ok"}]. -appup(Config) when is_list(Config) -> - ok = ?t:appup_test(typer). diff --git a/lib/typer/vsn.mk b/lib/typer/vsn.mk deleted file mode 100644 index ed12e067c1..0000000000 --- a/lib/typer/vsn.mk +++ /dev/null @@ -1 +0,0 @@ -TYPER_VSN = 0.9.11 diff --git a/lib/wx/api_gen/gen_util.erl b/lib/wx/api_gen/gen_util.erl index cd42ad2d96..49a3cb521e 100644 --- a/lib/wx/api_gen/gen_util.erl +++ b/lib/wx/api_gen/gen_util.erl @@ -203,7 +203,7 @@ replace_and_remove([$; | R], Acc) -> replace_and_remove([$@ | R], Acc) -> replace_and_remove(R, [directive|Acc]); -replace_and_remove([_E|R], Acc) -> %% Ignore everthing else +replace_and_remove([_E|R], Acc) -> %% Ignore everything else replace_and_remove(R, Acc); replace_and_remove([], Acc) -> Acc. diff --git a/lib/wx/api_gen/wx_gen_cpp.erl b/lib/wx/api_gen/wx_gen_cpp.erl index d4b6db8153..4b208001a0 100644 --- a/lib/wx/api_gen/wx_gen_cpp.erl +++ b/lib/wx/api_gen/wx_gen_cpp.erl @@ -627,7 +627,7 @@ decode_arg(N,#type{name="wxArrayString"},Place,A0) -> w(" int * ~sLen = (int *) bp; bp += 4;~n", [N]), case Place of arg -> w(" wxArrayString ~s;~n", [N]); - opt -> ignore %% Allready declared + opt -> ignore %% Already declared end, w(" int ~sASz = 0, * ~sTemp;~n", [N,N]), w(" for(int i=0; i < *~sLen; i++) {~n", [N]), diff --git a/lib/xmerl/doc/src/notes.xml b/lib/xmerl/doc/src/notes.xml index 12e64537ed..652560f60c 100644 --- a/lib/xmerl/doc/src/notes.xml +++ b/lib/xmerl/doc/src/notes.xml @@ -32,6 +32,61 @@ <p>This document describes the changes made to the Xmerl application.</p> +<section><title>Xmerl 1.3.13</title> + + <section><title>Fixed Bugs and Malfunctions</title> + <list> + <item> + <p> + The namespace_conformant option in xmerl_scan did not + work when parsing documents without explicit XML + namespace declaration.</p> + <p> + Own Id: OTP-14139</p> + </item> + <item> + <p> Fix a "well-formedness" bug in the XML Sax parser so + it returns an error if there are something more in the + file after the matching document. If one using the + xmerl_sax_parser:stream() a rest is allowed which then + can be sent to a new call of xmerl_sax_parser:stream() to + parse next document. </p> <p> This is done to be + compliant with XML conformance tests. </p> + <p> + Own Id: OTP-14211</p> + </item> + <item> + <p> Fixed compiler and dialyzer warnings in the XML SAX + parser. </p> + <p> + Own Id: OTP-14212</p> + </item> + <item> + <p> Change how to interpret end of document in the XML + SAX parser to comply with Tim Brays comment on the + standard. This makes it possible to handle more than one + doc on a stream, the standard makes it impossible to know + when the document is ended without waiting for the next + document (and not always even that). </p> <p> Tim Brays + comment: </p> <p> Trailing "Misc"<br/> The fact that + you're allowed some trailing junk after the root element, + I decided (but unfortunately too late) is a real design + error in XML. If I'm writing a network client, I'm + probably going to close the link as soon as a I see the + root element end-tag, and not depend on the other end + closing it down properly.<br/> Furthermore, if I want to + send a succession of XML documents over a network link, + if I find a processing instruction after a root element, + is it a trailer on the previous document, or part of the + prolog of the next? </p> + <p> + Own Id: OTP-14213</p> + </item> + </list> + </section> + +</section> + <section><title>Xmerl 1.3.12</title> <section><title>Fixed Bugs and Malfunctions</title> diff --git a/lib/xmerl/src/xmerl_regexp.erl b/lib/xmerl/src/xmerl_regexp.erl index fc89b80ff1..566b77725f 100644 --- a/lib/xmerl/src/xmerl_regexp.erl +++ b/lib/xmerl/src/xmerl_regexp.erl @@ -1154,7 +1154,7 @@ comp_crs([], Last) -> [{Last,maxchar}]. %% build_dfa(NFA, NfaStartState) -> {DFA,DfaStartState}. %% Build a DFA from an NFA using "subset construction". The major %% difference from the book is that we keep the marked and unmarked -%% DFA states in seperate lists. New DFA states are added to the +%% DFA states in separate lists. New DFA states are added to the %% unmarked list and states are marked by moving them to the marked %% list. We assume that the NFA accepting state numbers are in %% ascending order for the rules and use ordsets to keep this order. diff --git a/lib/xmerl/src/xmerl_sax_parser.erl b/lib/xmerl/src/xmerl_sax_parser.erl index 318a0cf7f4..1aef6c58c4 100644 --- a/lib/xmerl/src/xmerl_sax_parser.erl +++ b/lib/xmerl/src/xmerl_sax_parser.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ %% External exports %%---------------------------------------------------------------------- -export([file/2, + stream/3, stream/2]). %%---------------------------------------------------------------------- @@ -72,11 +73,12 @@ file(Name,Options) -> File = filename:basename(Name), ContinuationFun = fun default_continuation_cb/1, Res = stream(<<>>, - [{continuation_fun, ContinuationFun}, - {continuation_state, FD}, - {current_location, CL}, - {entity, File} - |Options]), + [{continuation_fun, ContinuationFun}, + {continuation_state, FD}, + {current_location, CL}, + {entity, File} + |Options], + file), ok = file:close(FD), Res end. @@ -92,19 +94,22 @@ file(Name,Options) -> %% EventState = term() %% Description: Parse a stream containing an XML document. %%---------------------------------------------------------------------- -stream(Xml, Options) when is_list(Xml), is_list(Options) -> +stream(Xml, Options) -> + stream(Xml, Options, stream). + +stream(Xml, Options, InputType) when is_list(Xml), is_list(Options) -> State = parse_options(Options, initial_state()), - case State#xmerl_sax_parser_state.file_type of + case State#xmerl_sax_parser_state.file_type of dtd -> xmerl_sax_parser_list:parse_dtd(Xml, State#xmerl_sax_parser_state{encoding = list, - input_type = stream}); + input_type = InputType}); normal -> xmerl_sax_parser_list:parse(Xml, State#xmerl_sax_parser_state{encoding = list, - input_type = stream}) + input_type = InputType}) end; -stream(Xml, Options) when is_binary(Xml), is_list(Options) -> +stream(Xml, Options, InputType) when is_binary(Xml), is_list(Options) -> case parse_options(Options, initial_state()) of {error, Reason} -> {error, Reason}; State -> @@ -127,7 +132,7 @@ stream(Xml, Options) when is_binary(Xml), is_list(Options) -> State#xmerl_sax_parser_state.event_state}; {Xml1, State1} -> parse_binary(Xml1, - State1#xmerl_sax_parser_state{input_type = stream}, + State1#xmerl_sax_parser_state{input_type = InputType}, ParseFunction) end end. @@ -226,12 +231,12 @@ check_encoding_option(E) -> %% Description: Detects which character set is used in a binary stream. %%---------------------------------------------------------------------- detect_charset(<<>>, #xmerl_sax_parser_state{continuation_fun = undefined} = _) -> - throw({error, "Can't detect character encoding due to no indata"}); + {error, "Can't detect character encoding due to no indata"}; detect_charset(<<>>, #xmerl_sax_parser_state{continuation_fun = CFun, continuation_state = CState} = State) -> case CFun(CState) of {<<>>, _} -> - throw({error, "Can't detect character encoding due to lack of indata"}); + {error, "Can't detect character encoding due to lack of indata"}; {NewBytes, NewContState} -> detect_charset(NewBytes, State#xmerl_sax_parser_state{continuation_state = NewContState}) end; diff --git a/lib/xmerl/src/xmerl_sax_parser.hrl b/lib/xmerl/src/xmerl_sax_parser.hrl index 932ab0cec5..7f9bf6c4d3 100644 --- a/lib/xmerl/src/xmerl_sax_parser.hrl +++ b/lib/xmerl/src/xmerl_sax_parser.hrl @@ -88,14 +88,7 @@ current_location, % Location of the currently parsed XML entity entity, % Parsed XML entity skip_external_dtd = false,% If true the external DTD is skipped during parsing - input_type % Source type: file | stream. - % This field is a preparation for an fix in R17 of a bug in - % the conformance against the standard. - % Today a file which contains two XML documents will be considered - % well-formed and the second is placed in the rest part of the - % return tuple, according to the conformance tests this should fail. - % In the future this will fail if xmerl_sax_aprser:file/2 is used but - % left to the user in the xmerl_sax_aprser:stream/2 case. + input_type % Source type: file | stream }). diff --git a/lib/xmerl/src/xmerl_sax_parser_base.erlsrc b/lib/xmerl/src/xmerl_sax_parser_base.erlsrc index 4d75805b9b..f3470b2809 100644 --- a/lib/xmerl/src/xmerl_sax_parser_base.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_base.erlsrc @@ -1,7 +1,7 @@ %%-*-erlang-*- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -72,7 +72,12 @@ parse(Xml, State) -> {ok, Rest, State2} -> State3 = event_callback(endDocument, State2), ets:delete(RefTable), - {ok, State3#xmerl_sax_parser_state.event_state, Rest}; + case check_if_rest_ok(State3#xmerl_sax_parser_state.input_type, Rest) of + true -> + {ok, State3#xmerl_sax_parser_state.event_state, Rest}; + false -> + format_error(fatal_error, State3, "Input found after legal document") + end; {fatal_error, {State2, Reason}} -> State3 = event_callback(endDocument, State2), ets:delete(RefTable), @@ -81,10 +86,14 @@ parse(Xml, State) -> State3 = event_callback(endDocument, State2), ets:delete(RefTable), format_error(Tag, State3, Reason); + {endDocument, Rest, State2} -> + State3 = event_callback(endDocument, State2), + ets:delete(RefTable), + {ok, State3#xmerl_sax_parser_state.event_state, Rest}; Other -> _State2 = event_callback(endDocument, State1), ets:delete(RefTable), - throw(Other) + {fatal_error, Other} end. %%---------------------------------------------------------------------- @@ -111,7 +120,7 @@ parse_dtd(Xml, State) -> {Rest, State2} when is_record(State2, xmerl_sax_parser_state) -> State3 = event_callback(endDocument, State2), ets:delete(RefTable), - {ok, State3#xmerl_sax_parser_state.event_state, Rest}; + {ok, State3#xmerl_sax_parser_state.event_state, Rest}; {endDocument, Rest, State2} when is_record(State2, xmerl_sax_parser_state) -> State3 = event_callback(endDocument, State2), ets:delete(RefTable), @@ -119,7 +128,7 @@ parse_dtd(Xml, State) -> Other -> _State2 = event_callback(endDocument, State1), ets:delete(RefTable), - throw(Other) + {fatal_error, Other} end. @@ -136,10 +145,11 @@ parse_dtd(Xml, State) -> %% [1] document ::= prolog element Misc* %%---------------------------------------------------------------------- parse_document(Rest, State) when is_record(State, xmerl_sax_parser_state) -> - {Rest1, State1} = parse_xml_decl(Rest, State), + {Rest1, State1} = parse_byte_order_mark(Rest, State), {Rest2, State2} = parse_misc(Rest1, State1, true), {ok, Rest2, State2}. +?PARSE_BYTE_ORDER_MARK(Bytes, State). %%---------------------------------------------------------------------- %% Function: parse_xml_decl(Rest, State) -> Result @@ -150,15 +160,8 @@ parse_document(Rest, State) when is_record(State, xmerl_sax_parser_state) -> %% [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)? %% [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>' %%---------------------------------------------------------------------- --dialyzer({[no_fail_call, no_match], parse_xml_decl/2}). parse_xml_decl(?STRING_EMPTY, State) -> cf(?STRING_EMPTY, State, fun parse_xml_decl/2); -parse_xml_decl(?BYTE_ORDER_MARK_1, State) -> - cf(?BYTE_ORDER_MARK_1, State, fun parse_xml_decl/2); -parse_xml_decl(?BYTE_ORDER_MARK_2, State) -> - cf(?BYTE_ORDER_MARK_2, State, fun parse_xml_decl/2); -parse_xml_decl(?BYTE_ORDER_MARK_REST(Rest), State) -> - cf(Rest, State, fun parse_xml_decl/2); parse_xml_decl(?STRING("<") = Bytes, State) -> cf(Bytes, State, fun parse_xml_decl/2); parse_xml_decl(?STRING("<?") = Bytes, State) -> @@ -170,31 +173,19 @@ parse_xml_decl(?STRING("<?xm") = Bytes, State) -> parse_xml_decl(?STRING("<?xml") = Bytes, State) -> cf(Bytes, State, fun parse_xml_decl/2); parse_xml_decl(?STRING_REST("<?xml", Rest1), State) -> - parse_xml_decl_1(Rest1, State); -parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) -> - case unicode:characters_to_list(Bytes, Enc) of - {incomplete, _, _} -> - cf(Bytes, State, fun parse_xml_decl/2); - {error, _Encoded, _Rest} -> - ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))); - _ -> - parse_prolog(Bytes, State) - end; -parse_xml_decl(Bytes, State) -> - parse_prolog(Bytes, State). - + parse_xml_decl_rest(Rest1, State); +?PARSE_XML_DECL(Bytes, State). -parse_xml_decl_1(?STRING_UNBOUND_REST(C, Rest) = Bytes, State) -> +parse_xml_decl_rest(?STRING_UNBOUND_REST(C, Rest) = Bytes, State) -> if ?is_whitespace(C) -> {_XmlAttributes, Rest1, State1} = parse_version_info(Rest, State, []), - %State2 = event_callback({processingInstruction, "xml", XmlAttributes}, State1),% The XML decl. should not be reported as a PI parse_prolog(Rest1, State1); true -> parse_prolog(?STRING_REST("<?xml", Bytes), State) end; -parse_xml_decl_1(Bytes, State) -> - unicode_incomplete_check([Bytes, State, fun parse_xml_decl_1/2], undefined). +parse_xml_decl_rest(Bytes, State) -> + unicode_incomplete_check([Bytes, State, fun parse_xml_decl_rest/2], undefined). @@ -216,8 +207,6 @@ parse_prolog(?STRING_REST("<?", Rest), State) -> parse_prolog(Rest1, State1); {endDocument, Rest1, State1} -> parse_prolog(Rest1, State1) - % IValue = ?TO_INPUT_FORMAT("<?"), - % {?APPEND_STRING(IValue, Rest1), State1} end; parse_prolog(?STRING_REST("<!", Rest), State) -> parse_prolog_1(Rest, State); @@ -230,7 +219,6 @@ parse_prolog(Bytes, State) -> unicode_incomplete_check([Bytes, State, fun parse_prolog/2], "expecting < or whitespace"). - parse_prolog_1(?STRING_EMPTY, State) -> cf(?STRING_EMPTY, State, fun parse_prolog_1/2); parse_prolog_1(?STRING("D") = Bytes, State) -> @@ -442,6 +430,15 @@ check_if_new_doc_allowed(stream, []) -> check_if_new_doc_allowed(_, _) -> false. +check_if_rest_ok(file, []) -> + true; +check_if_rest_ok(file, <<>>) -> + true; +check_if_rest_ok(stream, _) -> + true; +check_if_rest_ok(_, _) -> + false. + %%---------------------------------------------------------------------- %% Function: parse_pi_1(Rest, State) -> Result %% Input: Rest = string() | binary() @@ -1024,16 +1021,21 @@ parse_etag(Bytes, State) -> unicode_incomplete_check([Bytes, State, fun parse_etag/2], undefined). - parse_etag_1(?STRING_REST(">", Rest), #xmerl_sax_parser_state{end_tags=[{_ETag, Uri, LocalName, QName, OldNsList, NewNsList} - |RestOfETags]} = State, _Tag) -> + |RestOfETags], + input_type=InputType} = State, _Tag) -> State1 = event_callback({endElement, Uri, LocalName, QName}, State), State2 = send_end_prefix_mapping_event(NewNsList, State1), - parse_content(Rest, - State2#xmerl_sax_parser_state{end_tags=RestOfETags, - ns = OldNsList}, - [], true); + case check_if_new_doc_allowed(InputType, RestOfETags) of + true -> + throw({endDocument, Rest, State2#xmerl_sax_parser_state{ns = OldNsList}}); + false -> + parse_content(Rest, + State2#xmerl_sax_parser_state{end_tags=RestOfETags, + ns = OldNsList}, + [], true) + end; parse_etag_1(?STRING_UNBOUND_REST(_C, _), State, Tag) -> {P,TN} = Tag, ?fatal_error(State, "Bad EndTag: " ++ P ++ ":" ++ TN); @@ -1051,21 +1053,26 @@ parse_etag_1(Bytes, State, Tag) -> %% Description: Parsing the content part of tags %% [43] content ::= (element | CharData | Reference | CDSect | PI | Comment)* %%---------------------------------------------------------------------- - parse_content(?STRING_EMPTY, State, Acc, IgnorableWS) -> - case catch cf(?STRING_EMPTY, State, Acc, IgnorableWS, fun parse_content/4) of - {Rest, State1} when is_record(State1, xmerl_sax_parser_state) -> - {Rest, State1}; - {fatal_error, {State1, Msg}} -> - case check_if_document_complete(State1, Msg) of - true -> - State2 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State1), - {?STRING_EMPTY, State2}; - false -> - ?fatal_error(State1, Msg) - end; - Other -> - throw(Other) + case check_if_document_complete(State, "No more bytes") of + true -> + State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State), + {?STRING_EMPTY, State1}; + false -> + case catch cf(?STRING_EMPTY, State, Acc, IgnorableWS, fun parse_content/4) of + {Rest, State1} when is_record(State1, xmerl_sax_parser_state) -> + {Rest, State1}; + {fatal_error, {State1, Msg}} -> + case check_if_document_complete(State1, Msg) of + true -> + State2 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State1), + {?STRING_EMPTY, State2}; + false -> + ?fatal_error(State1, Msg) + end; + Other -> + throw(Other) + end end; parse_content(?STRING("\r") = Bytes, State, Acc, IgnorableWS) -> cf(Bytes, State, Acc, IgnorableWS, fun parse_content/4); @@ -1094,7 +1101,7 @@ parse_content(?STRING_REST("<?", Rest), State, Acc, IgnorableWS) -> parse_content(?STRING_REST("<!", Rest1) = Rest, #xmerl_sax_parser_state{end_tags = ET} = State, Acc, IgnorableWS) -> case ET of [] -> - {Rest, State}; %%LATH : Skicka ignorable WS ??? + {Rest, State}; %% Skicka ignorable WS ??? _ -> State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State), parse_cdata(Rest1, State1) @@ -1102,7 +1109,7 @@ parse_content(?STRING_REST("<!", Rest1) = Rest, #xmerl_sax_parser_state{end_tags parse_content(?STRING_REST("<", Rest1) = Rest, #xmerl_sax_parser_state{end_tags = ET} = State, Acc, IgnorableWS) -> case ET of [] -> - {Rest, State}; %%LATH : Skicka ignorable WS ??? + {Rest, State}; %% Skicka ignorable WS ??? _ -> State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State), parse_stag(Rest1, State1) @@ -1204,7 +1211,6 @@ send_character_event(_, true, String, State) -> %% Description: Parse whitespaces. %% [3] S ::= (#x20 | #x9 | #xD | #xA)+ %%---------------------------------------------------------------------- --dialyzer({no_fail_call, whitespace/3}). whitespace(?STRING_EMPTY, State, Acc) -> case cf(?STRING_EMPTY, State, Acc, fun whitespace/3) of {?STRING_EMPTY, State} -> @@ -1230,16 +1236,7 @@ whitespace(?STRING_REST("\r", Rest), State, Acc) -> whitespace(Rest, State#xmerl_sax_parser_state{line_no=N+1}, [?lf |Acc]); whitespace(?STRING_UNBOUND_REST(C, Rest), State, Acc) when ?is_whitespace(C) -> whitespace(Rest, State, [C|Acc]); -whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> - {lists:reverse(Acc), Bytes, State}; -whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) -> - case unicode:characters_to_list(Bytes, Enc) of - {incomplete, _, _} -> - cf(Bytes, State, Acc, fun whitespace/3); - {error, _Encoded, _Rest} -> - ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))) - end. - +?WHITESPACE(Bytes, State, Acc). %%---------------------------------------------------------------------- %% Function: parse_reference(Rest, State, HaveToExist) -> Result @@ -1362,7 +1359,6 @@ parse_pe_reference_1(Bytes, State, Name) -> "missing ; after reference " ++ Name). - %%---------------------------------------------------------------------- %% Function: insert_reference(Reference, State) -> Result %% Parameters: Reference = string() @@ -1378,7 +1374,6 @@ insert_reference({Name, Type, Value}, Table) -> end. - %%---------------------------------------------------------------------- %% Function: look_up_reference(Reference, State) -> Result %% Parameters: Reference = string() @@ -1693,7 +1688,7 @@ handle_external_entity({http, Url}, State) -> ++ file:format_error(Reason)); {ok, FD} -> {?STRING_EMPTY, EntityState} = - parse_external_entity_1(<<>>, + parse_external_entity_byte_order_mark(<<>>, State#xmerl_sax_parser_state{continuation_state=FD, current_location=filename:dirname(Url), entity=filename:basename(Url), @@ -1709,6 +1704,8 @@ handle_external_entity({http, Url}, State) -> handle_external_entity({Tag, _Url}, State) -> ?fatal_error(State, "Unsupported URI type: " ++ atom_to_list(Tag)). +?PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State). + %%---------------------------------------------------------------------- %% Function : parse_external_entity_1(Rest, State) -> Result %% Parameters: Rest = string() | binary() @@ -1716,7 +1713,6 @@ handle_external_entity({Tag, _Url}, State) -> %% Result : {Rest, State} %% Description: Parse the external entity. %%---------------------------------------------------------------------- --dialyzer({[no_fail_call, no_match], parse_external_entity_1/2}). parse_external_entity_1(?STRING_EMPTY, #xmerl_sax_parser_state{file_type=Type} = State) -> case catch cf(?STRING_EMPTY, State, fun parse_external_entity_1/2) of {Rest, State1} when is_record(State1, xmerl_sax_parser_state) -> @@ -1726,12 +1722,6 @@ parse_external_entity_1(?STRING_EMPTY, #xmerl_sax_parser_state{file_type=Type} = Other -> throw(Other) end; -parse_external_entity_1(?BYTE_ORDER_MARK_1, State) -> - cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_1/2); -parse_external_entity_1(?BYTE_ORDER_MARK_2, State) -> - cf(?BYTE_ORDER_MARK_2, State, fun parse_external_entity_1/2); -parse_external_entity_1(?BYTE_ORDER_MARK_REST(Rest), State) -> - parse_external_entity_1(Rest, State); parse_external_entity_1(?STRING("<") = Bytes, State) -> cf(Bytes, State, fun parse_external_entity_1/2); parse_external_entity_1(?STRING("<?") = Bytes, State) -> @@ -3290,7 +3280,7 @@ cf(Rest, #xmerl_sax_parser_state{continuation_fun = CFun, continuation_state = C catch throw:ErrorTerm -> ?fatal_error(State, ErrorTerm); - exit:Reason -> + exit:Reason -> ?fatal_error(State, {'EXIT', Reason}) end, case Result of diff --git a/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc b/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc index 961806bf4c..6e59347fb8 100644 --- a/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -34,8 +34,36 @@ -define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>). -define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, latin1)). -%% STRING_REST and STRING_UNBOUND_REST is only different in the list case -define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar, Rest/binary>>). --define(BYTE_ORDER_MARK_1, undefined_bom1). --define(BYTE_ORDER_MARK_2, undefined_bom2). --define(BYTE_ORDER_MARK_REST(Rest), <<undefined, Rest/binary>>). + +-define(PARSE_BYTE_ORDER_MARK(Bytes, State), + parse_byte_order_mark(Bytes, State) -> + parse_xml_decl(Bytes, State)). + +-define(PARSE_XML_DECL(Bytes, State), + parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, fun parse_xml_decl/2); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))); + _ -> + parse_prolog(Bytes, State) + end; + parse_xml_decl(Bytes, State) -> + parse_prolog(Bytes, State)). + +-define(WHITESPACE(Bytes, State, Acc), + whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> + {lists:reverse(Acc), Bytes, State}; + whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, Acc, fun whitespace/3); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))) + end). + +-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State), + parse_external_entity_byte_order_mark(Bytes, State) -> + parse_external_entity_1(Bytes, State)). diff --git a/lib/xmerl/src/xmerl_sax_parser_list.erlsrc b/lib/xmerl/src/xmerl_sax_parser_list.erlsrc index 624a621d92..6a4435b1d9 100644 --- a/lib/xmerl/src/xmerl_sax_parser_list.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_list.erlsrc @@ -36,6 +36,19 @@ %% In the list case we can't use a '++' when matchin against an unbound variable -define(STRING_UNBOUND_REST(MatchChar, Rest), [MatchChar | Rest]). --define(BYTE_ORDER_MARK_1, undefined_bom1). --define(BYTE_ORDER_MARK_2, undefined_bom2). --define(BYTE_ORDER_MARK_REST(Rest), [undefined|Rest]). + +-define(PARSE_BYTE_ORDER_MARK(Bytes, State), + parse_byte_order_mark(Bytes, State) -> + parse_xml_decl(Bytes, State)). + +-define(PARSE_XML_DECL(Bytes, State), + parse_xml_decl(Bytes, State) -> + parse_prolog(Bytes, State)). + +-define(WHITESPACE(Bytes, State, Acc), + whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> + {lists:reverse(Acc), Bytes, State}). + +-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State), + parse_external_entity_byte_order_mark(Bytes, State) -> + parse_external_entity_1(Bytes, State)). diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc index ff84ece97a..ec89024729 100644 --- a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -34,8 +34,50 @@ -define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>). -define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, {utf16, big})). -%% STRING_REST and STRING_UNBOUND_REST is only different in the list case -define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/big-utf16, Rest/binary>>). --define(BYTE_ORDER_MARK_1, undefined_bom1). --define(BYTE_ORDER_MARK_2, <<16#FE>>). +-define(BYTE_ORDER_MARK_1, <<16#FE>>). -define(BYTE_ORDER_MARK_REST(Rest), <<16#FE, 16#FF, Rest/binary>>). + +-define(PARSE_BYTE_ORDER_MARK(Bytes, State), + parse_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_xml_decl(Rest, State); + parse_byte_order_mark(Bytes, State) -> + parse_xml_decl(Bytes, State)). + +-define(PARSE_XML_DECL(Bytes, State), + parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, fun parse_xml_decl/2); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))); + _ -> + parse_prolog(Bytes, State) + end; + parse_xml_decl(Bytes, State) -> + parse_prolog(Bytes, State)). + +-define(WHITESPACE(Bytes, State, Acc), + whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> + {lists:reverse(Acc), Bytes, State}; + whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, Acc, fun whitespace/3); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))) + end). + +-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State), + parse_external_entity_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_external_entity_1(Rest, State); + parse_external_entity_byte_order_mark(Bytes, State) -> + parse_external_entity_1(Bytes, State)). diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc index a330fce8d0..566333a045 100644 --- a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -34,8 +34,50 @@ -define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>). -define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, {utf16, little})). -%% STRING_REST and STRING_UNBOUND_REST is only different in the list case -define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/little-utf16, Rest/binary>>). --define(BYTE_ORDER_MARK_1, undefined_bom1). --define(BYTE_ORDER_MARK_2, <<16#FF>>). +-define(BYTE_ORDER_MARK_1, <<16#FF>>). -define(BYTE_ORDER_MARK_REST(Rest), <<16#FF, 16#FE, Rest/binary>>). + +-define(PARSE_BYTE_ORDER_MARK(Bytes, State), + parse_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_xml_decl(Rest, State); + parse_byte_order_mark(Bytes, State) -> + parse_xml_decl(Bytes, State)). + +-define(PARSE_XML_DECL(Bytes, State), + parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, fun parse_xml_decl/2); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))); + _ -> + parse_prolog(Bytes, State) + end; + parse_xml_decl(Bytes, State) -> + parse_prolog(Bytes, State)). + +-define(WHITESPACE(Bytes, State, Acc), + whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> + {lists:reverse(Acc), Bytes, State}; + whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, Acc, fun whitespace/3); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))) + end). + +-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State), + parse_external_entity_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_external_entity_1(Rest, State); + parse_external_entity_byte_order_mark(Bytes, State) -> + parse_external_entity_1(Bytes, State)). diff --git a/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc index d46d60d237..f41d06d013 100644 --- a/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc +++ b/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2016. All Rights Reserved. +%% Copyright Ericsson AB 2008-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -34,11 +34,55 @@ -define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>). -define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, utf8)). - -%% STRING_REST and STRING_UNBOUND_REST is only different in the list case -define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/utf8, Rest/binary>>). -define(BYTE_ORDER_MARK_1, <<16#EF>>). -define(BYTE_ORDER_MARK_2, <<16#EF, 16#BB>>). -define(BYTE_ORDER_MARK_REST(Rest), <<16#EF, 16#BB, 16#BF, Rest/binary>>). +-define(PARSE_BYTE_ORDER_MARK(Bytes, State), + parse_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_2, State) -> + cf(?BYTE_ORDER_MARK_2, State, fun parse_byte_order_mark/2); + parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_xml_decl(Rest, State); + parse_byte_order_mark(Bytes, State) -> + parse_xml_decl(Bytes, State)). + +-define(PARSE_XML_DECL(Bytes, State), + parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, fun parse_xml_decl/2); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))); + _ -> + parse_prolog(Bytes, State) + end; + parse_xml_decl(Bytes, State) -> + parse_prolog(Bytes, State)). + +-define(WHITESPACE(Bytes, State, Acc), + whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) -> + {lists:reverse(Acc), Bytes, State}; + whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) -> + case unicode:characters_to_list(Bytes, Enc) of + {incomplete, _, _} -> + cf(Bytes, State, Acc, fun whitespace/3); + {error, _Encoded, _Rest} -> + ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc]))) + end). +-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State), + parse_external_entity_byte_order_mark(?STRING_EMPTY, State) -> + cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) -> + cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_2, State) -> + cf(?BYTE_ORDER_MARK_2, State, fun parse_external_entity_byte_order_mark/2); + parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) -> + parse_external_entity_1(Rest, State); + parse_external_entity_byte_order_mark(Bytes, State) -> + parse_external_entity_1(Bytes, State)). diff --git a/lib/xmerl/src/xmerl_scan.erl b/lib/xmerl/src/xmerl_scan.erl index 9f6b27113e..95dc82e5c9 100644 --- a/lib/xmerl/src/xmerl_scan.erl +++ b/lib/xmerl/src/xmerl_scan.erl @@ -2309,7 +2309,9 @@ expanded_name(Name, [], #xmlNamespace{default = URI}, S) -> expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) -> {_, Value} = lists:keyfind(Local, 1, Ns), case Name of - 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' -> + 'xmlns:xml' when Value =:= 'http://www.w3.org/XML/1998/namespace' -> + N; + 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' -> ?fatal({xml_prefix_cannot_be_redeclared, Value}, S); 'xmlns:xmlns' -> ?fatal({xmlns_prefix_cannot_be_declared, Value}, S); @@ -2323,6 +2325,8 @@ expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) -> N end end; +expanded_name(_Name, {"xml", Local}, _NS, _S) -> + {'http://www.w3.org/XML/1998/namespace', list_to_atom(Local)}; expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) -> case lists:keysearch(Prefix, 1, Ns) of {value, {_, URI}} -> @@ -2333,9 +2337,6 @@ expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) -> ?fatal({namespace_prefix_not_declared, Prefix}, S) end. - - - keyreplaceadd(K, Pos, [H|T], Obj) when K == element(Pos, H) -> [Obj|T]; keyreplaceadd(K, Pos, [H|T], Obj) -> diff --git a/lib/xmerl/test/Makefile b/lib/xmerl/test/Makefile index 7a326e334f..b13fee05b3 100644 --- a/lib/xmerl/test/Makefile +++ b/lib/xmerl/test/Makefile @@ -55,7 +55,8 @@ SUITE_FILES= \ xmerl_xsd_SUITE.erl \ xmerl_xsd_MS2002-01-16_SUITE.erl \ xmerl_xsd_NIST2002-01-16_SUITE.erl \ - xmerl_xsd_Sun2002-01-16_SUITE.erl + xmerl_xsd_Sun2002-01-16_SUITE.erl \ + xmerl_sax_stream_SUITE.erl XML_FILES= \ testcases.dtd \ @@ -125,4 +126,5 @@ release_tests_spec: opt @tar cfh - xmerl_xsd_MS2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) @tar cfh - xmerl_xsd_NIST2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) @tar cfh - xmerl_xsd_Sun2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) + @tar cfh - xmerl_sax_stream_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) chmod -R u+w "$(RELSYSDIR)" diff --git a/lib/xmerl/test/xmerl_SUITE.erl b/lib/xmerl/test/xmerl_SUITE.erl index cf7c0b7548..58c462483c 100644 --- a/lib/xmerl/test/xmerl_SUITE.erl +++ b/lib/xmerl/test/xmerl_SUITE.erl @@ -55,7 +55,7 @@ groups() -> {misc, [], [latin1_alias, syntax_bug1, syntax_bug2, syntax_bug3, pe_ref1, copyright, testXSEIF, export_simple1, export, - default_attrs_bug]}, + default_attrs_bug, xml_ns]}, {eventp_tests, [], [sax_parse_and_export]}, {ticket_tests, [], [ticket_5998, ticket_7211, ticket_7214, ticket_7430, @@ -237,7 +237,36 @@ default_attrs_bug(Config) -> {#xmlElement{attributes = [#xmlAttribute{name = b, value = "also explicit"}, #xmlAttribute{name = a, value = "explicit"}]}, [] - } = xmerl_scan:string(Doc2, [{default_attrs, true}]). + } = xmerl_scan:string(Doc2, [{default_attrs, true}]), + ok. + + +xml_ns(Config) -> + Doc = "<?xml version='1.0'?>\n" + "<doc xml:attr1=\"implicit xml ns\"/>", + {#xmlElement{namespace=#xmlNamespace{default = [], nodes = []}, + attributes = [#xmlAttribute{name = 'xml:attr1', + expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1}, + nsinfo = {"xml","attr1"}, + namespace = #xmlNamespace{default = [], nodes = []}}]}, + [] + } = xmerl_scan:string(Doc, [{namespace_conformant, true}]), + Doc2 = "<?xml version='1.0'?>\n" + "<doc xmlns:xml=\"http://www.w3.org/XML/1998/namespace\" xml:attr1=\"explicit xml ns\"/>", + {#xmlElement{namespace=#xmlNamespace{default = [], nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}, + attributes = [#xmlAttribute{name = 'xmlns:xml', + expanded_name = {"xmlns","xml"}, + nsinfo = {"xmlns","xml"}, + namespace = #xmlNamespace{default = [], + nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}}, + #xmlAttribute{name = 'xml:attr1', + expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1}, + nsinfo = {"xml","attr1"}, + namespace = #xmlNamespace{default = [], + nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}}]}, + [] + } = xmerl_scan:string(Doc2, [{namespace_conformant, true}]), + ok. pe_ref1(Config) -> file:set_cwd(datadir(Config)), diff --git a/lib/xmerl/test/xmerl_sax_SUITE.erl b/lib/xmerl/test/xmerl_sax_SUITE.erl index f5c0a783c4..7d1a70905c 100644 --- a/lib/xmerl/test/xmerl_sax_SUITE.erl +++ b/lib/xmerl/test/xmerl_sax_SUITE.erl @@ -85,17 +85,17 @@ ticket_11551(_Config) -> <a>hej</a> <?xml version=\"1.0\" encoding=\"utf-8\" ?> <a>hej</a>">>, - {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream1, []), + {ok, undefined, <<"\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream1, []), Stream2= <<"<?xml version=\"1.0\" encoding=\"utf-8\" ?> <a>hej</a> <?xml version=\"1.0\" encoding=\"utf-8\" ?> <a>hej</a>">>, - {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream2, []), + {ok, undefined, <<"\n\n\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream2, []), Stream3= <<"<a>hej</a> <?xml version=\"1.0\" encoding=\"utf-8\" ?> <a>hej</a>">>, - {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream3, []), + {ok, undefined, <<"\n\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream3, []), ok. diff --git a/lib/xmerl/test/xmerl_sax_std_SUITE.erl b/lib/xmerl/test/xmerl_sax_std_SUITE.erl index 525a3b175a..b8412206cc 100644 --- a/lib/xmerl/test/xmerl_sax_std_SUITE.erl +++ b/lib/xmerl/test/xmerl_sax_std_SUITE.erl @@ -2,7 +2,7 @@ %%---------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2016. All Rights Reserved. +%% Copyright Ericsson AB 2010-2017. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -507,11 +507,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-036'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/036.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"Illegal data\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -522,11 +519,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-037'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/037.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<" \r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -561,11 +555,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-040'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/040.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"<doc></doc>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -576,11 +567,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-041'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/041.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"<doc></doc>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -603,11 +591,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-043'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/043.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"Illegal data\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -618,11 +603,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-044'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/044.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"<doc/>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -669,11 +651,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-048'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/048.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"<![CDATA[]]>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -1416,11 +1395,8 @@ end_per_testcase(_Func,_Config) -> 'not-wf-sa-110'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/110.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"&e;\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -1914,9 +1890,9 @@ end_per_testcase(_Func,_Config) -> %% Special case becase we returns everything after a legal document %% as an rest instead of giving and error to let the user handle %% multipple docs on a stream. - {ok,_,<<"<?xml version=\"1.0\"?>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - % R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), - % check_result(R, "not-wf"). + %{ok,_,<<"<?xml version=\"1.0\"?>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -7784,11 +7760,8 @@ end_per_testcase(_Func,_Config) -> 'o-p01fail3'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"oasis","p01fail3.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_, <<"<bad/>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -11417,12 +11390,8 @@ end_per_testcase(_Func,_Config) -> 'ibm-not-wf-P01-ibm01n02'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P01/ibm01n02.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_, <<"<?xml version=\"1.0\"?>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - % R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), - % check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Case @@ -11433,11 +11402,8 @@ end_per_testcase(_Func,_Config) -> 'ibm-not-wf-P01-ibm01n03'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P01/ibm01n03.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_, <<"<title>Wrong combination!</title>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Cases @@ -13027,11 +12993,8 @@ end_per_testcase(_Func,_Config) -> 'ibm-not-wf-P27-ibm27n01'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P27/ibm27n01.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_, <<"<!ELEMENT cat EMPTY>">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Cases @@ -13461,11 +13424,8 @@ end_per_testcase(_Func,_Config) -> 'ibm-not-wf-P39-ibm39n06'(Config) -> file:set_cwd(xmerl_test_lib:get_data_dir(Config)), Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P39/ibm39n06.xml"]), - %% Special case becase we returns everything after a legal document - %% as an rest instead of giving and error to let the user handle - %% multipple docs on a stream. - {ok,_,<<"content after end tag\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]). - %%check_result(R, "not-wf"). + R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]), + check_result(R, "not-wf"). %%---------------------------------------------------------------------- %% Test Cases diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE.erl b/lib/xmerl/test/xmerl_sax_stream_SUITE.erl new file mode 100644 index 0000000000..a306eb66a2 --- /dev/null +++ b/lib/xmerl/test/xmerl_sax_stream_SUITE.erl @@ -0,0 +1,245 @@ +%%-*-erlang-*- +%%---------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2017. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%%---------------------------------------------------------------------- +%% File : xmerl_sax_stream_SUITE.erl +%%---------------------------------------------------------------------- +-module(xmerl_sax_stream_SUITE). +-compile(export_all). + +%%---------------------------------------------------------------------- +%% Include files +%%---------------------------------------------------------------------- +-include_lib("common_test/include/ct.hrl"). +-include_lib("kernel/include/file.hrl"). + +%%====================================================================== +%% External functions +%%====================================================================== + +%%---------------------------------------------------------------------- +%% Initializations +%%---------------------------------------------------------------------- +all() -> + [ + one_document, + two_documents, + one_document_and_junk + ]. + +%%---------------------------------------------------------------------- +%% Initializations +%%---------------------------------------------------------------------- + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_Func, _Config) -> + ok. + +%%---------------------------------------------------------------------- +%% Tests +%%---------------------------------------------------------------------- +one_document(Config) -> + Port = 11111, + + {ok, ListenSocket} = listen(Port), + Self = self(), + + spawn( + fun() -> + case catch gen_tcp:accept(ListenSocket) of + {ok, S} -> + Result = xmerl_sax_parser:stream(<<>>, + [{continuation_state, S}, + {continuation_fun, + fun(Sd) -> + io:format("Continuation called!!", []), + case gen_tcp:recv(Sd, 0) of + {ok, Packet} -> + io:format("Packet: ~p\n", [Packet]), + {Packet, Sd}; + {error, Reason} -> + throw({error, Reason}) + end + end}]), + Self ! {xmerl_sax, Result}, + close(S); + Error -> + Self ! {xmerl_sax, {error, {accept, Error}}} + end + end), + + {ok, SendSocket} = connect(localhost, Port), + + {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_one.xml"])), + + send_chunks(SendSocket, Binary), + + receive + {xmerl_sax, {ok, undefined, Rest}} -> + <<"\n">> = Rest, + io:format("Ok Rest: ~p\n", [Rest]) + after 5000 -> + ct:fail("Timeout") + end, + ok. + +two_documents(Config) -> + Port = 11111, + + {ok, ListenSocket} = listen(Port), + Self = self(), + + spawn( + fun() -> + case catch gen_tcp:accept(ListenSocket) of + {ok, S} -> + Result = xmerl_sax_parser:stream(<<>>, + [{continuation_state, S}, + {continuation_fun, + fun(Sd) -> + io:format("Continuation called!!", []), + case gen_tcp:recv(Sd, 0) of + {ok, Packet} -> + io:format("Packet: ~p\n", [Packet]), + {Packet, Sd}; + {error, Reason} -> + throw({error, Reason}) + end + end}]), + Self ! {xmerl_sax, Result}, + close(S); + Error -> + Self ! {xmerl_sax, {error, {accept, Error}}} + end + end), + + {ok, SendSocket} = connect(localhost, Port), + + {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_two.xml"])), + + send_chunks(SendSocket, Binary), + + receive + {xmerl_sax, {ok, undefined, Rest}} -> + <<"\n<?x", _R/binary>> = Rest, + io:format("Ok Rest: ~p\n", [Rest]) + after 5000 -> + ct:fail("Timeout") + end, + ok. + +one_document_and_junk(Config) -> + Port = 11111, + + {ok, ListenSocket} = listen(Port), + Self = self(), + + spawn( + fun() -> + case catch gen_tcp:accept(ListenSocket) of + {ok, S} -> + Result = xmerl_sax_parser:stream(<<>>, + [{continuation_state, S}, + {continuation_fun, + fun(Sd) -> + io:format("Continuation called!!", []), + case gen_tcp:recv(Sd, 0) of + {ok, Packet} -> + io:format("Packet: ~p\n", [Packet]), + {Packet, Sd}; + {error, Reason} -> + throw({error, Reason}) + end + end}]), + Self ! {xmerl_sax, Result}, + close(S); + Error -> + Self ! {xmerl_sax, {error, {accept, Error}}} + end + end), + + {ok, SendSocket} = connect(localhost, Port), + + {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_one_junk.xml"])), + + send_chunks(SendSocket, Binary), + + receive + {xmerl_sax, {ok, undefined, Rest}} -> + <<"\nth", _R/binary>> = Rest, + io:format("Ok Rest: ~p\n", [Rest]) + after 10000 -> + ct:fail("Timeout") + end, + ok. + +%%---------------------------------------------------------------------- +%% Utility functions +%%---------------------------------------------------------------------- +listen(Port) -> + case catch gen_tcp:listen(Port, [{active, false}, + binary, + {keepalive, true}, + {reuseaddr,true}]) of + {ok, ListenSocket} -> + {ok, ListenSocket}; + {error, Reason} -> + {error, {listen, Reason}} + end. + +close(Socket) -> + (catch gen_tcp:close(Socket)). + +connect(Host, Port) -> + Timeout = 5000, + % Options1 = check_options(Options), + Options = [binary], + case catch gen_tcp:connect(Host, Port, Options, Timeout) of + {ok, Socket} -> + {ok, Socket}; + {error, Reason} -> + {error, Reason} + end. + +send_chunks(Socket, Binary) -> + BSize = erlang:size(Binary), + if + BSize > 25 -> + <<Head:25/binary, Tail/binary>> = Binary, + case gen_tcp:send(Socket, Head) of + ok -> + timer:sleep(1000), + send_chunks(Socket, Tail); + {error,closed} -> + ok + end; + true -> + gen_tcp:send(Socket, Binary) + end. + +datadir(Config) -> + proplists:get_value(data_dir, Config). diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml new file mode 100644 index 0000000000..30328bb188 --- /dev/null +++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml @@ -0,0 +1,17 @@ +<?xml version="1.0"?> +<person> +<name> +Arne Andersson +</name> +<address> +<street> + Old Road 456 +</street> +<zip> +12323 +</zip> +<city> +Small City +</city> +</address> +</person> diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml new file mode 100644 index 0000000000..f730a95865 --- /dev/null +++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml @@ -0,0 +1,18 @@ +<?xml version="1.0"?> +<person> +<name> +Arne Andersson +</name> +<address> +<street> + Old Road 456 +</street> +<zip> +12323 +</zip> +<city> +Small City +</city> +</address> +</person> +this is junk ...... diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml new file mode 100644 index 0000000000..e241a02190 --- /dev/null +++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml @@ -0,0 +1,34 @@ +<?xml version="1.0"?> +<person> +<name> +Arne Andersson +</name> +<address> +<street> + Old Road 456 +</street> +<zip> +12323 +</zip> +<city> +Small City +</city> +</address> +</person> +<?xml version="1.0"?> +<person> +<name> +Bertil Bengtson +</name> +<address> +<street> + New Road 4 +</street> +<zip> +12328 +</zip> +<city> +Small City +</city> +</address> +</person> diff --git a/lib/xmerl/vsn.mk b/lib/xmerl/vsn.mk index 95adaa5bb0..1515a4e37d 100644 --- a/lib/xmerl/vsn.mk +++ b/lib/xmerl/vsn.mk @@ -1 +1 @@ -XMERL_VSN = 1.3.12 +XMERL_VSN = 1.3.13 |