diff options
Diffstat (limited to 'lib')
594 files changed, 37566 insertions, 21099 deletions
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile index 6798da0072..40f440423d 100644 --- a/lib/asn1/src/Makefile +++ b/lib/asn1/src/Makefile @@ -206,6 +206,7 @@ $(EBIN)/asn1ct_constructed_per.beam: asn1ct_constructed_per.erl asn1_records.hrl $(EBIN)/asn1ct_func.beam: asn1ct_func.erl $(EBIN)/asn1ct_gen.beam: asn1ct_gen.erl asn1_records.hrl $(EBIN)/asn1ct_gen_ber_bin_v2.beam: asn1ct_gen_ber_bin_v2.erl asn1_records.hrl +$(EBIN)/asn1ct_gen_check.beam: asn1_records.hrl $(EBIN)/asn1ct_gen_per.beam: asn1ct_gen_per.erl asn1_records.hrl $(EBIN)/asn1ct_gen_per_rt2ct.beam: asn1ct_gen_per_rt2ct.erl asn1_records.hrl $(EBIN)/asn1ct_imm.beam: asn1ct_imm.erl diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl index 48d9dd16d7..5577969727 100644 --- a/lib/asn1/src/asn1_db.erl +++ b/lib/asn1/src/asn1_db.erl @@ -19,7 +19,8 @@ %% -module(asn1_db). --export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/3,dbget/2]). +-export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2, + dbput/3,dbget/2]). -export([dbstop/0]). -record(state, {parent, monitor, includes, table}). @@ -44,6 +45,7 @@ dbload(Module) -> dbnew(Module, Erule) -> req({new, Module, Erule}). dbsave(OutFile, Module) -> cast({save, OutFile, Module}). dbput(Module, K, V) -> cast({set, Module, K, V}). +dbput(Module, Kvs) -> cast({set, Module, Kvs}). dbget(Module, K) -> req({get, Module, K}). dbstop() -> Resp = req(stop), erase(?MODULE), Resp. @@ -82,6 +84,10 @@ loop(#state{parent = Parent, monitor = MRef, table = Table, [{_, Modtab}] = ets:lookup(Table, Mod), ets:insert(Modtab, {K2, V}), loop(State); + {set, Mod, Kvs} -> + [{_, Modtab}] = ets:lookup(Table, Mod), + ets:insert(Modtab, Kvs), + loop(State); {From, {get, Mod, K2}} -> %% XXX If there is no information for Mod, get_table/3 %% will attempt to load information from an .asn1db diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl index 6c1cf1b12a..84435b2d21 100644 --- a/lib/asn1/src/asn1_records.hrl +++ b/lib/asn1/src/asn1_records.hrl @@ -81,9 +81,19 @@ module :: atom(), val :: atom()}). --record(state,{module,mname,type,tname,value,vname,erule,parameters=[], - inputmodules,abscomppath=[],recordtopname=[],options, - sourcedir}). +-record(state, + {module, + mname, + tname, + erule, + parameters=[], + inputmodules=[], + abscomppath=[], + recordtopname=[], + options, + sourcedir, + error_context %Top-level thingie (contains line numbers) + }). %% state record used by back-end at partial decode %% active is set to 'yes' when a partial decode function is generated. diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl index df341e5aab..a26d63c97d 100644 --- a/lib/asn1/src/asn1ct.erl +++ b/lib/asn1/src/asn1ct.erl @@ -34,7 +34,8 @@ %% Application internal exports -export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3, vsn/0, - get_name_of_def/1,get_pos_of_def/1]). + get_name_of_def/1,get_pos_of_def/1, + unset_pos_mod/1]). -export([read_config_data/1,get_gen_state_field/1, partial_inc_dec_toptype/1,update_gen_state/2, get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1, @@ -166,46 +167,26 @@ set_scan_parse_pass(#st{files=Files}=St) -> {error,St#st{error=Error}} end. -set_scan_parse_pass_1([F|Fs], St) -> +set_scan_parse_pass_1([F|Fs], #st{file=File}=St) -> case asn1ct_tok:file(F) of {error,Error} -> throw(Error); Tokens when is_list(Tokens) -> - case catch asn1ct_parser2:parse(Tokens) of + case asn1ct_parser2:parse(File, Tokens) of {ok,M} -> [M|set_scan_parse_pass_1(Fs, St)]; - {error,ErrorTerm} -> - throw(handle_parse_error(ErrorTerm, St)) + {error,Errors} -> + throw(Errors) end end; set_scan_parse_pass_1([], _) -> []. -parse_pass(#st{code=Tokens}=St) -> - case catch asn1ct_parser2:parse(Tokens) of +parse_pass(#st{file=File,code=Tokens}=St) -> + case asn1ct_parser2:parse(File, Tokens) of {ok,M} -> {ok,St#st{code=M}}; - {error,ErrorTerm} -> - {error,St#st{error=handle_parse_error(ErrorTerm, St)}} - end. - -handle_parse_error(ErrorTerm, #st{file=File,opts=Opts}) -> - case ErrorTerm of - {{Line,_Mod,Message},_TokTup} -> - if - is_integer(Line) -> - BaseName = filename:basename(File), - error("syntax error at line ~p in module ~s:~n", - [Line,BaseName], Opts); - true -> - error("syntax error in module ~p:~n", - [File], Opts) - end, - print_error_message(Message), - Message; - {Line,_Mod,[Message,Token]} -> - error("syntax error: ~p ~p at line ~p~n", - [Message,Token,Line], Opts), - {Line,[Message,Token]} + {error,Errors} -> + {error,St#st{error=Errors}} end. merge_pass(#st{file=Base,code=Code}=St) -> @@ -559,7 +540,10 @@ unset_pos_mod(Def) when is_record(Def,pvaluesetdef) -> unset_pos_mod(Def) when is_record(Def,pobjectdef) -> Def#pobjectdef{pos=undefined}; unset_pos_mod(Def) when is_record(Def,pobjectsetdef) -> - Def#pobjectsetdef{pos=undefined}. + Def#pobjectsetdef{pos=undefined}; +unset_pos_mod(#'ComponentType'{} = Def) -> + Def#'ComponentType'{pos=undefined}; +unset_pos_mod(Def) -> Def. get_pos_of_def(#typedef{pos=Pos}) -> Pos; @@ -1406,33 +1390,6 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes). vsn() -> ?vsn. - - -print_error_message([got,H|T]) when is_list(H) -> - io:format(" got:"), - print_listing(H,"and"), - print_error_message(T); -print_error_message([expected,H|T]) when is_list(H) -> - io:format(" expected one of:"), - print_listing(H,"or"), - print_error_message(T); -print_error_message([H|T]) -> - io:format(" ~p",[H]), - print_error_message(T); -print_error_message([]) -> - io:format("~n"). - -print_listing([H1,H2|[]],AndOr) -> - io:format(" ~p ~s ~p",[H1,AndOr,H2]); -print_listing([H1,H2|T],AndOr) -> - io:format(" ~p,",[H1]), - print_listing([H2|T],AndOr); -print_listing([H],_AndOr) -> - io:format(" ~p",[H]); -print_listing([],_) -> - ok. - - specialized_decode_prepare(Erule,M,TsAndVs,Options) -> case lists:member(asn1config,Options) of true -> diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl index 240f1cbb16..99392d6eaa 100644 --- a/lib/asn1/src/asn1ct_check.erl +++ b/lib/asn1/src/asn1ct_check.erl @@ -23,8 +23,6 @@ %% Main Module for ASN.1 compile time functions %-compile(export_all). -%% Avoid warning for local function error/1 clashing with autoimported BIF. --compile({no_auto_import,[error/1]}). -export([check/2,storeindb/2,format_error/1]). %-define(debug,1). -include("asn1_records.hrl"). @@ -60,17 +58,9 @@ -define(N_BMPString, 30). -define(TAG_PRIMITIVE(Num), - case S#state.erule of - ber -> - #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0}; - _ -> [] - end). + #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0}). -define(TAG_CONSTRUCTED(Num), - case S#state.erule of - ber -> - #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32}; - _ -> [] - end). + #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32}). -record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag @@ -249,26 +239,18 @@ check_exports(S,Module = #module{}) -> {exports,all} -> []; {exports,ExportList} when is_list(ExportList) -> - IsNotDefined = + IsNotDefined = fun(X) -> - case catch get_referenced_type(S,X) of - {error,{asn1,_}} -> - true; - _ -> false + try + _ = get_referenced_type(S,X), + false + catch {error,_} -> + true end end, - case lists:filter(IsNotDefined,ExportList) of - [] -> - []; - NoDefExp -> - GetName = - fun(T = #'Externaltypereference'{type=N})-> - %%{exported,undefined,entity,N} - NewS=S#state{type=T,tname=N}, - error({export,"exported undefined entity",NewS}) - end, - lists:map(GetName,NoDefExp) - end + [return_asn1_error(S, Ext, {undefined_export, Undef}) || + Ext = #'Externaltypereference'{type=Undef} <- ExportList, + IsNotDefined(Ext)] end. check_imports(S, #module{imports={imports,Imports}}) -> @@ -276,53 +258,18 @@ check_imports(S, #module{imports={imports,Imports}}) -> check_imports_1(_S, [], Acc) -> Acc; -check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc0) -> +check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc) -> Module = name_of_def(ModuleRef), - Refs0 = [{catch get_referenced_type(S, Ref),Ref} || Ref <- Imports], - Refs = [{M,R} || {{M,_},R} <- Refs0], - {Illegal,Other} = lists:splitwith(fun({error,_}) -> true; - (_) -> false - end, Refs), - ChainedRefs = [R || {M,R} <- Other, M =/= Module], - IllegalRefs = [R || {error,R} <- Illegal] ++ - [R || {M,R} <- ChainedRefs, - ok =/= chained_import(S, Module, M, name_of_def(R))], - Acc = [return_asn1_error(S, Ref, {undefined_import,name_of_def(Ref),Module}) || - Ref <- IllegalRefs] ++ Acc0, - check_imports_1(S, SFMs, Acc). - -chained_import(S,ImpMod,DefMod,Name) -> - %% Name is a referenced structure that is not defined in ImpMod, - %% but must be present in the Imports list of ImpMod. The chain of - %% imports of Name must end in DefMod. - GetImports = - fun(_M_) -> - case asn1_db:dbget(_M_,'MODULE') of - #module{imports={imports,ImportList}} -> - ImportList; - _ -> [] - end - end, - FindNameInImports = - fun([],N,_) -> {no_mod,N}; - ([#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs],N,F) -> - case [name_of_def(X) || X <- Imports, name_of_def(X) =:= N] of - [] -> F(SFMs,N,F); - [N] -> {name_of_def(ModuleRef),N} - end - end, - case GetImports(ImpMod) of - [] -> - error; - Imps -> - case FindNameInImports(Imps,Name,FindNameInImports) of - {no_mod,_} -> - error; - {DefMod,_} -> ok; - {OtherMod,_} -> - chained_import(S,OtherMod,DefMod,Name) - end - end. + Refs = [{try get_referenced_type(S, Ref) + catch throw:Error -> Error end, + Ref} + || Ref <- Imports], + CreateError = fun(Ref) -> + Error = {undefined_import,name_of_def(Ref),Module}, + return_asn1_error(S, Ref, Error) + end, + Errors = [CreateError(Ref) || {{error, _}, Ref} <- Refs], + check_imports_1(S, SFMs, Errors ++ Acc). checkt(S0, Names) -> Check = fun do_checkt/3, @@ -335,7 +282,7 @@ checkt(S0, Names) -> check_fold(S0, lists:reverse(CtxtSwitch), Check) ++ Types. do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) -> - NewS = S#state{type=Type0,tname=Name}, + NewS = S#state{tname=Name}, try check_type(NewS, Type0, TypeSpec) of #type{}=Ts -> case Type0#typedef.checked of @@ -350,7 +297,7 @@ do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) -> end catch {error,Reason} -> - error({type,Reason,NewS}); + Reason; {asn1_class,_ClassDef} -> {asn1_class,Name}; pobjectsetdef -> @@ -384,33 +331,32 @@ do_checkv(S, Name, Value) is_record(Value, typedef); %Value set may be parsed as object set. is_record(Value, pvaluedef); is_record(Value, pvaluesetdef) -> - NewS = S#state{value=Value}, - try check_value(NewS, Value) of + try check_value(S, Value) of {valueset,VSet} -> Pos = asn1ct:get_pos_of_def(Value), CheckedVSDef = #typedef{checked=true,pos=Pos, name=Name,typespec=VSet}, - asn1_db:dbput(NewS#state.mname, Name, CheckedVSDef), + asn1_db:dbput(S#state.mname, Name, CheckedVSDef), {valueset,Name}; V -> %% update the valuedef - asn1_db:dbput(NewS#state.mname, Name, V), + asn1_db:dbput(S#state.mname, Name, V), ok catch {error,Reason} -> - error({value,Reason,NewS}); + Reason; {pobjectsetdef} -> {pobjectsetdef,Name}; {objectsetdef} -> {objectsetdef,Name}; - {objectdef} -> + {asn1_class, _} -> %% this is an object, save as typedef #valuedef{checked=C,pos=Pos,name=N,type=Type, value=Def} = Value, ClassName = Type#type.def, NewSpec = #'Object'{classname=ClassName,def=Def}, NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec}, - asn1_db:dbput(NewS#state.mname, Name, NewDef), + asn1_db:dbput(S#state.mname, Name, NewDef), {objectdef,Name} end. @@ -419,7 +365,7 @@ checkp(S, Names) -> check_fold(S, Names, fun do_checkp/3). do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) -> - S = S0#state{type=Type0,tname=Name}, + S = S0#state{tname=Name}, try check_ptype(S, Type0, TypeSpec) of #type{}=Ts -> Type = Type0#ptypedef{checked=true,typespec=Ts}, @@ -427,7 +373,7 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) -> ok catch {error,Reason} -> - error({type,Reason,S}); + Reason; {asn1_class,_ClassDef} -> {asn1_class,Name}; {asn1_param_class,_} -> @@ -438,100 +384,81 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) -> checkc(S, Names) -> check_fold(S, Names, fun do_checkc/3). -do_checkc(S0, Name, Class0) -> - {Class1,ClassSpec} = - case Class0 of - #classdef{} -> - {Class0,Class0}; - #typedef{} -> - {#classdef{name=Name},Class0#typedef.typespec} - end, - S = S0#state{type=Class0,tname=Name}, - try check_class(S, ClassSpec) of - C -> - Class = Class1#classdef{checked=true,typespec=C}, - asn1_db:dbput(S#state.mname, Name, Class), - ok - catch - {error,Reason} -> - error({class,Reason,S}) - end. +do_checkc(S, Name, Class) -> + try + case is_classname(Name) of + false -> + asn1_error(S, {illegal_class_name,Name}); + true -> + do_checkc_1(S, Name, Class) + end + catch {error,Reason} -> Reason + end. + +do_checkc_1(S, Name, #classdef{}=Class) -> + C = check_class(S, Class), + store_class(S, true, Class#classdef{typespec=C}, Name), + ok; +do_checkc_1(S, Name, #typedef{typespec=#type{def=Def}=TS}) -> + C = check_class(S, TS), + {Mod,Pos} = case Def of + #'Externaltypereference'{module=M, pos=P} -> + {M,P}; + {pt, #'Externaltypereference'{module=M, pos=P}, _} -> + {M,P} + end, + Class = #classdef{name=Name, typespec=C, pos=Pos, module=Mod}, + store_class(S, true, Class, Name), + ok. + +%% is_classname(Atom) -> true|false. +is_classname(Name) when is_atom(Name) -> + lists:all(fun($-) -> true; + (D) when $0 =< D, D =< $9 -> true; + (UC) when $A =< UC, UC =< $Z -> true; + (_) -> false + end, atom_to_list(Name)). -checko(S,[Name|Os],Acc,ExclO,ExclOS) -> - ?dbg("Checking object ~p~n",[Name]), - Result = - case asn1_db:dbget(S#state.mname,Name) of - undefined -> - error({type,{internal_error,'???'},S}); - Object when is_record(Object,typedef) -> - NewS = S#state{type=Object,tname=Name}, - case catch(check_object(NewS,Object,Object#typedef.typespec)) of - {error,Reason} -> - error({type,Reason,NewS}); - {'EXIT',Reason} -> - error({type,{internal_error,Reason},NewS}); - {asn1,Reason} -> - error({type,Reason,NewS}); - O -> - NewObj = Object#typedef{checked=true,typespec=O}, - asn1_db:dbput(NewS#state.mname,Name,NewObj), - if - is_record(O,'Object') -> - case O#'Object'.gen of - true -> - {ok,ExclO,ExclOS}; - false -> - {ok,[Name|ExclO],ExclOS} - end; - is_record(O,'ObjectSet') -> - case O#'ObjectSet'.gen of - true -> - {ok,ExclO,ExclOS}; - false -> - {ok,ExclO,[Name|ExclOS]} - end - end - end; - PObject when is_record(PObject,pobjectdef) -> - NewS = S#state{type=PObject,tname=Name}, - case (catch check_pobject(NewS,PObject)) of - {error,Reason} -> - error({type,Reason,NewS}); - {'EXIT',Reason} -> - error({type,{internal_error,Reason},NewS}); - {asn1,Reason} -> - error({type,Reason,NewS}); - PO -> - NewPObj = PObject#pobjectdef{def=PO}, - asn1_db:dbput(NewS#state.mname,Name,NewPObj), - {ok,[Name|ExclO],ExclOS} - end; - PObjSet when is_record(PObjSet,pvaluesetdef) -> - %% this is a parameterized object set. Might be a parameterized - %% value set, couldn't it? - NewS = S#state{type=PObjSet,tname=Name}, - case (catch check_pobjectset(NewS,PObjSet)) of - {error,Reason} -> - error({type,Reason,NewS}); - {'EXIT',Reason} -> - error({type,{internal_error,Reason},NewS}); - {asn1,Reason} -> - error({type,Reason,NewS}); - POS -> - %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS}, - asn1_db:dbput(NewS#state.mname,Name,POS), - {ok,ExclO,[Name|ExclOS]} - end - end, - case Result of - {ok,NewExclO,NewExclOS} -> - checko(S,Os,Acc,NewExclO,NewExclOS); - _ -> - checko(S,Os,[Result|Acc],ExclO,ExclOS) +checko(S0,[Name|Os],Acc,ExclO,ExclOS) -> + Item = asn1_db:dbget(S0#state.mname, Name), + S = S0#state{error_context=Item}, + try checko_1(S, Item, Name, ExclO, ExclOS) of + {NewExclO,NewExclOS} -> + checko(S, Os, Acc, NewExclO, NewExclOS) + catch + throw:{error, Error} -> + checko(S, Os, [Error|Acc], ExclO, ExclOS) end; checko(_S,[],Acc,ExclO,ExclOS) -> {lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}. +checko_1(S, #typedef{typespec=TS}=Object, Name, ExclO, ExclOS) -> + NewS = S#state{tname=Name}, + O = check_object(NewS, Object, TS), + NewObj = Object#typedef{checked=true,typespec=O}, + asn1_db:dbput(NewS#state.mname, Name, NewObj), + case O of + #'Object'{gen=true} -> + {ExclO,ExclOS}; + #'Object'{gen=false} -> + {[Name|ExclO],ExclOS}; + #'ObjectSet'{gen=true} -> + {ExclO,ExclOS}; + #'ObjectSet'{gen=false} -> + {ExclO,[Name|ExclOS]} + end; +checko_1(S, #pobjectdef{}=PObject, Name, ExclO, ExclOS) -> + NewS = S#state{tname=Name}, + PO = check_pobject(NewS, PObject), + NewPObj = PObject#pobjectdef{def=PO}, + asn1_db:dbput(NewS#state.mname, Name, NewPObj), + {[Name|ExclO],ExclOS}; +checko_1(S, #pvaluesetdef{}=PObjSet, Name, ExclO, ExclOS) -> + NewS = S#state{tname=Name}, + POS = check_pobjectset(NewS, PObjSet), + asn1_db:dbput(NewS#state.mname, Name, POS), + {ExclO,[Name|ExclOS]}. + check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) -> case Ch of true -> TS; @@ -551,22 +478,16 @@ check_class(S = #state{mname=M,tname=T},ClassSpec) Tref = #'Externaltypereference'{type=TName} -> {MName,RefType} = get_referenced_type(S,Tref), #classdef{} = CD = get_class_def(S, RefType), - NewState = update_state(S#state{type=RefType, - tname=TName}, MName), + NewState = update_state(S#state{tname=TName}, MName), check_class(NewState, CD); {pt,ClassRef,Params} -> %% parameterized class {_,PClassDef} = get_referenced_type(S,ClassRef), - NewParaList = - [match_parameters(S,TmpParam,S#state.parameters)|| - TmpParam <- Params], + NewParaList = match_parameters(S, Params), instantiate_pclass(S,PClassDef,NewParaList) end; -check_class(S,C) when is_record(C,objectclass) -> - NewFieldSpec = check_class_fields(S,C#objectclass.fields), - C#objectclass{fields=NewFieldSpec}; -check_class(_S,{poc,_ObjSet,_Params}) -> - 'fix this later'; +check_class(S, #objectclass{}=C) -> + check_objectclass(S, C); check_class(S,ClassName) -> {RefMod,Def} = get_referenced_type(S,ClassName), case Def of @@ -579,8 +500,7 @@ check_class(S,ClassName) -> false -> Name=ClassName#'Externaltypereference'.type, store_class(S,idle,ClassDef,Name), -% NewS = S#state{mname=RefMod,type=Def,tname=Name}, - NewS = update_state(S#state{type=Def,tname=Name},RefMod), + NewS = update_state(S#state{tname=Name}, RefMod), CheckedTS = check_class(NewS,ClassDef#classdef.typespec), store_class(S,true,ClassDef#classdef{typespec=CheckedTS},Name), CheckedTS @@ -594,11 +514,20 @@ check_class(S,ClassName) -> end end. +check_objectclass(S, #objectclass{fields=Fs0,syntax=Syntax0}=C) -> + Fs = check_class_fields(S, Fs0), + case Syntax0 of + {'WITH SYNTAX',Syntax1} -> + Syntax = preprocess_syntax(S, Syntax1, Fs), + C#objectclass{fields=Fs,syntax={preprocessed_syntax,Syntax}}; + _ -> + C#objectclass{fields=Fs} + end. + instantiate_pclass(S=#state{parameters=_OldArgs},PClassDef,Params) -> #ptypedef{args=Args,typespec=Type} = PClassDef, MatchedArgs = match_args(S,Args, Params, []), -% NewS = S#state{type=Type,parameters=MatchedArgs++OldArgs,abscomppath=[]}, - NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]}, + NewS = S#state{parameters=MatchedArgs,abscomppath=[]}, check_class(NewS,#classdef{name=S#state.tname,typespec=Type}). store_class(S,Mode,ClassDef,ClassName) -> @@ -613,6 +542,12 @@ check_class_fields(S,[F|Fields],Acc) -> case element(1,F) of fixedtypevaluefield -> {_,Name,Type,Unique,OSpec} = F, + case {Unique,OSpec} of + {'UNIQUE',{'DEFAULT',_}} -> + asn1_error(S, {unique_and_default,Name}); + {_,_} -> + ok + end, RefType = check_type(S,#typedef{typespec=Type},Type), {fixedtypevaluefield,Name,RefType,Unique,OSpec}; object_or_fixedtypevalue_field -> @@ -621,7 +556,7 @@ check_class_fields(S,[F|Fields],Acc) -> Cat = case asn1ct_gen:type(asn1ct_gen:get_inner(Type2#type.def)) of Def when is_record(Def,'Externaltypereference') -> - {_,D} = get_referenced_type(S,Def), + {_,D} = get_referenced_type(S, Def, true), D; {undefined,user} -> %% neither of {primitive,bif} or {constructed,bif} @@ -644,18 +579,14 @@ check_class_fields(S,[F|Fields],Acc) -> objectset_or_fixedtypevalueset_field -> {_,Name,Type,OSpec} = F, RefType = - case (catch check_type(S,#typedef{typespec=Type},Type)) of - {asn1_class,_ClassDef} -> + try check_type(S,#typedef{typespec=Type},Type) of + #type{} = CheckedType -> + CheckedType + catch {asn1_class,_ClassDef} -> case if_current_checked_type(S,Type) of - true -> - Type#type.def; - _ -> - check_class(S,Type) - end; - CheckedType when is_record(CheckedType,type) -> - CheckedType; - _ -> - error({class,"internal error, check_class_fields",S}) + true -> Type#type.def; + _ -> check_class(S,Type) + end end, if is_record(RefType,'Externaltypereference') -> @@ -733,38 +664,34 @@ check_pobjectset(S,PObjSet) -> PObjSet end. +-record(osi, %Object set information. + {st, + classref, + uniq, + ext + }). + check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) -> ObjSpec; check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) -> ?dbg("check_object ~p~n",[ObjectDef]), -%% io:format("check_object,object: ~p~n",[ObjectDef]), -% {MName,_ClassDef} = get_referenced_type(S,ClassRef), - NewClassRef = check_externaltypereference(S,ClassRef), - ClassDef = - case get_referenced_type(S,ClassRef) of - {MName,ClDef=#classdef{checked=false}} -> - NewState = update_state(S#state{type=ClDef, - tname=ClassRef#'Externaltypereference'.type},MName), - ObjClass= - check_class(NewState,ClDef), - #classdef{checked=true, - typespec=ObjClass}; - {_,_ClDef} when is_record(_ClDef,classdef) -> - _ClDef; - {MName,_TDef=#typedef{checked=false,pos=Pos, - name=_TName,typespec=TS}} -> - ClDef = #classdef{pos=Pos,name=_TName,typespec=TS}, - NewState = update_state(S#state{type=_TDef, - tname=ClassRef#'Externaltypereference'.type},MName), - ObjClass = - check_class(NewState,ClDef), - ClDef#classdef{checked=true,typespec=ObjClass}; - {_,_ClDef} -> - _ClDef + _ = check_externaltypereference(S,ClassRef), + {ClassDef, NewClassRef} = + case get_referenced_type(S, ClassRef, true) of + {MName,#classdef{checked=false, name=CLName}=ClDef} -> + Type = ClassRef#'Externaltypereference'.type, + NewState = update_state(S#state{tname=Type}, MName), + ObjClass = check_class(NewState, ClDef), + {ClDef#classdef{checked=true, typespec=ObjClass}, + #'Externaltypereference'{module=MName, type=CLName}}; + {MName,#classdef{name=CLName}=ClDef} -> + {ClDef, #'Externaltypereference'{module=MName, type=CLName}}; + _ -> + asn1_error(S, illegal_object) end, NewObj = case ObjectDef of - Def when is_tuple(Def), (element(1,Def)==object) -> + {object,_,_}=Def -> NewSettingList = check_objectdefn(S,Def,ClassDef), #'Object'{def=NewSettingList}; {po,{object,DefObj},ArgsList} -> @@ -778,425 +705,287 @@ check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) -> instantiate_po(S,ClassDef,Object,ArgList); #'Externalvaluereference'{} -> {_,Object} = get_referenced_type(S,ObjectDef), - check_object(S,Object,Object#typedef.typespec); + check_object(S, Object, object_to_check(S, Object)); [] -> - %% An object with no fields. All class fields must be - %% optional or default. Check that all fields in - %% class are 'OPTIONAL' or 'DEFAULT' - class_fields_optional_check(S,ClassDef), - #'Object'{def={object,defaultsyntax,[]}}; - _ -> - exit({error,{no_object,ObjectDef},S}) + %% An object with no fields (parsed as a value). + Def = {object,defaultsyntax,[]}, + NewSettingList = check_objectdefn(S, Def, ClassDef), + #'Object'{def=NewSettingList}; + _ -> + asn1_error(S, illegal_object) end, - Gen = gen_incl(S,NewObj#'Object'.def, - (ClassDef#classdef.typespec)#objectclass.fields), + Fields = (ClassDef#classdef.typespec)#objectclass.fields, + Gen = gen_incl(S,NewObj#'Object'.def, Fields), NewObj#'Object'{classname=NewClassRef,gen=Gen}; - - -check_object(S, - _ObjSetDef, - ObjSet=#'ObjectSet'{class=ClassRef}) -> -%% io:format("check_object,SET: ~p~n",[ObjSet#'ObjectSet'.set]), - ?dbg("check_object set: ~p~n",[ObjSet#'ObjectSet'.set]), - {_,ClassDef} = get_referenced_type(S,ClassRef), - NewClassRef = check_externaltypereference(S,ClassRef), - {UniqueFieldName,UniqueInfo} = - case (catch get_unique_fieldname(S,ClassDef)) of - {error,'__undefined_',_} -> - {{unique,undefined},{unique,undefined}}; - {asn1,Msg,_} -> error({class,Msg,S}); - {'EXIT',Msg} -> error({class,{internal_error,Msg},S}); +check_object(S, _, #'ObjectSet'{class=ClassRef0,set=Set0}=ObjSet0) -> + {_,ClassDef} = get_referenced_type(S, ClassRef0), + ClassRef = check_externaltypereference(S, ClassRef0), + {UniqueFieldName,UniqueInfo} = + case get_unique_fieldname(S, ClassDef) of + no_unique -> {{unique,undefined},{unique,undefined}}; Other -> {element(1,Other),Other} end, - NewObjSet= - case prepare_objset(ObjSet#'ObjectSet'.set) of - {set,SET,EXT} -> - CheckedSet = check_object_list(S,NewClassRef,SET), - NewSet = get_unique_valuelist(S,CheckedSet,UniqueInfo), - ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=extensionmark(NewSet,EXT)}; - - {'SingleValue',ERef = #'Externalvaluereference'{}} -> - {RefedMod,ObjDef} = get_referenced_type(S,ERef), - #'Object'{def=CheckedObj} = - check_object(S,ObjDef,ObjDef#typedef.typespec), - - NewSet = get_unique_valuelist(S,[{{RefedMod,get_datastr_name(ObjDef)}, - CheckedObj}], - UniqueInfo), - ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=NewSet}; - ['EXTENSIONMARK'] -> - ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=['EXTENSIONMARK']}; - - OSref when is_record(OSref,'Externaltypereference') -> - {_,OS=#typedef{typespec=OSdef}} = get_referenced_type(S,OSref), - check_object(S,OS,OSdef); - - {Type,{'EXCEPT',Exclusion}} when is_record(Type,type) -> - {_,TDef} = get_referenced_type(S,Type#type.def), - OS = TDef#typedef.typespec, - NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion), - NewOS = OS#'ObjectSet'{set=NewSet}, - check_object(S,TDef#typedef{typespec=NewOS}, - NewOS); - #type{def={pt,DefinedObjSet,ParamList}} -> - {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet), - NewParamList = - [match_parameters(S,TmpParam,S#state.parameters)|| - TmpParam <- ParamList], - instantiate_pos(S,ClassRef,PObjSetDef,NewParamList); - - %% actually this is an ObjectSetFromObjects construct, it - %% is when the object set is retrieved from an object - %% field. - #type{def=#'ObjectClassFieldType'{classname=ObjName, - fieldname=FieldName}} -> - {RefedObjMod,TDef} = get_referenced_type(S,ObjName), - OS=TDef#typedef.typespec, - %% should get the right object set here. Get the field - %% FieldName out of the object set OS of class - %% OS#'ObjectSet'.class - OS2=check_object(S,TDef,OS), - NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2), - ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=NewSet}; - {'ObjectSetFromObjects',{_,_,ObjName},FieldName} -> - {RefedObjMod,TDef} = get_referenced_type(S,ObjName), - OS=TDef#typedef.typespec, - %% should get the right object set here. Get the field - %% FieldName out of the object set OS of class - %% OS#'ObjectSet'.class - OS2=check_object(S,TDef,OS), - NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2), - ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=NewSet}; - {'ObjectSetFromObjects',{_,ObjName},FieldName} -> - %% This is a ObjectSetFromObjects, i.e. - %% ObjectSetFromObjects ::= ReferencedObjects "." FieldName - %% with a defined object as ReferencedObjects. And - %% the FieldName of the Class (object) contains an object set. - {RefedObjMod,TDef} = get_referenced_type(S,ObjName), - O1 = TDef#typedef.typespec, - O2 = check_object(S,TDef,O1), - NewSet = object_set_from_objects(S,RefedObjMod,FieldName,O2), - OS2=ObjSet#'ObjectSet'{uniquefname=UniqueFieldName, - set=NewSet}, - %%io:format("ObjectSet: ~p~n",[OS2]), - OS2; - {pos,{objectset,_,DefinedObjSet},Params} -> - {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet), - NewParamList = - [match_parameters(S,TmpParam,S#state.parameters)|| - TmpParam <- Params], - instantiate_pos(S,ClassRef,PObjSetDef,NewParamList); - Unknown -> - exit({error,{unknown_object_set,Unknown},S}) - end, - NewSet2 = remove_duplicate_objects(NewObjSet#'ObjectSet'.set), - NewObjSet2 = NewObjSet#'ObjectSet'{set=NewSet2}, - Gen = gen_incl_set(S,NewObjSet2#'ObjectSet'.set, - ClassDef), - ?dbg("check_object done~n",[]), - NewObjSet2#'ObjectSet'{class=NewClassRef,gen=Gen}. + OSI0 = #osi{st=S,classref=ClassRef,uniq=UniqueInfo,ext=false}, + {Set1,OSI1} = if + is_list(Set0) -> + check_object_set_list(Set0, OSI0); + true -> + check_object_set(Set0, OSI0) + end, + Ext = case Set1 of + [] -> + %% FIXME: X420 does not compile unless we force + %% empty sets to be extensible. There should be + %% a better way. + true; + [_|_] -> + OSI1#osi.ext + end, + Set2 = remove_duplicate_objects(S, Set1), + Set = case Ext of + false -> Set2; + true -> Set2 ++ ['EXTENSIONMARK'] + end, + ObjSet = ObjSet0#'ObjectSet'{uniquefname=UniqueFieldName,set=Set}, + Gen = gen_incl_set(S, Set, ClassDef), + ObjSet#'ObjectSet'{class=ClassRef,gen=Gen}. + +check_object_set({element_set,Root0,Ext0}, OSI0) -> + OSI = case Ext0 of + none -> OSI0; + _ -> OSI0#osi{ext=true} + end, + case {Root0,Ext0} of + {empty,empty} -> {[],OSI}; + {empty,Ext} -> check_object_set(Ext, OSI); + {Root,none} -> check_object_set(Root, OSI); + {Root,empty} -> check_object_set(Root, OSI); + {Root,Ext} -> check_object_set_list([Root,Ext], OSI) + end; +check_object_set(#'Externaltypereference'{}=Ref, #osi{st=S}=OSI) -> + {_,#typedef{typespec=OSdef}=OS} = get_referenced_type(S, Ref), + ObjectSet = check_object(S, OS, OSdef), + check_object_set_objset(ObjectSet, OSI); +check_object_set(#'Externalvaluereference'{}=Ref, #osi{st=S}=OSI) -> + {RefedMod,ObjName,#'Object'{def=Def}} = check_referenced_object(S, Ref), + ObjList = check_object_set_mk(RefedMod, ObjName, Def, OSI), + {ObjList,OSI}; +check_object_set({'EXCEPT',Incl0,Excl0}, OSI) -> + {Incl1,_} = check_object_set(Incl0, OSI), + {Excl1,_} = check_object_set(Excl0, OSI), + Exclude = sofs:set([N || {N,_} <- Excl1], [name]), + Incl2 = [{Name,Obj} || {Name,_,_}=Obj <- Incl1], + Incl3 = sofs:relation(Incl2, [{name,object}]), + Incl4 = sofs:drestriction(Incl3, Exclude), + Incl5 = sofs:to_external(Incl4), + Incl = [Obj || {_,Obj} <- Incl5], + {Incl,OSI}; +check_object_set({object,_,_}=Obj0, OSI) -> + #osi{st=S,classref=ClassRef} = OSI, + #'Object'{def=Def} = + check_object(S, #typedef{typespec=Obj0}, + #'Object'{classname=ClassRef,def=Obj0}), + ObjList = check_object_set_mk(Def, OSI), + {ObjList,OSI}; +check_object_set(#'ObjectClassFieldType'{classname=ObjName, + fieldname=FieldNames}, + #osi{st=S}=OSI) -> + Set = check_ObjectSetFromObjects(S, ObjName, FieldNames), + check_object_set_objset_list(Set, OSI); +check_object_set({'ObjectSetFromObjects',Obj,FieldNames}, #osi{st=S}=OSI) -> + ObjName = element(tuple_size(Obj), Obj), + Set = check_ObjectSetFromObjects(S, ObjName, FieldNames), + check_object_set_objset_list(Set, OSI); +check_object_set({pt,DefinedObjSet,ParamList0}, OSI) -> + #osi{st=S,classref=ClassRef} = OSI, + {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet), + ParamList = match_parameters(S, ParamList0), + ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, ParamList), + check_object_set_objset(ObjectSet, OSI); +check_object_set({pos,{objectset,_,DefinedObjSet},Params0}, OSI) -> + #osi{st=S,classref=ClassRef} = OSI, + {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet), + Params = match_parameters(S, Params0), + ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, Params), + check_object_set_objset(ObjectSet, OSI); +check_object_set({pv,{simpledefinedvalue,DefinedObject},Params}=PV, OSI) -> + #osi{st=S,classref=ClassRef} = OSI, + Args = match_parameters(S, Params), + #'Object'{def=Def} = + check_object(S, PV, + #'Object'{classname=ClassRef , + def={po,{object,DefinedObject},Args}}), + ObjList = check_object_set_mk(Def, OSI), + {ObjList,OSI}; +check_object_set({'SingleValue',Val}, OSI) -> + check_object_set(Val, OSI); +check_object_set({'ValueFromObject',{object,Object},FieldNames}, OSI) -> + #osi{st=S} = OSI, + case extract_field(S, Object, FieldNames) of + #'Object'{def=Def} -> + ObjList = check_object_set_mk(Def, OSI), + {ObjList,OSI}; + _ -> + asn1_error(S, illegal_object) + end; +check_object_set(#type{def=Def}, OSI) -> + check_object_set(Def, OSI); +check_object_set({union,A0,B0}, OSI0) -> + {A,OSI1} = check_object_set(A0, OSI0), + {B,OSI} = check_object_set(B0, OSI1), + {A++B,OSI}. + +check_object_set_list([H|T], OSI0) -> + {Set0,OSI1} = check_object_set(H, OSI0), + {Set1,OSI2} = check_object_set_list(T, OSI1), + {Set0++Set1,OSI2}; +check_object_set_list([], OSI) -> + {[],OSI}. + +check_object_set_objset(#'ObjectSet'{set=Set}, OSI) -> + check_object_set_objset_list(Set, OSI). + +check_object_set_objset_list(Set, OSI) -> + check_object_set_objset_list_1(Set, OSI, []). + +check_object_set_objset_list_1(['EXTENSIONMARK'|T], OSI, Acc) -> + check_object_set_objset_list_1(T, OSI#osi{ext=true}, Acc); +check_object_set_objset_list_1([H|T], OSI, Acc) -> + check_object_set_objset_list_1(T, OSI, [H|Acc]); +check_object_set_objset_list_1([], OSI, Acc) -> + {Acc,OSI}. + +check_object_set_mk(Fields, OSI) -> + check_object_set_mk(no_mod, no_name, Fields, OSI). + +check_object_set_mk(M, N, Def, #osi{uniq={unique,undefined}}) -> + {_,_,Fields} = Def, + [{{M,N},no_unique_value,Fields}]; +check_object_set_mk(M, N, Def, #osi{uniq={UniqField,_}}) -> + {_,_,Fields} = Def, + case lists:keyfind(UniqField, 1, Fields) of + {UniqField,#valuedef{value=Val}} -> + [{{M,N},Val,Fields}]; + false -> + case Fields of + [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] -> + %% FIXME: If object is missing the unique field and + %% only contains a reference to an empty object set, + %% we will remove the entire object as a workaround + %% to get X420 to compile. There should be a better + %% way. + []; + _ -> + [{{M,N},no_unique_value,Fields}] + end + end. %% remove_duplicate_objects/1 remove duplicates of objects. %% For instance may Set contain objects of same class from %% different object sets that in fact might be duplicates. -remove_duplicate_objects(Set) when is_list(Set) -> - Pred = fun({A,B,_},{A,C,_}) when B =< C -> true; - ({A,_,_},{B,_,_}) when A < B -> true; - ('EXTENSIONMARK','EXTENSIONMARK') -> true; - (T,A) when is_tuple(T),is_atom(A) -> true;% EXTENSIONMARK last in list - (_,_) -> false - end, - lists:usort(Pred,Set). +remove_duplicate_objects(S, Set0) when is_list(Set0) -> + Set1 = [{Id,Orig} || {_,Id,_}=Orig <- Set0], + Set2 = sofs:relation(Set1), + Set3 = sofs:relation_to_family(Set2), + Set = sofs:to_external(Set3), + remove_duplicate_objects_1(S, Set). + +remove_duplicate_objects_1(S, [{no_unique_value,Objs}|T]) -> + Objs ++ remove_duplicate_objects_1(S, T); +remove_duplicate_objects_1(S, [{_,[_]=Objs}|T]) -> + Objs ++ remove_duplicate_objects_1(S, T); +remove_duplicate_objects_1(S, [{Id,[_|_]=Objs}|T]) -> + MakeSortable = fun(What) -> sortable_type(S, What) end, + Tagged = order_tag_set(Objs, MakeSortable), + case lists:ukeysort(1, Tagged) of + [{_,Obj}] -> + [Obj|remove_duplicate_objects_1(S, T)]; + [_|_] -> + asn1_error(S, {non_unique_object,Id}) + end; +remove_duplicate_objects_1(_, []) -> + []. -%% -extensionmark(L,true) -> - case lists:member('EXTENSIONMARK',L) of - true -> L; - _ -> L ++ ['EXTENSIONMARK'] +order_tag_set([{_, _, Fields}=Orig|Fs], Fun) -> + Pair = {[{FId, traverse(F, Fun)} || {FId, F} <- Fields], Orig}, + [Pair|order_tag_set(Fs, Fun)]; +order_tag_set([], _) -> []. + +sortable_type(S, #'Externaltypereference'{}=ERef) -> + try get_referenced_type(S, ERef) of + {_,#typedef{}=OI} -> + OI#typedef{pos=undefined,name=undefined} + catch + _:_ -> + ERef end; -extensionmark(L,_) -> - L. +sortable_type(_, #typedef{}=TD) -> + asn1ct:unset_pos_mod(TD#typedef{name=undefined}); +sortable_type(_, Type) -> + asn1ct:unset_pos_mod(Type). + +traverse(Structure0, Fun) -> + Structure = Fun(Structure0), + traverse_1(Structure, Fun). + +traverse_1(#typedef{typespec=TS0} = TD, Fun) -> + TS = traverse(TS0, Fun), + TD#typedef{typespec=TS}; +traverse_1(#valuedef{type=TS0} = VD, Fun) -> + TS = traverse(TS0, Fun), + VD#valuedef{type=TS}; +traverse_1(#type{def=TS0} = TD, Fun) -> + TS = traverse(TS0, Fun), + TD#type{def=TS}; +traverse_1(#'SEQUENCE'{components=Cs0} = Seq, Fun) -> + Cs = traverse_seq_set(Cs0, Fun), + Seq#'SEQUENCE'{components=Cs}; +traverse_1({'SEQUENCE OF',Type0}, Fun) -> + Type = traverse(Type0, Fun), + {'SEQUENCE OF',Type}; +traverse_1({'SET OF',Type0}, Fun) -> + Type = traverse(Type0, Fun), + {'SET OF',Type}; +traverse_1(#'SET'{components=Cs0} = Set, Fun) -> + Cs = traverse_seq_set(Cs0, Fun), + Set#'SET'{components=Cs}; +traverse_1({'CHOICE', Cs0}, Fun) -> + Cs = traverse_seq_set(Cs0, Fun), + {'CHOICE', Cs}; +traverse_1(Leaf, _) -> + Leaf. + +traverse_seq_set(List, Fun) when is_list(List) -> + traverse_seq_set_1(List, Fun); +traverse_seq_set({Set, Ext}, Fun) -> + {traverse_seq_set_1(Set, Fun), traverse_seq_set_1(Ext, Fun)}; +traverse_seq_set({Set1, Set2, Set3}, Fun) -> + {traverse_seq_set_1(Set1, Fun), + traverse_seq_set_1(Set2, Fun), + traverse_seq_set_1(Set3, Fun)}. + +traverse_seq_set_1([#'ComponentType'{} = CT0|Cs], Fun) -> + CT = #'ComponentType'{typespec=TS0} = Fun(CT0), + TS = traverse(TS0, Fun), + [CT#'ComponentType'{typespec=TS}|traverse_seq_set_1(Cs, Fun)]; +traverse_seq_set_1([{'COMPONENTS OF', _} = CO0|Cs], Fun) -> + {'COMPONENTS OF', TS0} = Fun(CO0), + TS = traverse(TS0, Fun), + [{'COMPONENTS OF', TS}|traverse_seq_set_1(Cs, Fun)]; +traverse_seq_set_1([], _) -> + []. -object_to_check(#typedef{typespec=ObjDef}) -> +object_to_check(_, #typedef{typespec=ObjDef}) -> ObjDef; -object_to_check(#valuedef{type=ClassName,value=ObjectRef}) -> +object_to_check(S, #valuedef{type=Class,value=ObjectRef}) -> %% If the object definition is parsed as an object the ClassName - %% is parsed as a type - #'Object'{classname=ClassName#type.def,def=ObjectRef}. - -prepare_objset({'SingleValue',Set}) when is_list(Set) -> - {set,Set,false}; -prepare_objset(L=['EXTENSIONMARK']) -> - L; -prepare_objset(Set) when is_list(Set) -> - {set,Set,false}; -prepare_objset({{'SingleValue',Set},Ext}) -> - {set,merge_sets(Set,Ext),true}; -%%prepare_objset({Set,Ext}) when is_list(Set),is_list(Ext) -> -%% {set,lists:append([Set,Ext]),true}; -prepare_objset({Set,Ext}) when is_list(Set) -> - {set,merge_sets(Set,Ext),true}; -prepare_objset({{object,definedsyntax,_ObjFields}=Set,Ext}) -> - {set,merge_sets(Set, Ext),true}; -prepare_objset(ObjDef={object,definedsyntax,_ObjFields}) -> - {set,[ObjDef],false}; -prepare_objset({ObjDef=#type{},Ext}) when is_list(Ext) -> - {set,[ObjDef|Ext],true}; -prepare_objset({#type{}=Type,#type{}=Ext}) -> - {set,[Type,Ext],true}; -prepare_objset(Ret) -> - Ret. - -class_fields_optional_check(S,#classdef{typespec=ClassSpec}) -> - Fields = ClassSpec#objectclass.fields, - class_fields_optional_check1(S,Fields). - -class_fields_optional_check1(_S,[]) -> - ok; -class_fields_optional_check1(S,[{typefield,_,'OPTIONAL'}|Rest]) -> - class_fields_optional_check1(S,Rest); -class_fields_optional_check1(S,[{fixedtypevaluefield,_,_,_,'OPTIONAL'}|Rest]) -> - class_fields_optional_check1(S,Rest); -class_fields_optional_check1(S,[{fixedtypevaluesetfield,_,_,'OPTIONAL'}|Rest]) -> - class_fields_optional_check1(S,Rest); -class_fields_optional_check1(S,[{objectfield,_,_,_,'OPTIONAL'}|Rest]) -> - class_fields_optional_check1(S,Rest); -class_fields_optional_check1(S,[{objectsetfield,_,_,'OPTIONAL'}|Rest]) -> - class_fields_optional_check1(S,Rest). - -%% ObjectSetFromObjects functionality - -%% The fieldname is a list of field names.They may be objects or -%% object sets. If ObjectSet is an object set the resulting object set -%% is the union of object sets if the last field name is an object -%% set. If the last field is an object the resulting object set is -%% the set of objects in ObjectSet. -object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet) -> - object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,[]). -object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,InterSect) - when is_record(ObjectSet,'ObjectSet') -> - #'ObjectSet'{class=Cl,set=Set} = ObjectSet, - {_,ClassDef} = get_referenced_type(S,Cl), - object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Set,InterSect,[]); -object_set_from_objects(S,RefedObjMod,FieldName,Object,InterSect) - when is_record(Object,'Object') -> - #'Object'{classname=Cl,def=Def}=Object, - object_set_from_objects(S,RefedObjMod,Cl,FieldName,[Def],InterSect,[]). -object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,['EXTENSIONMARK'|Os], - InterSect,Acc) -> - object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,%%Acc); - ['EXTENSIONMARK'|Acc]); -object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,[O|Os],InterSect,Acc) -> - case object_set_from_objects2(S,mod_of_obj(RefedObjMod,element(1,O)), - ClassDef,FieldName,element(3,O),InterSect) of - ObjS when is_list(ObjS) -> - object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,ObjS++Acc); - Obj -> - object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,[Obj|Acc]) - end; -object_set_from_objects(_S,_RefedObjMod,_ClassDef,_FieldName,[],InterSect,Acc) -> - %% For instance may Acc contain objects of same class from - %% different object sets that in fact might be duplicates. - remove_duplicate_objects(osfo_intersection(InterSect,Acc)). -%% Acc. -object_set_from_objects2(S,RefedObjMod,ClassDef,[{valuefieldreference,OName}], - Fields,_InterSect) -> - %% this is an object - case lists:keysearch(OName,1,Fields) of - {value,{_,TDef}} -> - mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef); - _ -> - [] % it may be an absent optional field - end; -object_set_from_objects2(S,RefedObjMod,ClassDef,[{typefieldreference,OSName}], - Fields,_InterSect) -> - %% this is an object set - case lists:keysearch(OSName,1,Fields) of - {value,{_,TDef}} -> - case TDef#typedef.typespec of - #'ObjectSet'{class=_NextClName,set=NextSet} ->%% = TDef#typedef.typespec, - NextSet; - #'Object'{def=_ObjDef} -> - mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef) -%% ObjDef - %% error({error,{internal,unexpected_object,TDef}}) - end; - _ -> - [] % it may be an absent optional field - end; -object_set_from_objects2(S,RefedObjMod,_ClassDef,[{valuefieldreference,OName}|Rest], - Fields,InterSect) -> - %% this is an object - case lists:keysearch(OName,1,Fields) of - {value,{_,TDef}} -> - #'Object'{classname=NextClName,def=ODef}=TDef#typedef.typespec, - {_,_,NextFields}=ODef, - {_,NextClass} = get_referenced_type(S,NextClName), - object_set_from_objects2(S,RefedObjMod,NextClass,Rest,NextFields,InterSect); - _ -> - [] - end; -object_set_from_objects2(S,RefedObjMod,_ClassDef,[{typefieldreference,OSName}|Rest], - Fields,InterSect) -> - %% this is an object set - Next = {NextClName,NextSet} = - case lists:keysearch(OSName,1,Fields) of - {value,{_,TDef}} when is_record(TDef,'ObjectSet') -> - #'ObjectSet'{class=NextClN,set=NextS} = TDef, - {NextClN,NextS}; - {value,{_,#typedef{typespec=OS}}} -> - %% objectsets in defined syntax will come here as typedef{} - %% #'ObjectSet'{class=NextClN,set=NextS} = OS, - case OS of - #'ObjectSet'{class=NextClN,set=NextS} -> - {NextClN,NextS}; - #'Object'{classname=NextClN,def=NextDef} -> - {NextClN,[NextDef]} - end; + %% is parsed as a type. + case Class of + #type{def=#'Externaltypereference'{}=Def} -> + #'Object'{classname=Def,def=ObjectRef}; _ -> - {[],[]} - end, - case Next of - {[],[]} -> - []; - _ -> - {_,NextClass} = get_referenced_type(S,NextClName), - object_set_from_objects(S,RefedObjMod,NextClass,Rest,NextSet,InterSect,[]) - end. - -mk_object_set_from_object(S,RefedObjMod,TDef,Class) -> - #'Object'{classname=_NextClName,def=ODef} = TDef#typedef.typespec, - {_,_,NextFields}=ODef, - - UniqueFieldName = - case (catch get_unique_fieldname(S,Class)) of - {error,'__undefined_',_} -> {unique,undefined}; - {asn1,Msg,_} -> error({class,Msg,S}); - {'EXIT',Msg} -> error({class,{internal_error,Msg},S}); - {Other,_} -> Other - end, - VDef = get_unique_value(S,NextFields,UniqueFieldName), - %% XXXXXXXXXXX - case VDef of - [] -> - ['EXTENSIONMARK']; - _ -> - {{RefedObjMod,get_datastr_name(TDef)},VDef,NextFields} + asn1_error(S, illegal_object) end. - - -mod_of_obj(_RefedObjMod,{NewMod,ObjName}) - when is_atom(NewMod),is_atom(ObjName) -> - NewMod; -mod_of_obj(RefedObjMod,_) -> - RefedObjMod. - - -merge_sets(Root,{'SingleValue',Ext}) -> - merge_sets(Root,Ext); -merge_sets(Root,Ext) when is_list(Root),is_list(Ext) -> - Root ++ Ext; -merge_sets(Root,Ext) when is_list(Ext) -> - [Root|Ext]; -merge_sets(Root,Ext) when is_list(Root) -> - Root++[Ext]; -merge_sets(Root,Ext) -> - [Root]++[Ext]. - -reduce_objectset(ObjectSet,Exclusion) -> - case Exclusion of - {'SingleValue',#'Externalvaluereference'{value=Name}} -> - case lists:keysearch(Name,1,ObjectSet) of - {value,El} -> - lists:subtract(ObjectSet,[El]); - _ -> - ObjectSet - end - end. - -%% Checks a list of objects or object sets and returns a list of selected -%% information for the code generation. -check_object_list(S,ClassRef,ObjectList) -> - check_object_list(S,ClassRef,ObjectList,[]). - -check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) -> - ?dbg("check_object_list: ~p~n",[ObjOrSet]), - case ObjOrSet of - ObjDef when is_tuple(ObjDef),(element(1,ObjDef)==object) -> - Def = - check_object(S,#typedef{typespec=ObjDef}, -% #'Object'{classname={objectclassname,ClassRef}, - #'Object'{classname=ClassRef, - def=ObjDef}), - check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def#'Object'.def}|Acc]); - {'SingleValue',Ref = #'Externalvaluereference'{}} -> - ?dbg("{SingleValue,Externalvaluereference}~n",[]), - {RefedMod,ObjName, - #'Object'{def=Def}} = check_referenced_object(S,Ref), - check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]); - ObjRef when is_record(ObjRef,'Externalvaluereference') -> - ?dbg("Externalvaluereference~n",[]), - {RefedMod,ObjName, - #'Object'{def=Def}} = check_referenced_object(S,ObjRef), - check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]); - {'ValueFromObject',{_,Object},FieldName} -> - {_,Def} = get_referenced_type(S,Object), - TypeDef = get_fieldname_element(S,Def,FieldName), - (TypeDef#typedef.typespec)#'ObjectSet'.set; - ObjSet when is_record(ObjSet,type) -> - ObjSetDef = - case ObjSet#type.def of - Ref when is_record(Ref,'Externaltypereference') -> - {_,D} = get_referenced_type(S,ObjSet#type.def), - D; - Other -> - throw({asn1_error,{'unknown objecset',Other,S}}) - end, - #'ObjectSet'{set=ObjectsInSet} = - check_object(S,ObjSetDef,ObjSetDef#typedef.typespec), - AccList = transform_set_to_object_list(ObjectsInSet,[]), - check_object_list(S,ClassRef,Objs,AccList++Acc); - union -> - check_object_list(S,ClassRef,Objs,Acc); - {pos,{objectset,_,DefinedObjectSet},Params} -> - OSDef = #type{def={pt,DefinedObjectSet,Params}}, - #'ObjectSet'{set=Set} = - check_object(S,ObjOrSet,#'ObjectSet'{class=ClassRef, - set=OSDef}), - check_object_list(S,ClassRef,Objs,Set ++ Acc); - {pv,{simpledefinedvalue,DefinedObject},Params} -> - Args = [match_parameters(S,Param,S#state.parameters)|| - Param<-Params], - #'Object'{def=Def} = - check_object(S,ObjOrSet, - #'Object'{classname=ClassRef , - def={po,{object,DefinedObject}, - Args}}), - check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def}|Acc]); - {'ObjectSetFromObjects',Os,FieldName} when is_tuple(Os) -> - NewSet = - check_ObjectSetFromObjects(S, element(tuple_size(Os), Os), - FieldName,[]), - check_object_list(S,ClassRef,Objs,NewSet++Acc); - {{'ObjectSetFromObjects',Os,FieldName},InterSection} - when is_tuple(Os) -> - NewSet = - check_ObjectSetFromObjects(S, element(tuple_size(Os), Os), - FieldName,InterSection), - check_object_list(S,ClassRef,Objs,NewSet++Acc); - Other -> - exit({error,{'unknown object',Other},S}) - end; -%% Finally reverse the accumulated list and if there are any extension -%% marks in the object set put one indicator of that in the end of the -%% list. -check_object_list(_,_,[],Acc) -> - lists:reverse(Acc). check_referenced_object(S,ObjRef) when is_record(ObjRef,'Externalvaluereference')-> @@ -1213,195 +1002,134 @@ check_referenced_object(S,ObjRef) check_object(update_state(S,RefedMod),ObjectDef,ObjectDef#typedef.typespec)} end. -check_ObjectSetFromObjects(S,ObjName,FieldName,InterSection) -> - {RefedMod,TDef} = get_referenced_type(S,ObjName), - ObjOrSet = check_object(update_state(S,RefedMod),TDef,TDef#typedef.typespec), - InterSec = prepare_intersection(S,InterSection), - _NewSet = object_set_from_objects(S,RefedMod,FieldName,ObjOrSet,InterSec). +check_ObjectSetFromObjects(S, ObjName, Fields) -> + {_,Obj0} = get_referenced_type(S, ObjName), + case check_object(S, Obj0, Obj0#typedef.typespec) of + #'ObjectSet'{}=Obj1 -> + get_fieldname_set(S, Obj1, Fields); + #'Object'{classname=Class, + def={object,_,ObjFs}} -> + ObjSet = #'ObjectSet'{class=Class, + set=[{'_','_',ObjFs}]}, + get_fieldname_set(S, ObjSet, Fields) + end. -prepare_intersection(_S,[]) -> - []; -prepare_intersection(S,{'EXCEPT',ObjRef}) -> - except_names(S,ObjRef); -prepare_intersection(_S,T) -> - exit({error,{internal_error,not_implemented,object_set_from_objects,T}}). -except_names(_S,{'SingleValue',#'Externalvaluereference'{value=ObjName}}) -> - [{except,ObjName}]; -except_names(_,T) -> - exit({error,{internal_error,not_implemented,object_set_from_objects,T}}). - -osfo_intersection(InterSect,ObjList) -> - Res = [X|| X = {{_,N},_,_} <- ObjList, - lists:member({except,N},InterSect) == false], - case lists:member('EXTENSIONMARK',ObjList) of - true -> - Res ++ ['EXTENSIONMARK']; +%% get_type_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) -> +%% Type +get_type_from_object(S, Object, FieldNames) + when is_record(Object, 'Externaltypereference'); + is_record(Object, 'Externalvaluereference') -> + extract_field(S, Object, FieldNames). + +%% get_value_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) -> +%% UntaggedValue +get_value_from_object(S, Def, FieldNames) -> + case extract_field(S, Def, FieldNames) of + #valuedef{value=Val} -> + Val; + {valueset,_}=Val -> + Val; _ -> - Res + asn1_error(S, illegal_value) end. -%% get_fieldname_element/3 -%% gets the type/value/object/... of the referenced element in FieldName -%% FieldName is a list and may have more than one element. -%% Each element in FieldName can be either {typefieldreference,AnyFieldName} -%% or {valuefieldreference,AnyFieldName} -%% Def is the def of the first object referenced by FieldName -get_fieldname_element(S,Def,[{_RefType,FieldName}]) when is_record(Def,typedef) -> - {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def, - check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps)); -get_fieldname_element(S,Def,[{_RefType,FieldName}|Rest]) - when is_record(Def,typedef) -> - %% As FieldName is followd by other FieldNames it has to be an - %% object or objectset. - {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def, - NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps)), - ObjDef = fun(#'Object'{def=D}) -> D; - (#'ObjectSet'{set=Set}) -> Set - end - (NewDef), - case ObjDef of +%% extract_field(State, ObjectOrObjectSet, [{RefType,FieldName}]) +%% RefType = typefieldreference | valuefieldreference +%% +%% Get the type, value, object, object set, or value set from the +%% referenced object or object set. The list of field name tuples +%% may have more than one element. All field names but the last +%% refers to either an object or object set. + +extract_field(S, Def0, FieldNames) -> + {_,Def1} = get_referenced_type(S, Def0), + Def2 = check_object(S, Def1, Def1#typedef.typespec), + Def = Def1#typedef{typespec=Def2}, + get_fieldname_element(S, Def, FieldNames). + +%% get_fieldname_element(State, Element, [{RefType,FieldName}] +%% RefType = typefieldreference | valuefieldreference +%% +%% Get the type, value, object, object set, or value set from the referenced +%% element. The list of field name tuples may have more than one element. +%% All field names but the last refers to either an object or object set. + +get_fieldname_element(S, Object0, [{_RefType,FieldName}|Fields]) -> + Object = case Object0 of + #typedef{typespec=#'Object'{def=Obj}} -> Obj; + {_,_,_}=Obj -> Obj + end, + case check_fieldname_element(S, FieldName, Object) of + #'Object'{def=D} when Fields =/= [] -> + get_fieldname_element(S, D, Fields); + #'ObjectSet'{}=Set -> + get_fieldname_set(S, Set, Fields); + Result when Fields =:= [] -> + Result + end; +get_fieldname_element(_S, Def, []) -> + Def. + +get_fieldname_set(S, #'ObjectSet'{set=Set0}, T) -> + get_fieldname_set_1(S, Set0, T, []). + +get_fieldname_set_1(S, ['EXTENSIONMARK'=Ext|T], Fields, Acc) -> + get_fieldname_set_1(S, T, Fields, [Ext|Acc]); +get_fieldname_set_1(S, [H|T], Fields, Acc) -> + try get_fieldname_element(S, H, Fields) of L when is_list(L) -> - [get_fieldname_element(S,X,Rest) || X <- L]; - _ -> - get_fieldname_element(S,ObjDef,Rest) + get_fieldname_set_1(S, T, Fields, L++Acc); + {valueset,L} -> + get_fieldname_set_1(S, T, Fields, L++Acc); + Other -> + get_fieldname_set_1(S, T, Fields, [Other|Acc]) + catch + throw:{error,_} -> + get_fieldname_set_1(S, T, Fields, Acc) end; -get_fieldname_element(S,{object,_,Fields},[{_RefType,FieldName}|Rest]) -> - NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,Fields)), - get_fieldname_element(S,NewDef,Rest); -get_fieldname_element(_S,Def,[]) -> - Def; -get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName]) - when is_record(Def,typedef) -> - ok. +get_fieldname_set_1(_, [], _Fields, Acc) -> + case Acc of + [#valuedef{}|_] -> + {valueset,Acc}; + _ -> + Acc + end. -check_fieldname_element(S,{value,{_,Def}}) -> - check_fieldname_element(S,Def); -check_fieldname_element(S, #typedef{typespec=Ts}=TDef) -> +check_fieldname_element(S, Name, {_,_,Fields}) -> + case lists:keyfind(Name, 1, Fields) of + {Name,Def} -> + check_fieldname_element_1(S, Def); + false -> + asn1_error(S, {undefined_field,Name}) + end. + +check_fieldname_element_1(S, #typedef{typespec=Ts}=TDef) -> case Ts of #'Object'{} -> check_object(S, TDef, Ts); _ -> check_type(S, TDef, Ts) end; -check_fieldname_element(S, #valuedef{}=VDef) -> +check_fieldname_element_1(S, #valuedef{}=VDef) -> try check_value(S, VDef) catch - throw:{objectdef} -> + throw:{asn1_class, _} -> #valuedef{checked=C,pos=Pos,name=N,type=Type, value=Def} = VDef, ClassName = Type#type.def, NewSpec = #'Object'{classname=ClassName,def=Def}, NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec}, - check_fieldname_element(S, NewDef) + check_fieldname_element_1(S, NewDef) end; -check_fieldname_element(S,Eref) - when is_record(Eref,'Externaltypereference'); - is_record(Eref,'Externalvaluereference') -> - {_,TDef}=get_referenced_type(S,Eref), - check_fieldname_element(S,TDef); -check_fieldname_element(S,Other) -> - throw({error,{assigned_object_error,"not_assigned_object",Other,S}}). +check_fieldname_element_1(_S, {value_tag,Val}) -> + #valuedef{value=Val}; +check_fieldname_element_1(S, Eref) + when is_record(Eref, 'Externaltypereference'); + is_record(Eref, 'Externalvaluereference') -> + {_,TDef} = get_referenced_type(S, Eref), + check_fieldname_element_1(S, TDef). -transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) -> - transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]); -transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) -> -%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]); - transform_set_to_object_list(Objs,Acc); -transform_set_to_object_list([],Acc) -> - Acc. - -get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object - lists:map(fun({N,{_,_,F}})->{N,no_unique_value,F}; - (V={_,_,_}) ->V; - ({A,B}) -> {A,no_unique_value,B} - end, ObjSet); -get_unique_valuelist(S,ObjSet,{UFN,Opt}) -> - get_unique_vlist(S,ObjSet,UFN,Opt,[]). - - -get_unique_vlist(_S,[],_,_,[]) -> - ['EXTENSIONMARK']; -get_unique_vlist(S,[],_,Opt,Acc) -> - case catch check_uniqueness(remove_duplicate_objects(Acc)) of - {asn1_error,_} when Opt =/= 'OPTIONAL' -> - error({'ObjectSet',"not unique objects in object set",S}); - {asn1_error,_} -> - lists:reverse(Acc); - _ -> - lists:reverse(Acc) - end; -get_unique_vlist(S,['EXTENSIONMARK'|Rest],UniqueFieldName,Opt,Acc) -> - get_unique_vlist(S,Rest,UniqueFieldName,Opt,Acc); -get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Opt,Acc) -> - {_,_,Fields} = Obj, - NewObjInf = - case get_unique_value(S,Fields,UniqueFieldName) of - #valuedef{value=V} -> [{ObjName,V,Fields}]; - [] -> []; % maybe the object only was a reference to an - % empty object set. - no_unique_value -> [{ObjName,no_unique_value,Fields}] - end, - get_unique_vlist(S,Rest,UniqueFieldName,Opt,NewObjInf++Acc); - -get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Opt,Acc) -> - get_unique_vlist(S,Rest,UniqueFieldName,Opt,[V|Acc]). - -get_unique_value(S,Fields,UniqueFieldName) -> - Module = S#state.mname, - case lists:keysearch(UniqueFieldName,1,Fields) of - {value,Field} -> - case element(2,Field) of - VDef when is_record(VDef,valuedef) -> - VDef; - {'ValueFromObject',Object,Name} -> - case Object of - {object,Ext} when is_record(Ext,'Externaltypereference') -> - OtherModule = Ext#'Externaltypereference'.module, - ExtObjName = Ext#'Externaltypereference'.type, - ObjDef = asn1_db:dbget(OtherModule,ExtObjName), - ObjSpec = ObjDef#typedef.typespec, - get_unique_value(OtherModule,element(3,ObjSpec),Name); - {object,{_,_,ObjName}} -> - ObjDef = asn1_db:dbget(Module,ObjName), - ObjSpec = ObjDef#typedef.typespec, - get_unique_value(Module,element(3,ObjSpec),Name); - {po,Object,_Params} -> - exit({error,{'parameterized object not implemented yet', - Object},S}) - end; - Value when is_atom(Value);is_number(Value) -> - #valuedef{value=Value,module=Module}; - {'CHOICE',{C,Value}} when is_atom(C) -> - %% #valuedef{value=normalize_value(S,element(3,Field),VDef,[])} - case Value of - Scalar when is_atom(Scalar);is_number(Scalar) -> - #valuedef{value=Value,module=Module}; - Eref = #'Externalvaluereference'{} -> - element(2,get_referenced_type(S,Eref)) - end - end; - false -> - case Fields of - [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] -> - []; - _ -> - no_unique_value - end - end. - -check_uniqueness(NameValueList) -> - check_uniqueness1(lists:keysort(2,NameValueList)). - -check_uniqueness1([]) -> - true; -check_uniqueness1([_]) -> - true; -check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) -> - throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}}); -check_uniqueness1([_|Rest]) -> - check_uniqueness1(Rest). - %% instantiate_po/4 %% ClassDef is the class of Object, %% Object is the Parameterized object, which is referenced, @@ -1410,8 +1138,7 @@ check_uniqueness1([_|Rest]) -> instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_record(Object,pobjectdef) -> FormalParams = get_pt_args(Object), MatchedArgs = match_args(S,FormalParams,ArgsList,[]), -% NewS = S#state{type=Object,parameters=MatchedArgs++OldArgs}, - NewS = S#state{type=Object,parameters=MatchedArgs}, + NewS = S#state{parameters=MatchedArgs}, check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class, def=Object#pobjectdef.def}). @@ -1421,20 +1148,14 @@ instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_ %% on the right side of the assignment, %% ArgsList is the list of actual parameters, i.e. real objects instantiate_pos(S=#state{parameters=_OldArgs},ClassRef,ObjectSetDef,ArgsList) -> -% ClassName = ClassDef#classdef.name, FormalParams = get_pt_args(ObjectSetDef), OSet = case get_pt_spec(ObjectSetDef) of - {valueset,Set} -> -% #'ObjectSet'{class=name2Extref(S#state.mname, -% ClassName),set=Set}; - #'ObjectSet'{class=ClassRef,set=Set}; - Set when is_record(Set,'ObjectSet') -> Set; - _ -> - error({type,"parameterized object set failure",S}) + {valueset,Set} -> #'ObjectSet'{class=ClassRef,set=Set}; + Set when is_record(Set,'ObjectSet') -> Set; + _ -> asn1_error(S, invalid_objectset) end, MatchedArgs = match_args(S,FormalParams,ArgsList,[]), -% NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs++OldArgs}, - NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs}, + NewS = S#state{parameters=MatchedArgs}, check_object(NewS,ObjectSetDef,OSet). @@ -1468,7 +1189,7 @@ gen_incl1(S,Fields,[C|CFields]) -> check_object(S,TDef,TDef#typedef.typespec); ERef -> {_,T} = get_referenced_type(S,ERef), - check_object(S,T,object_to_check(T)) + check_object(S, T, object_to_check(S, T)) end, case gen_incl(S,ObjDef#'Object'.def, ClassFields) of @@ -1485,7 +1206,7 @@ gen_incl1(S,Fields,[C|CFields]) -> end. get_objclass_fields(S,Eref=#'Externaltypereference'{}) -> - {_,ClassDef} = get_referenced_type(S,Eref), + {_,ClassDef} = get_referenced_type(S,Eref, true), get_objclass_fields(S,ClassDef); get_objclass_fields(S,CD=#classdef{typespec=#'Externaltypereference'{}}) -> get_objclass_fields(S,CD#classdef.typespec); @@ -1501,10 +1222,10 @@ gen_incl_set(S,Fields,#typedef{typespec=#type{def=Eref}}) {_,CDef} = get_referenced_type(S,Eref), gen_incl_set(S,Fields,CDef); gen_incl_set(S,Fields,ClassDef) -> - case catch get_unique_fieldname(S,ClassDef) of - Tuple when tuple_size(Tuple) =:= 3 -> + case get_unique_fieldname(S, ClassDef) of + no_unique -> false; - _ -> + {_, _} -> gen_incl_set1(S,Fields, (ClassDef#classdef.typespec)#objectclass.fields) end. @@ -1529,475 +1250,390 @@ gen_incl_set1(S,[Object|Rest],CFields)-> gen_incl_set1(S,Rest,CFields) end. -check_objectdefn(S,Def,CDef) when is_record(CDef,classdef) -> - WithSyntax = (CDef#classdef.typespec)#objectclass.syntax, - ClassFields = (CDef#classdef.typespec)#objectclass.fields, + +%%% +%%% Check an object definition. +%%% + +check_objectdefn(S, Def, #classdef{typespec=ObjClass}) -> + #objectclass{syntax=Syntax0,fields=ClassFields} = ObjClass, case Def of {object,defaultsyntax,Fields} -> - check_defaultfields(S,Fields,ClassFields); + check_defaultfields(S, Fields, ClassFields); {object,definedsyntax,Fields} -> - {_,WSSpec} = WithSyntax, - NewFields = - case catch( convert_definedsyntax(S,Fields,WSSpec, - ClassFields,[])) of - {asn1,{_ErrorType,ObjToken,ClassToken}} -> - throw({asn1,{'match error in object',ObjToken, - 'found in object',ClassToken,'found in class'}}); - Err={asn1,_} -> throw(Err); - Err={'EXIT',_} -> throw(Err); - DefaultFields when is_list(DefaultFields) -> - DefaultFields - end, - {object,defaultsyntax,NewFields}; - {object,_ObjectId} -> % This is a DefinedObject - fixa; - Other -> - exit({error,{objectdefn,Other}}) + Syntax = get_syntax(S, Syntax0, ClassFields), + case match_syntax(S, Syntax, Fields, []) of + {match,NewFields,[]} -> + {object,defaultsyntax,NewFields}; + {match,_,[What|_]} -> + syntax_match_error(S, What); + {nomatch,[What|_]} -> + syntax_match_error(S, What); + {nomatch,[]} -> + syntax_match_error(S) + end + end. + + +%%% +%%% Pre-process the simplified syntax so that it can be more +%%% easily matched. +%%% + +get_syntax(_, {preprocessed_syntax,Syntax}, _) -> + Syntax; +get_syntax(S, {'WITH SYNTAX',Syntax}, ClassFields) -> + preprocess_syntax(S, Syntax, ClassFields). + +preprocess_syntax(S, Syntax0, Cs) -> + Syntax = preprocess_syntax_1(S, Syntax0, Cs, true), + Present0 = preprocess_get_fields(Syntax, []), + Present1 = lists:sort(Present0), + Present = ordsets:from_list(Present1), + case Present =:= Present1 of + false -> + Dupl = Present1 -- Present, + asn1_error(S, {syntax_duplicated_fields,Dupl}); + true -> + ok + end, + Mandatory0 = get_mandatory_class_fields(Cs), + Mandatory = ordsets:from_list(Mandatory0), + case ordsets:subtract(Mandatory, Present) of + [] -> + Syntax; + [_|_]=Missing -> + asn1_error(S, {syntax_missing_mandatory_fields,Missing}) end. +preprocess_syntax_1(S, [H|T], Cs, Mandatory) when is_list(H) -> + [{optional,preprocess_syntax_1(S, H, Cs, false)}| + preprocess_syntax_1(S, T, Cs, Mandatory)]; +preprocess_syntax_1(S, [{valuefieldreference,Name}|T], Cs, Mandatory) -> + F = preprocess_check_field(S, Name, Cs, Mandatory), + [F|preprocess_syntax_1(S, T, Cs, Mandatory)]; +preprocess_syntax_1(S, [{typefieldreference,Name}|T], Cs, Mandatory) -> + F = preprocess_check_field(S, Name, Cs, Mandatory), + [F|preprocess_syntax_1(S, T, Cs, Mandatory)]; +preprocess_syntax_1(S,[{Token,_}|T], Cs, Mandatory) when is_atom(Token) -> + [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)]; +preprocess_syntax_1(S, [Token|T], Cs, Mandatory) when is_atom(Token) -> + [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)]; +preprocess_syntax_1(_, [], _, _) -> []. + +preprocess_check_field(S, Name, Cs, Mandatory) -> + case lists:keyfind(Name, 2, Cs) of + Tuple when is_tuple(Tuple) -> + case not Mandatory andalso is_mandatory_class_field(Tuple) of + true -> + asn1_error(S, {syntax_mandatory_in_optional_group,Name}); + false -> + {field,Tuple} + end; + false -> + asn1_error(S, {syntax_undefined_field,Name}) + end. + +preprocess_get_fields([{field,F}|T], Acc) -> + Name = element(2, F), + preprocess_get_fields(T, [Name|Acc]); +preprocess_get_fields([{optional,L}|T], Acc) -> + preprocess_get_fields(T, preprocess_get_fields(L, Acc)); +preprocess_get_fields([_|T], Acc) -> + preprocess_get_fields(T, Acc); +preprocess_get_fields([], Acc) -> + Acc. + +%%% +%%% Match the actual fields in the object definition to +%%% the pre-processed simplified syntax. +%%% + +match_syntax(S, [{token,Token}|T], [A|As]=Args, Acc) -> + case A of + {word_or_setting,_,#'Externaltypereference'{type=Token}} -> + match_syntax(S, T, As, Acc); + {Token,Line} when is_integer(Line) -> + match_syntax(S, T, As, Acc); + _ -> + {nomatch,Args} + end; +match_syntax(S, [{field,Field}|T]=Fs, [A|As0]=Args0, Acc) -> + try match_syntax_type(S, Field, A) of + {match,Match} -> + match_syntax(S, T, As0, lists:reverse(Match)++Acc); + {params,_Name,#ptypedef{args=Params}=P,Ref} -> + {Args,As} = lists:split(length(Params), As0), + Val = match_syntax_params(S, P, Ref, Args), + match_syntax(S, Fs, [Val|As], Acc) + catch + _:_ -> + {nomatch,Args0} + end; +match_syntax(S, [{optional,L}|T], As0, Acc) -> + case match_syntax(S, L, As0, []) of + {match,Match,As} -> + match_syntax(S, T, As, lists:reverse(Match)++Acc); + {nomatch,As0} -> + match_syntax(S, T, As0, Acc); + {nomatch,_}=NoMatch -> + NoMatch + end; +match_syntax(_, [_|_], [], _Acc) -> + {nomatch,[]}; +match_syntax(_, [], As, Acc) -> + {match,Acc,As}. + +match_syntax_type(S, Type, {value_tag,Val}) -> + match_syntax_type(S, Type, Val); +match_syntax_type(S, Type, {setting,_,Val}) -> + match_syntax_type(S, Type, Val); +match_syntax_type(S, Type, {word_or_setting,_,Val}) -> + match_syntax_type(S, Type, Val); +match_syntax_type(_S, _Type, {Atom,Line}) + when is_atom(Atom), is_integer(Line) -> + throw(nomatch); +match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type, + #'Externalvaluereference'{}=ValRef0) -> + try get_referenced_type(S, ValRef0) of + {M,#valuedef{}=ValDef} -> + match_syntax_type(update_state(S, M), Type, ValDef) + catch + throw:{error,_} -> + ValRef = #valuedef{name=Name, + type=T, + value=ValRef0, + module=S#state.mname}, + match_syntax_type(S, Type, ValRef) + end; +match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_}, #valuedef{}=Val0) -> + Val = check_value(S, Val0), + {match,[{Name,Val}]}; +match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_}, + {'ValueFromObject',{object,Object},FieldNames}) -> + Val = extract_field(S, Object, FieldNames), + {match,[{Name,Val}]}; +match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type, Any) -> + ValDef = #valuedef{name=Name,type=T,value=Any,module=S#state.mname}, + match_syntax_type(S, Type, ValDef); +match_syntax_type(_S, {fixedtypevaluesetfield,Name,#type{},_}, Any) -> + {match,[{Name,Any}]}; +match_syntax_type(S, {objectfield,Name,_,_,_}, #'Externalvaluereference'{}=Ref) -> + {M,Obj} = get_referenced_type(S, Ref), + check_object(S, Obj, object_to_check(S, Obj)), + {match,[{Name,Ref#'Externalvaluereference'{module=M}}]}; +match_syntax_type(S, {objectfield,Name,Class,_,_}, {object,_,_}=ObjDef) -> + InlinedObjName = list_to_atom(lists:concat([S#state.tname, + '_',Name])), + ObjSpec = #'Object'{classname=Class,def=ObjDef}, + CheckedObj = check_object(S, #typedef{typespec=ObjSpec}, ObjSpec), + InlObj = #typedef{checked=true,name=InlinedObjName,typespec=CheckedObj}, + ObjKey = {InlinedObjName, InlinedObjName}, + insert_once(S, inlined_objects, ObjKey), + %% Which module to use here? Could it be other than top_module? + asn1_db:dbput(get(top_module), InlinedObjName, InlObj), + {match,[{Name,InlObj}]}; +match_syntax_type(_S, {objectfield,Name,_,_,_}, Any) -> + {match,[{Name,Any}]}; +match_syntax_type(S, {objectsetfield,Name,CDef0,_}, Any) -> + CDef = case CDef0 of + #type{def=CDef1} -> CDef1; + CDef1 -> CDef1 + end, + case match_syntax_objset(S, Any, CDef) of + #typedef{typespec=#'ObjectSet'{}=Ts0}=Def -> + Ts = check_object(S, Def, Ts0), + {match,[{Name,Def#typedef{checked=true,typespec=Ts}}]}; + _ -> + syntax_match_error(S, Any) + end; +match_syntax_type(S, {typefield,Name0,_}, #type{def={pt,_,_}=Def}=Actual) -> + %% This is an inlined type. If constructed type, save in data base. + T = check_type(S, #typedef{typespec=Actual}, Actual), + #'Externaltypereference'{type=PtName} = element(2, Def), + NameList = [PtName,S#state.tname], + Name = list_to_atom(asn1ct_gen:list2name(NameList)), + NewTDef = #typedef{checked=true,name=Name,typespec=T}, + asn1_db:dbput(S#state.mname, Name, NewTDef), + insert_once(S, parameterized_objects, {Name,type,NewTDef}), + {match,[{Name0,NewTDef}]}; +match_syntax_type(S, {typefield,Name,_}, #type{def=#'ObjectClassFieldType'{}}=Actual) -> + T = check_type(S, #typedef{typespec=Actual}, Actual), + {match,[{Name,ocft_def(T)}]}; +match_syntax_type(S, {typefield,Name,_}, #type{def=#'Externaltypereference'{}=Ref}) -> + match_syntax_external(S, Name, Ref); +match_syntax_type(S, {typefield,Name,_}, #type{def=Def}=Actual) -> + T = check_type(S, #typedef{typespec=Actual}, Actual), + TypeName = asn1ct_gen:type(asn1ct_gen:get_inner(Def)), + {match,[{Name,#typedef{checked=true,name=TypeName,typespec=T}}]}; +match_syntax_type(S, {typefield,Name,_}, #'Externaltypereference'{}=Ref) -> + match_syntax_external(S, Name, Ref); +match_syntax_type(_S, {variabletypevaluefield,Name,_,_}, Any) -> + {match,[{Name,Any}]}; +match_syntax_type(_S, {variabletypevaluesetfield,Name,_,_}, Any) -> + {match,[{Name,Any}]}; +match_syntax_type(_S, _Type, _Actual) -> + throw(nomatch). + +match_syntax_params(S0, #ptypedef{name=Name}=PtDef, + #'Externaltypereference'{module=M,type=N}=ERef0, Args) -> + S = S0#state{mname=M,module=load_asn1_module(S0, M),tname=Name}, + Type = check_type(S, PtDef, #type{def={pt,ERef0,Args}}), + ERefName = new_reference_name(N), + ERef = #'Externaltypereference'{type=ERefName,module=S0#state.mname}, + TDef = #typedef{checked=true,name=ERefName,typespec=Type}, + insert_once(S0, parameterized_objects, {ERefName,type,TDef}), + asn1_db:dbput(S0#state.mname, ERef#'Externaltypereference'.type, TDef), + ERef. + +match_syntax_external(#state{mname=Mname}=S0, Name, Ref0) -> + {M,T0} = get_referenced_type(S0, Ref0), + Ref1 = Ref0#'Externaltypereference'{module=M}, + case T0 of + #ptypedef{} -> + {params,Name,T0,Ref1}; + #typedef{checked=false}=TDef0 when Mname =/= M -> + %% This typedef is an imported type (or maybe a set.asn + %% compilation). + S = S0#state{mname=M,module=load_asn1_module(S0, M), + tname=get_datastr_name(TDef0)}, + Type = check_type(S, TDef0, TDef0#typedef.typespec), + TDef = TDef0#typedef{checked=true,typespec=Type}, + asn1_db:dbput(M, get_datastr_name(TDef), TDef), + {match,[{Name,merged_name(S, Ref1)}]}; + TDef -> + %% This might be a renamed type in a set of specs, + %% so rename the ref. + Type = asn1ct:get_name_of_def(TDef), + Ref = Ref1#'Externaltypereference'{type=Type}, + {match,[{Name,Ref}]} + end. + +match_syntax_objset(_S, {element_set,_,_}=Set, ClassDef) -> + make_objset(ClassDef, Set); +match_syntax_objset(S, #'Externaltypereference'{}=Ref, _) -> + {_,T} = get_referenced_type(S, Ref), + T; +match_syntax_objset(S, #'Externalvaluereference'{}=Ref, _) -> + {_,T} = get_referenced_type(S, Ref), + T; +match_syntax_objset(_, [_|_]=Set, ClassDef) -> + make_objset(ClassDef, Set); +match_syntax_objset(S, {object,definedsyntax,Words}, ClassDef) -> + case Words of + [Word] -> + match_syntax_objset_1(S, Word, ClassDef); + [_|_] -> + %% More than one word does not make sense. + none + end; +match_syntax_objset(S, #type{def=#'Externaltypereference'{}=Set}, ClassDef) -> + match_syntax_objset(S, Set, ClassDef); +match_syntax_objset(_, #type{}, _) -> + none. + +match_syntax_objset_1(S, {setting,_,Set}, ClassDef) -> + %% Word that starts with an uppercase letter. + match_syntax_objset(S, Set, ClassDef); +match_syntax_objset_1(S, {word_or_setting,_,Set}, ClassDef) -> + %% Word in uppercase/hyphens only. + match_syntax_objset(S, Set, ClassDef); +match_syntax_objset_1(S, #type{def={'TypeFromObject', {object,Object}, FNs}}, + ClassDef) -> + Set = extract_field(S, Object, FNs), + [_|_] = Set, + #typedef{checked=true,typespec=#'ObjectSet'{class=ClassDef,set=Set}}; +match_syntax_objset_1(_, #type{def=#'ObjectClassFieldType'{}}=Set, ClassDef) -> + make_objset(ClassDef, Set); +match_syntax_objset_1(_, {object,_,_}=Object, ClassDef) -> + make_objset(ClassDef, [Object]). + +make_objset(ClassDef, Set) -> + #typedef{typespec=#'ObjectSet'{class=ClassDef,set=Set}}. + +syntax_match_error(S) -> + asn1_error(S, syntax_nomatch). + +syntax_match_error(S, What0) -> + What = printable_string(What0), + asn1_error(S, {syntax_nomatch,What}). + +printable_string(Def) -> + printable_string_1(Def). + +printable_string_1({word_or_setting,_,Def}) -> + printable_string_1(Def); +printable_string_1({value_tag,V}) -> + printable_string_1(V); +printable_string_1({#seqtag{val=Val1},Val2}) -> + atom_to_list(Val1) ++ " " ++ printable_string_1(Val2); +printable_string_1(#type{def=Def}) -> + atom_to_list(asn1ct_gen:get_inner(Def)); +printable_string_1(#'Externaltypereference'{type=Type}) -> + atom_to_list(Type); +printable_string_1(#'Externalvaluereference'{value=Type}) -> + atom_to_list(Type); +printable_string_1({Atom,Line}) when is_atom(Atom), is_integer(Line) -> + q(Atom); +printable_string_1({object,definedsyntax,L}) -> + q(string:join([printable_string_1(Item) || Item <- L], " ")); +printable_string_1([_|_]=Def) -> + case lists:all(fun is_integer/1, Def) of + true -> + lists:flatten(io_lib:format("~p", [Def])); + false -> + q(string:join([printable_string_1(Item) || Item <- Def], " ")) + end; +printable_string_1(Def) -> + lists:flatten(io_lib:format("~p", [Def])). + +q(S) -> + lists:concat(["\"",S,"\""]). + check_defaultfields(S, Fields, ClassFields) -> Present = ordsets:from_list([F || {F,_} <- Fields]), Mandatory0 = get_mandatory_class_fields(ClassFields), Mandatory = ordsets:from_list(Mandatory0), All = ordsets:from_list([element(2, F) || F <- ClassFields]), - #state{type=T,tname=Obj} = S, + #state{tname=Obj} = S, case ordsets:subtract(Present, All) of [] -> ok; [_|_]=Invalid -> - asn1_error(S, T, {invalid_fields,Invalid,Obj}) + asn1_error(S, {invalid_fields,Invalid,Obj}) end, case ordsets:subtract(Mandatory, Present) of [] -> check_defaultfields_1(S, Fields, ClassFields, []); [_|_]=Missing -> - asn1_error(S, T, {missing_mandatory_fields,Missing,Obj}) + asn1_error(S, {missing_mandatory_fields,Missing,Obj}) end. check_defaultfields_1(_S, [], _ClassFields, Acc) -> {object,defaultsyntax,lists:reverse(Acc)}; check_defaultfields_1(S, [{FName,Spec}|Fields], ClassFields, Acc) -> CField = lists:keyfind(FName, 2, ClassFields), - {NewField,RestFields} = - convert_to_defaultfield(S, FName, [Spec|Fields], CField), - check_defaultfields_1(S, RestFields, ClassFields, [NewField|Acc]). + {match,Match} = match_syntax_type(S, CField, Spec), + check_defaultfields_1(S, Fields, ClassFields, Match++Acc). -convert_definedsyntax(_S,[],[],_ClassFields,Acc) -> - lists:reverse(Acc); -convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) -> - {MatchedField,RestFields,RestWS} = - match_field(S,Fields,WithSyntax,ClassFields), - if - is_list(MatchedField) -> - convert_definedsyntax(S,RestFields,RestWS,ClassFields, - lists:append(MatchedField,Acc)); - true -> - convert_definedsyntax(S,RestFields,RestWS,ClassFields, - [MatchedField|Acc]) - end. +get_mandatory_class_fields(ClassFields) -> + [element(2, F) || F <- ClassFields, + is_mandatory_class_field(F)]. -get_mandatory_class_fields([{fixedtypevaluefield,Name,_,_,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([{objectfield,Name,_,_,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([{objectsetfield,Name,_,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([{typefield,Name,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([{variabletypevaluefield,Name,_,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([{variabletypevaluesetfield, - Name,_,'MANDATORY'}|T]) -> - [Name|get_mandatory_class_fields(T)]; -get_mandatory_class_fields([_|T]) -> - get_mandatory_class_fields(T); -get_mandatory_class_fields([]) -> []. - -match_field(S,Fields,WithSyntax,ClassFields) -> - match_field(S,Fields,WithSyntax,ClassFields,[]). - -match_field(S,Fields,[W|Ws],ClassFields,Acc) when is_list(W) -> - case catch(match_optional_field(S,Fields,W,ClassFields,[])) of - {'EXIT',_} -> - match_field(Fields,Ws,ClassFields,Acc); %% add S -%% {[Result],RestFields} -> -%% {Result,RestFields,Ws}; - {Result,RestFields} when is_list(Result) -> - {Result,RestFields,Ws}; - _ -> - match_field(S,Fields,Ws,ClassFields,Acc) - end; -match_field(S,Fields,WithSyntax,ClassFields,_Acc) -> - match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]). - -match_optional_field(_S,RestFields,[],_,Ret) -> - {Ret,RestFields}; -%% An additional optional field within an optional field -match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when is_list(W) -> - case catch match_optional_field(S,Fields,W,ClassFields,[]) of - {'EXIT',_} when length(Ws) > 0 -> - match_optional_field(S,Fields,Ws,ClassFields,Ret); - {'EXIT',_} -> - {Ret,Fields}; - {asn1,{optional_matcherror,_,_}} when length(Ws) > 0 -> - match_optional_field(S,Fields,Ws,ClassFields,Ret); - {asn1,{optional_matcherror,_,_}} -> - {Ret,Fields}; - {OptionalField,RestFields} -> - match_optional_field(S,RestFields,Ws,ClassFields, - lists:append(OptionalField,Ret)) - end; -%% identify and skip word -match_optional_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest], - [WorS|Ws],ClassFields,Ret) -> - match_optional_field(S,Rest,Ws,ClassFields,Ret); -match_optional_field(S,[],_,ClassFields,Ret) -> - match_optional_field(S,[],[],ClassFields,Ret); -%% identify and skip comma -match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) -> - match_optional_field(S,Rest,Ws,ClassFields,Ret); -%% am optional setting inside another optional setting may be "double-listed" -match_optional_field(S,[Setting],DefinedSyntax,ClassFields,Ret) - when is_list(Setting) -> - match_optional_field(S,Setting,DefinedSyntax,ClassFields,Ret); -%% identify and save field data -match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) -> - ?dbg("matching optional field setting: ~p with user friendly syntax: ~p~n",[Setting,W]), - WorS = - case Setting of - Type when is_record(Type,type) -> Type; - {'ValueFromObject',_,_} -> Setting; - {object,_,_} -> Setting; - {_,_,WordOrSetting} -> WordOrSetting; - Other -> Other - end, - case lists:keysearch(W,2,ClassFields) of - false -> - throw({asn1,{optional_matcherror,WorS,W}}); - {value,CField} -> - {NewField,RestFields} = - convert_to_defaultfield(S,W,[WorS|Rest],CField), - match_optional_field(S,RestFields,Ws,ClassFields,[NewField|Ret]) - end; -match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) -> - throw({asn1,{optional_matcherror,WorS,W}}). - -match_mandatory_field(_S,[],[],_,[Acc]) -> - {Acc,[],[]}; -match_mandatory_field(_S,[],[],_,Acc) -> - {Acc,[],[]}; -match_mandatory_field(S,[],[H|T],CF,Acc) when is_list(H) -> - match_mandatory_field(S,[],T,CF,Acc); -match_mandatory_field(_S,[],WithSyntax,_,_Acc) -> - throw({asn1,{mandatory_matcherror,[],WithSyntax}}); -%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when is_list(W) -> -match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when is_list(W), length(Acc) >= 1 -> - {Acc,Fields,WithSyntax}; -%% identify and skip word -%%match_mandatory_field(S,[{_,_,WorS}|Rest], -match_mandatory_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest], - [WorS|Ws],ClassFields,Acc) -> - match_mandatory_field(S,Rest,Ws,ClassFields,Acc); -%% identify and skip comma -match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) -> - match_mandatory_field(S,Rest,Ws,ClassFields,Ret); -%% identify and save field data -match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) -> - ?dbg("matching field setting: ~p with user friendly syntax: ~p~n",[Setting,W]), - WorS = - case Setting of - {object,_,_} -> Setting; - {_,_,WordOrSetting} -> WordOrSetting; - Type when is_record(Type,type) -> Type; - Other -> Other - end, - case lists:keysearch(W,2,ClassFields) of - false -> - throw({asn1,{mandatory_matcherror,WorS,W}}); - {value,CField} -> - {NewField,RestFields} = - convert_to_defaultfield(S,W,[WorS|Rest],CField), - match_mandatory_field(S,RestFields,Ws,ClassFields,[NewField|Acc]) - end; - -match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) -> - throw({asn1,{mandatory_matcherror,WorS,W}}). - -%% Converts a field of an object from defined syntax to default syntax -%% A field may be a type, a fixed type value, an object, an objectset, -%% -convert_to_defaultfield(S,ObjFieldName,[OFS|RestOFS],CField)-> - ?dbg("convert field: ~p of type: ~p~n",[ObjFieldName,element(1,CField)]), - CurrMod = S#state.mname, - Strip_value_tag = - fun({value_tag,ValueSetting}) -> ValueSetting; - (VS) -> VS - end, - ObjFieldSetting = Strip_value_tag(OFS), - RestSettings = [Strip_value_tag(X)||X <- RestOFS], - case element(1,CField) of - typefield -> - TypeDef= - case ObjFieldSetting of - TypeRec when is_record(TypeRec,type) -> TypeRec#type.def; - TDef when is_record(TDef,typedef) -> - TDef#typedef{checked=true, - typespec=check_type(S,TDef, - TDef#typedef.typespec)}; - _ -> ObjFieldSetting - end, - {Type,SettingsLeft} = - if - is_record(TypeDef,typedef) -> {TypeDef,RestSettings}; - is_record(TypeDef,'ObjectClassFieldType') -> - T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting), - {oCFT_def(S,T),RestSettings}; -% #typedef{checked=true,name=Name,typespec=IT}; - is_tuple(TypeDef), element(1,TypeDef) == pt -> - %% this is an inlined type. If constructed - %% type save in data base - T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting), - #'Externaltypereference'{type=PtName} = - element(2,TypeDef), - NameList = [PtName,S#state.tname], - NewName = list_to_atom(asn1ct_gen:list2name(NameList)), - NewTDef=#typedef{checked=true,name=NewName, - typespec=T}, - asn1_db:dbput(S#state.mname,NewName,NewTDef), - %%asn1ct_gen:insert_once(parameterized_objects,{NewName,type,NewTDef}), - insert_once(S,parameterized_objects, - {NewName,type,NewTDef}), - {NewTDef,RestSettings}; - is_tuple(TypeDef), element(1,TypeDef)=='SelectionType' -> - T=check_type(S,#typedef{typespec=ObjFieldSetting}, - ObjFieldSetting), - Name = type_name(S,T), - {#typedef{checked=true,name=Name,typespec=T},RestSettings}; - true -> - case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of - ERef = #'Externaltypereference'{module=CurrMod} -> - {RefMod,T} = get_referenced_type(S,ERef), - check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings); - - ERef = #'Externaltypereference'{} -> - {RefMod,T} = get_referenced_type(S,ERef), - check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings); - Bif when Bif=={primitive,bif};Bif=={constructed,bif} -> - T = check_type(S,#typedef{typespec=ObjFieldSetting}, - ObjFieldSetting), - {#typedef{checked=true,name=Bif,typespec=T},RestSettings}; - _ -> - %this case should not happen any more - {Mod,T} = - get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}), - case Mod of - CurrMod -> - {T,RestSettings}; - ExtMod -> - #typedef{name=Name} = T, - {T#typedef{name={ExtMod,Name}},RestSettings} - end - end - end, - {{ObjFieldName,Type},SettingsLeft}; - fixedtypevaluefield -> - case ObjFieldName of - Val when is_atom(Val) -> - %% ObjFieldSetting can be a value,an objectidentifiervalue, - %% an element in an enumeration or namednumberlist etc. - ValRef = - case ObjFieldSetting of - ValSetting=#'Externalvaluereference'{} -> - ValSetting; - {'ValueFromObject',{_,ObjRef},FieldName} -> - {_,Object} = get_referenced_type(S,ObjRef), - ChObject = check_object(S,Object, - Object#typedef.typespec), - get_fieldname_element(S,Object#typedef{typespec=ChObject}, - FieldName); - ValSetting = #valuedef{} -> - ValSetting; - ValSetting -> - #valuedef{type=element(3,CField), - value=ValSetting, - module=S#state.mname} - end, - ?dbg("fixedtypevaluefield ValRef: ~p~n",[ValRef]), - case ValRef of - #valuedef{} -> - {{ObjFieldName,check_value(S,ValRef)},RestSettings}; - _ -> - ValDef = - case catch get_referenced_type(S,ValRef) of - {error,_} -> - NewValDef = - #valuedef{name=Val, - type=element(3,CField), - value=ObjFieldSetting, - module=S#state.mname}, - check_value(S,NewValDef); - {M,VDef} when is_record(VDef,valuedef) -> - check_value(update_state(S,M), - %%S#state{mname=M}, - VDef);%% XXX - {M,VDef} -> - check_value(update_state(S,M), - %%S#state{mname=M}, - #valuedef{name=Val, - type=element(3,CField), - value=VDef, - module=M}) - end, - {{ObjFieldName,ValDef},RestSettings} - end; - Val -> - {{ObjFieldName,Val},RestSettings} - end; - fixedtypevaluesetfield -> - {{ObjFieldName,ObjFieldSetting},RestSettings}; - objectfield -> - CheckObject = - fun(O) -> - O#typedef{checked=true,typespec= - check_object(S,O,O#typedef.typespec)} - end, - ObjectSpec = - case ObjFieldSetting of - Ref when is_record(Ref,'Externalvaluereference') -> - %% The object O might be a #valuedef{} if - %% e.g. the definition looks like - %% myobj SOMECLASS ::= referencedObject - {M,O} = get_referenced_type(S,Ref), - check_object(S,O,object_to_check(O)), - Ref#'Externalvaluereference'{module=M}; - - {'ValueFromObject',{_,ObjRef},FieldName} -> - %% This is an ObjectFromObject - {_,Object} = get_referenced_type(S,ObjRef), - ChObject = check_object(S,Object, - Object#typedef.typespec), - ObjFromObj= - get_fieldname_element(S,Object#typedef{ - typespec=ChObject}, - FieldName), - CheckObject(ObjFromObj); - ObjDef={object,_,_} -> - %% An object defined inlined in another object - %% class is an objectfield, that implies that - %% {objectsetfield,TypeFieldName,DefinedObjecClass, - %% OptionalitySpec} - %% DefinedObjecClass = #'Externaltypereference'{}| - %% 'TYPE-IDENTIFIER' | 'ABSTRACT-SYNTAX' - ClassName = element(3,CField), - InlinedObjName= - list_to_atom(lists:concat([S#state.tname]++ - ['_',ObjFieldName])), - - ObjSpec = #'Object'{classname=ClassName, - def=ObjDef}, - CheckedObj= - check_object(S,#typedef{typespec=ObjSpec},ObjSpec), - InlObj = #typedef{checked=true,name=InlinedObjName, - typespec=CheckedObj}, - ObjKey = {InlinedObjName,InlinedObjName}, - %% asn1ct_gen:insert_once(inlined_objects,ObjKey), - insert_once(S,inlined_objects,ObjKey), - %% Which module to use here? Could it be other than top_module ? - %% asn1_db:dbput(S#state.mname,InlinedObjName,InlObj), - asn1_db:dbput(get(top_module),InlinedObjName,InlObj), - InlObj; - #type{def=Eref} when is_record(Eref,'Externaltypereference') -> - {_,O} = get_referenced_type(S,Eref), - CheckObject(O); - Other -> - {_,O} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Other}), - CheckObject(O) - end, - {{ObjFieldName,ObjectSpec},RestSettings}; - variabletypevaluefield -> - {{ObjFieldName,ObjFieldSetting},RestSettings}; - variabletypevaluesetfield -> - {{ObjFieldName,ObjFieldSetting},RestSettings}; -%% objectset_or_fixedtypevalueset_field -> -%% ok; - objectsetfield -> - ObjSetSpec = get_objectset_def(S,ObjFieldSetting,CField), - ?dbg("objectsetfield, ObjSetSpec:~p~n",[ObjSetSpec]), - {{ObjFieldName, - ObjSetSpec#typedef{checked=true, - typespec=check_object(S,ObjSetSpec, - ObjSetSpec#typedef.typespec)}},RestSettings} - end. - -get_objectset_def(S,Ref,CField) - when is_record(Ref,'Externaltypereference'); - is_record(Ref,'Externalvaluereference') -> - {_M,T}=get_referenced_type(S,Ref), - get_objectset_def2(S,T,CField); -get_objectset_def(S,ObjectList,CField) when is_list(ObjectList) -> - %% an objctset defined in the object,though maybe - %% parsed as a SequenceOfValue - %% The ObjectList may be a list of references to - %% objects, a ValueFromObject - ?dbg("objectsetfield: ~p~n",[CField]), - get_objectset_def2(S,ObjectList,CField); -get_objectset_def(S,'EXTENSIONMARK',CField) -> - ?dbg("objectsetfield: ~p~n",[CField]), - get_objectset_def2(S,['EXTENSIONMARK'],CField); -get_objectset_def(_S,ObjFieldSetting={'SingleValue',_},CField) -> - %% a Union of defined objects - ?dbg("objectsetfield, SingleValue~n",[]), - union_of_defed_objs(CField,ObjFieldSetting); -get_objectset_def(_S,ObjFieldSetting={{'SingleValue',_},_},CField) -> - %% a Union of defined objects - ?dbg("objectsetfield, SingleValue~n",[]), - union_of_defed_objs(CField,ObjFieldSetting); -get_objectset_def(S,{object,_,[#type{def={'TypeFromObject', - {object,RefedObj}, - FieldName}}]},_CField) -> - %% This case occurs when an ObjectSetFromObjects - %% production is used - {_M,Def} = get_referenced_type(S,RefedObj), - get_fieldname_element(S,Def,FieldName); -get_objectset_def(S,{object,_,[{setting,_,ERef}]},CField) - when is_record(ERef,'Externaltypereference') -> - {_,T} = get_referenced_type(S,ERef), - get_objectset_def2(S,T,CField); -get_objectset_def(S,#type{def=ERef},_CField) - when is_record(ERef,'Externaltypereference') -> - {_,T} = get_referenced_type(S,ERef), - T; -get_objectset_def(S,ObjFieldSetting,CField) - when is_atom(ObjFieldSetting) -> - ERef = #'Externaltypereference'{module=S#state.mname, - type=ObjFieldSetting}, - {_,T} = get_referenced_type(S,ERef), - get_objectset_def2(S,T,CField). - -get_objectset_def2(_S,T = #typedef{typespec=#'Object'{}},_CField) -> - #typedef{typespec=#'Object'{classname=Class,def=Def}} = T, - T#typedef{typespec=#'ObjectSet'{class=Class,set=[Def]}}; -get_objectset_def2(_S,Set,CField) when is_list(Set) -> - {_,_,Type,_} = CField, - ClassDef = Type#type.def, - #typedef{typespec=#'ObjectSet'{class=ClassDef, - set=Set}}; -get_objectset_def2(_S,T = #typedef{typespec=#'ObjectSet'{}},_CField) -> - T; -get_objectset_def2(S,T,_CField) -> - asn1ct:warning("get_objectset_def2: uncontrolled object set structure:~n~p~n", - [T],S,"get_objectset_def2: uncontrolled object set structure"). - -type_name(S,#type{def=Def}) -> - CurrMod = S#state.mname, - case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of - #'Externaltypereference'{module=CurrMod,type=Name} -> - Name; - #'Externaltypereference'{module=Mod,type=Name} -> - {Mod,Name}; - Bif when Bif=={primitive,bif};Bif=={constructed,bif} -> - Bif - end. +is_mandatory_class_field({fixedtypevaluefield,_,_,_,'MANDATORY'}) -> + true; +is_mandatory_class_field({objectfield,_,_,_,'MANDATORY'}) -> + true; +is_mandatory_class_field({objectsetfield,_,_,'MANDATORY'}) -> + true; +is_mandatory_class_field({typefield,_,'MANDATORY'}) -> + true; +is_mandatory_class_field({variabletypevaluefield,_,_,'MANDATORY'}) -> + true; +is_mandatory_class_field({variabletypevaluesetfield,_,_,'MANDATORY'}) -> + true; +is_mandatory_class_field(_) -> + false. merged_name(#state{inputmodules=[]},ERef) -> ERef; @@ -2013,38 +1649,18 @@ merged_name(S,ERef=#'Externaltypereference'{module=M}) -> ERef end. -oCFT_def(S,T) -> - case get_OCFT_inner(S,T) of - ERef=#'Externaltypereference'{} -> ERef; - {Name,Type} -> #typedef{checked=true,name=Name,typespec=Type}; - 'ASN1_OPEN_TYPE' -> - #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}} - end. - -get_OCFT_inner(_S,T) -> -% Module=S#state.mname, - Def = T#type.def, - case Def#'ObjectClassFieldType'.type of +ocft_def(#type{def=#'ObjectClassFieldType'{type=OCFT}}=T) -> + case OCFT of {fixedtypevaluefield,_,InnerType} -> case asn1ct_gen:type(asn1ct_gen:get_inner(InnerType#type.def)) of - Bif when Bif=={primitive,bif};Bif=={constructed,bif} -> - {Bif,InnerType}; - ERef = #'Externaltypereference'{} -> - ERef + Bif when Bif =:= {primitive,bif}; Bif =:= {constructed,bif} -> + #typedef{checked=true,name=Bif,typespec=InnerType}; + #'Externaltypereference'{}=Ref -> + Ref end; - 'ASN1_OPEN_TYPE' -> 'ASN1_OPEN_TYPE' + 'ASN1_OPEN_TYPE' -> + #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}} end. - - - -union_of_defed_objs({_,_,_ObjClass=#type{def=ClassDef},_},ObjFieldSetting) -> - #typedef{typespec=#'ObjectSet'{class = ClassDef, - set = ObjFieldSetting}}; -union_of_defed_objs({_,_,DefObjClassRef,_},ObjFieldSetting) - when is_record(DefObjClassRef,'Externaltypereference') -> - #typedef{typespec=#'ObjectSet'{class = DefObjClassRef, - set = ObjFieldSetting}}. - check_value(OldS,V) when is_record(V,pvaluesetdef) -> #pvaluesetdef{checked=Checked,type=Type} = V, @@ -2068,8 +1684,7 @@ check_value(OldS,V) when is_record(V,typedef) -> #typedef{typespec=TS} = V, case TS of #'ObjectSet'{class=ClassRef} -> - {RefM,TSDef} = get_referenced_type(OldS,ClassRef), - %%IsObjectSet(TSDef); + {_RefM,TSDef} = get_referenced_type(OldS, ClassRef), case TSDef of #classdef{} -> throw({objectsetdef}); #typedef{typespec=#type{def=Eref}} when @@ -2077,14 +1692,12 @@ check_value(OldS,V) when is_record(V,typedef) -> %% This case if the class reference is a defined %% reference to class check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}}); - #typedef{} -> + #typedef{typespec=HostType} -> % an ordinary value set with a type in #typedef.typespec - ValueSet = TS#'ObjectSet'.set, - Type=check_type(OldS,TSDef,TSDef#typedef.typespec), - Value = check_value(OldS,#valuedef{type=Type, - value=ValueSet, - module=RefM}), - {valueset,Type#type{constraint=Value#valuedef.value}} + ValueSet0 = TS#'ObjectSet'.set, + Constr = check_constraints(OldS, HostType, [ValueSet0]), + Type = check_type(OldS,TSDef,TSDef#typedef.typespec), + {valueset,Type#type{constraint=Constr}} end; _ -> throw({objectsetdef}) @@ -2104,11 +1717,11 @@ check_value(S, #valuedef{}=V) -> end. check_valuedef(#state{recordtopname=TopName}=S0, V0) -> - #valuedef{name=Name,type=Vtype,value=Value,module=ModName} = V0, + #valuedef{name=Name,type=Vtype0,value=Value,module=ModName} = V0, V = V0#valuedef{checked=true}, + Vtype = check_type(S0, #typedef{name=Name,typespec=Vtype0},Vtype0), Def = Vtype#type.def, - Constr = Vtype#type.constraint, - S1 = S0#state{type=Vtype,tname=Def,value=V0,vname=Name}, + S1 = S0#state{tname=Def}, SVal = update_state(S1, ModName), case Def of #'Externaltypereference'{type=RecName}=Ext -> @@ -2116,9 +1729,8 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) -> %% If V isn't a value but an object Type is a #classdef{} S2 = update_state(S1, RefM), case Type of - #classdef{} -> - throw({objectdef}); - #typedef{typespec=TypeSpec} -> + #typedef{typespec=TypeSpec0}=TypeDef -> + TypeSpec = check_type(S2, TypeDef, TypeSpec0), S3 = case is_contextswitchtype(Type) of true -> S2; @@ -2135,7 +1747,7 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) -> V#valuedef{type=Type}), V#valuedef{value=CheckedVal} end; - 'ANY' -> + 'ASN1_OPEN_TYPE' -> {opentypefieldvalue,ANYType,ANYValue} = Value, CheckedV = check_value(SVal,#valuedef{name=Name, type=ANYType, @@ -2143,19 +1755,12 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) -> module=ModName}), V#valuedef{value=CheckedV#valuedef.value}; 'INTEGER' -> - ok = validate_integer(SVal, Value, [], Constr), V#valuedef{value=normalize_value(SVal, Vtype, Value, [])}; - {'INTEGER',NamedNumberList} -> - ok = validate_integer(SVal, Value, NamedNumberList, Constr), + {'INTEGER',_NamedNumberList} -> V#valuedef{value=normalize_value(SVal, Vtype, Value, [])}; #'SEQUENCE'{} -> - {ok,SeqVal} = convert_external(SVal, Value), + {ok,SeqVal} = convert_external(SVal, Vtype, Value), V#valuedef{value=normalize_value(SVal, Vtype, SeqVal, TopName)}; - {'SelectionType',SelName,SelT} -> - CheckedT = check_selectiontype(SVal, SelName, SelT), - NewV = V#valuedef{type=CheckedT}, - SelVDef = check_value(S1#state{value=NewV}, NewV), - V#valuedef{value=SelVDef#valuedef.value}; _ -> V#valuedef{value=normalize_value(SVal, Vtype, Value, TopName)} end. @@ -2169,179 +1774,97 @@ is_contextswitchtype(#typedef{name='CHARACTER STRING'}) -> is_contextswitchtype(_) -> false. -% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) -> -% case lists:keysearch(Id,1,NamedNumberList) of -% {value,_} -> ok; -% false -> error({value,"unknown NamedNumber",S}) -% end; -%% This case occurs when there is a valuereference -%% validate_integer(S=#state{mname=M}, -%% #'Externalvaluereference'{module=M,value=Id}=Ref, -validate_integer(S,#'Externalvaluereference'{value=Id}=Ref, - NamedNumberList,Constr) -> - case lists:keysearch(Id,1,NamedNumberList) of - {value,_} -> ok; - false -> validate_integer_ref(S,Ref,NamedNumberList,Constr) - %%error({value,"unknown NamedNumber",S}) - end; -validate_integer(S,Id,NamedNumberList,Constr) when is_atom(Id) -> - case lists:keysearch(Id,1,NamedNumberList) of - {value,_} -> ok; - false -> validate_integer_ref(S,Id,NamedNumberList,Constr) - %error({value,"unknown NamedNumber",S}) +%%% +%%% Start of OBJECT IDENTFIER/RELATIVE-OID validation. +%%% + +validate_objectidentifier(S, OidType, #'Externalvaluereference'{}=Id) -> + %% Must be an OBJECT IDENTIFIER or RELATIVE-OID depending on OidType. + get_oid_value(S, OidType, false, Id); +validate_objectidentifier(S, OidType, {'ValueFromObject',{object,Obj},Fields}) -> + %% Must be an OBJECT IDENTIFIER/RELATIVE-OID depending on OidType. + case extract_field(S, Obj, Fields) of + #valuedef{checked=true,value=Value,type=Type} when is_tuple(Value) -> + _ = get_oid_type(S, OidType, Type), + Value; + _ -> + asn1_error(S, {illegal_oid,OidType}) end; -validate_integer(_S,Value,_NamedNumberList,Constr) when is_integer(Value) -> - check_integer_range(Value,Constr). - -validate_integer_ref(S,Id,_,_) when is_atom(Id) -> - error({value,"unknown integer referens",S}); -validate_integer_ref(S,Ref,NamedNumberList,Constr) -> - case get_referenced_type(S,Ref) of - {M,V} when is_record(V,valuedef) -> - NewS = update_state(S,M), - case check_value(NewS,V) of - #valuedef{type=#type{def='INTEGER'},value=Value} -> - validate_integer(NewS,Value,NamedNumberList,Constr); - _Err -> error({value,"unknown integer referens",S}) +validate_objectidentifier(S, OidType, + [{#seqtag{module=Mod,pos=Pos,val=Atom},Val}]) -> + %% This case is when an OBJECT IDENTIFIER value has been parsed as a + %% SEQUENCE value. + Rec = #'Externalvaluereference'{pos=Pos, + module=Mod, + value=Atom}, + validate_oid(S, OidType, [Rec,Val], []); +validate_objectidentifier(S, OidType, [_|_]=L0) -> + validate_oid(S, OidType, L0, []); +validate_objectidentifier(S, OidType, _) -> + asn1_error(S, {illegal_oid,OidType}). + +get_oid_value(S, OidType, AllowInteger, #'Externalvaluereference'{}=Id) -> + case get_referenced_type(S, Id) of + {_,#valuedef{checked=Checked,type=Type,value=V}} -> + case get_oid_type(S, OidType, Type) of + 'INTEGER' when not AllowInteger -> + asn1_error(S, {illegal_oid,OidType}); + _ when Checked -> + V; + 'INTEGER' -> + V; + _ -> + validate_objectidentifier(S, OidType, V) end; _ -> - error({value,"unknown integer referens",S}) + asn1_error(S, {illegal_oid,OidType}) end. - - - -check_integer_range(_Int, Constr) when is_list(Constr) -> - ok. -%%------------ -%% This can be removed when the old parser is removed -%% The function removes 'space' atoms from the list - -is_space_list([H],Acc) -> - lists:reverse([H|Acc]); -is_space_list([H,space|T],Acc) -> - is_space_list(T,[H|Acc]); -is_space_list([],Acc) -> - lists:reverse(Acc); -is_space_list([H|T],Acc) -> - is_space_list(T,[H|Acc]). - -validate_objectidentifier(S,OID,ERef,C) - when is_record(ERef,'Externalvaluereference') -> - validate_objectidentifier(S,OID,[ERef],C); -validate_objectidentifier(S,OID,Tup,C) when is_tuple(Tup) -> - validate_objectidentifier(S,OID,tuple_to_list(Tup),C); -validate_objectidentifier(S,OID,L,_) -> - NewL = is_space_list(L,[]), - case validate_objectidentifier1(S,OID,NewL) of - NewL2 when is_list(NewL2) ->{ok,list_to_tuple(NewL2)}; - Other -> {ok,Other} - end. - -validate_objectidentifier1(S, OID, [Id|T]) - when is_record(Id,'Externalvaluereference') -> - case catch get_referenced_type(S,Id) of - {M,V} when is_record(V,valuedef) -> - NewS = update_state(S,M), - case check_value(NewS,V) of - #valuedef{type=#type{def=ERef},checked=true, - value=Value} when is_tuple(Value) -> - case is_object_id(OID,NewS,ERef) of - true -> - %% T must be a RELATIVE-OID - validate_oid(true,NewS, rel_oid, T, lists:reverse(tuple_to_list(Value))); - _ -> - error({value, {"illegal "++to_string(OID),[Id|T]}, S}) - end; - _ -> - error({value, {"illegal "++to_string(OID),[Id|T]}, S}) - end; - _ -> - validate_oid(true,S, OID, [Id|T], []) - end; -validate_objectidentifier1(S,OID,V) -> - validate_oid(true,S,OID,V,[]). - -validate_oid(false, S, OID, V, Acc) -> - error({value, {"illegal "++to_string(OID), V,Acc}, S}); -validate_oid(_,_, _, [], Acc) -> - lists:reverse(Acc); -validate_oid(_, S, OID, [Value|Vrest], Acc) when is_integer(Value) -> - validate_oid(valid_objectid(OID,Value,Acc),S, OID, Vrest, [Value|Acc]); -validate_oid(_, S, OID, [{'NamedNumber',_Name,Value}|Vrest], Acc) +validate_oid(S, OidType, [], Acc) -> + Oid = lists:reverse(Acc), + validate_oid_path(S, OidType, Oid), + list_to_tuple(Oid); +validate_oid(S, OidType, [Value|Vrest], Acc) when is_integer(Value) -> + validate_oid(S, OidType, Vrest, [Value|Acc]); +validate_oid(S, OidType, [{'NamedNumber',_Name,Value}|Vrest], Acc) when is_integer(Value) -> - validate_oid(valid_objectid(OID,Value,Acc), S, OID, Vrest, [Value|Acc]); -validate_oid(_, S, OID, [Id|Vrest], Acc) - when is_record(Id,'Externalvaluereference') -> - case catch get_referenced_type(S, Id) of - {M,V} when is_record(V,valuedef) -> - NewS = update_state(S,M), - NewVal = case check_value(NewS, V) of - #valuedef{checked=true,value=Value} -> - fun(Int) when is_integer(Int) -> [Int]; - (L) when is_list(L) -> L; - (T) when is_tuple(T) -> tuple_to_list(T) - end (Value); - _ -> - error({value, {"illegal "++to_string(OID), - [Id|Vrest],Acc}, S}) - end, - case NewVal of - List when is_list(List) -> - validate_oid(valid_objectid(OID,NewVal,Acc), NewS, - OID, Vrest,lists:reverse(NewVal)++Acc); - _ -> - NewVal - end; - _ -> + validate_oid(S, OidType, Vrest, [Value|Acc]); +validate_oid(S, OidType, [#'Externalvaluereference'{}=Id|Vrest], Acc) -> + NeededOidType = case Acc of + [] -> o_id; + [_|_] -> rel_oid + end, + try get_oid_value(S, NeededOidType, true, Id) of + Val when is_integer(Val) -> + validate_oid(S, OidType, Vrest, [Val|Acc]); + Val when is_tuple(Val) -> + L = tuple_to_list(Val), + validate_oid(S, OidType, Vrest, lists:reverse(L, Acc)) + catch + _:_ -> case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of Value when is_integer(Value) -> - validate_oid(valid_objectid(OID,Value,Acc), - S, OID,Vrest, [Value|Acc]); + validate_oid(S, OidType,Vrest, [Value|Acc]); false -> - error({value, {"illegal "++to_string(OID),[Id,Vrest],Acc}, S}) + asn1_error(S, {illegal_oid,OidType}) end end; -validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},Value}], []) - when is_atom(Atom),is_integer(Value) -> - %% this case when an OBJECT IDENTIFIER value has been parsed as a - %% SEQUENCE value - Rec = #'Externalvaluereference'{module=Mod, - value=Atom}, - validate_objectidentifier1(S, OID, [Rec,Value]); -validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},EVRef}], []) - when is_atom(Atom),is_record(EVRef,'Externalvaluereference') -> - %% this case when an OBJECT IDENTIFIER value has been parsed as a - %% SEQUENCE value OTP-4354 - Rec = #'Externalvaluereference'{module=Mod, - value=Atom}, - validate_objectidentifier1(S, OID, [Rec,EVRef]); -validate_oid(_, S, OID, [#seqtag{module=Mod,val=Atom}|Rest], Acc) - when is_atom(Atom) -> - Rec = #'Externalvaluereference'{module=Mod, - value=Atom}, - validate_oid(true,S, OID, [Rec|Rest],Acc); -validate_oid(_, S, OID, V, Acc) -> - error({value, {"illegal "++to_string(OID),V,Acc},S}). - -is_object_id(OID,S,ERef=#'Externaltypereference'{}) -> - {_,OI} = get_referenced_type(S,ERef), - is_object_id(OID,S,OI#typedef.typespec); -is_object_id(o_id,_S,'OBJECT IDENTIFIER') -> - true; -is_object_id(rel_oid,_S,'RELATIVE-OID') -> - true; -is_object_id(_,_S,'INTEGER') -> - true; -is_object_id(OID,S,#type{def=Def}) -> - is_object_id(OID,S,Def); -is_object_id(_,_S,_) -> - false. - -to_string(o_id) -> - "OBJECT IDENTIFIER"; -to_string(rel_oid) -> - "RELATIVE-OID". +validate_oid(S, OidType, _V, _Acc) -> + asn1_error(S, {illegal_oid,OidType}). + +get_oid_type(S, OidType, #type{def=Def}) -> + get_oid_type(S, OidType, Def); +get_oid_type(S, OidType, #'Externaltypereference'{}=Id) -> + {_,OI} = get_referenced_type(S, Id), + get_oid_type(S, OidType, OI#typedef.typespec); +get_oid_type(_S, o_id, 'OBJECT IDENTIFIER'=T) -> + T; +get_oid_type(_S, rel_oid, 'RELATIVE-OID'=T) -> + T; +get_oid_type(_S, _, 'INTEGER'=T) -> + T; +get_oid_type(S, OidType, _) -> + asn1_error(S, {illegal_oid,OidType}). %% ITU-T Rec. X.680 Annex B - D reserved_objectid('itu-t',[]) -> 0; @@ -2380,7 +1903,6 @@ reserved_objectid('x',[0,0]) -> 24; reserved_objectid('y',[0,0]) -> 25; reserved_objectid('z',[0,0]) -> 26; - reserved_objectid(iso,[]) -> 1; %% arcs below "iso", note that number 1 is not used reserved_objectid('standard',[1]) -> 0; @@ -2392,25 +1914,22 @@ reserved_objectid('joint-iso-ccitt',[]) -> 2; reserved_objectid(_,_) -> false. -valid_objectid(_OID,[],_Acc) -> - true; -valid_objectid(OID,[H|T],Acc) -> - case valid_objectid(OID, H, Acc) of - true -> - valid_objectid(OID,T,[H|Acc]); - _ -> - false - end; -valid_objectid(o_id,I,[]) when I =:= 0; I =:= 1; I =:= 2 -> true; -valid_objectid(o_id,_I,[]) -> false; -valid_objectid(o_id,I,[0]) when I >= 0; I =< 4 -> true; -valid_objectid(o_id,_I,[0]) -> false; -valid_objectid(o_id,I,[1]) when I =:= 0; I =:= 2; I =:= 3 -> true; -valid_objectid(o_id,_I,[1]) -> false; -valid_objectid(o_id,_I,[2]) -> true; -valid_objectid(_,_,_) -> true. - -convert_external(S=#state{type=Vtype}, Value) -> +validate_oid_path(_, rel_oid, _) -> + ok; +validate_oid_path(_, o_id, [0,I|_]) when 0 =< I, I =< 9 -> + ok; +validate_oid_path(_, o_id, [1,I|_]) when 0 =< I, I =< 3 -> + ok; +validate_oid_path(_, o_id, [2|_]) -> + ok; +validate_oid_path(S, o_id=OidType, _) -> + asn1_error(S, {illegal_oid,OidType}). + +%%% +%%% End of OBJECT IDENTFIER/RELATIVE-OID validation. +%%% + +convert_external(S, Vtype, Value) -> case Vtype of #type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} -> %% this is an 'EXTERNAL' (or INSTANCE OF) @@ -2435,7 +1954,7 @@ to_EXTERNAL1990(S, [{#seqtag{val=identification}=T, to_EXTERNAL1990(S, Rest, [{T#seqtag{val='indirect-reference'},PCid}, {T#seqtag{val='direct-reference'},TrStx}]); to_EXTERNAL1990(S, _) -> - error({value,"illegal value in EXTERNAL type",S}). + asn1_error(S, illegal_external_value). to_EXTERNAL1990(S, [V={#seqtag{val='data-value-descriptor'},_}|Rest], Acc) -> to_EXTERNAL1990(S, Rest, [V|Acc]); @@ -2443,7 +1962,7 @@ to_EXTERNAL1990(_S, [{#seqtag{val='data-value'}=T,Val}], Acc) -> Encoding = {T#seqtag{val=encoding},{'CHOICE',{'octet-aligned',Val}}}, lists:reverse([Encoding|Acc]); to_EXTERNAL1990(S, _, _) -> - error({value,"illegal value in EXTERNAL type",S}). + asn1_error(S, illegal_external_value). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Functions to normalize the default values of SEQUENCE @@ -2453,17 +1972,16 @@ normalize_value(_,_,mandatory,_) -> mandatory; normalize_value(_,_,'OPTIONAL',_) -> 'OPTIONAL'; -normalize_value(S0, Type, {'DEFAULT',Value}, NameList) -> - S = S0#state{value=Value}, +normalize_value(S, Type, {'DEFAULT',Value}, NameList) -> case catch get_canonic_type(S,Type,NameList) of {'BOOLEAN',CType,_} -> normalize_boolean(S,Value,CType); {'INTEGER',CType,_} -> - normalize_integer(S,Value,CType); + normalize_integer(S, Value, CType); {'BIT STRING',CType,_} -> normalize_bitstring(S,Value,CType); - {'OCTET STRING',CType,_} -> - normalize_octetstring(S0, Value, CType); + {'OCTET STRING',_,_} -> + normalize_octetstring(S, Value); {'NULL',_CType,_} -> %%normalize_null(Value); 'NULL'; @@ -2499,39 +2017,41 @@ normalize_value(S0, Type, {'DEFAULT',Value}, NameList) -> normalize_value(S,Type,Val,NameList) -> normalize_value(S,Type,{'DEFAULT',Val},NameList). -normalize_boolean(S,{Name,Bool},CType) when is_atom(Name) -> - normalize_boolean(S,Bool,CType); normalize_boolean(_,true,_) -> true; normalize_boolean(_,false,_) -> false; normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) -> get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]); -normalize_boolean(_,Other,_) -> - throw({error,{asn1,{'invalid default value',Other}}}). +normalize_boolean(S, _, _) -> + asn1_error(S, {illegal_value, "BOOLEAN"}). -normalize_integer(_S,Int,_) when is_integer(Int) -> +normalize_integer(_S, Int, _) when is_integer(Int) -> Int; -normalize_integer(_S,{Name,Int},_) when is_atom(Name),is_integer(Int) -> - Int; -normalize_integer(S,{Name,Int=#'Externalvaluereference'{}}, - Type) when is_atom(Name) -> - normalize_integer(S,Int,Type); -normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) -> - case Type of - NNL when is_list(NNL) -> - case lists:keysearch(Name,1,NNL) of - {value,{Name,Val}} -> +normalize_integer(S, #'Externalvaluereference'{value=Name}=Ref, NNL) -> + case lists:keyfind(Name, 1, NNL) of + {Name,Val} -> + Val; + false -> + try get_referenced_value(S, Ref) of + Val when is_integer(Val) -> Val; - false -> - get_normalized_value(S,Int,Type, - fun normalize_integer/3,[]) - end; + _ -> + asn1_error(S, illegal_integer_value) + catch + throw:_ -> + asn1_error(S, illegal_integer_value) + end + end; +normalize_integer(S, {'ValueFromObject',{object,Obj},FieldNames}, _) -> + case extract_field(S, Obj, FieldNames) of + #valuedef{value=Val} when is_integer(Val) -> + Val; _ -> - get_normalized_value(S,Int,Type,fun normalize_integer/3,[]) + asn1_error(S, illegal_integer_value) end; -normalize_integer(_,Int,_) -> - exit({'Unknown INTEGER value',Int}). +normalize_integer(S, _, _) -> + asn1_error(S, illegal_integer_value). %% normalize_bitstring(S, Value, Type) -> bitstring() %% Convert a literal value for a BIT STRING to an Erlang bit string. @@ -2543,36 +2063,34 @@ normalize_bitstring(S, Value, Type)-> {bstring,String} when is_list(String) -> bstring_to_bitstring(String); #'Externalvaluereference'{} -> - get_normalized_value(S, Value, Type, - fun normalize_bitstring/3, []); - RecList when is_list(RecList) -> - F = fun(#'Externalvaluereference'{value=Name}) -> - case lists:keymember(Name, 1, Type) of - true -> Name; - false -> throw({error,false}) - end; - (Name) when is_atom(Name) -> - %% Already normalized. - Name; - (Other) -> - throw({error,Other}) - end, - try - lists:map(F, RecList) - catch - throw:{error,Reason} -> - asn1ct:warning("default value not " - "compatible with type definition ~p~n", - [Reason],S, - "default value not " - "compatible with type definition"), - Value + Val = get_referenced_value(S, Value), + normalize_bitstring(S, Val, Type); + {'ValueFromObject',{object,Obj},FieldNames} -> + case extract_field(S, Obj, FieldNames) of + #valuedef{value=Val} -> + normalize_bitstring(S, Val, Type); + _ -> + asn1_error(S, {illegal_value, "BIT STRING"}) end; + RecList when is_list(RecList) -> + [normalize_bs_item(S, Item, Type) || Item <- RecList]; Bs when is_bitstring(Bs) -> %% Already normalized. - Bs + Bs; + _ -> + asn1_error(S, {illegal_value, "BIT STRING"}) end. +normalize_bs_item(S, #'Externalvaluereference'{value=Name}, Type) -> + case lists:keymember(Name, 1, Type) of + true -> Name; + false -> asn1_error(S, {illegal_value, "BIT STRING"}) + end; +normalize_bs_item(_, Atom, _) when is_atom(Atom) -> + Atom; +normalize_bs_item(S, _, _) -> + asn1_error(S, {illegal_value, "BIT STRING"}). + hstring_to_binary(L) -> byte_align(hstring_to_bitstring(L)). @@ -2600,29 +2118,35 @@ hex_to_int(D) when $A =< D, D =< $F -> D - ($A - 10). %% {bstring,String} each element in String corresponds to one bit in an octet %% {hstring,String} each element in String corresponds to one byte in an octet %% #'Externalvaluereference' -normalize_octetstring(S,Value,CType) -> +normalize_octetstring(S, Value) -> case Value of {bstring,String} -> bstring_to_binary(String); {hstring,String} -> hstring_to_binary(String); - Rec when is_record(Rec,'Externalvaluereference') -> - get_normalized_value(S,Value,CType, - fun normalize_octetstring/3,[]); - {Name,String} when is_atom(Name) -> - normalize_octetstring(S,String,CType); + #'Externalvaluereference'{} -> + case get_referenced_value(S, Value) of + String when is_binary(String) -> + String; + Other -> + normalize_octetstring(S, Other) + end; + {'ValueFromObject',{object,Obj},FieldNames} -> + case extract_field(S, Obj, FieldNames) of + #valuedef{value=Val} when is_binary(Val) -> + Val; + _ -> + asn1_error(S, illegal_octet_string_value) + end; _ -> - Item = S#state.value, - asn1_error(S, Item, illegal_octet_string_value) + asn1_error(S, illegal_octet_string_value) end. normalize_objectidentifier(S, Value) -> - {ok,Val} = validate_objectidentifier(S, o_id, Value, []), - Val. + validate_objectidentifier(S, o_id, Value). -normalize_relative_oid(S,Value) -> - {ok,Val} = validate_objectidentifier(S, rel_oid, Value, []), - Val. +normalize_relative_oid(S, Value) -> + validate_objectidentifier(S, rel_oid, Value). normalize_objectdescriptor(Value) -> Value. @@ -2644,40 +2168,22 @@ lookup_enum_value(S, Id, NNL) when is_atom(Id) -> {_,_}=Ret -> Ret; false -> - asn1_error(S, S#state.value, {undefined,Id}) + asn1_error(S, {undefined,Id}) end. -normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when is_atom(C) -> - case catch lists:keysearch(C,#'ComponentType'.name,CType) of - {value,#'ComponentType'{typespec=CT,name=Name}} -> - {C,normalize_value(S,CT,{'DEFAULT',V}, - [Name|NameList])}; - Other -> - asn1ct:warning("Wrong format of type/value ~p/~p~n",[Other,V],S, - "Wrong format of type/value"), - {C,V} +normalize_choice(S, {'CHOICE',{C,V}}, CType, NameList) + when is_atom(C) -> + case lists:keyfind(C, #'ComponentType'.name, CType) of + #'ComponentType'{typespec=CT,name=Name} -> + {C,normalize_value(S, CT, {'DEFAULT',V}, [Name|NameList])}; + false -> + asn1_error(S, {illegal_id,C}) end; -normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) when is_list(ValueList) -> - lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList); -normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) -> - {M,#valuedef{value=V}}=get_referenced_type(S,Val), - normalize_choice(update_state(S,M),{'CHOICE',V},CType,NameList); -% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]); -normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList) +normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList) when is_atom(Name) -> -% normalize_choice(S,ChoiceVal,CType,NameList). normalize_choice(S,{'CHOICE',CV},CType,NameList); -normalize_choice(_S,V,_CType,_NameList) -> - exit({error,{bad_choice_value,V}}). - -%% normalize_choice(NameList,S,CVal = {'CHOICE',{_,_}},CType,_) -> -%% normalize_choice(S,CVal,CType,NameList); -%% normalize_choice(NameList,S,CVal={'DEFAULT',VL},CType,_) when is_list(VL)-> -%% normalize_choice(S,CVal,CType,NameList); -%% normalize_choice(NameList,S,CV={Name,_CV},CType,_) when is_atom(Name)-> -%% normalize_choice(S,{'CHOICE',CV},CType,NameList); -%% normalize_choice(_,_S,V,_,_) -> -%% V. +normalize_choice(S, V, _CType, _NameList) -> + asn1_error(S, {illegal_id, error_value(V)}). normalize_sequence(S,Value,Components,NameList) when is_tuple(Components) -> @@ -2732,12 +2238,9 @@ normalized_record(SorS,S,Value,Components,NameList) -> Value; _ -> NoComps = length(Components), - case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of - ListOfVals when length(ListOfVals) == NoComps -> - list_to_tuple([NewName|ListOfVals]); - _ -> - error({type,{illegal,default,value,Value},S}) - end + ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]), + NoComps = length(ListOfVals), %% Assert + list_to_tuple([NewName|ListOfVals]) end. is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) -> case get_referenced_type(S,V) of @@ -2750,10 +2253,11 @@ is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) -> is_record_normalized(_,_,_,_) -> false. -normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs], +normalize_seq_or_set(SorS, S, + [{#seqtag{val=Cname},V}|Vs], [#'ComponentType'{name=Cname,typespec=TS}|Cs], NameList, Acc) -> - NewNameList = + NewNameList = case TS#type.def of #'Externaltypereference'{type=TName} -> [TName]; @@ -2761,24 +2265,26 @@ normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs], end, NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList), normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]); -normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs], +normalize_seq_or_set(SorS, S, + Values=[{#seqtag{val=Cname0},_V}|_Vs], [#'ComponentType'{prop='OPTIONAL'}|Cs], - NameList,Acc) -> + NameList, Acc) -> + verify_valid_component(S, Cname0, Cs), normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]); -normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs], - [#'ComponentType'{name=Cname2,typespec=TS, - prop={'DEFAULT',Value}}|Cs], - NameList,Acc) -> - NewNameList = +normalize_seq_or_set(SorS, S, + Values=[{#seqtag{val=Cname0},_V}|_Vs], + [#'ComponentType'{name=Cname,typespec=TS, + prop={'DEFAULT',Value}}|Cs], + NameList, Acc) -> + verify_valid_component(S, Cname0, Cs), + NewNameList = case TS#type.def of #'Externaltypereference'{type=TName} -> [TName]; - _ -> [Cname2|NameList] + _ -> [Cname|NameList] end, NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList), normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]); -normalize_seq_or_set(_SorS,_S,[],[],_,Acc) -> - lists:reverse(Acc); %% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT %% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by %% the previous case). @@ -2801,9 +2307,23 @@ normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{}, Cs,NameList,Acc) -> get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6, [SorS,NameList,Acc]); -normalize_seq_or_set(_SorS,S,V,_,_,_) -> - error({type,{illegal,default,value,V},S}). - +normalize_seq_or_set(_SorS, _S, [], [], _, Acc) -> + lists:reverse(Acc); +normalize_seq_or_set(_SorS, S, V, Cs, _, _) -> + case V of + [{#seqtag{val=Name},_}|_] -> + asn1_error(S, {illegal_id,error_value(Name)}); + [] -> + [#'ComponentType'{name=Name}|_] = Cs, + asn1_error(S, {missing_id,error_value(Name)}) + end. + +verify_valid_component(S, Name, Cs) -> + case lists:keyfind(Name, #'ComponentType'.name, Cs) of + false -> asn1_error(S, {illegal_id,error_value(Name)}); + #'ComponentType'{} -> ok + end. + normalize_seqof(S,Value,Type,NameList) -> normalize_s_of('SEQUENCE OF',S,Value,Type,NameList). @@ -2859,10 +2379,7 @@ normalize_restrictedstring(_S,CString,_) when is_list(CString) -> %% definedvalue case or argument in a parameterized type normalize_restrictedstring(S,ERef,CType) when is_record(ERef,'Externalvaluereference') -> get_normalized_value(S,ERef,CType, - fun normalize_restrictedstring/3,[]); -%% -normalize_restrictedstring(S,{Name,Val},CType) when is_atom(Name) -> - normalize_restrictedstring(S,Val,CType). + fun normalize_restrictedstring/3,[]). normalize_objectclassfieldvalue(S,{opentypefieldvalue,Type,Value},NameList) -> %% An open type has per definition no type. Thus should the type @@ -2910,6 +2427,8 @@ call_Func(S,Val,Type,Func,ArgList) -> get_canonic_type(S,Type,NameList) -> {InnerType,NewType,NewNameList} = case Type#type.def of + 'INTEGER'=Name -> + {Name,[],NameList}; Name when is_atom(Name) -> {Name,Type,NameList}; Ref when is_record(Ref,'Externaltypereference') -> @@ -2964,8 +2483,8 @@ check_formal_parameter(_, {_,_}) -> ok; check_formal_parameter(_, #'Externaltypereference'{}) -> ok; -check_formal_parameter(S, #'Externalvaluereference'{value=Name}=Ref) -> - asn1_error(S, Ref, {illegal_typereference,Name}). +check_formal_parameter(S, #'Externalvaluereference'{value=Name}) -> + asn1_error(S, {illegal_typereference,Name}). % check_type(S,Type,ObjSpec={{objectclassname,_},_}) -> % check_class(S,ObjSpec); @@ -2977,7 +2496,7 @@ check_type(_S,Type,Ts) when is_record(Type,typedef), Ts; check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> {Def,Tag,Constr,IsInlined} = - case match_parameters(S,Ts#type.def,S#state.parameters) of + case match_parameter(S, Ts#type.def) of #type{tag=PTag,constraint=_Ctmp,def=Dtmp,inlined=Inl} -> {Dtmp,merge_tags(Ts#type.tag,PTag),Ts#type.constraint,Inl}; #typedef{typespec=#type{tag=PTag,def=Dtmp,inlined=Inl}} -> @@ -2989,16 +2508,16 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> inlined=IsInlined}, TestFun = fun(Tref) -> - MaybeChoice = get_non_typedef(S, Tref), + {_, MaybeChoice} = get_referenced_type(S, Tref, true), case catch((MaybeChoice#typedef.typespec)#type.def) of {'CHOICE',_} -> - maybe_illicit_implicit_tag(choice,Tag); + maybe_illicit_implicit_tag(S, choice, Tag); 'ANY' -> - maybe_illicit_implicit_tag(open_type,Tag); + maybe_illicit_implicit_tag(S, open_type, Tag); 'ANY DEFINED BY' -> - maybe_illicit_implicit_tag(open_type,Tag); + maybe_illicit_implicit_tag(S, open_type, Tag); 'ASN1_OPEN_TYPE' -> - maybe_illicit_implicit_tag(open_type,Tag); + maybe_illicit_implicit_tag(S, open_type, Tag); _ -> Tag end @@ -3007,7 +2526,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> case Def of Ext when is_record(Ext,'Externaltypereference') -> {RefMod,RefTypeDef,IsParamDef} = - case get_referenced_type(S,Ext) of + case get_referenced_type(S, Ext) of {undefined,TmpTDef} -> %% A parameter {get(top_module),TmpTDef,true}; {TmpRefMod,TmpRefDef} -> @@ -3031,7 +2550,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> NewS = S#state{mname=RefMod, module=load_asn1_module(S,RefMod), tname=get_datastr_name(NewRefTypeDef1), - type=NewRefTypeDef1, abscomppath=[],recordtopname=[]}, RefType1 = check_type(NewS,RefTypeDef,RefTypeDef#typedef.typespec), @@ -3051,18 +2569,17 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> Key); _ -> ok end, + Pos = Ext#'Externaltypereference'.pos, {RefType1,#'Externaltypereference'{module=RefMod, + pos=Pos, type=TmpName}} end, case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of true -> %% Here we expand to a built in type and inline it - NewS2 = S#state{type=#typedef{typespec=RefType}}, - NewC = - constraint_merge(NewS2, - check_constraints(NewS2,Constr)++ - RefType#type.constraint), + NewC = check_constraints(S, RefType, Constr ++ + RefType#type.constraint), TempNewDef#newt{ type = RefType#type.def, tag = merge_tags(Ct,RefType#type.tag), @@ -3073,19 +2590,13 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> NewExt = ExtRef#'Externaltypereference'{module=merged_mod(S,RefMod,Ext)}, TempNewDef#newt{ type = check_externaltypereference(S,NewExt), - tag = case S#state.erule of - ber -> - merge_tags(Ct,RefType#type.tag); - _ -> - Ct - end - } + tag = merge_tags(Ct,RefType#type.tag)} end; 'ANY' -> - Ct=maybe_illicit_implicit_tag(open_type,Tag), + Ct = maybe_illicit_implicit_tag(S, open_type, Tag), TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct}; {'ANY_DEFINED_BY',_} -> - Ct=maybe_illicit_implicit_tag(open_type,Tag), + Ct = maybe_illicit_implicit_tag(S, open_type, Tag), TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct}; 'INTEGER' -> TempNewDef#newt{tag= @@ -3132,7 +2643,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> {'ENUMERATED',NamedNumberList} -> TempNewDef#newt{type= {'ENUMERATED', - check_enumerated(S,NamedNumberList,Constr)}, + check_enumerated(S, NamedNumberList)}, tag= merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED)), constraint=[]}; @@ -3235,7 +2746,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> tag= merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))}; {'CHOICE',Components} -> - Ct = maybe_illicit_implicit_tag(choice,Tag), + Ct = maybe_illicit_implicit_tag(S, choice, Tag), TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct}; Set when is_record(Set,'SET') -> RecordName= @@ -3258,12 +2769,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)}, tag= merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))}; - %% This is a temporary hack until the full Information Obj Spec - %% in X.681 is supported - {#'Externaltypereference'{type='TYPE-IDENTIFIER'}, - [{typefieldreference,_,'Type'}]} -> - Ct=maybe_illicit_implicit_tag(open_type,Tag), - TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct}; {pt,Ptype,ParaList} -> %% Ptype might be a parameterized - type, object set or @@ -3271,18 +2776,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> %% calling function. {_RefMod,Ptypedef} = get_referenced_type(S,Ptype), notify_if_not_ptype(S,Ptypedef), - NewParaList = - [match_parameters(S,TmpParam,S#state.parameters)|| - TmpParam <- ParaList], + NewParaList = match_parameters(S, ParaList), Instance = instantiate_ptype(S,Ptypedef,NewParaList), TempNewDef#newt{type=Instance#type.def, tag=merge_tags(Tag,Instance#type.tag), constraint=Instance#type.constraint, inlined=yes}; - OCFT=#'ObjectClassFieldType'{classname=ClRef} -> + #'ObjectClassFieldType'{classname=ClRef0}=OCFT0 -> %% this case occures in a SEQUENCE when %% the type of the component is a ObjectClassFieldType + ClRef = match_parameter(S, ClRef0), + OCFT = OCFT0#'ObjectClassFieldType'{classname=ClRef}, ClassSpec = check_class(S,ClRef), NewTypeDef = maybe_open_type(S,ClassSpec, @@ -3292,16 +2797,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> Ct = case is_open_type(NewTypeDef) of true -> - maybe_illicit_implicit_tag(open_type,MergedTag); + maybe_illicit_implicit_tag(S, open_type, MergedTag); _ -> MergedTag end, case TopName of [] when Type#typedef.name =/= undefined -> %% This is a top-level type. - #type{def=Simplified} = - simplify_type(#type{def=NewTypeDef}), - TempNewDef#newt{type=Simplified,tag=Ct}; + #type{constraint=C,def=Simplified} = + simplify_type(#type{def=NewTypeDef, + constraint=Constr}), + TempNewDef#newt{type=Simplified,tag=Ct, + constraint=C}; _ -> TempNewDef#newt{type=NewTypeDef,tag=Ct} end; @@ -3311,33 +2818,21 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) -> TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag), type=CheckedT#type.def}; - {valueset,Vtype} -> - TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}}; {'SelectionType',Name,T} -> CheckedT = check_selectiontype(S,Name,T), TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag), type=CheckedT#type.def}; - Other -> - exit({'cant check' ,Other}) + 'ASN1_OPEN_TYPE' -> + TempNewDef end, #newt{type=TDef,tag=NewTags,constraint=NewConstr,inlined=Inlined} = NewDef, Ts#type{def=TDef, inlined=Inlined, - constraint=check_constraints(S, NewConstr), + constraint=check_constraints(S, #type{def=TDef}, NewConstr), tag=lists:map(fun(#tag{type={default,TTx}}=TempTag) -> TempTag#tag{type=TTx}; (Other) -> Other - end, NewTags)}; -check_type(_S,Type,Ts) -> - exit({error,{asn1,internal_error,Type,Ts}}). - -get_non_typedef(S, Tref0) -> - case get_referenced_type(S, Tref0) of - {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=Tref}}} -> - get_non_typedef(S, Tref); - {_,Type} -> - Type - end. + end, NewTags)}. %% @@ -3353,10 +2848,11 @@ simplify_comp(#'ComponentType'{typespec=Type0}=C) -> C#'ComponentType'{typespec=Type}; simplify_comp(Other) -> Other. -simplify_type(#type{tag=Tag,def=Inner}=T) -> +simplify_type(#type{tag=Tag,def=Inner,constraint=Constr0}=T) -> case Inner of - #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}} -> - Type#type{tag=Tag}; + #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}=OCFT -> + Constr = [{ocft,OCFT}|Type#type.constraint++Constr0], + Type#type{tag=Tag,constraint=Constr}; _ -> T end. @@ -3389,29 +2885,22 @@ get_innertag(_S,#'ObjectClassFieldType'{type=Type}) -> _ -> [] end. -get_type_from_object(S,Object,TypeField) - when is_record(Object,'Externaltypereference'); - is_record(Object,'Externalvaluereference') -> - {_,ObjectDef} = get_referenced_type(S,Object), - ObjSpec = check_object(S,ObjectDef,ObjectDef#typedef.typespec), - get_fieldname_element(S,ObjectDef#typedef{typespec=ObjSpec},TypeField). - %% get_class_def(S, Type) -> #classdef{} | 'none'. get_class_def(S, #typedef{typespec=#type{def=#'Externaltypereference'{}=Eref}}) -> - {_,NextDef} = get_referenced_type(S, Eref), + {_,NextDef} = get_referenced_type(S, Eref, true), get_class_def(S, NextDef); get_class_def(S, #'Externaltypereference'{}=Eref) -> - {_,NextDef} = get_referenced_type(S, Eref), + {_,NextDef} = get_referenced_type(S, Eref, true), get_class_def(S, NextDef); get_class_def(_S, #classdef{}=CD) -> CD; get_class_def(_S, _) -> none. -maybe_illicit_implicit_tag(Kind,Tag) -> +maybe_illicit_implicit_tag(S, Kind, Tag) -> case Tag of [#tag{type='IMPLICIT'}|_T] -> - throw({error,{asn1,{implicit_tag_before,Kind}}}); + asn1_error(S, {implicit_tag_before,Kind}); [ChTag = #tag{type={default,_}}|T] -> case Kind of open_type -> @@ -3438,19 +2927,24 @@ merged_mod(S,RefMod,Ext) -> %% any UNIQUE field, so that a component relation constraint cannot specify %% the type of a typefield, return 'ASN1_OPEN_TYPE'. %% -maybe_open_type(S,ClassSpec=#objectclass{fields=Fs}, - OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList}, +maybe_open_type(_, _, #'ObjectClassFieldType'{fieldname={_,_}}=OCFT, _) -> + %% Already converted. + OCFT; +maybe_open_type(S, #objectclass{fields=Fs}=ClassSpec, + #'ObjectClassFieldType'{fieldname=FieldRefList}=OCFT, Constr) -> - Type = get_ObjectClassFieldType(S,Fs,FieldRefList), - FieldNames=get_referenced_fieldname(FieldRefList), - case last_fieldname(FieldRefList) of + Type = get_OCFType(S, Fs, FieldRefList), + FieldNames = get_referenced_fieldname(FieldRefList), + case lists:last(FieldRefList) of {valuefieldreference,_} -> OCFT#'ObjectClassFieldType'{fieldname=FieldNames, type=Type}; {typefieldreference,_} -> - case {catch get_unique_fieldname(S,#classdef{typespec=ClassSpec}), - asn1ct_gen:get_constraint(Constr,componentrelation)}of - {Tuple,_} when tuple_size(Tuple) =:= 3 -> + %% Note: The constraints have not been checked yet, + %% so we must use a special lookup routine. + case {get_unique_fieldname(S, #classdef{typespec=ClassSpec}), + get_componentrelation(Constr)} of + {no_unique,_} -> OCFT#'ObjectClassFieldType'{fieldname=FieldNames, type='ASN1_OPEN_TYPE'}; {_,no} -> @@ -3462,16 +2956,12 @@ maybe_open_type(S,ClassSpec=#objectclass{fields=Fs}, end end. -last_fieldname(FieldRefList) when is_list(FieldRefList) -> - lists:last(FieldRefList); -last_fieldname({FieldName,_}) when is_atom(FieldName) -> - [A|_] = atom_to_list(FieldName), - case is_lowercase(A) of - true -> - {valuefieldreference,FieldName}; - _ -> - {typefieldreference,FieldName} - end. +get_componentrelation([{element_set,{componentrelation,_,_}=Cr,none}|_]) -> + Cr; +get_componentrelation([_|T]) -> + get_componentrelation(T); +get_componentrelation([]) -> + no. is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) -> true; @@ -3510,35 +3000,19 @@ notify_if_not_ptype(S,#pobjectsetdef{class=Cl}) -> _ -> throw(pobjectsetdef) end; -notify_if_not_ptype(_S,PT) -> - throw({error,{"supposed to be a parameterized type",PT}}). -% fix me +notify_if_not_ptype(S, PT) -> + asn1_error(S, {param_bad_type, error_value(PT)}). + instantiate_ptype(S,Ptypedef,ParaList) -> #ptypedef{args=Args,typespec=Type} = Ptypedef, NewType = check_ptype(S,Ptypedef,Type#type{inlined=yes}), MatchedArgs = match_args(S,Args, ParaList, []), OldArgs = S#state.parameters, - NewS = S#state{type=NewType,parameters=MatchedArgs++OldArgs,abscomppath=[]}, -%% NewS = S#state{type=NewType,parameters=MatchedArgs,abscomppath=[]}, + NewS = S#state{parameters=MatchedArgs++OldArgs,abscomppath=[]}, check_type(NewS, Ptypedef#ptypedef{typespec=NewType}, NewType). -get_datastr_name(#typedef{name=N}) -> - N; -get_datastr_name(#classdef{name=N}) -> - N; -get_datastr_name(#valuedef{name=N}) -> - N; -get_datastr_name(#ptypedef{name=N}) -> - N; -get_datastr_name(#pvaluedef{name=N}) -> - N; -get_datastr_name(#pvaluesetdef{name=N}) -> - N; -get_datastr_name(#pobjectdef{name=N}) -> - N; -get_datastr_name(#pobjectsetdef{name=N}) -> - N. - +get_datastr_name(Type) -> + asn1ct:get_name_of_def(Type). get_pt_args(#ptypedef{args=Args}) -> Args; @@ -3606,8 +3080,8 @@ match_args(S,FA = [FormArg|Ft], AA = [ActArg|At], Acc) -> end; match_args(_S,[], [], Acc) -> lists:reverse(Acc); -match_args(_,_, _, _) -> - throw({error,{asn1,{wrong_number_of_arguments}}}). +match_args(S, _, _, _) -> + asn1_error(S, param_wrong_number_of_arguments). %%%%%%%%%%%%%%%%% %% categorize_arg(S,FormalArg,ActualArg) -> {FormalArg,CatgorizedActualArg} @@ -3652,11 +3126,6 @@ parameter_name_style(#'Externaltypereference'{}) -> parameter_name_style(#'Externalvaluereference'{}) -> beginning_lowercase. -is_lowercase(X) when X >= $A,X =< $W -> - false; -is_lowercase(_) -> - true. - %% categorize(Parameter) -> CategorizedParameter %% If Parameter has an abstract syntax of another category than %% Category, transform it to a known syntax. @@ -3705,725 +3174,503 @@ parse_objectset(Set) -> Set. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% check_constraints/2 -%% -check_constraints(S,C) when is_list(C) -> - check_constraints(S, C, []). - -resolv_tuple_or_list(S,List) when is_list(List) -> - lists:map(fun(X)->resolv_value(S,X) end, List); -resolv_tuple_or_list(S,{Lb,Ub}) -> - {resolv_value(S,Lb),resolv_value(S,Ub)}. - -%%%----------------------------------------- -%% If the constraint value is a defined value the valuename -%% is replaced by the actual value %% -resolv_value(S,Val) -> - Id = match_parameters(S,Val, S#state.parameters), - resolv_value1(S,Id). +%% Check and simplify constraints. +%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -resolv_value1(S, ERef = #'Externalvaluereference'{value=Name}) -> - case catch resolve_namednumber(S, S#state.type, Name) of - V when is_integer(V) -> - V; - _ -> - case get_referenced_type(S,ERef) of - {Err,_Reason} when Err == error; Err == 'EXIT' -> - throw({error,{asn1,{undefined_type_or_value, - Name}}}); - {_M,VDef} -> - resolv_value1(S,VDef) - end - end; -resolv_value1(S, {gt,V}) -> - case resolv_value1(S, V) of - Int when is_integer(Int) -> - Int + 1; - Other -> - throw({error,{asn1,{not_integer_value,Other}}}) - end; -resolv_value1(S, {lt,V}) -> - case resolv_value1(S, V) of - Int when is_integer(Int) -> - Int - 1; - Other -> - throw({error,{asn1,{not_integer_value,Other}}}) - end; -resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference, - FieldName}]}) -> - %% FieldName can hold either a fixed-type value or a variable-type value - %% Object is a DefinedObject, i.e. a #'Externaltypereference' - resolve_value_from_object(S,Object,FieldName); -resolv_value1(_,#valuedef{checked=true,value=V}) -> - V; -resolv_value1(S,#valuedef{type=_T, - value={'ValueFromObject',{object,Object}, - [{valuefieldreference, - FieldName}]}}) -> - resolve_value_from_object(S,Object,FieldName); -resolv_value1(S,VDef = #valuedef{}) -> - #valuedef{value=Val} = check_value(S,VDef), - Val; -resolv_value1(_,V) -> - V. -resolve_value_from_object(S,Object,FieldName) -> - {_,ObjTDef} = get_referenced_type(S,Object), - TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec), - {_,_,Components} = TS#'Object'.def, - case lists:keysearch(FieldName,1,Components) of - {value,{_,#valuedef{value=Val}}} -> - Val; - _ -> - error({value,"illegal value in constraint",S}) +check_constraints(_S, _HostType, []) -> + []; +check_constraints(S, HostType0, [_|_]=Cs0) -> + HostType = get_real_host_type(HostType0, Cs0), + Cs1 = top_level_intersections(Cs0), + Cs2 = [coalesce_constraints(C) || C <- Cs1], + {_,Cs3} = filter_extensions(Cs2), + Cs = simplify_element_sets(S, HostType, Cs3), + finish_constraints(Cs). + +get_real_host_type(HostType, Cs) -> + case lists:keyfind(ocft, 1, Cs) of + false -> HostType; + {_,OCFT} -> HostType#type{def=OCFT} end. +top_level_intersections([{element_set,{intersection,_,_}=C,none}]) -> + top_level_intersections_1(C); +top_level_intersections(Cs) -> + Cs. + +top_level_intersections_1({intersection,A,B}) -> + [{element_set,A,none}|top_level_intersections_1(B)]; +top_level_intersections_1(Other) -> + [{element_set,Other,none}]. + +coalesce_constraints({element_set, + {Tag,{element_set,A,_}}, + {Tag,{element_set,B,_}}}) -> + %% (SIZE (C1), ..., (SIZE (C2)) => (SIZE (C1, ..., C2)) + {element_set,{Tag,{element_set,A,B}},none}; +coalesce_constraints(Other) -> + Other. + +%% Remove all outermost extensions except the last. + +filter_extensions([H0|T0]) -> + case filter_extensions(T0) of + {true,T} -> + H = remove_extension(H0), + {true,[H|T]}; + {false,T} -> + {any_extension(H0),[H0|T]} + end; +filter_extensions([]) -> + {false,[]}. -resolve_namednumber(S,#typedef{typespec=Type},Name) -> - case Type#type.def of - {'ENUMERATED',NameList} -> - resolve_namednumber_1(S, Name, NameList, Type); - {'INTEGER',NameList} -> - resolve_namednumber_1(S, Name, NameList, Type); +remove_extension({element_set,Root,_}) -> + {element_set,remove_extension(Root),none}; +remove_extension(Tuple) when is_tuple(Tuple) -> + L = [remove_extension(El) || El <- tuple_to_list(Tuple)], + list_to_tuple(L); +remove_extension(Other) -> Other. + +any_extension({element_set,_,Ext}) when Ext =/= none -> + true; +any_extension(Tuple) when is_tuple(Tuple) -> + any_extension_tuple(1, Tuple); +any_extension(_) -> false. + +any_extension_tuple(I, T) when I =< tuple_size(T) -> + any_extension(element(I, T)) orelse any_extension_tuple(I+1, T); +any_extension_tuple(_, _) -> false. + +simplify_element_sets(S, HostType, [{element_set,R0,E0}|T0]) -> + R1 = simplify_element_set(S, HostType, R0), + E1 = simplify_element_set(S, HostType, E0), + case simplify_element_sets(S, HostType, T0) of + [{element_set,R2,E2}] -> + [{element_set,cs_intersection(S, R1, R2), + cs_intersection(S, E1, E2)}]; + L when is_list(L) -> + [{element_set,R1,E1}|L] + end; +simplify_element_sets(S, HostType, [H|T]) -> + [H|simplify_element_sets(S, HostType, T)]; +simplify_element_sets(_, _, []) -> + []. + +simplify_element_set(_S, _HostType, empty) -> + {set,[]}; +simplify_element_set(S, HostType, {'SingleValue',Vs0}) when is_list(Vs0) -> + Vs1 = [resolve_value(S, HostType, V) || V <- Vs0], + Vs = make_constr_set_vs(Vs1), + simplify_element_set(S, HostType, Vs); +simplify_element_set(S, HostType, {'SingleValue',V0}) -> + V1 = resolve_value(S, HostType, V0), + V = {set,[{range,V1,V1}]}, + simplify_element_set(S, HostType, V); +simplify_element_set(S, HostType, {'ValueRange',{Lb0,Ub0}}) -> + Lb = resolve_value(S, HostType, Lb0), + Ub = resolve_value(S, HostType, Ub0), + V = make_constr_set(S, Lb, Ub), + simplify_element_set(S, HostType, V); +simplify_element_set(S, HostType, {'ALL-EXCEPT',Set0}) -> + Set = simplify_element_set(S, HostType, Set0), + {'ALL-EXCEPT',Set}; +simplify_element_set(S, HostType, {intersection,A0,B0}) -> + A = simplify_element_set(S, HostType, A0), + B = simplify_element_set(S, HostType, B0), + cs_intersection(S, A, B); +simplify_element_set(S, HostType, {union,A0,B0}) -> + A = simplify_element_set(S, HostType, A0), + B = simplify_element_set(S, HostType, B0), + cs_union(S, A, B); +simplify_element_set(S, HostType, {simpletable,{element_set,Type,_}}) -> + check_simpletable(S, HostType, Type); +simplify_element_set(S, _, {componentrelation,R,Id}) -> + check_componentrelation(S, R, Id); +simplify_element_set(S, HostType, {Tag,{element_set,_,_}=El0}) -> + [El1] = simplify_element_sets(S, HostType, [El0]), + {Tag,El1}; +simplify_element_set(S, HostType, #type{}=Type) -> + simplify_element_set_type(S, HostType, Type); +simplify_element_set(_, _, C) -> + C. + +simplify_element_set_type(S, HostType, #type{def=Def0}=Type0) -> + #'Externaltypereference'{} = Def0, %Assertion. + case get_referenced_type(S, Def0) of + {_,#valuedef{checked=false,value={valueset,Vs0}}} -> + [Vs1] = simplify_element_sets(S, HostType, [Vs0]), + case Vs1 of + {element_set,Set,none} -> + Set; + {element_set,Set,{set,[]}} -> + Set + end; + {_,{valueset,#type{def=#'Externaltypereference'{}}=Type}} -> + simplify_element_set_type(S, HostType, Type); _ -> - not_enumerated + case HostType of + #type{def=#'ObjectClassFieldType'{}} -> + %% Open type. + #type{def=Def} = check_type(S, HostType, Type0), + Def; + _ -> + #type{constraint=Cs} = check_type(S, HostType, Type0), + C = convert_back(Cs), + simplify_element_set(S, HostType, C) + end end. -resolve_namednumber_1(S, Name, NameList, Type) -> - NamedNumberList = check_enumerated(S, NameList, Type#type.constraint), - {_,N} = lookup_enum_value(S, Name, NamedNumberList), - N. - -check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) -> - {RefMod,CTDef} = get_referenced_type(S,Type#type.def), - NewS = S#state{module=load_asn1_module(S,RefMod),mname=RefMod, - type=CTDef,tname=get_datastr_name(CTDef)}, - CType = check_type(NewS,S#state.tname,CTDef#typedef.typespec), - check_constraints(S,Rest,CType#type.constraint ++ Acc); -check_constraints(S,[C | Rest], Acc) -> - check_constraints(S,Rest,[check_constraint(S,C) | Acc]); -check_constraints(S,[],Acc) -> - constraint_merge(S,Acc). - - -range_check(F={FixV,FixV}) -> -% FixV; - F; -range_check(VR={Lb,Ub}) when Lb < Ub -> - VR; -range_check(Err={_,_}) -> - throw({error,{asn1,{illegal_size_constraint,Err}}}); -range_check(Value) -> - Value. - -check_constraint(S,Ext) when is_record(Ext,'Externaltypereference') -> - check_externaltypereference(S,Ext); - - -check_constraint(S,{'SizeConstraint',{Lb,Ub}}) - when is_list(Lb); tuple_size(Lb) =:= 2 -> - NewLb = range_check(resolv_tuple_or_list(S,Lb)), - NewUb = range_check(resolv_tuple_or_list(S,Ub)), - {'SizeConstraint',{NewLb,NewUb}}; -check_constraint(S,{'SizeConstraint',{Lb,Ub}}) -> - case {resolv_value(S,Lb),resolv_value(S,Ub)} of - {FixV,FixV} -> - {'SizeConstraint',FixV}; - {Low,High} when Low < High -> - {'SizeConstraint',{Low,High}}; - Err -> - throw({error,{asn1,{illegal_size_constraint,Err}}}) - end; -check_constraint(S,{'SizeConstraint',Lb}) -> - {'SizeConstraint',resolv_value(S,Lb)}; +convert_back([H1,H2|T]) -> + {intersection,H1,convert_back([H2|T])}; +convert_back([H]) -> + H; +convert_back([]) -> + none. -check_constraint(S,{'SingleValue', L}) when is_list(L) -> - F = fun(A) -> resolv_value(S,A) end, - {'SingleValue',lists:sort(lists:map(F,L))}; - -check_constraint(S,{'SingleValue', V}) when is_integer(V) -> - Val = resolv_value(S,V), -%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range? - {'SingleValue',Val}; -check_constraint(S,{'SingleValue', V}) -> - {'SingleValue',resolv_value(S,V)}; - -check_constraint(S,{'ValueRange', {Lb, Ub}}) -> - {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}}; -%% In case of a constraint with extension marks like (1..Ub,...) -check_constraint(S,{VR={'ValueRange', {_Lb, _Ub}},Rest}) -> - {check_constraint(S,VR),Rest}; -check_constraint(_S,{'PermittedAlphabet',PA}) -> - {'PermittedAlphabet',permitted_alphabet_cnstr(PA)}; - -check_constraint(S,{valueset,Type}) -> - {valueset,check_type(S,S#state.tname,Type)}; - -check_constraint(_S,ST={simpletable,Type}) when is_atom(Type) -> - %% An already checked constraint - ST; -check_constraint(S,{simpletable,Type}) -> +check_simpletable(S, HostType, Type) -> + case HostType of + #type{def=#'ObjectClassFieldType'{}} -> + ok; + _ -> + %% Table constraints may only be applied to + %% CLASS.&field constructs. + asn1_error(S, illegal_table_constraint) + end, Def = case Type of #type{def=D} -> D; - {'SingleValue',ObjRef = #'Externalvaluereference'{}} -> - ObjRef + {'SingleValue',#'Externalvaluereference'{}=ObjRef} -> + ObjRef; + _ -> + asn1_error(S, invalid_table_constraint) end, - C = match_parameters(S,Def,S#state.parameters), + C = match_parameter(S, Def), case C of #'Externaltypereference'{} -> - ERef = check_externaltypereference(S,C), - {simpletable,ERef#'Externaltypereference'.type}; - #type{def=#'Externaltypereference'{}=ExtTypeRef} -> - ERef = check_externaltypereference(S, ExtTypeRef), + ERef = check_externaltypereference(S, C), {simpletable,ERef#'Externaltypereference'.type}; - {valueset,#type{def=ERef=#'Externaltypereference'{}}} -> % this is an object set - {_,TDef} = get_referenced_type(S,ERef), - case TDef#typedef.typespec of - #'ObjectSet'{} -> - check_object(S,TDef,TDef#typedef.typespec), - {simpletable,ERef#'Externaltypereference'.type}; - Err -> - exit({error,{internal_error,Err}}) - end; #'Externalvaluereference'{} -> %% This is an object set with a referenced object - {_,TorVDef} = get_referenced_type(S,C), - GetObjectSet = - fun(#typedef{typespec=O}) when is_record(O,'Object') -> - #'ObjectSet'{class=O#'Object'.classname, - set={'SingleValue',C}}; - (#valuedef{type=Cl,value=O}) - when is_record(O,'Externalvaluereference'), - is_record(Cl,type) -> - %% an object might reference another object - #'ObjectSet'{class=Cl#type.def, - set={'SingleValue',O}}; - (Err) -> - exit({error,{internal_error,simpletable_constraint,Err}}) - end, - ObjSet = GetObjectSet(TorVDef), - {simpletable,check_object(S,Type,ObjSet)}; - #'ObjectSet'{} -> - io:format("ALERT: simpletable forbidden case!~n",[]), - {simpletable,check_object(S,Type,C)}; - {'ValueFromObject',{_,ORef},FieldName} -> - %% This is an ObjectFromObject - {_,Object} = get_referenced_type(S,ORef), - ChObject = check_object(S,Object, - Object#typedef.typespec), - ObjFromObj= - get_fieldname_element(S,Object#typedef{ - typespec=ChObject}, - FieldName), - {simpletable,ObjFromObj}; -%% ObjFromObj#typedef{checked=true,typespec= -%% check_object(S,ObjFromObj, -%% ObjFromObj#typedef.typespec)}}; - _ -> - check_type(S,S#state.tname,Type),%% this seems stupid. - OSName = Def#'Externaltypereference'.type, - {simpletable,OSName} - end; + {_,TorVDef} = get_referenced_type(S, C), + Set = case TorVDef of + #typedef{typespec=#'Object'{classname=ClassName}} -> + #'ObjectSet'{class=ClassName, + set={'SingleValue',C}}; + #valuedef{type=#type{def=ClassDef}, + value=#'Externalvaluereference'{}=Obj} -> + %% an object might reference another object + #'ObjectSet'{class=ClassDef, + set={'SingleValue',Obj}} + end, + {simpletable,check_object(S, Type, Set)}; + {'ValueFromObject',{_,Object},FieldNames} -> + %% This is an ObjectFromObject. + {simpletable,extract_field(S, Object, FieldNames)} + end. -check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) -> +check_componentrelation(S, {objectset,Opos,Objset0}, Id) -> %% Objset is an 'Externaltypereference' record, since Objset is %% a DefinedObjectSet. - RealObjset = match_parameters(S,Objset,S#state.parameters), - ObjSetRef = - case RealObjset of - #'Externaltypereference'{} -> RealObjset; - #type{def=#'Externaltypereference'{}} -> RealObjset#type.def; - {valueset,OS = #type{def=#'Externaltypereference'{}}} -> OS#type.def - end, - Ext = check_externaltypereference(S,ObjSetRef), - {componentrelation,{objectset,Opos,Ext},Id}; + ObjSet = match_parameter(S, Objset0), + Ext = check_externaltypereference(S, ObjSet), + {componentrelation,{objectset,Opos,Ext},Id}. + +%%% +%%% Internal set representation. +%%% +%%% We represent sets as a union of strictly disjoint ranges: +%%% +%%% {set,[Range]} +%%% +%%% A range is represented as: +%%% +%%% Range = {a_range,UpperBound} | {range,LowerBound,UpperBound} +%%% +%%% We don't use the atom 'MIN' to represent MIN, because atoms +%%% compare higher than integer. Instead we use {a_range,UpperBound} +%%% to represent MIN..UpperBound. We represent MAX as 'MAX' because +%%% 'MAX' compares higher than any integer. +%%% +%%% The ranges are sorted in term order. The ranges must not overlap +%%% or be adjacent to each other. This invariant is established when +%%% creating sets, and maintained by the intersection and union +%%% operators. +%%% +%%% Example of invalid set representaions: +%%% +%%% [{range,0,10},{range,5,10}] %Overlapping ranges +%%% [{range,0,5},{range,6,10}] %Adjancent ranges +%%% [{range,10,20},{a_range,100}] %Not sorted +%%% + +make_constr_set(_, 'MIN', Ub) -> + {set,[{a_range,make_constr_set_val(Ub)}]}; +make_constr_set(_, Lb, Ub) when Lb =< Ub -> + {set,[{range,make_constr_set_val(Lb), + make_constr_set_val(Ub)}]}; +make_constr_set(S, _, _) -> + asn1_error(S, reversed_range). + +make_constr_set_val([C]) when is_integer(C) -> C; +make_constr_set_val(Val) -> Val. + +make_constr_set_vs(Vs) -> + {set,make_constr_set_vs_1(Vs)}. + +make_constr_set_vs_1([]) -> + []; +make_constr_set_vs_1([V]) -> + [{range,V,V}]; +make_constr_set_vs_1([V0|Vs]) -> + V1 = make_constr_set_vs_1(Vs), + range_union([{range,V0,V0}], V1). + +%%% +%%% Set operators. +%%% + +cs_intersection(_S, Other, none) -> + Other; +cs_intersection(_S, none, Other) -> + Other; +cs_intersection(_S, {set,SetA}, {set,SetB}) -> + {set,range_intersection(SetA, SetB)}; +cs_intersection(_S, A, B) -> + {intersection,A,B}. + +range_intersection([], []) -> + []; +range_intersection([_|_], []) -> + []; +range_intersection([], [_|_]) -> + []; +range_intersection([H1|_]=A, [H2|_]=B) when H1 > H2 -> + range_intersection(B, A); +range_intersection([H1|T1], [H2|T2]=B) -> + %% Now H1 =< H2. + case {H1,H2} of + {{a_range,Ub0},{a_range,Ub1}} when Ub0 < Ub1 -> + %% Ub0 =/= 'MAX' + [H1|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])]; + {{a_range,_},{a_range,_}} -> + %% Must be equal. + [H1|range_intersection(T1, T2)]; + {{a_range,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 -> + %% No intersection. + range_intersection(T1, B); + {{a_range,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 -> + %% Ub0 =/= 'MAX' + [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])]; + {{a_range,Ub},{range,_Lb1,Ub}} -> + %% The first range covers the second range, but does not + %% go beyond. We handle this case specially because Ub may + %% be 'MAX', and evaluating 'MAX'+1 will fail. + [H2|range_intersection(T1, T2)]; + {{a_range,Ub0},{range,_Lb1,Ub1}} -> + %% Ub0 > Ub1, Ub1 =/= 'MAX'. The first range completely + %% covers and extends beyond the second range. + [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)]; + {{range,_Lb0,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 -> + %% Lb0 < Lb1. No intersection. + range_intersection(T1, B); + {{range,_Lb0,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 -> + %% Ub0 >= Lb1, Ub0 =/= 'MAX'. Partial overlap. + [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])]; + {{range,_Lb0,Ub},{range,_Lb1,Ub}} -> + %% The first range covers the second range, but does not + %% go beyond. We handle this case specially because Ub may + %% be 'MAX', and evaluating 'MAX'+1 will fail. + [H2|range_intersection(T1, T2)]; + {{range,_Lb0,Ub0},{range,_Lb1,Ub1}} -> + %% Ub1 =/= MAX. The first range completely covers and + %% extends beyond the second. + [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)] + end. -check_constraint(S,Type) when is_record(Type,type) -> - #type{def=Def} = check_type(S,S#state.tname,Type), - Def; +cs_union(_S, {set,SetA}, {set,SetB}) -> + {set,range_union(SetA, SetB)}; +cs_union(_S, A, B) -> + {union,A,B}. + +range_union(A, B) -> + range_union_1(lists:merge(A, B)). + +range_union_1([{a_range,Ub0},{a_range,Ub1}|T]) -> + range_union_1([{a_range,max(Ub0, Ub1)}|T]); +range_union_1([{a_range,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 -> + range_union_1([{a_range,max(Ub0, Ub1)}|T]); +range_union_1([{a_range,_}=H|T]) -> + %% Ranges are disjoint. + [H|range_union_1(T)]; +range_union_1([{range,Lb0,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 -> + range_union_1([{range,Lb0,max(Ub0, Ub1)}|T]); +range_union_1([{range,_,_}=H|T]) -> + %% Ranges are disjoint. + [H|range_union_1(T)]; +range_union_1([]) -> + []. -check_constraint(S,C) when is_list(C) -> - lists:map(fun(X)->check_constraint(S,X) end,C); -% else keep the constraint unchanged -check_constraint(_S,Any) -> -% io:format("Constraint = ~p~n",[Any]), - Any. - -permitted_alphabet_cnstr(T) when is_tuple(T) -> - permitted_alphabet_cnstr([T]); -permitted_alphabet_cnstr(L) when is_list(L) -> - VRexpand = fun({'ValueRange',{A,B}}) -> - {'SingleValue',expand_valuerange(A,B)}; - (Other) -> - Other - end, - L2 = lists:map(VRexpand,L), - %% first perform intersection - L3 = permitted_alphabet_intersection(L2), - [Res] = permitted_alphabet_union(L3), - Res. +%%% +%%% Finish up constrains, making them suitable for the back-ends. +%%% +%%% A 'PermittedAlphabet' (FROM) constraint will be reduced to: +%%% +%%% {'SingleValue',[integer()]} +%%% +%%% A 'SizeConstraint' (SIZE) constraint will be reduced to: +%%% +%%% {Lb,Ub} +%%% +%%% All other constraints will be reduced to: +%%% +%%% {'SingleValue',[integer()]} | {'ValueRange',Lb,Ub} +%%% + +finish_constraints(Cs) -> + finish_constraints_1(Cs, fun smart_collapse/1). + +finish_constraints_1([{element_set,{Tag,{element_set,_,_}=Set0},none}|T], + Collapse0) -> + Collapse = collapse_fun(Tag), + case finish_constraints_1([Set0], Collapse) of + [] -> + finish_constraints_1(T, Collapse0); + [Set] -> + [{Tag,Set}|finish_constraints_1(T, Collapse0)] + end; +finish_constraints_1([{element_set,{set,[{a_range,'MAX'}]},_}|T], Collapse) -> + finish_constraints_1(T, Collapse); +finish_constraints_1([{element_set,{intersection,A0,B0},none}|T], Collapse) -> + A = {element_set,A0,none}, + B = {element_set,B0,none}, + finish_constraints_1([A,B|T], Collapse); +finish_constraints_1([{element_set,Root,Ext}|T], Collapse) -> + case finish_constraint(Root, Ext, Collapse) of + none -> + finish_constraints_1(T, Collapse); + Constr -> + [Constr|finish_constraints_1(T, Collapse)] + end; +finish_constraints_1([H|T], Collapse) -> + [H|finish_constraints_1(T, Collapse)]; +finish_constraints_1([], _) -> + []. -expand_valuerange([A],[A]) -> - [A]; -expand_valuerange([A],[B]) when A < B -> - [A|expand_valuerange([A+1],[B])]. +finish_constraint({set,Root0}, Ext, Collapse) -> + case Collapse(Root0) of + none -> none; + Root -> finish_constraint(Root, Ext, Collapse) + end; +finish_constraint(Root, Ext, _Collapse) -> + case Ext of + none -> Root; + _ -> {Root,[]} + end. -permitted_alphabet_intersection(C) -> - permitted_alphabet_merge(C,intersection, []). +collapse_fun('SizeConstraint') -> + fun size_constraint_collapse/1; +collapse_fun('PermittedAlphabet') -> + fun single_value_collapse/1. -permitted_alphabet_union(C) -> - permitted_alphabet_merge(C,union, []). +single_value_collapse(V) -> + {'SingleValue',ordsets:from_list(single_value_collapse_1(V))}. -permitted_alphabet_merge([],_,Acc) -> - lists:reverse(Acc); -permitted_alphabet_merge([{'SingleValue',L1}, - UorI, - {'SingleValue',L2}|Rest],UorI,Acc) - when is_list(L1),is_list(L2) -> - UI = ordsets:UorI([ordsets:from_list(L1),ordsets:from_list(L2)]), - permitted_alphabet_merge([{'SingleValue',UI}|Rest],UorI,Acc); -permitted_alphabet_merge([C1|Rest],UorI,Acc) -> - permitted_alphabet_merge(Rest,UorI,[C1|Acc]). - - -%% constraint_merge/2 -%% Compute the intersection of the outermost level of the constraint list. -%% See Dubuisson second paragraph and fotnote on page 285. -%% If constraints with extension are included in combined constraints. The -%% resulting combination will have the extension of the last constraint. Thus, -%% there will be no extension if the last constraint is without extension. -%% The rootset of all constraints are considered in the "outermoust -%% intersection". See section 13.1.2 in Dubuisson. -constraint_merge(St, Cs0) -> - Cs = constraint_merge_1(St, Cs0), - normalize_cs(Cs). - -normalize_cs([{'SingleValue',[V]}|Cs]) -> - [{'SingleValue',V}|normalize_cs(Cs)]; -normalize_cs([{'SingleValue',[_|_]=L0}|Cs]) -> - [H|T] = L = lists:usort(L0), - [case is_range(H, T) of - false -> {'SingleValue',L}; - true -> {'ValueRange',{H,lists:last(T)}} - end|normalize_cs(Cs)]; -normalize_cs([{'ValueRange',{Sv,Sv}}|Cs]) -> - [{'SingleValue',Sv}|normalize_cs(Cs)]; -normalize_cs([{'ValueRange',{'MIN','MAX'}}|Cs]) -> - normalize_cs(Cs); -normalize_cs([{'SizeConstraint',C0}|Cs]) -> - case normalize_size_constraint(C0) of - none -> - normalize_cs(Cs); - C -> - [{'SizeConstraint',C}|normalize_cs(Cs)] - end; -normalize_cs([H|T]) -> - [H|normalize_cs(T)]; -normalize_cs([]) -> []. +single_value_collapse_1([{range,Lb,Ub}|T]) when is_integer(Lb), + is_integer(Ub) -> + lists:seq(Lb, Ub) ++ single_value_collapse_1(T); +single_value_collapse_1([]) -> + []. -%% Normalize a size constraint to make it non-ambiguous and -%% easy to interpret for the backends. -%% -%% Returns one of the following terms: -%% {LowerBound,UpperBound} -%% {{LowerBound,UpperBound},[]} % Extensible -%% none % Remove size constraint from list -%% -%% where: -%% LowerBound = integer() -%% UpperBound = integer() | 'MAX' - -normalize_size_constraint(Sv) when is_integer(Sv) -> - {Sv,Sv}; -normalize_size_constraint({Root,Ext}) when is_list(Ext) -> - {normalize_size_constraint(Root),[]}; -normalize_size_constraint({{_,_},Ext}) when is_integer(Ext) -> - normalize_size_constraint(Ext); -normalize_size_constraint([H|T]) -> - {H,lists:last(T)}; -normalize_size_constraint({0,'MAX'}) -> +smart_collapse([{a_range,Ub}]) -> + {'ValueRange',{'MIN',Ub}}; +smart_collapse([{a_range,_}|T]) -> + {range,_,Ub} = lists:last(T), + {'ValueRange',{'MIN',Ub}}; +smart_collapse([{range,Lb,Ub}]) -> + {'ValueRange',{Lb,Ub}}; +smart_collapse([_|_]=L) -> + V = lists:foldr(fun({range,Lb,Ub}, A) -> + seq(Lb, Ub) ++ A + end, [], L), + {'SingleValue',V}. + +size_constraint_collapse([{range,0,'MAX'}]) -> none; -normalize_size_constraint({Lb,Ub}=Range) - when is_integer(Lb), is_integer(Ub) orelse Ub =:= 'MAX' -> - Range. +size_constraint_collapse(Root) -> + [{range,Lb,_}|_] = Root, + {range,_,Ub} = lists:last(Root), + {Lb,Ub}. -is_range(Prev, [H|T]) when Prev =:= H - 1 -> is_range(H, T); -is_range(_, [_|_]) -> false; -is_range(_, []) -> true. +seq(Same, Same) -> + [Same]; +seq(Lb, Ub) when is_integer(Lb), is_integer(Ub) -> + lists:seq(Lb, Ub). -constraint_merge_1(_S, [H]=C) when is_tuple(H) -> - C; -constraint_merge_1(_S, []) -> - []; -constraint_merge_1(S, C) -> - %% skip all extension but the last extension - C1 = filter_extensions(C), - %% perform all internal level intersections, intersections first - %% since they have precedence over unions - C2 = lists:map(fun(X)when is_list(X)->constraint_intersection(S,X); - (X) -> X end, - C1), - %% perform all internal level unions - C3 = lists:map(fun(X)when is_list(X)->constraint_union(S,X); - (X) -> X end, - C2), - - %% now get intersection of the outermost level - %% get the least common single value constraint - SVs = get_constraints(C3,'SingleValue'), - CombSV = intersection_of_sv(S,SVs), - %% get the least common value range constraint - VRs = get_constraints(C3,'ValueRange'), - CombVR = intersection_of_vr(S,VRs), - %% get the least common size constraint - SZs = get_constraints(C3,'SizeConstraint'), - CombSZ = intersection_of_size(S,SZs), - RestC = ordsets:subtract(ordsets:from_list(C3), - ordsets:from_list(SZs ++ VRs ++ SVs)), - %% get the least common combined constraint. That is the union of each - %% deep constraint and merge of single value and value range constraints. - %% FIXME: Removing 'intersection' from the flattened list essentially - %% means that intersections are converted to unions! - Cs = combine_constraints(S, CombSV, CombVR, CombSZ++RestC), - [X || X <- lists:flatten(Cs), - X =/= intersection, - X =/= union]. - -%% constraint_union(S,C) takes a list of constraints as input and -%% merge them to a union. Unions are performed when two -%% constraints is found with an atom union between. -%% The list may be nested. Fix that later !!! -constraint_union(_S,[]) -> - []; -constraint_union(_S,C=[_E]) -> - C; -constraint_union(S,C) when is_list(C) -> - case lists:member(union,C) of - true -> - constraint_union1(S,C,[]); - _ -> - C - end; -% SV = get_constraints(C,'SingleValue'), -% SV1 = constraint_union_sv(S,SV), -% VR = get_constraints(C,'ValueRange'), -% VR1 = constraint_union_vr(VR), -% RestC = ordsets:filter(fun({'SingleValue',_})->false; -% ({'ValueRange',_})->false; -% (_) -> true end,ordsets:from_list(C)), -% SV1++VR1++RestC; -constraint_union(_S,C) -> - [C]. - -constraint_union1(S, [{'ValueRange',{Lb1,Ub1}},union, - {'ValueRange',{Lb2,Ub2}}|Rest], Acc) -> - AunionB = {'ValueRange',{c_min(Lb1, Lb2),max(Ub1, Ub2)}}, - constraint_union1(S, [AunionB|Rest], Acc); -constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) -> - AunionB = constraint_union_sv(S,[A,B]), - constraint_union1(S,Rest,Acc ++ AunionB); -constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) -> - AunionB = union_sv_vr(S,A,B), - constraint_union1(S, AunionB++Rest, Acc); -constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) -> - AunionB = union_sv_vr(S,B,A), - constraint_union1(S, AunionB++Rest, Acc); -constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints - constraint_union1(S,Rest,Acc); -constraint_union1(S,[A|Rest],Acc) -> - constraint_union1(S,Rest,[A|Acc]); -constraint_union1(_S,[],Acc) -> - Acc. +%%%----------------------------------------- +%% If the constraint value is a defined value the valuename +%% is replaced by the actual value +%% +resolve_value(S, HostType, Val) -> + Id = match_parameter(S, Val), + resolve_value1(S, HostType, Id). -constraint_union_sv(_S,SV) -> - Values=lists:map(fun({_,V})->V end,SV), - case ordsets:from_list(Values) of - [] -> []; - [N] -> [{'SingleValue',N}]; - L -> [{'SingleValue',L}] - end. -c_min('MIN', _) -> 'MIN'; -c_min(_, 'MIN') -> 'MIN'; -c_min(A, B) -> min(A, B). - -union_sv_vr(_S,{'SingleValue',SV},VR) - when is_integer(SV) -> - union_sv_vr(_S,{'SingleValue',[SV]},VR); -union_sv_vr(_S,{'SingleValue',SV},{'ValueRange',{VLb,VUb}}) - when is_list(SV) -> - L = lists:sort(SV++[VLb,VUb]), - {Lb,L1} = case lists:member('MIN',L) of - true -> {'MIN',L--['MIN']}; % remove 'MIN' so it does not disturb - false -> {hd(L),tl(L)} - end, - Ub = case lists:member('MAX',L1) of - true -> 'MAX'; - false -> lists:last(L1) - end, - case SV of - [H] -> H; - _ -> SV - end, - %% for now we through away the Singlevalues so that they don't disturb - %% in the code generating phase (the effective Valuerange is already - %% calculated. If we want to keep the Singlevalues as well for - %% use in code gen phases we need to introduce a new representation - %% like {'ValueRange',{Lb,Ub},[ListOfRanges|AntiValues|Singlevalues] - %% These could be used to generate guards which allows only the specific - %% values , not the full range - [{'ValueRange',{Lb,Ub}}]. - - -%% get_constraints/2 -%% Arguments are a list of constraints, which has the format {key,value}, -%% and a constraint type -%% Returns a list of constraints only of the requested type or the atom -%% 'no' if no such constraints were found -get_constraints(L=[{CType,_}],CType) -> - L; -get_constraints(C,CType) -> - keysearch_allwithkey(CType,1,C). - -%% keysearch_allwithkey(Key,Ix,L) -%% Types: -%% Key = is_atom() -%% Ix = integer() -%% L = [TwoTuple] -%% TwoTuple = [{atom(),term()}|...] -%% Returns a List that contains all -%% elements from L that has a key Key as element Ix -keysearch_allwithkey(Key,Ix,L) -> - lists:filter(fun(X) when is_tuple(X) -> - case element(Ix,X) of - Key -> true; - _ -> false - end; - (_) -> false - end, L). - - -%% filter_extensions(C) -%% takes a list of constraints as input and returns a list with the -%% constraints and all extensions but the last are removed. -filter_extensions([L]) when is_list(L) -> - [filter_extensions(L)]; -filter_extensions(C=[_H]) -> - C; -filter_extensions(C) when is_list(C) -> - filter_extensions(C,[], []). - -filter_extensions([],Acc,[]) -> - Acc; -filter_extensions([],Acc,[EC|ExtAcc]) -> - CwoExt = remove_extension(ExtAcc,[]), - CwoExt ++ [EC|Acc]; -filter_extensions([C={A,_E}|T],Acc,ExtAcc) when is_tuple(A) -> - filter_extensions(T,Acc,[C|ExtAcc]); -filter_extensions([C={'SizeConstraint',{A,_B}}|T],Acc,ExtAcc) - when is_list(A);is_tuple(A) -> - filter_extensions(T,Acc,[C|ExtAcc]); -filter_extensions([C={'PermittedAlphabet',{{'SingleValue',_},E}}|T],Acc,ExtAcc) - when is_tuple(E); is_list(E) -> - filter_extensions(T,Acc,[C|ExtAcc]); -filter_extensions([H|T],Acc,ExtAcc) -> - filter_extensions(T,[H|Acc],ExtAcc). - -remove_extension([],Acc) -> - Acc; -remove_extension([{'SizeConstraint',{A,_B}}|R],Acc) -> - remove_extension(R,[{'SizeConstraint',A}|Acc]); -remove_extension([{C,_E}|R],Acc) when is_tuple(C) -> - remove_extension(R,[C|Acc]); -remove_extension([{'PermittedAlphabet',{A={'SingleValue',_}, - E}}|R],Acc) - when is_tuple(E);is_list(E) -> - remove_extension(R,[{'PermittedAlphabet',A}|Acc]). - -%% constraint_intersection(S,C) takes a list of constraints as input and -%% performs intersections. Intersecions are performed when an -%% atom intersection is found between two constraints. -%% The list may be nested. Fix that later !!! -constraint_intersection(_S,[]) -> - []; -constraint_intersection(_S,C=[_E]) -> - C; -constraint_intersection(S,C) when is_list(C) -> -% io:format("constraint_intersection: ~p~n",[C]), - case lists:member(intersection,C) of - true -> - constraint_intersection1(S,C,[]); - _ -> - C +resolve_value1(S, HostType, #'Externalvaluereference'{value=Name}=ERef) -> + case resolve_namednumber(S, HostType, Name) of + V when is_integer(V) -> + V; + not_named -> + resolve_value1(S, HostType, get_referenced_value(S, ERef)) end; -constraint_intersection(_S,C) -> - [C]. - -constraint_intersection1(S,[A,intersection,B|Rest],Acc) -> - AisecB = c_intersect(S,A,B), - constraint_intersection1(S, AisecB++Rest, Acc); -constraint_intersection1(S,[A|Rest],Acc) -> - constraint_intersection1(S,Rest,[A|Acc]); -constraint_intersection1(_, [], [C]) -> - C; -constraint_intersection1(_,[],Acc) -> - lists:reverse(Acc). - -c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) -> - intersection_of_sv(S,[C1,C2]); -c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) -> - intersection_of_vr(S,[C1,C2]); -c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) -> - intersection_sv_vr(S,[C2],[C1]); -c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) -> - intersection_sv_vr(S,[C1],[C2]); -c_intersect(_S,C1,C2) -> - [C1,C2]. - -%% combine_constraints(S,SV,VR,CComb) -%% Types: -%% S = is_record(state,S) -%% SV = [] | [SVC] -%% VR = [] | [VRC] -%% CComb = [] | [Lists] -%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]} -%% VRC = {'ValueRange',{Lb,Ub}} -%% Lists = List of lists containing any constraint combination -%% Lb = 'MIN' | integer() -%% Ub = 'MAX' | integer() -%% Returns a combination of the least common constraint among SV,VR and all -%% elements in CComb -combine_constraints(_S,[],VR,CComb) -> - VR ++ CComb; -% combine_combined_cnstr(S,VR,CComb); -combine_constraints(_S,SV,[],CComb) -> - SV ++ CComb; -% combine_combined_cnstr(S,SV,CComb); -combine_constraints(S,SV,VR,CComb) -> - C=intersection_sv_vr(S,SV,VR), - C ++ CComb. -% combine_combined_cnstr(S,C,CComb). - -intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}]) - when is_integer(SV) -> - case is_int_in_vr(SV,C2) of - true -> [C1]; - _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S}) - %throw({error,{"asn1 illegal constraint",C1,C2}}) - %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]), - [C1,C2] +resolve_value1(S, HostType, {gt,V}) -> + case resolve_value1(S, HostType, V) of + Int when is_integer(Int) -> + Int + 1; + _Other -> + asn1_error(S, illegal_integer_value) end; -intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2]) - when is_list(SV) -> - case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of - [] -> - %%error({type,{"asn1 illegal constraint",C1,C2},S}); - %throw({error,{"asn1 illegal constraint",C1,C2}}); - %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]), - [C1,C2]; - [V] -> [{'SingleValue',V}]; - L -> [{'SingleValue',L}] - end. - - -%% Size constraint [{'SizeConstraint',1},{'SizeConstraint',{{1,64},[]}}] - -intersection_of_size(_,[]) -> - []; -intersection_of_size(_,C=[_SZ]) -> - C; -intersection_of_size(S,[SZ,SZ|Rest]) -> - intersection_of_size(S,[SZ|Rest]); -intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest]) - when is_integer(Int),is_tuple(Range) -> - case Range of - {Lb,Ub} when Int >= Lb, - Int =< Ub -> - intersection_of_size(S,[C1|Rest]); - {{Lb,Ub},Ext} when is_list(Ext),Int >= Lb,Int =< Ub -> - intersection_of_size(S,[C1|Rest]); - _ -> - throw({error,{asn1,{illegal_size_constraint,C}}}) +resolve_value1(S, HostType, {lt,V}) -> + case resolve_value1(S, HostType, V) of + Int when is_integer(Int) -> + Int - 1; + _Other -> + asn1_error(S, illegal_integer_value) end; -intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest]) - when is_integer(Int),is_tuple(Range) -> - intersection_of_size(S,[C2,C1|Rest]); -intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) -> - Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])), - Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])), - intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]); -intersection_of_size(_,SZ) -> - throw({error,{asn1,{illegal_size_constraint,SZ}}}). - -intersection_of_vr(_,[]) -> - []; -intersection_of_vr(_,VR=[_C]) -> - VR; -intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) -> - Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])), - Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])), - intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]); -intersection_of_vr(_S,VR) -> - %%error({type,{asn1,{illegal_value_range_constraint,VR}},S}); - throw({error,{asn1,{illegal_value_range_constraint,VR}}}). - -intersection_of_sv(_,[]) -> - []; -intersection_of_sv(_,SV=[_C]) -> - SV; -intersection_of_sv(S,[SV,SV|Rest]) -> - intersection_of_sv(S,[SV|Rest]); -intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when is_integer(Int), - is_list(SV) -> - SV2=intersection_of_sv1(S,Int,SV), - intersection_of_sv(S,[SV2|Rest]); -intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when is_integer(Int), - is_list(SV) -> - SV2=intersection_of_sv1(S,Int,SV), - intersection_of_sv(S,[SV2|Rest]); -intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when is_list(SV1), - is_list(SV2) -> - SV3=common_set(SV1,SV2), - intersection_of_sv(S,[SV3|Rest]); -intersection_of_sv(_S,SV) -> - %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}). - throw({error,{asn1,{illegal_single_value_constraint,SV}}}). - -intersection_of_sv1(_S,Int,SV) when is_integer(Int),is_list(SV) -> - case lists:member(Int,SV) of - true -> {'SingleValue',Int}; +resolve_value1(S, _HostType, {'ValueFromObject',{object,Object},FieldName}) -> + get_value_from_object(S, Object, FieldName); +resolve_value1(_, _, #valuedef{checked=true,value=V}) -> + V; +resolve_value1(S, _, #valuedef{value={'ValueFromObject', + {object,Object},FieldName}}) -> + get_value_from_object(S, Object, FieldName); +resolve_value1(S, _HostType, #valuedef{}=VDef) -> + #valuedef{value=Val} = check_value(S,VDef), + Val; +resolve_value1(_, _, V) -> + V. + +resolve_namednumber(S, #type{def=Def}, Name) -> + case Def of + {'ENUMERATED',NameList} -> + resolve_namednumber_1(S, Name, NameList); + {'INTEGER',NameList} -> + resolve_namednumber_1(S, Name, NameList); _ -> - %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S}) - throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}}) - end; -intersection_of_sv1(_S,SV1,SV2) -> - %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}). - throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}). + not_named + end. -greatest_LB([H]) -> - H; -greatest_LB(L) -> - greatest_LB1(lists:reverse(L)). -greatest_LB1(['MIN',H2|_T])-> - H2; -greatest_LB1([H|_T]) -> - H. -smallest_UB(L) -> - hd(L). - -common_set(SV1,SV2) -> - lists:filter(fun(X)->lists:member(X,SV1) end,SV2). - -is_int_in_vr(Int,{_,{'MIN','MAX'}}) when is_integer(Int) -> - true; -is_int_in_vr(Int,{_,{'MIN',Ub}}) when is_integer(Int),Int =< Ub -> - true; -is_int_in_vr(Int,{_,{Lb,'MAX'}}) when is_integer(Int),Int >= Lb -> - true; -is_int_in_vr(Int,{_,{Lb,Ub}}) when is_integer(Int),Int >= Lb,Int =< Ub -> - true; -is_int_in_vr(_,_) -> - false. - +resolve_namednumber_1(S, Name, NameList) -> + try + NamedNumberList = check_enumerated(S, NameList), + {_,N} = lookup_enum_value(S, Name, NamedNumberList), + N + catch _:_ -> + not_named + end. + +%%% +%%% End of constraint handling. +%%% check_imported(S,Imodule,Name) -> check_imported(S,Imodule,Name,false). @@ -4510,18 +3757,28 @@ check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) -> #'Externaltypereference'{pos=Pos,module=ModName,type=Name} end. +get_referenced_value(S, T) -> + case get_referenced_type(S, T) of + {ExtMod,#valuedef{value=#'Externalvaluereference'{}=Ref}} -> + get_referenced_value(update_state(S, ExtMod), Ref); + {_,#valuedef{value=Val}} -> + Val + end. + get_referenced_type(S, T) -> + get_referenced_type(S, T, false). + +get_referenced_type(S, T, Recurse) -> case do_get_referenced_type(S, T) of - {_,#type{def=#'Externaltypereference'{}=ERef}} -> - get_referenced_type(S, ERef); - {_,#type{def=#'Externalvaluereference'{}=VRef}} -> - get_referenced_type(S, VRef); + {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=ERef}}} + when Recurse -> + get_referenced_type(S, ERef, Recurse); {_,_}=Res -> Res end. -do_get_referenced_type(#state{parameters=Ps}=S, T0) -> - case match_parameters(S, T0, Ps) of +do_get_referenced_type(S, T0) -> + case match_parameter(S, T0) of T0 -> do_get_ref_type_1(S, T0); T -> @@ -4563,7 +3820,7 @@ get_referenced(S,Emod,Ename,Pos) -> %% May be an imported entity in module Emod or Emod may not exist case asn1_db:dbget(Emod,'MODULE') of undefined -> - throw({error,{asn1,{module_not_found,Emod}}}); + asn1_error(S, {undefined_import, Ename, Emod}); _ -> NewS = update_state(S,Emod), get_imported(NewS,Ename,Emod,Pos) @@ -4593,12 +3850,11 @@ get_imported(S,Name,Module,Pos) -> parse_and_save(S,Imodule), case asn1_db:dbget(Imodule,'MODULE') of undefined -> - throw({error,{asn1,{module_not_found,Imodule}}}); + asn1_error(S, {undefined_import, Name, Module}); Im when is_record(Im,module) -> case is_exported(Im,Name) of false -> - throw({error, - {asn1,{not_exported,{Im,Name}}}}); + asn1_error(S, {undefined_export, Name}); _ -> ?dbg("get_imported, is_exported ~p, ~p~n",[Imodule,Name]), get_referenced_type(S, @@ -4611,37 +3867,6 @@ get_imported(S,Name,Module,Pos) -> get_renamed_reference(S,Name,Module) end. -check_and_save(S,#'Externaltypereference'{module=M}=ERef,#typedef{checked=false}=TDef,Settings) - when S#state.mname /= M -> - %% This ERef is an imported type (or maybe a set.asn compilation) - NewS = S#state{mname=M,module=load_asn1_module(S,M), - type=TDef,tname=get_datastr_name(TDef)}, - Type=check_type(NewS,TDef,TDef#typedef.typespec),%XXX - CheckedTDef = TDef#typedef{checked=true, - typespec=Type}, - asn1_db:dbput(M,get_datastr_name(TDef),CheckedTDef), - {merged_name(S,ERef),Settings}; -check_and_save(S,#'Externaltypereference'{module=M,type=N}=Eref, - #ptypedef{name=Name,args=Params} = PTDef,Settings) -> - %% instantiate a parameterized type - %% The parameterized type should be saved as a type in the module - %% it was instantiated. - NewS = S#state{mname=M,module=load_asn1_module(S,M), - type=PTDef,tname=Name}, - {Args,RestSettings} = lists:split(length(Params),Settings), - Type = check_type(NewS,PTDef,#type{def={pt,Eref,Args}}), - ERefName = new_reference_name(N), - ERefNew = #'Externaltypereference'{type=ERefName,module=S#state.mname}, - NewTDef=#typedef{checked=true,name=ERefName, - typespec=Type}, - insert_once(S,parameterized_objects,{ERefName,type,NewTDef}), - asn1_db:dbput(S#state.mname,ERefNew#'Externaltypereference'.type, - NewTDef), - {ERefNew,RestSettings}; -check_and_save(_S,ERef,TDef,Settings) -> - %% This might be a renamed type in a set of specs, so rename the ERef - {ERef#'Externaltypereference'{type=asn1ct:get_name_of_def(TDef)},Settings}. - save_object_set_instance(S,Name,ObjSetSpec) when is_record(ObjSetSpec,'ObjectSet') -> NewObjSet = #typedef{checked=true,name=Name,typespec=ObjSetSpec}, @@ -4708,18 +3933,14 @@ update_state(S,ModuleName) -> S; _ -> parse_and_save(S,ModuleName), - case asn1_db:dbget(ModuleName,'MODULE') of - RefedMod when is_record(RefedMod,module) -> - S#state{mname=ModuleName,module=RefedMod}; - _ -> throw({error,{asn1,{module_does_not_exist,ModuleName}}}) - end + Mod = #module{} = asn1_db:dbget(ModuleName,'MODULE'), + S#state{mname=ModuleName,module=Mod} end. - get_renamed_reference(S,Name,Module) -> case renamed_reference(S,Name,Module) of undefined -> - throw({error,{asn1,{undefined_type,Name}}}); + asn1_error(S, {undefined, Name}); NewTypeName when NewTypeName =/= Name -> get_referenced1(S,Module,NewTypeName,undefined) end. @@ -4770,37 +3991,49 @@ get_importmoduleoftype([I|Is],Name) -> get_importmoduleoftype([],_) -> undefined. +match_parameters(S, Names) -> + [match_parameter(S, Name) || Name <- Names]. -match_parameters(_S,Name,[]) -> - Name; +match_parameter(#state{parameters=Ps}=S, Name) -> + match_parameter(S, Name, Ps). -match_parameters(_S,#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) -> +match_parameter(_S, Name, []) -> + Name; +match_parameter(S, {valueset,{element_set,#type{}=Ts,none}}, Ps) -> + match_parameter(S, {valueset,Ts}, Ps); +match_parameter(_S, #'Externaltypereference'{type=Name}, + [{#'Externaltypereference'{type=Name},NewName}|_T]) -> NewName; -match_parameters(_S,#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) -> +match_parameter(_S, #'Externaltypereference'{type=Name}, + [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) -> NewName; -match_parameters(_S,#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) -> +match_parameter(_S, #'Externalvaluereference'{value=Name}, + [{#'Externalvaluereference'{value=Name},NewName}|_T]) -> NewName; -match_parameters(_S,#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) -> +match_parameter(_S, #'Externalvaluereference'{value=Name}, + [{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) -> NewName; -match_parameters(_S,#type{def=#'Externaltypereference'{module=M,type=Name}}, - [{#'Externaltypereference'{module=M,type=Name},Type}]) -> +match_parameter(_S, #type{def=#'Externaltypereference'{module=M,type=Name}}, + [{#'Externaltypereference'{module=M,type=Name},Type}]) -> Type; -match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}}, - [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) -> +match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}}, + [{{_,#'Externaltypereference'{type=Name}}, + {valueset,#type{def=NewName}}}|_T]) -> NewName; -match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}}, - [{{_,#'Externaltypereference'{type=Name}}, - NewName=#type{def=#'Externaltypereference'{}}}|_T]) -> +match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}}, + [{{_,#'Externaltypereference'{type=Name}}, + NewName=#type{def=#'Externaltypereference'{}}}|_T]) -> NewName#type.def; -match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}}, - [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) -> +match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}}, + [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) -> NewName; %% When a parameter is a parameterized element it has to be %% instantiated now! -match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) -> - case catch check_type(S,#typedef{name=S#state.tname,typespec=T},T) of - pobjectsetdef -> - +match_parameter(S, {valueset,T=#type{def={pt,_,_Args}}}, _Ps) -> + try check_type(S,#typedef{name=S#state.tname,typespec=T},T) of + #type{def=Ts} -> + Ts + catch pobjectsetdef -> {_,ObjRef,_Params} = T#type.def, {_,ObjDef}=get_referenced_type(S,ObjRef), %%ObjDef is a pvaluesetdef where the type field holds the class @@ -4818,17 +4051,15 @@ match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) -> ObjectSet = #'ObjectSet'{class=RightClassRef,set=T}, ObjSpec = check_object(S,#typedef{typespec=ObjectSet},ObjectSet), Name = list_to_atom(asn1ct_gen:list2name([get_datastr_name(ObjDef)|S#state.recordtopname])), - save_object_set_instance(S,Name,ObjSpec); - pvaluesetdef -> error({pvaluesetdef,"parameterized valueset",S}); - {error,_Reason} -> error({type,"error in parameter",S}); - Ts when is_record(Ts,type) -> Ts#type.def + save_object_set_instance(S,Name,ObjSpec) end; + %% same as previous, only depends on order of parsing -match_parameters(S,{valueset,{pos,{objectset,_,POSref},Args}},Parameters) -> - match_parameters(S,{valueset,#type{def={pt,POSref,Args}}},Parameters); -match_parameters(S,Name, [_H|T]) -> - %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]), - match_parameters(S,Name,T). +match_parameter(S, {valueset,{pos,{objectset,_,POSref},Args}}, Ps) -> + match_parameter(S, {valueset,#type{def={pt,POSref,Args}}}, Ps); +match_parameter(S, Name, [_H|T]) -> + %%io:format("match_parameter(~p,~p)~n",[Name,[H|T]]), + match_parameter(S, Name, T). imported(S,Name) -> {imports,Ilist} = (S#state.module)#module.imports, @@ -4854,7 +4085,6 @@ check_named_number_list(_S, [{_,_}|_]=NNL) -> NNL; check_named_number_list(S, NNL0) -> %% Check that the names are unique. - T = S#state.type, case check_unique(NNL0, 2) of [] -> NNL1 = [{Id,resolve_valueref(S, Val)} || {'NamedNumber',Id,Val} <- NNL0], @@ -4863,14 +4093,14 @@ check_named_number_list(S, NNL0) -> [] -> NNL; [Val|_] -> - asn1_error(S, T, {value_reused,Val}) + asn1_error(S, {value_reused,Val}) end; [H|_] -> - asn1_error(S, T, {namelist_redefinition,H}) + asn1_error(S, {namelist_redefinition,H}) end. -resolve_valueref(S, #'Externalvaluereference'{module=Mod,value=Name}) -> - dbget_ex(S, Mod, Name); +resolve_valueref(S, #'Externalvaluereference'{} = T) -> + get_referenced_value(S, T); resolve_valueref(_, Val) when is_integer(Val) -> Val. @@ -4879,7 +4109,7 @@ check_integer(S, NNL) -> check_bitstring(S, NNL0) -> NNL = check_named_number_list(S, NNL0), - _ = [asn1_error(S, S#state.type, {invalid_bit_number,Bit}) || + _ = [asn1_error(S, {invalid_bit_number,Bit}) || {_,Bit} <- NNL, Bit < 0], NNL. @@ -4904,7 +4134,7 @@ check_type_identifier(S, Eref=#'Externaltypereference'{type=Class}) -> {_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} -> check_type_identifier(S, (TD#typedef.typespec)#type.def); _ -> - asn1_error(S, S#state.type, {illegal_instance_of,Class}) + asn1_error(S, {illegal_instance_of,Class}) end. iof_associated_type(S,[]) -> @@ -4913,12 +4143,7 @@ iof_associated_type(S,[]) -> case get(instance_of) of undefined -> AssociateSeq = iof_associated_type1(S,[]), - Tag = - case S#state.erule of - ber -> - [?TAG_CONSTRUCTED(?N_INSTANCE_OF)]; - _ -> [] - end, + Tag = [?TAG_CONSTRUCTED(?N_INSTANCE_OF)], TypeDef=#typedef{checked=true, name='INSTANCE OF', typespec=#type{tag=Tag, @@ -4944,16 +4169,11 @@ iof_associated_type1(S,C) -> [] -> 'ASN1_OPEN_TYPE'; _ -> {typefield,'Type'} end, - {ObjIdTag,C1TypeTag}= - case S#state.erule of - ber -> - {[{'UNIVERSAL',8}], - [#tag{class='UNIVERSAL', - number=6, - type='IMPLICIT', - form=0}]}; - _ -> {[{'UNIVERSAL','INTEGER'}],[]} - end, + ObjIdTag = [{'UNIVERSAL',8}], + C1TypeTag = [#tag{class='UNIVERSAL', + number=6, + type='IMPLICIT', + form=0}], TypeIdentifierRef=#'Externaltypereference'{module=ModuleName, type='TYPE-IDENTIFIER'}, ObjectIdentifier = @@ -4992,9 +4212,13 @@ iof_associated_type1(S,C) -> %% returns the leading attribute, the constraint of the components and %% the tablecinf value for the second component. -instance_of_constraints(_,[]) -> +instance_of_constraints(_, []) -> {false,[],[],[]}; -instance_of_constraints(S, [{simpletable,Type}]) -> +instance_of_constraints(S, [{element_set,{simpletable,C},none}]) -> + {element_set,Type,none} = C, + instance_of_constraints_1(S, Type). + +instance_of_constraints_1(S, Type) -> #type{def=#'Externaltypereference'{type=Name}} = Type, ModuleName = S#state.mname, ObjectSetRef=#'Externaltypereference'{module=ModuleName, @@ -5014,93 +4238,100 @@ instance_of_constraints(S, [{simpletable,Type}]) -> valueindex=[]}, {TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}. -%% Check ENUMERATED -%% **************************************** -%% Check that all values are unique -%% assign values to un-numbered identifiers -%% check that the constraints are allowed and correct -%% put the updated info back into database -check_enumerated(_S,[{Name,Number}|_Rest]= NNList,_Constr) when is_atom(Name), is_integer(Number)-> - %% already checked , just return the same list - NNList; -check_enumerated(_S,{[{Name,Number}|_Rest],L}= NNList,_Constr) when is_atom(Name), is_integer(Number), is_list(L)-> - %% already checked , contains extension marker, just return the same lists - NNList; -check_enumerated(S,NamedNumberList,_Constr) -> - check_enum(S,NamedNumberList,[],[],[]). - -%% identifiers are put in Acc2 -%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]} -%% the latter is returned if the ENUMERATION contains EXTENSIONMARK -check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2,Root) when is_integer(Num) -> - check_enum(S,T,[{Id,Num}|Acc1],Acc2,Root); -check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2,_Root) -> - NewAcc2 = lists:keysort(2,Acc1), - NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[],[]), - { NewList, check_enum(S,T,[],[],enum_counts(NewList))}; -check_enum(S,[Id|T],Acc1,Acc2,Root) when is_atom(Id) -> - check_enum(S,T,Acc1,[Id|Acc2],Root); -check_enum(_S,[],Acc1,Acc2,Root) -> - NewAcc2 = lists:keysort(2,Acc1), - enum_number(lists:reverse(Acc2),NewAcc2,0,[],Root). - - -% assign numbers to identifiers , numbers from 0 ... but must not -% be the same as already assigned to NamedNumbers -enum_number(Identifiers,NamedNumbers,Cnt,Acc,[]) -> - enum_number(Identifiers,NamedNumbers,Cnt,Acc); -enum_number(Identifiers,NamedNumbers,_Cnt,Acc,CountL) -> - enum_extnumber(Identifiers,NamedNumbers,Acc,CountL). - -enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt -> - enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]); -enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num - enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]); -enum_number([],L2,_Cnt,Acc) -> - lists:append([lists:reverse(Acc),L2]); -enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt - enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]); -enum_number([H|T],[],Cnt,Acc) -> - enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]). - -enum_extnumber(Identifiers,NamedNumbers,Acc,[C]) -> - check_add_enum_numbers(NamedNumbers,[C]), - enum_number(Identifiers,NamedNumbers,C,Acc); -enum_extnumber([H|T],[{Id,Num}|T2],Acc,[C|Counts]) when Num > C -> - enum_extnumber(T,[{Id,Num}|T2],[{H,C}|Acc],Counts); -enum_extnumber([],L2,Acc,Cnt) -> - check_add_enum_numbers(L2, Cnt), - lists:concat([lists:reverse(Acc),L2]); -enum_extnumber(_Identifiers,[{Id,Num}|_T2],_Acc,[C|_]) when Num < C -> -%% enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts); - exit({error,{asn1,"AdditionalEnumeration element with same number as root element",{Id,Num}}}); -enum_extnumber(Identifiers,[{Id,Num}|T2],Acc,[_C|Counts]) -> % Num =:= C - enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts); -enum_extnumber([H|T],[],Acc,[C|Counts]) -> - enum_extnumber(T,[],[{H,C}|Acc],Counts). - -enum_counts([]) -> - [0]; -enum_counts(L) -> - Used=[I||{_,I}<-L], - AddEnumLb = lists:max(Used) + 1, - lists:foldl(fun(El,AccIn)->lists:delete(El,AccIn) end, - lists:seq(0,AddEnumLb), - Used). -check_add_enum_numbers(L, Cnt) -> - Max = lists:max(Cnt), - Fun = fun({_,N}=El) when N < Max -> - case lists:member(N,Cnt) of - false -> - exit({error,{asn1,"AdditionalEnumeration element with same number as root element",El}}); - _ -> - ok - end; - (_) -> - ok - end, - lists:foreach(Fun,L). +%%% +%%% Check ENUMERATED. +%%% +check_enumerated(_S, [{Name,Number}|_]=NNL) + when is_atom(Name), is_integer(Number) -> + %% Already checked. + NNL; +check_enumerated(_S, {[{Name,Number}|_],L}=NNL) + when is_atom(Name), is_integer(Number), is_list(L) -> + %% Already checked (with extension). + NNL; +check_enumerated(S, NNL) -> + check_enum_ids(S, NNL, gb_sets:empty()), + check_enum(S, NNL, gb_sets:empty(), []). + +check_enum_ids(S, [{'NamedNumber',Id,_}|T], Ids0) -> + Ids = check_enum_update_ids(S, Id, Ids0), + check_enum_ids(S, T, Ids); +check_enum_ids(S, ['EXTENSIONMARK'|T], Ids) -> + check_enum_ids(S, T, Ids); +check_enum_ids(S, [Id|T], Ids0) when is_atom(Id) -> + Ids = check_enum_update_ids(S, Id, Ids0), + check_enum_ids(S, T, Ids); +check_enum_ids(_, [], _) -> + ok. + +check_enum(S, [{'NamedNumber',Id,N}|T], Used0, Acc) -> + Used = check_enum_update_used(S, Id, N, Used0), + check_enum(S, T, Used, [{Id,N}|Acc]); +check_enum(S, ['EXTENSIONMARK'|Ext0], Used0, Acc0) -> + Acc = lists:reverse(Acc0), + {Root,Used,Cnt} = check_enum_number_root(Acc, Used0, 0, []), + Ext = check_enum_ext(S, Ext0, Used, Cnt, []), + {Root,Ext}; +check_enum(S, [Id|T], Used, Acc) when is_atom(Id) -> + check_enum(S, T, Used, [Id|Acc]); +check_enum(_, [], Used, Acc0) -> + Acc = lists:reverse(Acc0), + {Root,_,_} = check_enum_number_root(Acc, Used, 0, []), + lists:keysort(2, Root). + +check_enum_number_root([Id|T]=T0, Used0, Cnt, Acc) when is_atom(Id) -> + case gb_sets:is_element(Cnt, Used0) of + false -> + Used = gb_sets:insert(Cnt, Used0), + check_enum_number_root(T, Used, Cnt+1, [{Id,Cnt}|Acc]); + true -> + check_enum_number_root(T0, Used0, Cnt+1, Acc) + end; +check_enum_number_root([H|T], Used, Cnt, Acc) -> + check_enum_number_root(T, Used, Cnt, [H|Acc]); +check_enum_number_root([], Used, Cnt, Acc) -> + {lists:keysort(2, Acc),Used,Cnt}. + +check_enum_ext(S, [{'NamedNumber',Id,N}|T], Used0, C, Acc) -> + Used = check_enum_update_used(S, Id, N, Used0), + if + N < C -> + asn1_error(S, {enum_not_ascending,Id,N,C-1}); + true -> + ok + end, + check_enum_ext(S, T, Used, N+1, [{Id,N}|Acc]); +check_enum_ext(S, [Id|T]=T0, Used0, C, Acc) when is_atom(Id) -> + case gb_sets:is_element(C, Used0) of + true -> + check_enum_ext(S, T0, Used0, C+1, Acc); + false -> + Used = gb_sets:insert(C, Used0), + check_enum_ext(S, T, Used, C+1, [{Id,C}|Acc]) + end; +check_enum_ext(_, [], _, _, Acc) -> + lists:keysort(2, Acc). + +check_enum_update_ids(S, Id, Ids) -> + case gb_sets:is_element(Id, Ids) of + false -> + gb_sets:insert(Id, Ids); + true -> + asn1_error(S, {enum_illegal_redefinition,Id}) + end. + +check_enum_update_used(S, Id, N, Used) -> + case gb_sets:is_element(N, Used) of + false -> + gb_sets:insert(N, Used); + true -> + asn1_error(S, {enum_reused_value,Id,N}) + end. + +%%% +%%% End of ENUMERATED checking. +%%% check_boolean(_S,_Constr) -> ok. @@ -5145,7 +4376,7 @@ check_sequence(S,Type,Comps) -> CompListTuple = complist_as_tuple(NewComps4), {CRelInf,CompListTuple}; Dupl -> - throw({error,{asn1,{duplicate_components,Dupl}}}) + asn1_error(S, {duplicate_identifier, error_value(hd(Dupl))}) end. complist_as_tuple(CompList) -> @@ -5155,8 +4386,6 @@ complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, root) -> complist_as_tuple(T, Acc, Ext, Acc2, ext); complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, ext) -> complist_as_tuple(T, Acc, Ext, Acc2, root2); -complist_as_tuple([#'EXTENSIONMARK'{}|_T], _Acc, _Ext, _Acc2, root2) -> - throw({error,{asn1,{too_many_extension_marks}}}); complist_as_tuple([C|T], Acc, Ext, Acc2, root) -> complist_as_tuple(T, [C|Acc], Ext, Acc2, root); complist_as_tuple([C|T], Acc, Ext, Acc2, ext) -> @@ -5199,11 +4428,11 @@ expand_components2(S,{_,PT={pt,_,_}}) -> expand_components2(S,{_,OCFT = #'ObjectClassFieldType'{}}) -> UncheckedType = #type{def=OCFT}, Type = check_type(S,#typedef{typespec=UncheckedType},UncheckedType), - expand_components2(S,{undefined,oCFT_def(S,Type)}); + expand_components2(S, {undefined,ocft_def(Type)}); expand_components2(S,{_,ERef}) when is_record(ERef,'Externaltypereference') -> expand_components2(S,get_referenced_type(S,ERef)); -expand_components2(_S,Err) -> - throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}}). +expand_components2(S,{_, What}) -> + asn1_error(S, {illegal_COMPONENTS_OF, error_value(What)}). take_only_rootset([])-> []; @@ -5252,7 +4481,7 @@ check_sequenceof(S,Type,Component) when is_record(Component,type) -> check_set(S,Type,Components) -> {TableCInf,NewComponents} = check_sequence(S,Type,Components), - check_distinct_tags(NewComponents,[]), + check_unique_tags(S, collect_components(NewComponents), []), case {lists:member(der,S#state.options),S#state.erule} of {true,_} -> {Sorted,SortedComponents} = sort_components(der,S,NewComponents), @@ -5264,35 +4493,21 @@ check_set(S,Type,Components) -> {false,TableCInf,NewComponents} end. - -%% check that all tags are distinct according to X.680 26.3 -check_distinct_tags({C1,C2,C3},Acc) when is_list(C1),is_list(C2),is_list(C3) -> - check_distinct_tags(C1++C2++C3,Acc); -check_distinct_tags({C1,C2},Acc) when is_list(C1),is_list(C2) -> - check_distinct_tags(C1++C2,Acc); -check_distinct_tags([#'ComponentType'{tags=[T]}|Cs],Acc) -> - check_distinct(T,Acc), - check_distinct_tags(Cs,[T|Acc]); -check_distinct_tags([C=#'ComponentType'{tags=[T|Ts]}|Cs],Acc) -> - check_distinct(T,Acc), - check_distinct_tags([C#'ComponentType'{tags=Ts}|Cs],[T|Acc]); -check_distinct_tags([#'ComponentType'{tags=[]}|_Cs],_Acc) -> - throw({error,"Not distinct tags in SET"}); -check_distinct_tags([],_) -> - ok. -check_distinct(T,Acc) -> - case lists:member(T,Acc) of - true -> - throw({error,"Not distinct tags in SET"}); - _ -> ok - end. +collect_components({C1,C2,C3}) -> + collect_components(C1++C2++C3); +collect_components({C1,C2}) -> + collect_components(C1++C2); +collect_components(Cs) -> + %% Assert that tags are not empty + [] = [EmptyTag || EmptyTag = #'ComponentType'{tags=[]} <- Cs], + Cs. %% sorting in canonical order according to X.680 8.6, X.691 9.2 %% DER: all components shall be sorted in canonical order. %% PER: only root components shall be sorted in canonical order. The %% extension components shall remain in textual order. %% -sort_components(der,S=#state{tname=TypeName},Components) -> +sort_components(der, S, Components) -> {R1,Ext,R2} = extension(textual_order(Components)), CompsList = case Ext of noext -> R1; @@ -5300,88 +4515,34 @@ sort_components(der,S=#state{tname=TypeName},Components) -> end, case {untagged_choice(S,CompsList),Ext} of {false,noext} -> - {true,sort_components1(S,TypeName,CompsList,[],[],[],[])}; + {true,sort_components1(CompsList)}; {false,_} -> - {true,{sort_components1(S,TypeName,CompsList,[],[],[],[]), []}}; + {true,{sort_components1(CompsList),[]}}; {true,noext} -> %% sort in run-time {dynamic,R1}; _ -> {dynamic,{R1, Ext, R2}} end; -sort_components(per,S=#state{tname=TypeName},Components) -> +sort_components(per, S, Components) -> {R1,Ext,R2} = extension(textual_order(Components)), Root = tag_untagged_choice(S,R1++R2), case Ext of noext -> - {true,sort_components1(S,TypeName,Root,[],[],[],[])}; + {true,sort_components1(Root)}; _ -> - {true,{sort_components1(S,TypeName,Root,[],[],[],[]), - Ext}} + {true,{sort_components1(Root),Ext}} end. -sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs], - UnivAcc,ApplAcc,ContAcc,PrivAcc) -> - sort_components1(S,TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc); -sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs], - UnivAcc,ApplAcc,ContAcc,PrivAcc) -> - sort_components1(S,TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc); -sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs], - UnivAcc,ApplAcc,ContAcc,PrivAcc) -> - sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc); -sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs], - UnivAcc,ApplAcc,ContAcc,PrivAcc) -> - sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]); -sort_components1(S,TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) -> - I = #'ComponentType'.tags, - ascending_order_check(S,TypeName,sort_universal_type(UnivAcc)) ++ - ascending_order_check(S,TypeName,lists:keysort(I,ApplAcc)) ++ - ascending_order_check(S,TypeName,lists:keysort(I,ContAcc)) ++ - ascending_order_check(S,TypeName,lists:keysort(I,PrivAcc)). - -ascending_order_check(S,TypeName,Components) -> - ascending_order_check1(S,TypeName,Components), - Components. - -ascending_order_check1(S,TypeName, - [C1 = #'ComponentType'{tags=[{_,T}|_]}, - C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) -> - asn1ct:warning("Indistinct tag ~p in SET ~p, components ~p and ~p~n", - [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name],S, - "Indistinct tag in SET"), - ascending_order_check1(S,TypeName,[C2|Rest]); -ascending_order_check1(S,TypeName, - [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]}, - C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) -> - case (decode_type(T1) == decode_type(T2)) of - true -> - asn1ct:warning("Indistinct tags ~p and ~p in" - " SET ~p, components ~p and ~p~n", - [T1,T2,TypeName,C1#'ComponentType'.name, - C2#'ComponentType'.name],S, - "Indistinct tags and in SET"), - ascending_order_check1(S,TypeName,[C2|Rest]); - _ -> - ascending_order_check1(S,TypeName,[C2|Rest]) - end; -ascending_order_check1(S,N,[_|Rest]) -> - ascending_order_check1(S,N,Rest); -ascending_order_check1(_,_,[]) -> - ok. - -sort_universal_type(Components) -> - List = lists:map(fun(C) -> - #'ComponentType'{tags=[{_,T}|_]} = C, - {decode_type(T),C} - end, - Components), - SortedList = lists:keysort(1,List), - lists:map(fun(X)->element(2,X) end,SortedList). - -decode_type(I) when is_integer(I) -> - I; -decode_type(T) -> - asn1ct_gen_ber_bin_v2:decode_type(T). +sort_components1(Cs0) -> + Cs1 = [{tag_key(Tag),C} || #'ComponentType'{tags=[Tag|_]}=C <- Cs0], + Cs = lists:sort(Cs1), + [C || {_,C} <- Cs]. + +tag_key({'UNIVERSAL',Tag}) -> {0,Tag}; +tag_key({'APPLICATION',Tag}) -> {1,Tag}; +tag_key({'CONTEXT',Tag}) -> {2,Tag}; +tag_key({'PRIVATE',Tag}) -> {3,Tag}. untagged_choice(_S,[#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) -> true; @@ -5477,35 +4638,43 @@ check_selectiontype(S,Name,#type{def=Eref}) {RefMod,TypeDef} = get_referenced_type(S,Eref), NewS = S#state{module=load_asn1_module(S,RefMod), mname=RefMod, - type=TypeDef, tname=get_datastr_name(TypeDef)}, check_selectiontype2(NewS,Name,TypeDef); check_selectiontype(S,Name,Type=#type{def={pt,_,_}}) -> - TName = - case S#state.recordtopname of - [] -> - S#state.tname; - N -> N - end, + TName = case S#state.recordtopname of + [] -> S#state.tname; + N -> N + end, TDef = #typedef{name=TName,typespec=Type}, check_selectiontype2(S,Name,TDef); -check_selectiontype(S,Name,Type) -> - Msg = lists:flatten(io_lib:format("SelectionType error: ~w < ~w must be a reference to a CHOICE.",[Name,Type])), - error({type,Msg,S}). +check_selectiontype(S, _Name, Type) -> + asn1_error(S, {illegal_choice_type, error_value(Type)}). check_selectiontype2(S,Name,TypeDef) -> NewS = S#state{recordtopname=get_datastr_name(TypeDef)}, - CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec), - Components = get_choice_components(S,CheckedType#type.def), - case lists:keysearch(Name,#'ComponentType'.name,Components) of - {value,C} -> - %% The selected type will have the tag of the selected type. - _T = C#'ComponentType'.typespec; -% T#type{tag=def_to_tag(NewS,T#type.def)}; - _ -> - Msg = lists:flatten(io_lib:format("error checking SelectionType: ~w~n",[Name])), - error({type,Msg,S}) + Components = + try + CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec), + get_choice_components(S,CheckedType#type.def) + catch error:_ -> + asn1_error(S, {illegal_choice_type, error_value(TypeDef)}) + end, + case lists:keyfind(Name, #'ComponentType'.name, Components) of + #'ComponentType'{typespec=TS} -> TS; + false -> asn1_error(S, {illegal_id, error_value(Name)}) end. + + +get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)-> + Components; +get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) -> + C1++C2; +get_choice_components(S,ERef=#'Externaltypereference'{}) -> + {_RefMod,TypeDef}=get_referenced_type(S,ERef), + #typedef{typespec=TS} = TypeDef, + get_choice_components(S,TS#type.def). + + check_restrictedstring(_S,_Def,_Constr) -> ok. @@ -5538,7 +4707,7 @@ check_choice(S,Type,Components) when is_list(Components) -> check_unique_tags(S, NewComps3), complist_as_tuple(NewComps3); Dupl -> - throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}}) + asn1_error(S, {duplicate_identifier,error_value(hd(Dupl))}) end; check_choice(_S,_,[]) -> []. @@ -5635,25 +4804,30 @@ check_unique_tags(S,C) -> case (S#state.module)#module.tagdefault of 'AUTOMATIC' -> case any_manual_tag(C) of - false -> true; - _ -> collect_and_sort_tags(C,[]) + false -> + true; + true -> + check_unique_tags(S, C, []) end; _ -> - collect_and_sort_tags(C,[]) + check_unique_tags(S, C, []) end. -collect_and_sort_tags([C|Rest],Acc) when is_record(C,'ComponentType') -> - collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc); -collect_and_sort_tags([_|Rest],Acc) -> - collect_and_sort_tags(Rest,Acc); -collect_and_sort_tags([],Acc) -> - {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)), - Dupl2 = [Dup|| {dup,Dup} <- Dupl], - if - length(Dupl2) > 0 -> - throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}}); - true -> - true +check_unique_tags(S, [#'ComponentType'{name=Name,tags=Tags0}|T], Acc) -> + Tags = [{Tag,Name} || Tag <- Tags0], + check_unique_tags(S, T, Tags ++ Acc); +check_unique_tags(S, [_|T], Acc) -> + check_unique_tags(S, T, Acc); +check_unique_tags(S, [], Acc) -> + R0 = sofs:relation(Acc), + R1 = sofs:relation_to_family(R0), + R2 = sofs:to_external(R1), + Dup = [Els || {_,[_,_|_]=Els} <- R2], + case Dup of + [] -> + ok; + [FirstDupl|_] -> + asn1_error(S, {duplicate_tags,FirstDupl}) end. check_unique(L,Pos) -> @@ -5795,28 +4969,18 @@ componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc) {[],C}; [{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] -> OS = object_set_mod_name(S,ObjSet), - UniqueFieldName = - case (catch get_unique_fieldname(S,#classdef{typespec=ClassDef})) of - {error,'__undefined_',_} -> - no_unique; - {asn1,Msg,_} -> - error({type,Msg,S}); - {'EXIT',Msg} -> - error({type,{internal_error,Msg},S}); - {Other,_} -> Other - end, -% UsedFieldName = get_used_fieldname(S,Attr,STList), + UniqFN = get_unique_fieldname(S, + #classdef{typespec=ClassDef}), %% Res should be done differently: even though %% a unique field name exists it is not %% certain that the ObjectClassFieldType of %% the simple table constraint picks that %% class field. Res = #simpletableattributes{objectsetname=OS, -%% c_name=asn1ct_gen:un_hyphen_var(Attr), c_name=Attr, c_index=N, - usedclassfield=UniqueFieldName, - uniqueclassfield=UniqueFieldName, + usedclassfield=UniqFN, + uniqueclassfield=UniqFN, valueindex=ValueIndex}, {[Res],C#'ComponentType'{typespec=NewTSpec}} end; @@ -5869,7 +5033,7 @@ remove_doubles1(El,L) -> NewL -> remove_doubles1(El,NewL) end. -%% get_simple_table_info searches the commponents Cs by the path from +%% get_simple_table_info searches the components Cs by the path from %% an at-list (third argument), and follows into a component of it if %% necessary, to get information needed for code generating. %% @@ -5884,32 +5048,35 @@ remove_doubles1(El,L) -> % %% at least one step below the outermost level, i.e. the leading % %% information shall be on a sub level. 2) They don't have any common % %% path. -get_simple_table_info(S,Cs,[AtList|Rest]) -> - [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)]; -get_simple_table_info(_,_,[]) -> - []. -get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when is_list(Cs) -> - case lists:keysearch(Cname,#'ComponentType'.name,Cs) of - {value,C} -> - get_simple_table_info1(S,C,Cnames,[Cname|Path]); - _ -> - error({type,"Missing expected simple table constraint",S}) - end; -get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) -> - %% In this component there must be a simple table constraint - %% o.w. the asn1 code is wrong. - #type{def=OCFT,constraint=Cnstr} = TS, - case constraint_member(simpletable,Cnstr) of - {true,{simpletable,_OSRef}} -> - simple_table_info(S,OCFT,Path); - _ -> - error({type,{"missing expected simple table constraint", - Cnstr},S}) +get_simple_table_info(S, Cs, AtLists) -> + [get_simple_table_info1(S, Cs, AtList, []) || AtList <- AtLists]. + +get_simple_table_info1(S, Cs, [Cname|Cnames], Path) -> + #'ComponentType'{} = C = + lists:keyfind(Cname, #'ComponentType'.name, Cs), + get_simple_table_info2(S, C, Cnames, [Cname|Path]). + +get_simple_table_info2(S, #'ComponentType'{name=Name,typespec=TS}, [], Path) -> + OCFT = simple_table_get_ocft(S, Name, TS), + case lists:keymember(simpletable, 1, TS#type.constraint) of + true -> + simple_table_info(S, OCFT, Path); + false -> + asn1_error(S, {missing_table_constraint,Name}) end; -get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) -> +get_simple_table_info2(S, #'ComponentType'{typespec=TS}, Cnames, Path) -> Components = get_atlist_components(TS#type.def), - get_simple_table_info1(S,Components,Cnames,Path). - + get_simple_table_info1(S, Components, Cnames, Path). + +simple_table_get_ocft(_, _, #type{def=#'ObjectClassFieldType'{}=OCFT}) -> + OCFT; +simple_table_get_ocft(S, Component, #type{constraint=Constr}) -> + case lists:keyfind(ocft, 1, Constr) of + {ocft,OCFT} -> + OCFT; + false -> + asn1_error(S, {missing_ocft,Component}) + end. simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef, class=ObjectClass, @@ -5932,19 +5099,8 @@ simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef, CDef; _ -> #classdef{typespec=ObjectClass} end, - UniqueName = - case (catch get_unique_fieldname(S,ClassDef)) of - {error,'__undefined_',_} -> no_unique; - {asn1,Msg,_} -> - error({type,Msg,S}); - {'EXIT',Msg} -> - error({type,{internal_error,Msg},S}); - {Other,_} -> Other - end, - {lists:reverse(Path),ObjectClassFieldName,UniqueName}; -simple_table_info(S,Type,_) -> - error({type,{"the type referenced by a componentrelation constraint must be a ObjectClassFieldType",Type},S}). - + UniqueName = get_unique_fieldname(S, ClassDef), + {lists:reverse(Path),ObjectClassFieldName,UniqueName}. %% any_component_relation searches for all component relation %% constraints that refers to the actual level and returns a list of @@ -5958,9 +5114,8 @@ simple_table_info(S,Type,_) -> %% is found to check the validity of the at-list. any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,NamePath,Acc) -> CRelPath = - case constraint_member(componentrelation,Type#type.constraint) of -%% [{componentrelation,_,AtNotation}] -> - {true,{_,_,AtNotation}} -> + case lists:keyfind(componentrelation, 1, Type#type.constraint) of + {_,_,AtNotation} -> %% Found component relation constraint, now check %% whether this constraint is relevant for the level %% where the search started @@ -5969,7 +5124,7 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames, %% simple table constraint from where the component %% relation is found. evaluate_atpath(S,NamePath,CNames,AtNot); - _ -> + false -> [] end, InnerAcc = @@ -5991,11 +5146,11 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames, any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc); any_component_relation(S,Type,CNames,NamePath,Acc) when is_record(Type,type) -> CRelPath = - case constraint_member(componentrelation,Type#type.constraint) of - {true,{_,_,AtNotation}} -> + case lists:keyfind(componentrelation, 1, Type#type.constraint) of + {_,_,AtNotation} -> AtNot = extract_at_notation(AtNotation), evaluate_atpath(S,NamePath,CNames,AtNot); - _ -> + false -> [] end, InnerAcc = @@ -6017,15 +5172,6 @@ any_component_relation(S,['ExtensionAdditionGroupEnd'|Cs],CNames,NamePath,Acc) - any_component_relation(_,[],_,_,Acc) -> Acc. -constraint_member(componentrelation,[CRel={componentrelation,_,_}|_Rest]) -> - {true,CRel}; -constraint_member(simpletable,[ST={simpletable,_}|_Rest]) -> - {true,ST}; -constraint_member(Key,[_H|T]) -> - constraint_member(Key,T); -constraint_member(_,[]) -> - false. - %% evaluate_atpath/4 finds out whether the at notation refers to the %% search level. The list of referenced names in the AtNot list shall %% begin with a name that exists on the level it refers to. If the @@ -6059,9 +5205,7 @@ evaluate_atpath(S=#state{abscomppath=TopPath},NamePath,Cnames,{outermost,AtPath= {_,[H|_T]} -> case lists:member(H,Cnames) of true -> [AtPathBelowTop]; - _ -> - %% error({type,{asn1,"failed to analyze at-path",AtPath},S}) - throw({type,{asn1,"failed to analyze at-path",AtPath},S}) + _ -> asn1_error(S, {invalid_at_path, AtPath}) end end; evaluate_atpath(_,_,_,_) -> @@ -6098,23 +5242,8 @@ tuple2complist({R1,E,R2}) -> tuple2complist(List) when is_list(List) -> List. -get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)-> - Components; -get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) -> - C1++C2; -get_choice_components(S,ERef=#'Externaltypereference'{}) -> - {_RefMod,TypeDef}=get_referenced_type(S,ERef), - #typedef{typespec=TS} = TypeDef, - get_choice_components(S,TS#type.def). - -extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) -> - {Level,[Name|extract_at_notation1(Rest)]}; -extract_at_notation(At) -> - exit({error,{asn1,{at_notation,At}}}). -extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) -> - [Name|extract_at_notation1(Rest)]; -extract_at_notation1([]) -> - []. +extract_at_notation([{Level,ValueRefs}]) -> + {Level,[Name || #'Externalvaluereference'{value=Name} <- ValueRefs]}. %% componentrelation1/1 identifies all componentrelation constraints %% that exist in C or in the substructure of C. Info about the found @@ -6133,8 +5262,8 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI}, Ret = % case Constraint of % [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] -> - case constraint_member(componentrelation,Constraint) of - {true,{_,{_,_,ObjectSet},AtList}} -> + case lists:keyfind(componentrelation, 1, Constraint) of + {_,{_,_,ObjectSet},AtList} -> [{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList, %% Note: if Path is longer than one,i.e. it is within %% an inner type of the actual level, then the only @@ -6145,7 +5274,7 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI}, lists:map(fun(#'Externalvaluereference'{value=V})->V end, AL), {[{ObjectSet,AtPath,ClassDef,Path}],Def}; - _ -> + false -> %% check the inner type of component innertype_comprel(S,Def,Path) end, @@ -6219,10 +5348,8 @@ componentlist_comprel(_,[],Acc,_,NewCL) -> innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) -> Ret = -% case Cons of -% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] -> - case constraint_member(componentrelation,Cons) of - {true,{_,{_,_,ObjectSet},AtList}} -> + case lists:keyfind(componentrelation, 1, Cons) of + {_,{_,_,ObjectSet},AtList} -> %% This AtList must have an "outermost" at sign to be %% relevent here. [{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2] @@ -6233,7 +5360,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) -> lists:map(fun(#'Externalvaluereference'{value=V})->V end, AL), [{ObjectSet,AtPath,ClassDef,Path}]; - _ -> + false -> innertype_comprel(S,Def,Path) end, case Ret of @@ -6301,8 +5428,7 @@ value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) -> InnerType = asn1ct_gen:get_inner(Type#type.def), Components = case get_atlist_components(Type#type.def) of - [] -> error({type,{asn1,"element in at list must be a " - "SEQUENCE, SET or CHOICE.",Name},S}); + [] -> asn1_error(S, {invalid_element, Name}); Comps -> Comps end, {Index,ValueIndex} = component_value_index(S,InnerType,At,Components), @@ -6322,29 +5448,27 @@ component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) -> component_index1(S,Name,[_C|Cs],N) -> component_index1(S,Name,Cs,N+1); component_index1(S,Name,[],_) -> - error({type,{asn1,"component of at-list was not" - " found in substructure",Name},S}). + asn1_error(S, {invalid_at_list, Name}). -get_unique_fieldname(_S,ClassDef) when is_record(ClassDef,classdef) -> -%% {_,Fields,_} = ClassDef#classdef.typespec, - Fields = (ClassDef#classdef.typespec)#objectclass.fields, - get_unique_fieldname1(Fields,[]); +get_unique_fieldname(S, #classdef{typespec=TS}) -> + Fields = TS#objectclass.fields, + get_unique_fieldname1(S, Fields, []); get_unique_fieldname(S,#typedef{typespec=#type{def=ClassRef}}) -> %% A class definition may be referenced as %% REFED-CLASS ::= DEFINED-CLASS and then REFED-CLASS is a typedef {_M,ClassDef} = get_referenced_type(S,ClassRef), get_unique_fieldname(S,ClassDef). -get_unique_fieldname1([],[]) -> - throw({error,'__undefined_',[]}); -get_unique_fieldname1([],[Name]) -> - Name; -get_unique_fieldname1([],Acc) -> - throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc}); -get_unique_fieldname1([{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|Rest],Acc) -> - get_unique_fieldname1(Rest,[{Name,Opt}|Acc]); -get_unique_fieldname1([_H|T],Acc) -> - get_unique_fieldname1(T,Acc). +get_unique_fieldname1(S, [{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|T], Acc) -> + get_unique_fieldname1(S, T, [{Name,Opt}|Acc]); +get_unique_fieldname1(S, [_|T], Acc) -> + get_unique_fieldname1(S, T, Acc); +get_unique_fieldname1(S, [], Acc) -> + case Acc of + [] -> no_unique; + [Name] -> Name; + [_|_] -> asn1_error(S, multiple_uniqs) + end. get_tableconstraint_info(S,Type,{CheckedTs,EComps,CheckedTs2}) -> {get_tableconstraint_info(S,Type,CheckedTs,[]), @@ -6400,31 +5524,8 @@ get_tableconstraint_info(S,Type,[C|Cs],Acc) -> get_referenced_fieldname([{_,FirstFieldname}]) -> {FirstFieldname,[]}; -get_referenced_fieldname([{_,FirstFieldname}|Rest]) -> - {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)}; -get_referenced_fieldname(Def={FieldName,RestFieldName}) when is_atom(FieldName),is_list(RestFieldName)-> - Def; -get_referenced_fieldname(Def) -> - {no_type,Def}. - -%% get_ObjectClassFieldType extracts the type from the chain of -%% objects that leads to a final type. -get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when - is_record(ERef,'Externaltypereference') -> - {MName,Type} = get_referenced_type(S,ERef), - NewS = update_state(S#state{type=Type, - tname=ERef#'Externaltypereference'.type},MName), - ClassSpec = check_class(NewS,Type), - Fields = ClassSpec#objectclass.fields, - get_ObjectClassFieldType(S,Fields,PrimFieldNameList); -get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) -> - check_PrimitiveFieldNames(S,Fields,L), - get_OCFType(S,Fields,L); -get_ObjectClassFieldType(S,ERef,{FieldName,Rest}) -> - get_ObjectClassFieldType(S,ERef,Rest ++ [FieldName]). - -check_PrimitiveFieldNames(_S,_Fields,_) -> - ok. +get_referenced_fieldname([{_,FirstFieldname}|T]) -> + {FirstFieldname,[element(2, X) || X <- T]}. %% get_ObjectClassFieldType_classdef gets the def of the class of the %% ObjectClassFieldType, i.e. the objectclass record. If the type has @@ -6445,15 +5546,13 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) -> {fixedtypevaluefield,PrimFieldName,Type}; {value,{objectfield,_,ClassRef,_Unique,_OptSpec}} -> {MName,ClassDef} = get_referenced_type(S,ClassRef), - NewS = update_state(S#state{type=ClassDef, - tname=get_datastr_name(ClassDef)}, + NewS = update_state(S#state{tname=get_datastr_name(ClassDef)}, MName), CheckedCDef = check_class(NewS,ClassDef), get_OCFType(S,CheckedCDef#objectclass.fields,Rest); {value,{objectsetfield,_,Type,_OptSpec}} -> {MName,ClassDef} = get_referenced_type(S,Type#type.def), - NewS = update_state(S#state{type=ClassDef, - tname=get_datastr_name(ClassDef)}, + NewS = update_state(S#state{tname=get_datastr_name(ClassDef)}, MName), CheckedCDef = check_class(NewS,ClassDef), get_OCFType(S,CheckedCDef#objectclass.fields,Rest); @@ -6461,7 +5560,7 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) -> {value,Other} -> {element(1,Other),PrimFieldName}; _ -> - throw({error,lists:flatten(io_lib:format("undefined FieldName in ObjectClassFieldType: ~w",[PrimFieldName]))}) + asn1_error(S, {illegal_object_field, PrimFieldName}) end. get_taglist(S,Ext) when is_record(Ext,'Externaltypereference') -> @@ -6485,30 +5584,8 @@ get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) -> []; get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) -> get_taglist(S,Type); -get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList}) - when is_list(FieldNameList) -> - case get_ObjectClassFieldType(S,ERef,FieldNameList) of - {fixedtypevaluefield,_,Type} -> get_taglist(S,Type); - {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed - end; -get_taglist(S,{ObjCl,FieldNameList}) when is_record(ObjCl,objectclass), - is_list(FieldNameList) -> - case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of - {fixedtypevaluefield,_,Type} -> get_taglist(S,Type); - {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed - end; -get_taglist(S,Def) -> - case S#state.erule of - ber -> - []; - _ -> - case Def of - 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such - []; - _ -> - [asn1ct_gen:def_to_tag(Def)] - end - end. +get_taglist(_, _) -> + []. get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when is_list(TagL) -> %% tag_list has been here , just return TagL and continue with next alternative @@ -6565,15 +5642,6 @@ get_taglist1(_S,[]) -> %% tag_number('CHARACTER STRING') -> 29; %% tag_number('BMPString') -> 30. - -dbget_ex(_S,Module,Key) -> - case asn1_db:dbget(Module,Key) of - undefined -> - - throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value - T -> T - end. - merge_tags(T1, T2) when is_list(T2) -> merge_tags2(T1 ++ T2, []); merge_tags(T1, T2) -> @@ -6590,75 +5658,46 @@ merge_tags2([H|T],Acc) -> merge_tags2([], Acc) -> lists:reverse(Acc). -%% merge_constraints(C1, []) -> -%% C1; -%% merge_constraints([], C2) -> -%% C2; -%% merge_constraints(C1, C2) -> -%% {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]), -%% SizeC = merge_constraints(SList), -%% ValueC = merge_constraints(VList), -%% PermAlphaC = merge_constraints(PAList), -%% case Rest of -%% [] -> -%% SizeC ++ ValueC ++ PermAlphaC; -%% _ -> -%% throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}}) -%% end. - -%% merge_constraints([]) -> []; -%% merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2, -%% High1 =< High2 -> -%% merge_constraints([C1|Rest]); -%% merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) -> -%% [C1|merge_constraints([C2|Rest])]; -%% merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) -> -%% throw({error,asn1,{conflicting_constraints,{C1,C2}}}); -%% merge_constraints([C]) -> -%% [C]. - -%% splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) -> -%% splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc); -%% splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) -> -%% splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc); -%% splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) -> -%% splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc); -%% splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) -> -%% splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]); -%% splitlist([],Sacc,Vacc,PAacc,Restacc) -> -%% {lists:reverse(Sacc), -%% lists:reverse(Vacc), -%% lists:reverse(PAacc), -%% lists:reverse(Restacc)}. - - - -storeindb(S,M) when is_record(M,module) -> - TVlist = M#module.typeorval, - NewM = M#module{typeorval=findtypes_and_values(TVlist)}, - asn1_db:dbnew(NewM#module.name, S#state.erule), - asn1_db:dbput(NewM#module.name,'MODULE', NewM), - Res = storeindb(#state{mname=NewM#module.name}, TVlist, []), - include_default_class(S,NewM#module.name), +storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) -> + S = S0#state{mname=ModName}, + TVlist1 = [{asn1ct:get_name_of_def(Def),Def} || Def <- TVlist0], + case check_duplicate_defs(S, TVlist1) of + ok -> + storeindb_1(S, M, TVlist0, TVlist1); + {error,_}=Error -> + Error + end. + +storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) -> + NewM = M#module{typeorval=findtypes_and_values(TVlist0)}, + asn1_db:dbnew(ModName, S#state.erule), + asn1_db:dbput(ModName, 'MODULE', NewM), + asn1_db:dbput(ModName, TVlist), + include_default_class(S, NewM#module.name), include_default_type(NewM#module.name), - Res. + ok. -storeindb(#state{mname=Module}=S, [H|T], Errors) -> - Name = asn1ct:get_name_of_def(H), - case asn1_db:dbget(Module, Name) of - undefined -> - asn1_db:dbput(Module, Name, H), - storeindb(S, T, Errors); - Prev -> - PrevLine = asn1ct:get_pos_of_def(Prev), - Error = return_asn1_error(S, H, {already_defined,Name,PrevLine}), - storeindb(S, T, [Error|Errors]) - end; -storeindb(_, [], []) -> - ok; -storeindb(_, [], [_|_]=Errors) -> - {error,Errors}. +check_duplicate_defs(S, Defs) -> + Set0 = sofs:relation(Defs), + Set1 = sofs:relation_to_family(Set0), + Set = sofs:to_external(Set1), + case [duplicate_def(S, N, Dup) || {N,[_,_|_]=Dup} <- Set] of + [] -> + ok; + [_|_]=E -> + {error,lists:append(E)} + end. + +duplicate_def(S, Name, Dups0) -> + Dups1 = [{asn1ct:get_pos_of_def(Def),Def} || Def <- Dups0], + [{Prev,_}|Dups] = lists:sort(Dups1), + duplicate_def_1(S, Dups, Name, Prev). +duplicate_def_1(S, [{_,Def}|T], Name, Prev) -> + E = return_asn1_error(S, Def, {already_defined,Name,Prev}), + [E|duplicate_def_1(S, T, Name, Prev)]; +duplicate_def_1(_, [], _, _) -> + []. findtypes_and_values(TVList) -> findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values, @@ -6698,99 +5737,146 @@ findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) -> {lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc), lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}. +return_asn1_error(#state{error_context=Context}=S, Error) -> + return_asn1_error(S, Context, Error). + return_asn1_error(#state{mname=Where}, Item, Error) -> Pos = asn1ct:get_pos_of_def(Item), {structured_error,{Where,Pos},?MODULE,Error}. -asn1_error(S, Item, Error) -> - throw({error,return_asn1_error(S, Item, Error)}). +asn1_error(S, Error) -> + throw({error,return_asn1_error(S, Error)}). format_error({already_defined,Name,PrevLine}) -> io_lib:format("the name ~p has already been defined at line ~p", [Name,PrevLine]); +format_error({duplicate_identifier,Ids}) -> + io_lib:format("the identifier '~p' has already been used", [Ids]); +format_error({duplicate_tags,Elements}) -> + io_lib:format("duplicate tags in the elements: ~s", + [format_elements(Elements)]); +format_error({enum_illegal_redefinition,Id}) -> + io_lib:format("'~s' must not be redefined", [Id]); +format_error({enum_not_ascending,Id,N,Prev}) -> + io_lib:format("the values for enumerations which follow '...' must " + "be in ascending order, but '~p(~p)' is less than the " + "previous value '~p'", [Id,N,Prev]); +format_error({enum_reused_value,Id,Val}) -> + io_lib:format("'~s' has the value '~p' which is used more than once", + [Id,Val]); +format_error({illegal_id, Id}) -> + io_lib:format("illegal identifier: ~p", [Id]); +format_error({illegal_choice_type, Ref}) -> + io_lib:format("expecting a CHOICE type: ~p", [Ref]); +format_error({illegal_class_name,Class}) -> + io_lib:format("the class name '~s' is illegal (it must start with an uppercase letter and only contain uppercase letters, digits, or hyphens)", [Class]); +format_error({illegal_COMPONENTS_OF, Ref}) -> + io_lib:format("expected a SEQUENCE or SET got: ~p", [Ref]); +format_error(illegal_external_value) -> + "illegal value in EXTERNAL type"; format_error({illegal_instance_of,Class}) -> io_lib:format("using INSTANCE OF on class '~s' is illegal, " - "because INSTANCE OF may only be used on the class TYPE-IDENTFIER", + "because INSTANCE OF may only be used on the class TYPE-IDENTIFIER", [Class]); +format_error(illegal_integer_value) -> + "expecting an integer value"; +format_error(illegal_object) -> + "expecting an object"; +format_error({illegal_object_field, Id}) -> + io_lib:format("expecting a class field: ~p",[Id]); +format_error({illegal_oid,o_id}) -> + "illegal OBJECT IDENTIFIER"; +format_error({illegal_oid,rel_oid}) -> + "illegal RELATIVE-OID"; format_error(illegal_octet_string_value) -> "expecting a bstring or an hstring as value for an OCTET STRING"; format_error({illegal_typereference,Name}) -> io_lib:format("'~p' is used as a typereference, but does not start with an uppercase letter", [Name]); +format_error(illegal_table_constraint) -> + "table constraints may only be applied to CLASS.&field constructs"; +format_error(illegal_value) -> + "expecting a value"; +format_error({illegal_value, TYPE}) -> + io_lib:format("expecting a ~s value", [TYPE]); format_error({invalid_fields,Fields,Obj}) -> io_lib:format("invalid ~s in ~p", [format_fields(Fields),Obj]); format_error({invalid_bit_number,Bit}) -> io_lib:format("the bit number '~p' is invalid", [Bit]); +format_error(invalid_table_constraint) -> + "the table constraint is not an object set"; +format_error(invalid_objectset) -> + "expecting an object set"; +format_error({implicit_tag_before,Kind}) -> + "illegal implicit tag before " ++ + case Kind of + choice -> "'CHOICE'"; + open_type -> "open type" + end; format_error({missing_mandatory_fields,Fields,Obj}) -> io_lib:format("missing mandatory ~s in ~p", [format_fields(Fields),Obj]); +format_error({missing_table_constraint,Component}) -> + io_lib:format("the component '~s' is referenced by a component relation constraint using the '@field-name' notation, but does not have a table constraint", + [Component]); +format_error({missing_id,Id}) -> + io_lib:format("expected the mandatory component '~p'", [Id]); +format_error({missing_ocft,Component}) -> + io_lib:format("the component '~s' must be an ObjectClassFieldType (CLASSNAME.&field-name)", [Component]); +format_error(multiple_uniqs) -> + "implementation limitation: only one UNIQUE field is allowed in CLASS"; format_error({namelist_redefinition,Name}) -> io_lib:format("the name '~s' can not be redefined", [Name]); +format_error({param_bad_type, Ref}) -> + io_lib:format("'~p' is not a parameterized type", [Ref]); +format_error(param_wrong_number_of_arguments) -> + "wrong number of arguments"; +format_error(reversed_range) -> + "ranges must be given in increasing order"; +format_error({syntax_duplicated_fields,Fields}) -> + io_lib:format("~s must only occur once in the syntax list", + [format_fields(Fields)]); +format_error(syntax_nomatch) -> + "unexpected end of object definition"; +format_error({syntax_mandatory_in_optional_group,Name}) -> + io_lib:format("the field '&~s' must not be within an optional group since it is not optional", + [Name]); +format_error({syntax_missing_mandatory_fields,Fields}) -> + io_lib:format("missing mandatory ~s in the syntax list", + [format_fields(Fields)]); +format_error({syntax_nomatch,Actual}) -> + io_lib:format("~s is not the next item allowed according to the defined syntax", + [Actual]); +format_error({syntax_undefined_field,Field}) -> + io_lib:format("'&~s' is not a field of the class being defined", + [Field]); format_error({undefined,Name}) -> io_lib:format("'~s' is referenced, but is not defined", [Name]); +format_error({undefined_export,Ref}) -> + io_lib:format("'~s' is exported but is not defined", [Ref]); +format_error({undefined_field,FieldName}) -> + io_lib:format("the field '&~s' is undefined", [FieldName]); format_error({undefined_import,Ref,Module}) -> io_lib:format("'~s' is not exported from ~s", [Ref,Module]); +format_error({unique_and_default,Field}) -> + io_lib:format("the field '&~s' must not have both 'UNIQUE' and 'DEFAULT'", + [Field]); format_error({value_reused,Val}) -> io_lib:format("the value '~p' is used more than once", [Val]); +format_error({non_unique_object,Id}) -> + io_lib:format("object set with a UNIQUE field value of '~p' is used more than once", [Id]); format_error(Other) -> io_lib:format("~p", [Other]). format_fields([F]) -> - io_lib:format("field &~s", [F]); + io_lib:format("field '&~s'", [F]); format_fields([H|T]) -> - [io_lib:format("fields &~s", [H])| - [io_lib:format(", &~s", [F]) || F <- T]]. - -error({_,{structured_error,_,_,_}=SE,_}) -> - SE; -error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) -> - Pos = Ref#'Externaltypereference'.pos, - io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]), - {error,{export,Pos,Mname,Typename,Msg}}; -% error({type,{Msg1,Msg2},#state{mname=Mname,type=Type,tname=Typename}}) -% when is_record(Type,typedef) -> -% io:format("asn1error:~p:~p:~p ~p~n", -% [Type#typedef.pos,Mname,Typename,Msg1]), -% {error,{type,Type#typedef.pos,Mname,Typename,Msg1,Msg2}}; -error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}}) - when is_record(Type,type) -> - io:format("asn1error:~p:~p~n~p~n", - [Mname,Typename,Msg]), - {error,{type,Mname,Typename,Msg}}; -error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}}) - when is_record(Type,typedef) -> - io:format("asn1error:~p:~p:~p~n~p~n", - [Type#typedef.pos,Mname,Typename,Msg]), - {error,{type,Type#typedef.pos,Mname,Typename,Msg}}; -error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}}) - when is_record(Type,ptypedef) -> - io:format("asn1error:~p:~p:~p~n~p~n", - [Type#ptypedef.pos,Mname,Typename,Msg]), - {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}}; -error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}}) - when is_record(Value,valuedef) -> - io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]), - {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}}; -error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}}) - when is_record(Type,pobjectdef) -> - io:format("asn1error:~p:~p:~p~n~p~n", - [Type#pobjectdef.pos,Mname,Typename,Msg]), - {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}}; -error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}}) - when is_record(Value,valuedef) -> - io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]), - {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}}; -error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) -> - io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Valuename,Msg]), - {error,{Other,Pos,Mname,Valuename,Msg}}; -error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) -> - io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]), - {error,{Other,Pos,Mname,Typename,Msg}}; -error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) -> - io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]), - {error,{Other,Pos,Mname,Typename,Msg}}; -error({Other,Msg,#state{mname=Mname,type=Type,tname=Typename}}) -> - io:format("asn1error:~p:~p:~p~n~p~n",[asn1ct:get_pos_of_def(Type),Mname,Typename,Msg]), - {error,{Other,asn1ct:get_pos_of_def(Type),Mname,Typename,Msg}}. + [io_lib:format("fields '&~s'", [H])| + [io_lib:format(", '&~s'", [F]) || F <- T]]. + +format_elements([H1,H2|T]) -> + [io_lib:format("~p, ", [H1])|format_elements([H2|T])]; +format_elements([H]) -> + io_lib:format("~p", [H]). include_default_type(Module) -> NameAbsList = default_type_list(), @@ -6953,62 +6039,62 @@ default_type_list() -> ]. -include_default_class(S,Module) -> - NameAbsList = default_class_list(S), - include_default_class1(Module,NameAbsList). +include_default_class(S, Module) -> + _ = [include_default_class1(S, Module, ClassDef) || + ClassDef <- default_class_list()], + ok. -include_default_class1(_,[]) -> - ok; -include_default_class1(Module,[{Name,TS}|Rest]) -> - case asn1_db:dbget(Module,Name) of +include_default_class1(S, Module, {Name,Ts0}) -> + case asn1_db:dbget(Module, Name) of undefined -> - C = #classdef{checked=true,module=Module,name=Name, - typespec=TS}, - asn1_db:dbput(Module,Name,C); - _ -> ok - end, - include_default_class1(Module,Rest). + #objectclass{fields=Fields, + syntax={'WITH SYNTAX',Syntax0}} = Ts0, + Syntax = preprocess_syntax(S, Syntax0, Fields), + Ts = Ts0#objectclass{syntax={preprocessed_syntax,Syntax}}, + C = #classdef{checked=true,module=Module, + name=Name,typespec=Ts}, + asn1_db:dbput(Module, Name, C); + _ -> + ok + end. -default_class_list(S) -> +default_class_list() -> [{'TYPE-IDENTIFIER', - {objectclass, - [{fixedtypevaluefield, - id, - #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER), - def='OBJECT IDENTIFIER'}, - 'UNIQUE', - 'MANDATORY'}, - {typefield,'Type','MANDATORY'}], - {'WITH SYNTAX', - [{typefieldreference,'Type'}, - 'IDENTIFIED', - 'BY', - {valuefieldreference,id}]}}}, + #objectclass{fields=[{fixedtypevaluefield, + id, + #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)], + def='OBJECT IDENTIFIER'}, + 'UNIQUE', + 'MANDATORY'}, + {typefield,'Type','MANDATORY'}], + syntax={'WITH SYNTAX', + [{typefieldreference,'Type'}, + 'IDENTIFIED', + 'BY', + {valuefieldreference,id}]}}}, {'ABSTRACT-SYNTAX', - {objectclass, - [{fixedtypevaluefield, - id, - #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER), - def='OBJECT IDENTIFIER'}, - 'UNIQUE', - 'MANDATORY'}, - {typefield,'Type','MANDATORY'}, - {fixedtypevaluefield, - property, - #type{tag=?TAG_PRIMITIVE(?N_BIT_STRING), - def={'BIT STRING',[]}}, - undefined, - {'DEFAULT', - [0,1,0]}}], - {'WITH SYNTAX', - [{typefieldreference,'Type'}, - 'IDENTIFIED', - 'BY', - {valuefieldreference,id}, - ['HAS', - 'PROPERTY', - {valuefieldreference,property}]]}}}]. - + #objectclass{fields=[{fixedtypevaluefield, + id, + #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)], + def='OBJECT IDENTIFIER'}, + 'UNIQUE', + 'MANDATORY'}, + {typefield,'Type','MANDATORY'}, + {fixedtypevaluefield, + property, + #type{tag=[?TAG_PRIMITIVE(?N_BIT_STRING)], + def={'BIT STRING',[]}}, + undefined, + {'DEFAULT', + [0,1,0]}}], + syntax={'WITH SYNTAX', + [{typefieldreference,'Type'}, + 'IDENTIFIED', + 'BY', + {valuefieldreference,id}, + ['HAS', + 'PROPERTY', + {valuefieldreference,property}]]}}}]. new_reference_name(Name) -> case get(asn1_reference) of @@ -7037,8 +6123,9 @@ insert_once(S,Tab,Key) -> skipped end. -check_fold(S, [H|T], Check) -> - Type = asn1_db:dbget(S#state.mname, H), +check_fold(S0, [H|T], Check) -> + Type = asn1_db:dbget(S0#state.mname, H), + S = S0#state{error_context=Type}, case Check(S, H, Type) of ok -> check_fold(S, T, Check); @@ -7047,5 +6134,19 @@ check_fold(S, [H|T], Check) -> end; check_fold(_, [], Check) when is_function(Check, 3) -> []. +error_value(Value) when is_integer(Value) -> Value; +error_value(Value) when is_atom(Value) -> Value; +error_value(#type{def=Value}) when is_atom(Value) -> Value; +error_value(#type{def=Value}) -> error_value(Value); +error_value(RefOrType) -> + try name_of_def(RefOrType) of + Name -> Name + catch _:_ -> + case get_datastr_name(RefOrType) of + undefined -> RefOrType; + Name -> Name + end + end. + name_of_def(#'Externaltypereference'{type=N}) -> N; name_of_def(#'Externalvaluereference'{value=N}) -> N. diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl index a91404ed54..0bc6688a49 100644 --- a/lib/asn1/src/asn1ct_constructed_per.erl +++ b/lib/asn1/src/asn1ct_constructed_per.erl @@ -410,12 +410,11 @@ gen_dec_open_type(Erule, Val, {Xmod,Xtype}, LeadingAttr, #classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType), #objectclass{fields=ClassFields} = ClassDef, Extensible = lists:member('EXTENSIONMARK', ObjSet1), - ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} || - {_,Key,Code} <- ObjSet1], - ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]), + Typename = [Name,ClType], + ObjSet = index_object_set(Erule, ClType, Name, + ObjSet1, ClassFields), Key = erlang:md5(term_to_binary({decode,ObjSet,RestFieldNames, Prop,Extensible})), - Typename = [Name,ClType], Gen = fun(_Fd, N) -> dec_objset_optional(N, Prop), dec_objset(Erule, N, ObjSet, RestFieldNames, Typename), @@ -467,46 +466,15 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) -> Imm = asn1ct_gen_per:gen_dec_imm(Erule, Type), {Term,_} = asn1ct_imm:dec_slim_cg(Imm, 'Bytes'), emit([com,nl,Term]); - #typedef{name={constructed,bif},typespec=Def} -> - InnerType = asn1ct_gen:get_inner(Def#type.def), - case InnerType of - 'CHOICE' -> - asn1ct_name:start(), - asn1ct_name:new(bytes), - {'CHOICE',CompList} = Def#type.def, - Ext = extensible_enc(CompList), - emit(["{Result,_} = begin",nl]), - gen_dec_choice(Erule, Typename, CompList, Ext), - emit([nl, - "end",com,nl, - "Result"]); - 'SET' -> - Imm0 = gen_dec_constructed_imm(Erule, Typename, Def), - Imm = opt_imm(Imm0), - asn1ct_name:start(), - emit(["{Result,_} = begin",nl]), - emit_gen_dec_imm(Imm), - emit([nl, - "end",com,nl, - "Result"]); - 'SET OF' -> - asn1ct_name:start(), - do_gen_decode_sof(Erule, Typename, 'SET OF', - Def, false); - 'SEQUENCE' -> - Imm0 = gen_dec_constructed_imm(Erule, Typename, Def), - Imm = opt_imm(Imm0), - asn1ct_name:start(), - emit(["{Result,_} = begin",nl]), - emit_gen_dec_imm(Imm), - emit([nl, - "end",com,nl, - "Result"]); - 'SEQUENCE OF' -> - asn1ct_name:start(), - do_gen_decode_sof(Erule, Typename, 'SEQUENCE OF', - Def, false) - end; + #typedef{name={constructed,bif},typespec=Type}=Def -> + Prefix = "dec_outlined_", + Key = {dec_outlined,Def}, + Gen = fun(_Fd, Name) -> + gen_dec_obj(Erule, Name, Typename, Type) + end, + Func = asn1ct_func:call_gen(Prefix, Key, Gen), + emit(["{Term,_} = ",{asis,Func},"(Bytes)",com,nl, + "Term"]); #typedef{name=Type} -> emit(["{Result,_} = ",{asis,enc_func("dec_", Type)},"(Bytes),",nl, "Result"]); @@ -531,6 +499,12 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) -> end end. +gen_dec_obj(Erules, Name, Typename, Type) -> + emit([{asis,Name},"(Bytes) ->",nl]), + InnerType = asn1ct_gen:get_inner(Type#type.def), + asn1ct_gen:gen_decode_constructed(Erules, Typename, + InnerType, Type). + gen_encode_choice(Erule, TopType, D) -> asn1ct_name:start(), Imm = gen_encode_choice_imm(Erule, TopType, D), @@ -595,10 +569,10 @@ gen_encode_sof_imm(Erule, Typename, SeqOrSetOf, #type{}=D) -> gen_decode_sof(Erules, Typename, SeqOrSetOf, #type{}=D) -> asn1ct_name:start(), - do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, true), + do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D), emit([".",nl,nl]). -do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) -> +do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) -> {_SeqOrSetOf,ComponentType} = D#type.def, SizeConstraint = asn1ct_imm:effective_constraint(bitstring, D#type.constraint), @@ -610,12 +584,11 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) -> "" end, {Num,Buf} = gen_decode_length(SizeConstraint, Erules), - Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf, - ComponentType,NeedRest})), + Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,ComponentType})), Gen = fun(_Fd, Name) -> gen_decode_sof_components(Erules, Name, Typename, SeqOrSetOf, - ComponentType, NeedRest) + ComponentType) end, F = asn1ct_func:call_gen("dec_components", Key, Gen), emit([",",nl, @@ -629,7 +602,7 @@ gen_decode_length(Constraint, Erule) -> Imm = asn1ct_imm:per_dec_length(Constraint, true, is_aligned(Erule)), asn1ct_imm:dec_slim_cg(Imm, "Bytes"). -gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) -> +gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) -> {ObjFun,ObjFun_Var} = case Cont#type.tablecinf of [{objfun,_}|_R] -> @@ -637,14 +610,8 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) -> _ -> {"",""} end, - case NeedRest of - false -> - emit([{asis,Name},"(0, _Bytes",ObjFun_Var,", Acc) ->",nl, - "lists:reverse(Acc);",nl]); - true -> - emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl, - "{lists:reverse(Acc),Bytes};",nl]) - end, + emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl, + "{lists:reverse(Acc),Bytes};",nl]), emit([{asis,Name},"(Num, Bytes",ObjFun,", Acc) ->",nl, "{Term,Remain} = "]), Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf, @@ -1024,11 +991,12 @@ enc_var_type_call(Erule, Name, RestFieldNames, #classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType), #objectclass{fields=ClassFields} = ClassDef, Extensible = lists:member('EXTENSIONMARK', ObjSet1), - ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} || - {_,Key,Code} <- ObjSet1], - ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]), + ObjSet = index_object_set(Erule, ClType, Name, + ObjSet1, ClassFields), Key = erlang:md5(term_to_binary({encode,ObjSet,RestFieldNames,Extensible})), - Imm = enc_objset_imm(Erule, Name, ObjSet, RestFieldNames, Extensible), + TypeName = [ClType,Name], + Imm = enc_objset_imm(Erule, TypeName, Name, ObjSet, + RestFieldNames, Extensible), Lambda = {lambda,[{var,"Val"},{var,"Id"}],Imm}, Gen = fun(_Fd, N) -> Aligned = is_aligned(Erule), @@ -1039,11 +1007,27 @@ enc_var_type_call(Erule, Name, RestFieldNames, Prefix = lists:concat(["enc_os_",Name]), [{call_gen,Prefix,Key,Gen,Lambda,[Val,Fun]}]. -fix_object_code(Name, [{Name,B}|_], _ClassFields) -> - B; -fix_object_code(Name, [_|T], ClassFields) -> - fix_object_code(Name, T, ClassFields); -fix_object_code(Name, [], ClassFields) -> +index_object_set(_Erules, _ClType, Name, Set0, ClassFields) -> + Set = index_object_set_1(Name, Set0, ClassFields), + lists:sort(Set). + +index_object_set_1(Name, [{_,Key,Code}|T], ClassFields) -> + case index_object_set_2(Name, Code, ClassFields) of + none -> + index_object_set_1(Name, T, ClassFields); + Type -> + [{Key,Type}|index_object_set_1(Name, T, ClassFields)] + end; +index_object_set_1(Name, [_|T], ClassFields) -> + index_object_set_1(Name, T, ClassFields); +index_object_set_1(_, [], _) -> + []. + +index_object_set_2(Name, [{Name,Type}|_], _ClassFields) -> + Type; +index_object_set_2(Name, [_|T], ClassFields) -> + index_object_set_2(Name, T, ClassFields); +index_object_set_2(Name, [], ClassFields) -> case lists:keyfind(Name, 2, ClassFields) of {typefield,Name,'OPTIONAL'} -> none; @@ -1059,7 +1043,8 @@ fix_object_code(Name, [], ClassFields) -> end end. -enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) -> +enc_objset_imm(Erule, TypeName, Component, ObjSet, + RestFieldNames, Extensible) -> Aligned = is_aligned(Erule), E = {error, fun() -> @@ -1070,7 +1055,7 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) -> end}, [{'cond', [[{eq,{var,"Id"},Key}| - enc_obj(Erule, Obj, RestFieldNames, Aligned)] || + enc_obj(Erule, Obj, TypeName, RestFieldNames, Aligned)] || {Key,Obj} <- ObjSet] ++ [['_',case Extensible of false -> @@ -1086,24 +1071,18 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) -> end end]]}]. -enc_obj(Erule, Obj, RestFieldNames0, Aligned) -> +enc_obj(Erule, Obj, TypeName, RestFieldNames0, Aligned) -> + Val = {var,"Val"}, case Obj of + #typedef{name={constructed,bif},typespec=Type}=Def -> + Prefix = "enc_outlined_", + Key = {enc_outlined,Def}, + Gen = fun(_Fd, Name) -> + gen_enc_obj(Erule, Name, TypeName, Type) + end, + [{call_gen,Prefix,Key,Gen,undefined,[Val]}]; #typedef{name={primitive,bif},typespec=Def} -> asn1ct_gen_per:gen_encode_prim_imm({var,"Val"}, Def, Aligned); - #typedef{name={constructed,bif},typespec=Def} -> - InnerType = asn1ct_gen:get_inner(Def#type.def), - case InnerType of - 'CHOICE' -> - gen_encode_choice_imm(Erule, name, Def); - 'SET' -> - gen_encode_constructed_imm(Erule, name, Def); - 'SET OF' -> - gen_encode_sof_imm(Erule, name, InnerType, Def); - 'SEQUENCE' -> - gen_encode_constructed_imm(Erule, name, Def); - 'SEQUENCE OF' -> - gen_encode_sof_imm(Erule, name, InnerType, Def) - end; #typedef{name=Type} -> [{apply,{local,enc_func(Type),Type},[{var,"Val"}]}]; #'Externalvaluereference'{module=Mod,value=Value} -> @@ -1112,7 +1091,8 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) -> {object,_,Fields} = Def, [NextField|RestFieldNames] = RestFieldNames0, {NextField,Typedef} = lists:keyfind(NextField, 1, Fields), - enc_obj(Erule, Typedef, RestFieldNames, Aligned) + enc_obj(Erule, Typedef, TypeName, + RestFieldNames, Aligned) end; #'Externaltypereference'{module=Mod,type=Type} -> Func = enc_func(Type), @@ -1124,6 +1104,11 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) -> end end. +gen_enc_obj(Erules, Name, Typename, Type) -> + emit([{asis,Name},"(Val) ->",nl]), + InnerType = asn1ct_gen:get_inner(Type#type.def), + asn1ct_gen:gen_encode_constructed(Erules, Typename, + InnerType, Type). gen_dec_components_call(Erule, TopType, {Root,ExtList}, DecInfObj, Ext, NumberOfOptionals) -> diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl index 2ef8466309..0e41aa1a7a 100644 --- a/lib/asn1/src/asn1ct_gen.erl +++ b/lib/asn1/src/asn1ct_gen.erl @@ -531,34 +531,30 @@ gen_part_decode_funcs({primitive,bif},_TypeName, gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) -> throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}). - -gen_types(Erules,Tname,{RootL1,ExtList,RootL2}) +%% EncDec = 'gen_encode' | 'gen_decode' +gen_types(Erules, Tname, {RootL1,ExtList,RootL2}, EncDec) when is_list(RootL1), is_list(RootL2) -> - gen_types(Erules,Tname,RootL1), - Rtmod = ct_gen_module(Erules), - gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList)), - gen_types(Erules,Tname,RootL2); -gen_types(Erules,Tname,{RootList,ExtList}) when is_list(RootList) -> - gen_types(Erules,Tname,RootList), + gen_types(Erules, Tname, RootL1, EncDec), Rtmod = ct_gen_module(Erules), - gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList)); -gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) -> - gen_types(Erules,Tname,Rest); -gen_types(Erules,Tname,[ComponentType|Rest]) -> + gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec), + gen_types(Erules, Tname, RootL2, EncDec); +gen_types(Erules, Tname, {RootList,ExtList}, EncDec) when is_list(RootList) -> + gen_types(Erules, Tname, RootList, EncDec), Rtmod = ct_gen_module(Erules), + gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec); +gen_types(Erules, Tname, [{'EXTENSIONMARK',_,_}|T], EncDec) -> + gen_types(Erules, Tname, T, EncDec); +gen_types(Erules, Tname, [ComponentType|T], EncDec) -> asn1ct_name:clear(), - Rtmod:gen_encode(Erules,Tname,ComponentType), - asn1ct_name:clear(), - Rtmod:gen_decode(Erules,Tname,ComponentType), - gen_types(Erules,Tname,Rest); -gen_types(_,_,[]) -> - true; -gen_types(Erules,Tname,Type) when is_record(Type,type) -> Rtmod = ct_gen_module(Erules), + Rtmod:EncDec(Erules, Tname, ComponentType), + gen_types(Erules, Tname, T, EncDec); +gen_types(_, _, [], _) -> + ok; +gen_types(Erules, Tname, #type{}=Type, EncDec) -> asn1ct_name:clear(), - Rtmod:gen_encode(Erules,Tname,Type), - asn1ct_name:clear(), - Rtmod:gen_decode(Erules,Tname,Type). + Rtmod = ct_gen_module(Erules), + Rtmod:EncDec(Erules, Tname, Type). %% VARIOUS GENERATOR STUFF %% ************************************************* @@ -599,25 +595,25 @@ gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) -> 'SET' -> Rtmod:gen_encode_set(Erules,Typename,D), #'SET'{components=Components} = D#type.def, - gen_types(Erules,Typename,Components); + gen_types(Erules, Typename, Components, gen_encode); 'SEQUENCE' -> Rtmod:gen_encode_sequence(Erules,Typename,D), #'SEQUENCE'{components=Components} = D#type.def, - gen_types(Erules,Typename,Components); + gen_types(Erules, Typename, Components, gen_encode); 'CHOICE' -> Rtmod:gen_encode_choice(Erules,Typename,D), {_,Components} = D#type.def, - gen_types(Erules,Typename,Components); + gen_types(Erules, Typename, Components, gen_encode); 'SEQUENCE OF' -> Rtmod:gen_encode_sof(Erules,Typename,InnerType,D), {_,Type} = D#type.def, NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def), - gen_types(Erules,[NameSuffix|Typename],Type); + gen_types(Erules, [NameSuffix|Typename], Type, gen_encode); 'SET OF' -> Rtmod:gen_encode_sof(Erules,Typename,InnerType,D), {_,Type} = D#type.def, NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def), - gen_types(Erules,[NameSuffix|Typename],Type); + gen_types(Erules, [NameSuffix|Typename], Type, gen_encode); _ -> exit({nyi,InnerType}) end; @@ -630,20 +626,29 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) -> asn1ct:step_in_constructed(), %% updates namelist for exclusive decode case InnerType of 'SET' -> - Rtmod:gen_decode_set(Erules,Typename,D); + Rtmod:gen_decode_set(Erules,Typename,D), + #'SET'{components=Components} = D#type.def, + gen_types(Erules, Typename, Components, gen_decode); 'SEQUENCE' -> - Rtmod:gen_decode_sequence(Erules,Typename,D); + Rtmod:gen_decode_sequence(Erules,Typename,D), + #'SEQUENCE'{components=Components} = D#type.def, + gen_types(Erules, Typename, Components, gen_decode); 'CHOICE' -> - Rtmod:gen_decode_choice(Erules,Typename,D); + Rtmod:gen_decode_choice(Erules,Typename,D), + {_,Components} = D#type.def, + gen_types(Erules, Typename, Components, gen_decode); 'SEQUENCE OF' -> - Rtmod:gen_decode_sof(Erules,Typename,InnerType,D); + Rtmod:gen_decode_sof(Erules,Typename,InnerType,D), + {_,#type{def=Def}=Type} = D#type.def, + NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def), + gen_types(Erules, [NameSuffix|Typename], Type, gen_decode); 'SET OF' -> - Rtmod:gen_decode_sof(Erules,Typename,InnerType,D); - _ -> - exit({nyi,InnerType}) + Rtmod:gen_decode_sof(Erules,Typename,InnerType,D), + {_,#type{def=Def}=Type} = D#type.def, + NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def), + gen_types(Erules, [NameSuffix|Typename], Type, gen_decode) end; - gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) -> gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec). diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl index e51b0898be..37413298a7 100644 --- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl +++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2002-2013. All Rights Reserved. +%% Copyright Ericsson AB 2002-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -24,7 +24,7 @@ -include("asn1_records.hrl"). --export([decode_class/1, decode_type/1]). +-export([decode_class/1]). -export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]). -export([gen_encode_prim/4]). -export([gen_dec_prim/3]). @@ -278,8 +278,7 @@ emit_enc_enumerated_cases(L, Tags) -> emit_enc_enumerated_cases(L, Tags, noext). emit_enc_enumerated_cases([{EnumName,EnumVal}|T], Tags, Ext) -> - Bytes = encode_pos_integer(EnumVal, []), - Len = length(Bytes), + {Bytes,Len} = encode_integer(EnumVal), emit([{asis,EnumName}," -> ", {call,ber,encode_tags,[Tags,{asis,Bytes},Len]},";",nl]), emit_enc_enumerated_cases(T, Tags, Ext); @@ -288,10 +287,25 @@ emit_enc_enumerated_cases([], _Tags, _Ext) -> emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]), emit([nl,"end"]). -encode_pos_integer(0, [B|_Acc] = L) when B < 128 -> +encode_integer(Val) -> + Bytes = + if + Val >= 0 -> + encode_integer_pos(Val, []); + true -> + encode_integer_neg(Val, []) + end, + {Bytes,length(Bytes)}. + +encode_integer_pos(0, [B|_Acc]=L) when B < 128 -> L; -encode_pos_integer(N, Acc) -> - encode_pos_integer(N bsr 8, [N band 255|Acc]). +encode_integer_pos(N, Acc) -> + encode_integer_pos((N bsr 8), [N band 16#ff| Acc]). + +encode_integer_neg(-1, [B1|_T]=L) when B1 > 127 -> + L; +encode_integer_neg(N, Acc) -> + encode_integer_neg(N bsr 8, [N band 16#ff|Acc]). %%=============================================================================== %%=============================================================================== @@ -1179,23 +1193,25 @@ gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) -> gen_objset_enc(Erules, ObjSetName, UniqueName, [{ObjName,Val,Fields}|T], ClName, ClFields, NthObj,Acc)-> - emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]), CurrMod = get(currmod), {InternalFunc,NewNthObj}= case ObjName of {no_mod,no_name} -> - gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj); + gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj); {CurrMod,Name} -> - emit({" fun 'enc_",Name,"'/3"}), + emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl, + " fun 'enc_",Name,"'/3;",nl]), {[],NthObj}; {ModuleName,Name} -> + emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]), emit_ext_fun(enc,ModuleName,Name), + emit([";",nl]), {[],NthObj}; _ -> - emit({" fun 'enc_",ObjName,"'/3"}), + emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl, + " fun 'enc_",ObjName,"'/3;",nl]), {[],NthObj} end, - emit({";",nl}), gen_objset_enc(Erules, ObjSetName, UniqueName, T, ClName, ClFields, NewNthObj, InternalFunc ++ Acc); %% See X.681 Annex E for the following case @@ -1223,13 +1239,14 @@ emit_default_getenc(ObjSetName,UniqueName) -> %% gen_inlined_enc_funs for each object iterates over all fields of a %% class, and for each typefield it checks if the object has that %% field and emits the proper code. -gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, NthObj) -> - emit([indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl, +gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, Val, NthObj) -> + emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl, + indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl, indent(6),"case Type of",nl]), gen_inlined_enc_funs1(Fields, T, ObjSetName, [], NthObj, []); -gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) -> - gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj); -gen_inlined_enc_funs(_,[],_,NthObj) -> +gen_inlined_enc_funs(Fields, [_|Rest], ObjSetName, Val, NthObj) -> + gen_inlined_enc_funs(Fields, Rest, ObjSetName, Val, NthObj); +gen_inlined_enc_funs(_, [], _, _, NthObj) -> {[],NthObj}. gen_inlined_enc_funs1(Fields, [{typefield,Name,_}|Rest], ObjSetName, @@ -1276,7 +1293,7 @@ gen_inlined_enc_funs1(Fields,[_|Rest], ObjSetName, Sep, NthObj, Acc)-> gen_inlined_enc_funs1(Fields, Rest, ObjSetName, Sep, NthObj, Acc); gen_inlined_enc_funs1(_, [], _, _, NthObj, Acc) -> emit([nl,indent(6),"end",nl, - indent(3),"end"]), + indent(3),"end;",nl]), {Acc,NthObj}. emit_enc_open_type(I) -> @@ -1358,23 +1375,25 @@ gen_objset_dec(_,_,{unique,undefined},_,_,_,_) -> ok; gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T], ClName, ClFields, NthObj)-> - emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]), CurrMod = get(currmod), NewNthObj= case ObjName of {no_mod,no_name} -> - gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj); + gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj); {CurrMod,Name} -> - emit([" fun 'dec_",Name,"'/3"]), + emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl, + " fun 'dec_",Name,"'/3;", nl]), NthObj; {ModuleName,Name} -> + emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]), emit_ext_fun(dec,ModuleName,Name), + emit([";",nl]), NthObj; _ -> - emit([" fun 'dec_",ObjName,"'/3"]), + emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl, + " fun 'dec_",ObjName,"'/3;", nl]), NthObj end, - emit([";",nl]), gen_objset_dec(Erules, ObjSName, UniqueName, T, ClName, ClFields, NewNthObj); gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName, @@ -1394,10 +1413,15 @@ emit_default_getdec(ObjSetName,UniqueName) -> emit(["'getdec_",ObjSetName,"'(ErrV) ->",nl]), emit([indent(2), "fun(C,V,_) -> exit({{component,C},{value,V},{unique_name_and_value,",{asis,UniqueName},", ErrV}}) end"]). -gen_inlined_dec_funs(Fields, ClFields, ObjSetName, NthObj) -> +gen_inlined_dec_funs(Fields, [{typefield,_,_}|_]=ClFields, ObjSetName, Val, NthObj) -> + emit(["'getdec_",ObjSetName,"'(",{asis,Val},") ->",nl]), emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",nl, indent(6),"case Type of",nl]), - gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj). + gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj); +gen_inlined_dec_funs(Fields, [_|ClFields], ObjSetName, Val, NthObj) -> + gen_inlined_dec_funs(Fields, ClFields, ObjSetName, Val, NthObj); +gen_inlined_dec_funs(_, _, _, _,NthObj) -> + NthObj. gen_inlined_dec_funs1(Fields, [{typefield,Name,Prop}|Rest], ObjSetName, Sep0, NthObj) -> @@ -1439,7 +1463,7 @@ gen_inlined_dec_funs1(Fields, [_|Rest], ObjSetName, Sep, NthObj)-> gen_inlined_dec_funs1(Fields, Rest, ObjSetName, Sep, NthObj); gen_inlined_dec_funs1(_, [], _, _, NthObj) -> emit([nl,indent(6),"end",nl, - indent(3),"end"]), + indent(3),"end;",nl]), NthObj. emit_dec_open_type(I) -> @@ -1534,39 +1558,6 @@ decode_class('CONTEXT') -> decode_class('PRIVATE') -> ?PRIVATE. -decode_type('BOOLEAN') -> 1; -decode_type('INTEGER') -> 2; -decode_type('BIT STRING') -> 3; -decode_type('OCTET STRING') -> 4; -decode_type('NULL') -> 5; -decode_type('OBJECT IDENTIFIER') -> 6; -decode_type('ObjectDescriptor') -> 7; -decode_type('EXTERNAL') -> 8; -decode_type('REAL') -> 9; -decode_type('ENUMERATED') -> 10; -decode_type('EMBEDDED_PDV') -> 11; -decode_type('UTF8String') -> 12; -decode_type('RELATIVE-OID') -> 13; -decode_type('SEQUENCE') -> 16; -decode_type('SEQUENCE OF') -> 16; -decode_type('SET') -> 17; -decode_type('SET OF') -> 17; -decode_type('NumericString') -> 18; -decode_type('PrintableString') -> 19; -decode_type('TeletexString') -> 20; -decode_type('T61String') -> 20; -decode_type('VideotexString') -> 21; -decode_type('IA5String') -> 22; -decode_type('UTCTime') -> 23; -decode_type('GeneralizedTime') -> 24; -decode_type('GraphicString') -> 25; -decode_type('VisibleString') -> 26; -decode_type('GeneralString') -> 27; -decode_type('UniversalString') -> 28; -decode_type('BMPString') -> 30; -decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative -decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}). - mkfuncname(#'Externaltypereference'{module=Mod,type=EType}, DecOrEnc) -> CurrMod = get(currmod), case CurrMod of diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl index bdd14871d1..91820e08de 100644 --- a/lib/asn1/src/asn1ct_imm.erl +++ b/lib/asn1/src/asn1ct_imm.erl @@ -499,6 +499,8 @@ per_dec_enumerated_fix_list([], Tail, _) -> Tail. per_dec_integer_1([{'SingleValue',Value}], _Aligned) -> {value,Value}; +per_dec_integer_1([{'ValueRange',{'MIN',_}}], Aligned) -> + per_dec_unconstrained(Aligned); per_dec_integer_1([{'ValueRange',{Lb,'MAX'}}], Aligned) when is_integer(Lb) -> per_decode_semi_constrained(Lb, Aligned); per_dec_integer_1([{'ValueRange',{Lb,Ub}}], Aligned) when is_integer(Lb), @@ -1094,6 +1096,9 @@ per_enc_integer_1(Val0, [Constr], Aligned) -> per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) -> per_enc_constrained(Val, Sv, Sv, Aligned); +per_enc_integer_2(Val, {'ValueRange',{'MIN',Ub}}, Aligned) + when is_integer(Ub) -> + {[],{lt,Val,Ub+1},per_enc_unconstrained(Val, Aligned)}; per_enc_integer_2(Val0, {'ValueRange',{Lb,'MAX'}}, Aligned) when is_integer(Lb) -> {Prefix,Val} = sub_lb(Val0, Lb), @@ -1580,7 +1585,7 @@ do_combine_put_bits(_, _, _) -> throw(impossible). debit(Budget0, Alternatives) -> - case Budget0 - log2(Alternatives) of + case Budget0 - math:log2(Alternatives) of Budget when Budget > 0.0 -> Budget; _ -> @@ -1593,8 +1598,6 @@ num_clauses([_|T], N) -> num_clauses(T, N+1); num_clauses([], N) -> N. -log2(N) -> - math:log(N) / math:log(2.0). collect_put_bits(Imm) -> lists:splitwith(fun({put_bits,V,_,_}) when is_integer(V) -> true; diff --git a/lib/asn1/src/asn1ct_parser.yrl b/lib/asn1/src/asn1ct_parser.yrl deleted file mode 100644 index 083162f191..0000000000 --- a/lib/asn1/src/asn1ct_parser.yrl +++ /dev/null @@ -1,1177 +0,0 @@ -%%<copyright> -%% <year>1997-2008</year> -%% <holder>Ericsson AB, All Rights Reserved</holder> -%%</copyright> -%%<legalnotice> -%% The contents of this file are subject to the Erlang Public License, -%% Version 1.1, (the "License"); you may not use this file except in -%% compliance with the License. You should have received a copy of the -%% Erlang Public License along with this software. If not, it can be -%% retrieved online at http://www.erlang.org/. -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and limitations -%% under the License. -%% -%% The Initial Developer of the Original Code is Ericsson AB. -%%</legalnotice>
-%%
-Nonterminals
-ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
-DefinitiveObjIdComponent TagDefault ExtensionDefault
-ModuleBody Exports SymbolsExported Imports SymbolsImported
-SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
-Symbol Reference AssignmentList Assignment
-ExtensionAndException
-ComponentTypeLists
-Externaltypereference Externalvaluereference DefinedType DefinedValue
-AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
-ValueAssignment
-% ValueSetTypeAssignment
-ValueSet
-Type BuiltinType NamedType ReferencedType
-Value ValueNotNull BuiltinValue ReferencedValue NamedValue
-% BooleanType
-BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
-% inlined IntegerValue
-EnumeratedType
-% inlined Enumerations
-Enumeration EnumerationItem
-% inlined EnumeratedValue
-% RealType
-RealValue NumericRealValue SpecialRealValue BitStringType
-% inlined BitStringValue
-IdentifierList
-% OctetStringType
-% inlined OctetStringValue
-% NullType NullValue
-SequenceType ComponentTypeList ComponentType
-% SequenceValue SequenceOfValue
-ComponentValueList SequenceOfType
-SAndSOfValue ValueList SetType
-% SetValue SetOfValue
-SetOfType
-ChoiceType
-% AlternativeTypeList made common with ComponentTypeList
-ChoiceValue
-AnyValue
-AnyDefBy
-SelectionType
-TaggedType Tag ClassNumber Class
-% redundant TaggedValue
-% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
-ObjectIdentifierValue ObjIdComponentList ObjIdComponent
-% NameForm NumberForm NameAndNumberForm
-CharacterStringType
-RestrictedCharacterStringValue CharacterStringList
-% CharSyms CharsDefn
-Quadruple
-% Group Plane Row Cell
-Tuple
-% TableColumn TableRow
-% UnrestrictedCharacterString
-CharacterStringValue
-% UnrestrictedCharacterStringValue
-ConstrainedType Constraint ConstraintSpec TypeWithConstraint
-ElementSetSpecs ElementSetSpec
-%GeneralConstraint
-UserDefinedConstraint UserDefinedConstraintParameter
-UserDefinedConstraintParameters
-ExceptionSpec
-ExceptionIdentification
-Unions
-UnionMark
-UElems
-Intersections
-IntersectionElements
-IntersectionMark
-IElems
-Elements
-Elems
-SubTypeElements
-Exclusions
-LowerEndpoint
-UpperEndpoint
-LowerEndValue
-UpperEndValue
-TypeConstraints NamedConstraint PresenceConstraint
-
-ParameterizedTypeAssignment
-ParameterList
-Parameters
-Parameter
-ParameterizedType
-
-% X.681
-ObjectClassAssignment ObjectClass ObjectClassDefn
-FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
-TokenOrGroupSpecs TokenOrGroupSpec
-SyntaxList OptionalGroup RequiredToken Word
-TypeOptionalitySpec
-ValueOrObjectOptSpec
-VSetOrOSetOptSpec
-ValueOptionalitySpec
-ObjectOptionalitySpec
-ValueSetOptionalitySpec
-ObjectSetOptionalitySpec
-% X.681 chapter 15
-InformationFromObjects
-ValueFromObject
-%ValueSetFromObjects
-TypeFromObject
-%ObjectFromObject
-%ObjectSetFromObjects
-ReferencedObjects
-FieldName
-PrimitiveFieldName
-
-ObjectAssignment
-ObjectSetAssignment
-ObjectSet
-ObjectSetElements
-Object
-ObjectDefn
-DefaultSyntax
-DefinedSyntax
-FieldSettings
-FieldSetting
-DefinedSyntaxTokens
-DefinedSyntaxToken
-Setting
-DefinedObject
-ObjectFromObject
-ObjectSetFromObjects
-ParameterizedObject
-ExternalObjectReference
-DefinedObjectSet
-DefinedObjectClass
-ExternalObjectClassReference
-
-% X.682
-TableConstraint
-ComponentRelationConstraint
-ComponentIdList
-
-% X.683
-ActualParameter
-.
-
-%UsefulType.
-
-Terminals
-'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
-'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
-'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
-'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
-'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
-'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
-'TYPE-IDENTIFIER'
-'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
-'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
-'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
-'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
-'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
-'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
-'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
-'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
-'!' '..' '...' '|' '<' ':' '^'
-number identifier typereference restrictedcharacterstringtype
-bstring hstring cstring typefieldreference valuefieldreference
-objectclassreference word.
-
-Rootsymbol ModuleDefinition.
-Endsymbol '$end'.
-
-Left 300 'EXCEPT'.
-Left 200 '^'.
-Left 200 'INTERSECTION'.
-Left 100 '|'.
-Left 100 'UNION'.
-
-
-ModuleDefinition -> ModuleIdentifier
- 'DEFINITIONS'
- TagDefault
- ExtensionDefault
- '::='
- 'BEGIN'
- ModuleBody
- 'END' :
- {'ModuleBody',Ex,Im,Types} = '$7',
- {{typereference,Pos,Name},Defid} = '$1',
- #module{
- pos= Pos,
- name= Name,
- defid= Defid,
- tagdefault='$3',
- extensiondefault='$4',
- exports=Ex,
- imports=Im,
- typeorval=Types}.
-% {module, '$1','$3','$6'}.
-% Results always in a record of type module defined in asn_records.hlr
-
-ModuleIdentifier -> typereference DefinitiveIdentifier :
- put(asn1_module,'$1'#typereference.val),
- {'$1','$2'}.
-
-DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
-DefinitiveIdentifier -> '$empty': [].
-
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
-
-DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
-% DefinitiveObjIdComponent -> NameForm : '$1' .
-DefinitiveObjIdComponent -> number : '$1' . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
-DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
-
-% DefinitiveNumberForm -> number : 'fix' .
-
-% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
-
-TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
-TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
-TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
-TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
-
-ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
-ExtensionDefault -> '$empty' : 'false'. % because this is the default
-
-ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
-ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
-
-Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
-Exports -> 'EXPORTS' ';' : {exports,[]}.
-Exports -> '$empty' : {exports,all} .
-
-% inlined above SymbolsExported -> SymbolList : '$1'.
-% inlined above SymbolsExported -> '$empty' : [].
-
-Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
-Imports -> 'IMPORTS' ';' : {imports,[]}.
-Imports -> '$empty' : {imports,[]} .
-
-% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
-% inlined above SymbolsImported -> '$empty' : [].
-
-SymbolsFromModuleList -> SymbolsFromModule :['$1'].
-% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
-SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
-
-% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-
-% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
-
-% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
-% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
-% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
-% AssignedIdentifier -> DefinedValue : '$1'.
-% inlined AssignedIdentifier -> '$empty' : undefined.
-
-SymbolList -> Symbol : ['$1'].
-SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
-
-Symbol -> Reference :'$1'.
-% later Symbol -> ParameterizedReference :'$1'.
-
-Reference -> typereference :'$1'.
-Reference -> identifier:'$1'.
-Reference -> typereference '{' '}':'$1'.
-Reference -> Externaltypereference '{' '}':'$1'.
-
-% later Reference -> objectclassreference :'$1'.
-% later Reference -> objectreference :'$1'.
-% later Reference -> objectsetreference :'$1'.
-
-AssignmentList -> Assignment : ['$1'].
-% modified AssignmentList -> AssignmentList Assignment : '$1'.
-AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
-
-Assignment -> TypeAssignment : '$1'.
-Assignment -> ValueAssignment : '$1'.
-% later Assignment -> ValueSetTypeAssignment : '$1'.
-Assignment -> ObjectClassAssignment : '$1'.
-% later Assignment -> ObjectAssignment : '$1'.
-% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
-Assignment -> ObjectSetAssignment : '$1'.
-Assignment -> ParameterizedTypeAssignment : '$1'.
-%Assignment -> ParameterizedValueAssignment : '$1'.
-%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
-%Assignment -> ParameterizedObjectClassAssignment : '$1'.
-
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
-
-FieldSpecs -> FieldSpec : ['$1'].
-FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
-
-FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
-
-FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
-FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
-
-FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
- {variabletypevaluefield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
- {variabletypevaluesetfield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
- {fixedtypevaluesetfield, '$1','$2','$3'}.
-
-TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
-
-ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueOptionalitySpec -> 'DEFAULT' Value :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-
-%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
- {'DEFAULT',{object,['$2'|'$4']}}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
- {'DEFAULT',{object, ['$2']}}.
-%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
-% {'DEFAULT',{object, '$2'}}.
-ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
- {'DEFAULT',{object, '$2'}}.
-
-
-VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
-%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
-VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
-
-%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
-
-OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-OptionalitySpec -> 'DEFAULT' ValueNotNull :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-OptionalitySpec -> '$empty' : 'MANDATORY'.
-
-WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
-
-SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
-SyntaxList -> '{' '}' : [].
-
-TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
-TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
-
-TokenOrGroupSpec -> RequiredToken : '$1'.
-TokenOrGroupSpec -> OptionalGroup : '$1'.
-
-OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
-
-RequiredToken -> typereference : '$1'.
-RequiredToken -> Word : '$1'.
-RequiredToken -> ',' : '$1'.
-RequiredToken -> PrimitiveFieldName : '$1'.
-
-Word -> 'BY' : 'BY'.
-
-ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
- #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
- args='$2', typespec='$4'}.
-
-ParameterList -> '{' Parameters '}':'$2'.
-
-Parameters -> Parameter: ['$1'].
-Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
-
-Parameter -> typereference: '$1'.
-Parameter -> Value: '$1'.
-Parameter -> Type ':' typereference: {'$1','$3'}.
-Parameter -> Type ':' Value: {'$1','$3'}.
-Parameter -> '{' typereference '}': {objectset,'$2'}.
-
-
-% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
-Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
-
-% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
-% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
-
-
-DefinedType -> Externaltypereference : '$1' .
-DefinedType -> typereference :
- #'Externaltypereference'{pos='$1'#typereference.pos,
- module= get(asn1_module),
- type= '$1'#typereference.val} .
-DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
-DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
-
-% ActualParameterList -> '{' ActualParameters '}' : '$1'.
-
-% ActualParameters -> ActualParameter : ['$1'].
-% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
-
-ActualParameter -> Type : '$1'.
-ActualParameter -> ValueNotNull : '$1'.
-ActualParameter -> ValueSet : '$1'.
-% later DefinedType -> ParameterizedType : '$1' .
-% later DefinedType -> ParameterizedValueSetType : '$1' .
-
-% inlined DefinedValue -> Externalvaluereference :'$1'.
-% inlined DefinedValue -> identifier :'$1'.
-% later DefinedValue -> ParameterizedValue :'$1'.
-
-% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
-
-% not referenced yet ItemSpec -> typereference :'$1'.
-% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
-
-% not referenced yet ItemId -> ItemSpec : '$1'.
-
-% not referenced yet ComponentId -> identifier :'$1'.
-% not referenced yet ComponentId -> number :'$1'.
-% not referenced yet ComponentId -> '*' :'$1'.
-
-TypeAssignment -> typereference '::=' Type :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
-
-ValueAssignment -> identifier Type '::=' Value :
- #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
-
-% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
-
-
-ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
-
-% record(type,{tag,def,constraint}).
-Type -> BuiltinType :#type{def='$1'}.
-Type -> 'NULL' :#type{def='NULL'}.
-Type -> TaggedType:'$1'.
-Type -> ReferencedType:#type{def='$1'}. % change notag later
-Type -> ConstrainedType:'$1'.
-
-%ANY is here for compatibility with the old ASN.1 standard from 1988
-BuiltinType -> 'ANY' AnyDefBy:
- case '$2' of
- [] -> 'ANY';
- _ -> {'ANY DEFINED BY','$2'}
- end.
-BuiltinType -> BitStringType :'$1'.
-BuiltinType -> 'BOOLEAN' :element(1,'$1').
-BuiltinType -> CharacterStringType :'$1'.
-BuiltinType -> ChoiceType :'$1'.
-BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-BuiltinType -> EnumeratedType :'$1'.
-BuiltinType -> 'EXTERNAL' :element(1,'$1').
-% later BuiltinType -> InstanceOfType :'$1'.
-BuiltinType -> IntegerType :'$1'.
-% BuiltinType -> 'NULL' :element(1,'$1').
-% later BuiltinType -> ObjectClassFieldType :'$1'.
-BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
-BuiltinType -> 'REAL' :element(1,'$1').
-BuiltinType -> SequenceType :'$1'.
-BuiltinType -> SequenceOfType :'$1'.
-BuiltinType -> SetType :'$1'.
-BuiltinType -> SetOfType :'$1'.
-% The so called Useful types
-BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
-BuiltinType -> 'UTCTime' :'UTCTime'.
-BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
-
-% moved BuiltinType -> TaggedType :'$1'.
-
-
-AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
-AnyDefBy -> '$empty': [].
-
-NamedType -> identifier Type :
-%{_,Pos,Val} = '$1',
-%{'NamedType',Pos,{Val,'$2'}}.
-V1 = '$1',
-{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
-NamedType -> SelectionType :'$1'.
-
-ReferencedType -> DefinedType : '$1'.
-% redundant ReferencedType -> UsefulType : 'fix'.
-ReferencedType -> SelectionType : '$1'.
-ReferencedType -> TypeFromObject : '$1'.
-% later ReferencedType -> ValueSetFromObjects : 'fix'.
-
-% to much conflicts Value -> AnyValue :'$1'.
-Value -> ValueNotNull : '$1'.
-Value -> 'NULL' :element(1,'$1').
-
-ValueNotNull -> BuiltinValue :'$1'.
-% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
-% inlined Externalvaluereference -> Externalvaluereference :'$1'.
-ValueNotNull -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$3')}.
-ValueNotNull -> identifier :'$1'.
-
-
-%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
-% redundant BuiltinValue -> BitStringValue :'$1'.
-BuiltinValue -> BooleanValue :'$1'.
-BuiltinValue -> CharacterStringValue :'$1'.
-BuiltinValue -> ChoiceValue :'$1'.
-% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
-% BuiltinValue -> EnumeratedValue :'$1'. identifier
-% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
-% later BuiltinValue -> InstanceOfValue :'$1'.
-BuiltinValue -> SignedNumber :'$1'.
-% BuiltinValue -> 'NULL' :'$1'.
-% later BuiltinValue -> ObjectClassFieldValue :'$1'.
-% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
-BuiltinValue -> bstring :element(3,'$1').
-BuiltinValue -> hstring :element(3,'$1').
-% conflict BuiltinValue -> RealValue :'$1'.
-BuiltinValue -> SAndSOfValue :'$1'.
-% replaced BuiltinValue -> SequenceOfValue :'$1'.
-% replaced BuiltinValue -> SequenceValue :'$1'.
-% replaced BuiltinValue -> SetValue :'$1'.
-% replaced BuiltinValue -> SetOfValue :'$1'.
-% conflict redundant BuiltinValue -> TaggedValue :'$1'.
-
-% inlined ReferencedValue -> DefinedValue:'$1'.
-% ReferencedValue -> Externalvaluereference:'$1'.
-% ReferencedValue -> identifier :'$1'.
-% later ReferencedValue -> ValueFromObject:'$1'.
-
-% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
-
-% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
-
-BooleanValue -> TRUE :true.
-BooleanValue -> FALSE :false.
-
-IntegerType -> 'INTEGER' : 'INTEGER'.
-IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
-
-NamedNumberList -> NamedNumber :['$1'].
-% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
-NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
-
-NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
-NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
-NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
-
-%NamedValue -> identifier Value :
-% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
-
-
-SignedNumber -> number : element(3,'$1').
-SignedNumber -> '-' number : - element(3,'$1').
-
-% inlined IntegerValue -> SignedNumber :'$1'.
-% conflict moved to Value IntegerValue -> identifier:'$1'.
-
-EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
-
-% inlined Enumerations -> Enumeration :{'$1','false',[]}.
-% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
-% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
-
-Enumeration -> EnumerationItem :['$1'].
-% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
-Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
-
-EnumerationItem -> identifier:element(3,'$1').
-EnumerationItem -> NamedNumber :'$1'.
-EnumerationItem -> '...' :'EXTENSIONMARK'.
-
-% conflict moved to Value EnumeratedValue -> identifier:'$1'.
-
-% inlined RealType -> REAL:'REAL'.
-
-RealValue -> NumericRealValue :'$1'.
-RealValue -> SpecialRealValue:'$1'.
-
-% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
-NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
-
-SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
-SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
-
-BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
-BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
-% NamedBitList replaced by NamedNumberList to reduce the grammar
-% Must check later that all "numbers" are positive
-
-% inlined BitStringValue -> bstring:'$1'.
-% inlined BitStringValue -> hstring:'$1'.
-% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
-% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
-
-IdentifierList -> identifier :[element(3,'$1')].
-% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
-IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
-
-% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
-
-% inlined OctetStringValue -> bstring:'$1'.
-% inlined OctetStringValue -> hstring:'$1'.
-
-% inlined NullType -> 'NULL':'NULL'.
-
-% inlined NullValue -> NULL:'NULL'.
-
-% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
-SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
-SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
-
-% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
-%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
-% ',' ComponentTypeList :{'$1','$3', '$5'}.
-%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
-
-ComponentTypeList -> ComponentType :['$1'].
-% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
-ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
-
-% -record('ComponentType',{pos,name,type,attrib}).
-ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
-ComponentType -> NamedType :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
-ComponentType -> NamedType 'OPTIONAL' :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
-ComponentType -> NamedType 'DEFAULT' Value:
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
-ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
-
-% redundant ExtensionAndException -> '...' : extensionmark.
-% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
-
-% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
-% replaced SequenceValue -> '{' '}':[].
-
-ValueList -> Value :['$1'].
-ValueList -> NamedNumber :['$1'].
-% modified ValueList -> ValueList ',' Value :'$1'.
-ValueList -> Value ',' ValueList :['$1'|'$3'].
-ValueList -> Value ',' '...' :['$1' |[]].
-ValueList -> Value ValueList : ['$1',space|'$2'].
-ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
-
-%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
-%ComponentValueList -> NamedValue :['$1'].
-%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
-%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
-
-SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
-
-% replaced SequenceOfValue with SAndSOfValue
-
-SAndSOfValue -> '{' ValueList '}' :'$2'.
-%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
-SAndSOfValue -> '{' '}' :[].
-
-% save for later SetType ->
-% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
-SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
-% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
-SetType -> SET '{' '}' :{'SET',[]}.
-
-% replaced SetValue with SAndSOfValue
-
-SetOfType -> SET OF Type : {'SET OF','$3'}.
-
-% replaced SetOfValue with SAndSOfValue
-
-ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
-% AlternativeTypeList is replaced by ComponentTypeList
-ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
-% save for later SelectionType ->
-
-TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
-TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
-TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
-
-Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
-Tag -> '[' Class typereference '.' identifier ']':
- #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
- value=element(3,'$5')}}.
-Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
-Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
-
-ClassNumber -> number :element(3,'$1').
-% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
-ClassNumber -> identifier :element(3,'$1').
-
-Class -> 'UNIVERSAL' :element(1,'$1').
-Class -> 'APPLICATION' :element(1,'$1').
-Class -> 'PRIVATE' :element(1,'$1').
-Class -> '$empty' :'CONTEXT'.
-
-% conflict redundant TaggedValue -> Value:'$1'.
-
-% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-
-% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
-
-% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
-
-% inlined ExternalValue -> SequenceValue :'$1'.
-
-% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-
-ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
-% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
-% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
-% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
-
-ObjIdComponentList -> Value:'$1'.
-ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> DefinedValue:'$1'.
-%ObjIdComponentList -> number:'$1'.
-%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-
-% redundant ObjIdComponent -> NameForm :'$1'. % expanded
-% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
-% ObjIdComponent -> number :'$1'.
-% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
-% ObjIdComponent -> NameAndNumberForm :'$1'.
-% ObjIdComponent -> NamedNumber :'$1'.
-% NamedBit replaced by NamedNumber to reduce grammar
-% must check later that "number" is positive
-
-% NameForm -> identifier:'$1'.
-
-% inlined NumberForm -> number :'$1'.
-% inlined NumberForm -> DefinedValue :'$1'.
-
-% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
-% NameAndNumberForm -> NamedBit:'$1'.
-
-
-CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
-CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-RestrictedCharacterStringValue -> cstring :element(3, '$1').
-% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
-% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
-RestrictedCharacterStringValue -> Quadruple :'$1'.
-RestrictedCharacterStringValue -> Tuple :'$1'.
-
-% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
-
-% redundant CharSyms -> CharsDefn :'$1'.
-% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
-
-% redundant CharsDefn -> cstring :'$1'.
-% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
-% redundant CharsDefn -> Value :'$1'.
-
-Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
-% {Group,Plane,Row,Cell}
-
-Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
-% {TableColumn,TableRow}
-
-% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
-% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
-
-% inlined UsefulType -> typereference :'$1'.
-
-SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
-
-ConstrainedType -> Type Constraint :
- '$1'#type{constraint=merge_constraints(['$2'])}.
-ConstrainedType -> Type Constraint Constraint :
- '$1'#type{constraint=merge_constraints(['$2','$3'])}.
-ConstrainedType -> Type Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
-ConstrainedType -> Type Constraint Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-ConstrainedType -> TypeWithConstraint :'$1'.
-
-TypeWithConstraint -> 'SET' Constraint 'OF' Type :
- #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$4'},constraint =
- merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-
-
-Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
- #constraint{c='$2',e='$3'}.
-
-% inlined Constraint -> SubTypeConstraint :'$1'.
-ConstraintSpec -> ElementSetSpecs :'$1'.
-ConstraintSpec -> UserDefinedConstraint :'$1'.
-ConstraintSpec -> TableConstraint :'$1'.
-
-TableConstraint -> ComponentRelationConstraint : '$1'.
-TableConstraint -> ObjectSet : '$1'.
-%TableConstraint -> '{' typereference '}' :tableconstraint.
-
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
-
-ComponentIdList -> identifier: ['$1'].
-ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
-
-
-% later ConstraintSpec -> GeneralConstraint :'$1'.
-
-% from X.682
-UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
-UserDefinedConstraint -> 'CONSTRAINED' 'BY'
- '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
-
-UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
-UserDefinedConstraintParameters ->
- UserDefinedConstraintParameter ','
- UserDefinedConstraintParameters: ['$1'|'$3'].
-
-UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
-UserDefinedConstraintParameter -> ActualParameter : '$1'.
-
-
-
-ExceptionSpec -> '!' ExceptionIdentification : '$1'.
-ExceptionSpec -> '$empty' : undefined.
-
-ExceptionIdentification -> SignedNumber : '$1'.
-% inlined ExceptionIdentification -> DefinedValue : '$1'.
-ExceptionIdentification -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$1')}.
-ExceptionIdentification -> identifier :'$1'.
-ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
-
-% inlined SubTypeConstraint -> ElementSetSpec
-
-ElementSetSpecs -> ElementSetSpec : '$1'.
-ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
-ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
-ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
-
-ElementSetSpec -> Unions : '$1'.
-ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
-
-Unions -> Intersections : '$1'.
-Unions -> UElems UnionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
- end.
-
-UElems -> Unions :'$1'.
-
-Intersections -> IntersectionElements :'$1'.
-Intersections -> IElems IntersectionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
- {V1,V2} when list(V1) ->
- V1 ++ [V2];
- {V1,V2} ->
- [V1,V2]
- end.
-%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
-%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
-
-IElems -> Intersections :'$1'.
-
-IntersectionElements -> Elements :'$1'.
-IntersectionElements -> Elems Exclusions :{'$1','$2'}.
-
-Elems -> Elements :'$1'.
-
-Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
-
-IntersectionMark -> 'INTERSECTION':'$1'.
-IntersectionMark -> '^':'$1'.
-UnionMark -> 'UNION':'$1'.
-UnionMark -> '|':'$1'.
-
-
-Elements -> SubTypeElements : '$1'.
-%Elements -> ObjectSetElements : '$1'.
-Elements -> '(' ElementSetSpec ')' : '$2'.
-Elements -> ReferencedType : '$1'.
-
-SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
-% The rule above modifyed only because of conflicts
-SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
-%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
-SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
-SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
-% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
-
-% inlined above InnerTypeConstraints ::=
-% inlined above SingleTypeConstraint::= Constraint
-% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
-% inlined above FullSpecification ::= "{" TypeConstraints "}"
-% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
-% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
-TypeConstraints -> NamedConstraint : ['$1'].
-TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
-TypeConstraints -> identifier : ['$1'].
-TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
-
-NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
-NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
-NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
-
-PresenceConstraint -> 'PRESENT' : 'PRESENT'.
-PresenceConstraint -> 'ABSENT' : 'ABSENT'.
-PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
-
-
-
-LowerEndpoint -> LowerEndValue :'$1'.
-%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
-LowerEndpoint -> LowerEndValue '<':('$1'+1).
-
-UpperEndpoint -> UpperEndValue :'$1'.
-%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
-UpperEndpoint -> '<' UpperEndValue :('$2'-1).
-
-LowerEndValue -> Value :'$1'.
-LowerEndValue -> 'MIN' :'MIN'.
-
-UpperEndValue -> Value :'$1'.
-UpperEndValue -> 'MAX' :'MAX'.
-
-
-% X.681
-
-
-% X.681 chap 15
-
-%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
-TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
-
-ReferencedObjects -> typereference : '$1'.
-%ReferencedObjects -> ParameterizedObject
-%ReferencedObjects -> DefinedObjectSet
-%ReferencedObjects -> ParameterizedObjectSet
-
-FieldName -> typefieldreference : ['$1'].
-FieldName -> valuefieldreference : ['$1'].
-FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
-
-PrimitiveFieldName -> typefieldreference : '$1'.
-PrimitiveFieldName -> valuefieldreference : '$1'.
-
-%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
-ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
-ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
-
-ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
-ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
-
-%ObjectSetElements -> Object.
-% ObjectSetElements -> identifier : '$1'.
-%ObjectSetElements -> DefinedObjectSet.
-%ObjectSetElements -> ObjectSetFromObjects.
-%ObjectSetElements -> ParameterizedObjectSet.
-
-%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
-ObjectAssignment -> ValueAssignment.
-%ObjectAssignment -> identifier typereference '::=' Object.
-%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
-
-%Object -> DefinedObject: '$1'.
-%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
-Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
-Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
-
-%Object -> ObjectDefn -> DefaultSyntax: '$1'.
-Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
-Object -> '{' FieldSetting '}' :['$2'].
-
-%% For User-friendly notation
-%% Object -> ObjectDefn -> DefinedSyntax
-Object -> '{' '}'.
-Object -> '{' DefinedSyntaxTokens '}'.
-
-% later Object -> ParameterizedObject: '$1'. look in x.683
-
-%DefinedObject -> ExternalObjectReference: '$1'.
-%DefinedObject -> identifier: '$1'.
-
-DefinedObjectClass -> typereference.
-%DefinedObjectClass -> objectclassreference.
-DefinedObjectClass -> ExternalObjectClassReference.
-%DefinedObjectClass -> typereference '.' objectclassreference.
-%%DefinedObjectClass -> UsefulObjectClassReference.
-
-ExternalObjectReference -> typereference '.' identifier.
-ExternalObjectClassReference -> typereference '.' typereference.
-%%ExternalObjectClassReference -> typereference '.' objectclassreference.
-
-ObjectDefn -> DefaultSyntax: '$1'.
-%ObjectDefn -> DefinedSyntax: '$1'.
-
-ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
-
-% later look in x.683 ParameterizedObject ->
-
-%DefaultSyntax -> '{' '}'.
-%DefaultSyntax -> '{' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting '}': '$2'.
-
-FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
-
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting: '$1'.
-
-%DefinedSyntax -> '{' '}'.
-DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
-
-DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
-DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
-
-% expanded DefinedSyntaxToken -> Literal: '$1'.
-%DefinedSyntaxToken -> typereference: '$1'.
-DefinedSyntaxToken -> word: '$1'.
-DefinedSyntaxToken -> ',': '$1'.
-DefinedSyntaxToken -> Setting: '$1'.
-%DefinedSyntaxToken -> '$empty': nil .
-
-% Setting ::= Type|Value|ValueSet|Object|ObjectSet
-Setting -> Type: '$1'.
-%Setting -> Value: '$1'.
-%Setting -> ValueNotNull: '$1'.
-Setting -> BuiltinValue: '$1'.
-Setting -> ValueSet: '$1'.
-%Setting -> Object: '$1'.
-%Setting -> ExternalObjectReference.
-Setting -> typereference '.' identifier.
-Setting -> identifier.
-Setting -> ObjectDefn.
-
-Setting -> ObjectSet: '$1'.
-
-
-Erlang code.
-%%-author('[email protected]').
--copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
--vsn('$Revision: /main/release/1 $').
--include("asn1_records.hrl").
-
-to_set(V) when list(V) ->
- ordsets:list_to_set(V);
-to_set(V) ->
- ordsets:list_to_set([V]).
-
-merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
- {merge_constraints(Rlist,[],[]),
- merge_constraints(ExtList,[],[])};
-
-merge_constraints(Clist) ->
- merge_constraints(Clist, [], []).
-
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
- lists:flatten(Cacc);
-merge_constraints([],Cacc,Eacc) ->
- lists:flatten(Cacc) ++ [{'Errors',Eacc}].
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',V} when list(V) ->
- [C,
- {'ValueRange',{lists:min(V),lists:max(V)}}];
- {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
- V2 = {'SingleValue',
- ordsets:list_to_set(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when list(List) ->
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
-
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when list(L) ->
- ordsets:list_to_set(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({C1,C2}) ->
- {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/asn1/src/asn1ct_parser2.erl b/lib/asn1/src/asn1ct_parser2.erl index 3891fce8d3..488e4af4e0 100644 --- a/lib/asn1/src/asn1ct_parser2.erl +++ b/lib/asn1/src/asn1ct_parser2.erl @@ -20,7 +20,7 @@ %% -module(asn1ct_parser2). --export([parse/1]). +-export([parse/2,format_error/1]). -include("asn1_records.hrl"). %% Only used internally within this module. @@ -28,26 +28,34 @@ -record(constraint, {c,e}). -record(identifier, {pos,val}). -%% parse all types in module -parse(Tokens) -> - case catch parse_ModuleDefinition(Tokens) of - {'EXIT',Reason} -> - {error,{{undefined,get(asn1_module), - [internal,error,'when',parsing,module,definition,Reason]}, - hd(Tokens)}}; - {asn1_error,Reason} -> - {error,{Reason,hd(Tokens)}}; - {ModuleDefinition,Rest1} -> - {Types,Rest2} = parse_AssignmentList(Rest1), - clean_process_dictionary(), - case Rest2 of - [{'END',_}|_Rest3] -> - {ok,ModuleDefinition#module{typeorval = Types}}; - _ -> - {error,{{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'END']}, - hd(Rest2)}} - end +parse(File0, Tokens0) -> + try do_parse(Tokens0) of + {ok,#module{}}=Result -> + Result + catch + throw:{asn1_error,Fun} when is_function(Fun, 0) -> + handle_parse_error(File0, Fun()); + throw:{asn1_error,{parse_error,Tokens}} -> + handle_parse_error(File0, Tokens) + after + clean_process_dictionary() + end. + +handle_parse_error(File0, [Token|_]) -> + File = filename:basename(File0), + Line = get_line(Token), + Error = {structured_error,{File,Line},?MODULE, + {syntax_error,get_token(Token)}}, + {error,[Error]}. + +do_parse(Tokens0) -> + {ModuleDefinition,Tokens1} = parse_ModuleDefinition(Tokens0), + {Types,Tokens2} = parse_AssignmentList(Tokens1), + case Tokens2 of + [{'END',_}|_Rest3] -> + {ok,ModuleDefinition#module{typeorval=Types}}; + _ -> + parse_error(Tokens2) end. clean_process_dictionary() -> @@ -57,6 +65,11 @@ clean_process_dictionary() -> _ = erase(extensiondefault), ok. +format_error({syntax_error,Token}) when is_atom(Token) -> + io_lib:format("syntax error before: '~s'", [Token]); +format_error({syntax_error,Token}) -> + io_lib:format("syntax error before: '~p'", [Token]). + parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) -> put(asn1_module,ModuleIdentifier), {_DefinitiveIdentifier,Rest02} = @@ -70,9 +83,7 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) -> [{'DEFINITIONS',_}|Rest03] -> Rest03; _ -> - throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module), - [got,get_token(hd(Rest02)), - expected,'DEFINITIONS']}}) + parse_error(Rest02) end, {TagDefault,Rest2} = case Rest of @@ -104,12 +115,11 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) -> extensiondefault = ExtensionDefault, exports = Exports, imports = {imports, Imports}}, Rest6}; - _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}}) + _ -> + parse_error(Rest3) end; parse_ModuleDefinition(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,typereference]}}). + parse_error(Tokens). parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) -> {{exports,[]},Rest}; @@ -122,8 +132,7 @@ parse_Exports([{'EXPORTS',_L1}|Rest]) -> [{';',_}|Rest3] -> {{exports,SymbolList},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,';']}}) + parse_error(Rest2) end; parse_Exports(Rest) -> {{exports,all},Rest}. @@ -137,29 +146,25 @@ parse_SymbolList(Tokens,Acc) -> [{',',_L1}|Rest2] -> parse_SymbolList(Rest2,[Symbol|Acc]); Rest2 -> - {lists:reverse([Symbol|Acc]),Rest2} + {lists:reverse(Acc, [Symbol]),Rest2} end. parse_Symbol(Tokens) -> parse_Reference(Tokens). parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) -> -% {Tref,Rest}; {tref2Exttref(L1,TrefName),Rest}; parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_}, {'{',_L2},{'}',_L3}|Rest]) -> -% {{Tref1,Tref2},Rest}; {{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest}; parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) -> {tref2Exttref(Tref),Rest}; -parse_Reference([Vref = {identifier,_L1,_VName},{'{',_L2},{'}',_L3}|Rest]) -> +parse_Reference([#identifier{}=Vref,{'{',_L2},{'}',_L3}|Rest]) -> {identifier2Extvalueref(Vref),Rest}; -parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) -> +parse_Reference([#identifier{}=Vref|Rest]) -> {identifier2Extvalueref(Vref),Rest}; parse_Reference(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [typereference,identifier]]}}). + parse_error(Tokens). parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) -> {{imports,[]},Rest}; @@ -168,9 +173,8 @@ parse_Imports([{'IMPORTS',_L1}|Rest]) -> case Rest2 of [{';',_L2}|Rest3] -> {{imports,SymbolsFromModuleList},Rest3}; - Rest3 -> - throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,';']}}) + _ -> + parse_error(Rest2) end; parse_Imports(Tokens) -> {{imports,[]},Tokens}. @@ -180,11 +184,12 @@ parse_SymbolsFromModuleList(Tokens) -> parse_SymbolsFromModuleList(Tokens,Acc) -> {SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens), - case (catch parse_SymbolsFromModule(Rest)) of + try parse_SymbolsFromModule(Rest) of {Sl,_Rest2} when is_record(Sl,'SymbolsFromModule') -> - parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]); - _ -> - {lists:reverse([SymbolsFromModule|Acc]),Rest} + parse_SymbolsFromModuleList(Rest, [SymbolsFromModule|Acc]) + catch + throw:{asn1_error,_} -> + {lists:reverse(Acc, [SymbolsFromModule]),Rest} end. parse_SymbolsFromModule(Tokens) -> @@ -198,169 +203,154 @@ parse_SymbolsFromModule(Tokens) -> end, {SymbolList,Rest} = parse_SymbolList(Tokens), case Rest of - [{'FROM',_L1},Tref = {typereference,_,Name},Ref={identifier,_L2,_Id},C={',',_}|Rest2] -> - NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList), + [{'FROM',_L1},{typereference,_,Name}=Tref| + [#identifier{},{',',_}|_]=Rest2] -> + NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList), {#'SymbolsFromModule'{symbols=NewSymbolList, - module=tref2Exttref(Tref)},[Ref,C|Rest2]}; + module=tref2Exttref(Tref)},Rest2}; %% This a special case when there is only one Symbol imported %% from the next module. No other way to distinguish Ref from %% a part of the GlobalModuleReference of Name. - [{'FROM',_L1},Tref = {typereference,_,Name},Ref = {identifier,_L2,_Id},From = {'FROM',_}|Rest2] -> - NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList), + [{'FROM',_L1},{typereference,_,Name}=Tref| + [#identifier{},{'FROM',_}|_]=Rest2] -> + NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList), {#'SymbolsFromModule'{symbols=NewSymbolList, - module=tref2Exttref(Tref)},[Ref,From|Rest2]}; - [{'FROM',_L1},Tref = {typereference,_,Name},{identifier,_L2,_Id}|Rest2] -> - NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList), + module=tref2Exttref(Tref)},Rest2}; + [{'FROM',_L1},{typereference,_,Name}=Tref,#identifier{}|Rest2] -> + NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList), {#'SymbolsFromModule'{symbols=NewSymbolList, module=tref2Exttref(Tref)},Rest2}; - [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] -> - {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me - NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList), + [{'FROM',_L1},{typereference,_,Name}=Tref|[{'{',_}|_]=Rest2] -> + {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue(Rest2), % value not used yet, fix me + NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList), {#'SymbolsFromModule'{symbols=NewSymbolList, module=tref2Exttref(Tref)},Rest3}; - [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] -> - NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList), + [{'FROM',_L1},{typereference,_,Name}=Tref|Rest2] -> + NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList), {#'SymbolsFromModule'{symbols=NewSymbolList, module=tref2Exttref(Tref)},Rest2}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected, - ['FROM typerefernece identifier ,', - 'FROM typereference identifier', - 'FROM typereference {', - 'FROM typereference']]}}) + parse_error(Rest) end. parse_ObjectIdentifierValue([{'{',_}|Rest]) -> parse_ObjectIdentifierValue(Rest,[]). -parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) -> +parse_ObjectIdentifierValue([{number,_,Num}|Rest], Acc) -> parse_ObjectIdentifierValue(Rest,[Num|Acc]); -parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) -> +parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{number,_,Num},{')',_}|Rest], Acc) -> parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]); -parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) -> +parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},#identifier{val=Id2},{')',_}|Rest], Acc) -> parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]); -parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) -> - parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]); -parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) -> - parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]); -parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) -> +parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{typereference,_,Tref},{'.',_},#identifier{val=Id2}, {')',_}|Rest], Acc) -> + parse_ObjectIdentifierValue(Rest, [{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]); +parse_ObjectIdentifierValue([#identifier{}=Id|Rest], Acc) -> + parse_ObjectIdentifierValue(Rest, [identifier2Extvalueref(Id)|Acc]); +parse_ObjectIdentifierValue([{'}',_}|Rest], Acc) -> {lists:reverse(Acc),Rest}; -parse_ObjectIdentifierValue([H|_T],_Acc) -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected, - ['{ some of the following }',number,'identifier ( number )', - 'identifier ( identifier )', - 'identifier ( typereference.identifier)',identifier]]}}). +parse_ObjectIdentifierValue(Tokens, _Acc) -> + parse_error(Tokens). -parse_AssignmentList(Tokens = [{'END',_}|_Rest]) -> - {[],Tokens}; -parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) -> - {[],Tokens}; parse_AssignmentList(Tokens) -> - parse_AssignmentList(Tokens,[]). + parse_AssignmentList(Tokens, []). -parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) -> +parse_AssignmentList([{'END',_}|_]=Tokens, Acc) -> {lists:reverse(Acc),Tokens}; -parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) -> +parse_AssignmentList([{'$end',_}|_]=Tokens, Acc) -> {lists:reverse(Acc),Tokens}; -parse_AssignmentList(Tokens,Acc) -> - case (catch parse_Assignment(Tokens)) of - {'EXIT',Reason} -> - exit(Reason); - {asn1_error,R} -> -% [H|T] = Tokens, - throw({error,{R,hd(Tokens)}}); - {Assignment,Rest} -> - parse_AssignmentList(Rest,[Assignment|Acc]) - end. - -parse_Assignment(Tokens) -> - Flist = [fun parse_TypeAssignment/1, - fun parse_ValueAssignment/1, - fun parse_ObjectClassAssignment/1, - fun parse_ObjectAssignment/1, - fun parse_ObjectSetAssignment/1, - fun parse_ParameterizedAssignment/1, +parse_AssignmentList(Tokens0, Acc) -> + {Assignment,Tokens} = parse_Assignment(Tokens0), + parse_AssignmentList(Tokens, [Assignment|Acc]). + +parse_Assignment([{typereference,L1,Name},{'::=',_}|Tokens0]) -> + %% 1) Type ::= TypeDefinition + %% 2) CLASS-NAME ::= CLASS {...} + Flist = [{type,fun parse_Type/1}, + {class,fun parse_ObjectClass/1}], + case parse_or_tag(Tokens0, Flist) of + {{type,Type},Tokens} -> + %% TypeAssignment + {#typedef{pos=L1,name=Name,typespec=Type},Tokens}; + {{class,Type},Tokens} -> + %% ObjectClassAssignment + {#classdef{pos=L1,name=Name,module=resolve_module(Type), + typespec=Type},Tokens} + end; +parse_Assignment([{typereference,_,_},{'{',_}|_]=Tokens) -> + %% 1) Type{...} ::= ... + %% 2) ValueSet{...} Type ::= ... + %% ObjectSet{...} CLASS-NAME ::= CLASS {...} + %% 3) CLASS-NAME{...} ::= CLASS {...} + %% A parameterized value set and and a parameterized object set + %% cannot be distinguished from each other without type information. + Flist = [fun parse_ParameterizedTypeAssignment/1, + fun parse_ParameterizedValueSetTypeAssignment/1, + fun parse_ParameterizedObjectClassAssignment/1], + parse_or(Tokens, Flist); +parse_Assignment([{typereference,_,_}|_]=Tokens) -> + %% 1) ObjectSet CLASS-NAME ::= ... + %% 2) ValueSet Type ::= ... + Flist = [fun parse_ObjectSetAssignment/1, fun parse_ValueSetTypeAssignment/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - {asn1_assignment_error,Reason} -> - throw({asn1_error,Reason}); - Result -> - Result - end. - + parse_or(Tokens, Flist); +parse_Assignment([#identifier{},{'{',_}|_]=Tokens) -> + %% 1) value{...} Type ::= ... + %% 2) object{...} CLASS-NAME ::= ... + Flist = [fun parse_ParameterizedValueAssignment/1, + fun parse_ParameterizedObjectAssignment/1], + parse_or(Tokens, Flist); +parse_Assignment([#identifier{}|_]=Tokens) -> + %% 1) value Type ::= ... + %% 2) object CLASS-NAME ::= ... + Flist = [fun parse_ValueAssignment/1, + fun parse_ObjectAssignment/1], + parse_or(Tokens, Flist); +parse_Assignment(Tokens) -> + parse_error(Tokens). parse_or(Tokens,Flist) -> parse_or(Tokens,Flist,[]). -parse_or(_Tokens,[],ErrList) -> - case ErrList of - [] -> - throw({asn1_error,{parse_or,ErrList}}); - L when is_list(L) -> - %% chose to throw 1) the error with the highest line no, - %% 2) the last error which is not a asn1_assignment_error or - %% 3) the last error. - throw(prioritize_error(ErrList)) +parse_or(Tokens, [Fun|Funs], ErrList) when is_function(Fun, 1) -> + try Fun(Tokens) of + {_,Rest}=Result when is_list(Rest) -> + Result + catch + throw:{asn1_error,Error} -> + parse_or(Tokens, Funs, [Error|ErrList]) end; -parse_or(Tokens,[Fun|Frest],ErrList) -> - case (catch Fun(Tokens)) of - Exit = {'EXIT',_Reason} -> - parse_or(Tokens,Frest,[Exit|ErrList]); - AsnErr = {asn1_error,_} -> - parse_or(Tokens,Frest,[AsnErr|ErrList]); - AsnAssErr = {asn1_assignment_error,_} -> - parse_or(Tokens,Frest,[AsnAssErr|ErrList]); - Result = {_,L} when is_list(L) -> - Result; - Error -> - parse_or(Tokens,Frest,[Error|ErrList]) - end. - -parse_or_tag(Tokens,Flist) -> - parse_or_tag(Tokens,Flist,[]). - -parse_or_tag(_Tokens,[],ErrList) -> - case ErrList of - [] -> - throw({asn1_error,{parse_or_tag,ErrList}}); - L when is_list(L) -> - %% chose to throw 1) the error with the highest line no, - %% 2) the last error which is not a asn1_assignment_error or - %% 3) the last error. - throw(prioritize_error(ErrList)) +parse_or(_Tokens, [], ErrList) -> + throw({asn1_error,fun() -> prioritize_error(ErrList) end}). + +parse_or_tag(Tokens, Flist) -> + parse_or_tag(Tokens, Flist, []). + +parse_or_tag(Tokens, [{Tag,Fun}|Funs], ErrList) when is_function(Fun, 1) -> + try Fun(Tokens) of + {Parsed,Rest} when is_list(Rest) -> + {{Tag,Parsed},Rest} + catch + throw:{asn1_error,Error} -> + parse_or_tag(Tokens, Funs, [Error|ErrList]) end; -parse_or_tag(Tokens,[{Tag,Fun}|Frest],ErrList) when is_function(Fun) -> - case (catch Fun(Tokens)) of - Exit = {'EXIT',_Reason} -> - parse_or_tag(Tokens,Frest,[Exit|ErrList]); - AsnErr = {asn1_error,_} -> - parse_or_tag(Tokens,Frest,[AsnErr|ErrList]); - AsnAssErr = {asn1_assignment_error,_} -> - parse_or_tag(Tokens,Frest,[AsnAssErr|ErrList]); - {ParseRes,Rest} when is_list(Rest) -> - {{Tag,ParseRes},Rest}; - Error -> - parse_or_tag(Tokens,Frest,[Error|ErrList]) - end. +parse_or_tag(_Tokens, [], ErrList) -> + throw({asn1_error,fun() -> prioritize_error(ErrList) end}). + +prioritize_error(Errors0) -> + Errors1 = prioritize_error_1(Errors0), + Errors2 = [{length(L),L} || L <- Errors1], + Errors = lists:sort(Errors2), + [Res|_] = [L || {_,L} <- Errors], + Res. + +prioritize_error_1([F|T]) when is_function(F, 0) -> + [F()|prioritize_error_1(T)]; +prioritize_error_1([{parse_error,Tokens}|T]) -> + [Tokens|prioritize_error_1(T)]; +prioritize_error_1([]) -> + []. -parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) -> - {Type,Rest2} = parse_Type(Rest), - {#typedef{pos=L1,name=Tref,typespec=Type},Rest2}; -parse_TypeAssignment([H1,H2|_Rest]) -> - throw({asn1_assignment_error,{get_line(H1),get(asn1_module), - [got,[get_token(H1),get_token(H2)], expected, - typereference,'::=']}}); -parse_TypeAssignment([H|_T]) -> - throw({asn1_assignment_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected, - typereference]}}). %% parse_Type(Tokens) -> Ret %% @@ -370,9 +360,8 @@ parse_TypeAssignment([H|_T]) -> %% parse_Type(Tokens) -> {Tag,Rest3} = case Tokens of - [Lbr= {'[',_}|Rest] -> - parse_Tag([Lbr|Rest]); - Rest-> {[],Rest} + [{'[',_}|_] -> parse_Tag(Tokens); + _ -> {[],Tokens} end, {Tag2,Rest4} = case Rest3 of [{'IMPLICIT',_}|Rest31] when is_record(Tag,tag)-> @@ -384,31 +373,17 @@ parse_Type(Tokens) -> Rest31 -> {Tag,Rest31} end, - Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1], - {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_Reason} -> - throw(AsnErr); - Result -> - Result - end, - case hd(Rest5) of - {'(',_} -> + Flist = [fun parse_BuiltinType/1, + fun parse_ReferencedType/1, + fun parse_TypeWithConstraint/1], + {Type,Rest5} = parse_or(Rest4, Flist), + case Rest5 of + [{'(',_}|_] -> {Constraints,Rest6} = parse_Constraints(Rest5), - if is_record(Type,type) -> - {Type#type{constraint=merge_constraints(Constraints), - tag=Tag2},Rest6}; - true -> - {#type{def=Type,constraint=merge_constraints(Constraints), - tag=Tag2},Rest6} - end; - _ -> - if is_record(Type,type) -> - {Type#type{tag=Tag2},Rest5}; - true -> - {#type{def=Type,tag=Tag2},Rest5} - end + {Type#type{tag=Tag2, + constraint=merge_constraints(Constraints)},Rest6}; + [_|_] -> + {Type#type{tag=Tag2},Rest5} end. parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) -> @@ -419,11 +394,10 @@ parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) -> [{'}',_}|Rest4] -> {#type{def={'BIT STRING',NamedNumberList}},Rest4}; _ -> - throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,'}']}}) + parse_error(Rest3) end; _ -> - {{'BIT STRING',[]},Rest} + {#type{def={'BIT STRING',[]}},Rest} end; parse_BuiltinType([{'BOOLEAN',_}|Rest]) -> {#type{def='BOOLEAN'},Rest}; @@ -435,41 +409,33 @@ parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) -> {#type{def='CHARACTER STRING'},Rest}; parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) -> - {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest), - AlternativeTypeLists1 = - lists:filter(fun(#'ExtensionAdditionGroup'{}) -> false; - ('ExtensionAdditionGroupEnd') -> false; - (_) -> true - end,AlternativeTypeLists), + {L0,Rest2} = parse_AlternativeTypeLists(Rest), case Rest2 of [{'}',_}|Rest3] -> - AlternativeTypeLists2 = - case {[Ext||Ext = #'EXTENSIONMARK'{} <- AlternativeTypeLists1], - get(extensiondefault)} of - {[],'IMPLIED'} -> AlternativeTypeLists1 ++ [#'EXTENSIONMARK'{}]; - _ -> AlternativeTypeLists1 + NeedExt = not lists:keymember('EXTENSIONMARK', 1, L0) andalso + get(extensiondefault) =:= 'IMPLIED', + L = case NeedExt of + true -> + L0 ++ [#'EXTENSIONMARK'{}]; + false -> + L0 end, - - {#type{def={'CHOICE',AlternativeTypeLists2}},Rest3}; + {#type{def={'CHOICE',L}},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end; parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) -> {#type{def='EMBEDDED PDV'},Rest}; parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) -> - {Enumerations,Rest2} = parse_Enumerations(Rest,get(extensiondefault)), + {Enumerations,Rest2} = parse_Enumerations(Rest), case Rest2 of [{'}',_}|Rest3] -> {#type{def={'ENUMERATED',Enumerations}},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end; parse_BuiltinType([{'EXTERNAL',_}|Rest]) -> {#type{def='EXTERNAL'},Rest}; - -% InstanceOfType parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) -> {DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest), case Rest2 of @@ -480,9 +446,6 @@ parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) -> _ -> {#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2} end; - -% parse_BuiltinType(Tokens) -> - parse_BuiltinType([{'INTEGER',_}|Rest]) -> case Rest of [{'{',_}|Rest2] -> @@ -491,17 +454,13 @@ parse_BuiltinType([{'INTEGER',_}|Rest]) -> [{'}',_}|Rest4] -> {#type{def={'INTEGER',NamedNumberList}},Rest4}; _ -> - throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,'}']}}) + parse_error(Rest3) end; _ -> {#type{def='INTEGER'},Rest} end; parse_BuiltinType([{'NULL',_}|Rest]) -> {#type{def='NULL'},Rest}; - -% ObjectClassFieldType fix me later - parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) -> {#type{def='OBJECT IDENTIFIER'},Rest}; parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) -> @@ -529,18 +488,14 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) -> parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]), case Rest3 of [{'}',_}|Rest4] -> - {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4}; + {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4}; _ -> - throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,'}']}}) + parse_error(Rest3) end -% _ -> % Seq case 4,17-19,23-26 will fail here -% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), -% [got,get_token(hd(Rest2)),expected,'}']}}) end; parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) -> {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest), - case Rest2 of + case Rest2 of [{'}',_}|Rest3] -> ComponentTypeLists2 = case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists], @@ -551,25 +506,19 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) -> {#type{def=#'SEQUENCE'{components = ComponentTypeLists2}}, Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end; - -parse_BuiltinType([{'SEQUENCE',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) -> -%% TODO: take care of the identifier for something useful - {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]), - {#type{def={'SEQUENCE OF',#type{def=Type,tag=[]}}},Rest2}; - -parse_BuiltinType([{'SEQUENCE',_},{'OF',_},{identifier,_,_} |Rest]) -> +parse_BuiltinType([{'SEQUENCE',_},{'OF',_}| + [#identifier{},{'<',_}|_]=Tokens0]) -> + {Type,Tokens} = parse_SelectionType(Tokens0), + {#type{def={'SEQUENCE OF',Type}},Tokens}; +parse_BuiltinType([{'SEQUENCE',_},{'OF',_},#identifier{} |Rest]) -> %% TODO: take care of the identifier for something useful {Type,Rest2} = parse_Type(Rest), {#type{def={'SEQUENCE OF',Type}},Rest2}; - parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) -> {Type,Rest2} = parse_Type(Rest), {#type{def={'SEQUENCE OF',Type}},Rest2}; - - parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) -> {#type{def=#'SET'{components=[#'EXTENSIONMARK'{pos = Line}]}},Rest}; parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) -> @@ -581,12 +530,18 @@ parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) -> val = ExceptionIdentification}]}}, Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + {ComponentTypeLists,Rest3}= + parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]), + case Rest3 of + [{'}',_}|Rest4] -> + {#type{def=#'SET'{components=ComponentTypeLists}},Rest4}; + _ -> + parse_error(Rest3) + end end; parse_BuiltinType([{'SET',_},{'{',_}|Rest]) -> {ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest), - case Rest2 of + case Rest2 of [{'}',_}|Rest3] -> ComponentTypeLists2 = case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists], @@ -597,184 +552,128 @@ parse_BuiltinType([{'SET',_},{'{',_}|Rest]) -> {#type{def=#'SET'{components = ComponentTypeLists2}}, Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end; - -parse_BuiltinType([{'SET',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) -> -%% TODO: take care of the identifier for something useful - {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]), - {#type{def={'SET OF',#type{def=Type,tag=[]}}},Rest2}; - - -parse_BuiltinType([{'SET',_},{'OF',_},{identifier,_,_}|Rest]) -> +parse_BuiltinType([{'SET',_},{'OF',_}| + [#identifier{},{'<',_}|_]=Tokens0]) -> + {Type,Tokens} = parse_SelectionType(Tokens0), + {#type{def={'SET OF',Type}},Tokens}; +parse_BuiltinType([{'SET',_},{'OF',_},#identifier{}|Rest]) -> %%TODO: take care of the identifier for something useful {Type,Rest2} = parse_Type(Rest), {#type{def={'SET OF',Type}},Rest2}; - parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) -> {Type,Rest2} = parse_Type(Rest), {#type{def={'SET OF',Type}},Rest2}; - -%% The so called Useful types parse_BuiltinType([{'GeneralizedTime',_}|Rest]) -> {#type{def='GeneralizedTime'},Rest}; parse_BuiltinType([{'UTCTime',_}|Rest]) -> {#type{def='UTCTime'},Rest}; parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) -> {#type{def='ObjectDescriptor'},Rest}; - -%% For compatibility with old standard -parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) -> +parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},#identifier{val=Id}|Rest]) -> + %% For compatibility with the old standard. {#type{def={'ANY_DEFINED_BY',Id}},Rest}; parse_BuiltinType([{'ANY',_}|Rest]) -> + %% For compatibility with the old standard. {#type{def='ANY'},Rest}; - parse_BuiltinType(Tokens) -> parse_ObjectClassFieldType(Tokens). -% throw({asn1_error,unhandled_type}). -parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) -> - {Constraint,Rest2} = parse_Constraint([Lpar|Rest]), +parse_TypeWithConstraint([{'SEQUENCE',_}|[{'(',_}|_]=Rest0]) -> + {Constraint,Rest2} = parse_Constraint(Rest0), Rest4 = case Rest2 of - [{'OF',_}, {identifier,_,_Id}|Rest3] -> + [{'OF',_},#identifier{}|Rest3] -> %%% TODO: make some use of the identifier, maybe useful in the XML mapping Rest3; [{'OF',_}|Rest3] -> Rest3; _ -> - throw({asn1_error, - {get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'OF']}}) + parse_error(Rest2) end, {Type,Rest5} = parse_Type(Rest4), {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint])},Rest5}; -parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) -> - {Constraint,Rest2} = parse_Constraint([Lpar|Rest]), +parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) -> + {Constraint,Rest2} = parse_Constraint(Rest0), #constraint{c=C} = Constraint, - Constraint2 = Constraint#constraint{c={'SizeConstraint',C}}, + Constraint2 = Constraint#constraint{c={element_set,{'SizeConstraint',C}, + none}}, Rest4 = case Rest2 of - [{'OF',_}, {identifier,_,_Id}|Rest3] -> + [{'OF',_},#identifier{}|Rest3] -> %%% TODO: make some use of the identifier, maybe useful in the XML mapping Rest3; [{'OF',_}|Rest3] -> Rest3; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'OF']}}) + parse_error(Rest2) end, {Type,Rest5} = parse_Type(Rest4), {#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest5}; -parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) -> - {Constraint,Rest2} = parse_Constraint([Lpar|Rest]), +parse_TypeWithConstraint([{'SET',_}|[{'(',_}|_]=Rest0]) -> + {Constraint,Rest2} = parse_Constraint(Rest0), Rest4 = case Rest2 of - [{'OF',_}, {identifier,_,_Id}|Rest3] -> + [{'OF',_},#identifier{}|Rest3] -> %%% TODO: make some use of the identifier, maybe useful in the XML mapping Rest3; [{'OF',_}|Rest3] -> Rest3; _ -> - throw({asn1_error, - {get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'OF']}}) + parse_error(Rest2) end, {Type,Rest5} = parse_Type(Rest4), {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint])},Rest5}; -parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) -> - {Constraint,Rest2} = parse_Constraint([Lpar|Rest]), +parse_TypeWithConstraint([{'SET',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) -> + {Constraint,Rest2} = parse_Constraint(Rest0), #constraint{c=C} = Constraint, - Constraint2 = Constraint#constraint{c={'SizeConstraint',C}}, + Constraint2 = Constraint#constraint{c={element_set, + {'SizeConstraint',C},none}}, Rest4 = case Rest2 of - [{'OF',_}, {identifier,_,_Id}|Rest3] -> + [{'OF',_},#identifier{}|Rest3] -> %%% TODO: make some use of the identifier, maybe useful in the XML mapping Rest3; [{'OF',_}|Rest3] -> Rest3; _ -> - throw({asn1_error, - {get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'OF']}}) + parse_error(Rest2) end, {Type,Rest5} = parse_Type(Rest4), {#type{def = {'SET OF',Type}, constraint = merge_constraints([Constraint2])},Rest5}; parse_TypeWithConstraint(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'], - followed,by,a,constraint]}}). + parse_error(Tokens). %% -------------------------- parse_ReferencedType(Tokens) -> - Flist = [fun parse_DefinedType/1, + Flist = [fun parse_ParameterizedType/1, + fun parse_DefinedType/1, fun parse_SelectionType/1, - fun parse_TypeFromObject/1, - fun parse_ValueSetFromObjects/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + fun parse_TypeFromObject/1], + parse_or(Tokens, Flist). -parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) -> - parse_ParameterizedType(Tokens); -parse_DefinedType(Tokens=[{typereference,L1,TypeName}, - T2={typereference,_,_},T3={'{',_}|Rest]) -> - case (catch parse_ParameterizedType(Tokens)) of - {'EXIT',_Reason} -> - Rest2 = [T2,T3|Rest], - {#type{def = #'Externaltypereference'{pos=L1, - module=resolve_module(TypeName), - type=TypeName}},Rest2}; - {asn1_error,_} -> - Rest2 = [T2,T3|Rest], - {#type{def = #'Externaltypereference'{pos=L1, - module=resolve_module(TypeName), - type=TypeName}},Rest2}; - Result -> - Result - end; -parse_DefinedType(Tokens=[{typereference,_L1,_Module},{'.',_}, - {typereference,_,_TypeName},{'{',_}|_Rest]) -> - parse_ParameterizedType(Tokens); -parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) -> - {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest}; -parse_DefinedType([{typereference,L1,TypeName}|Rest]) -> - case is_pre_defined_class(TypeName) of - false -> - {#type{def = #'Externaltypereference'{pos=L1,module=resolve_module(TypeName), - type=TypeName}},Rest}; - _ -> - throw({asn1_error, - {L1,get(asn1_module), - [got,TypeName,expected, - [typereference,'typereference.typereference', - 'typereference typereference']]}}) - end; +parse_DefinedType([{typereference,L1,Module}, + {'.',_}, + {typereference,_,TypeName}|Tokens]) -> + {#type{def = #'Externaltypereference'{pos=L1,module=Module, + type=TypeName}},Tokens}; +parse_DefinedType([{typereference,_,_}=Tr|Tokens]) -> + {#type{def=tref2Exttref(Tr)},Tokens}; parse_DefinedType(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [typereference,'typereference.typereference', - 'typereference typereference']]}}). + parse_error(Tokens). -parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) -> +parse_SelectionType([#identifier{val=Name},{'<',_}|Rest]) -> {Type,Rest2} = parse_Type(Rest), - {{'SelectionType',Name,Type},Rest2}; + {#type{def={'SelectionType',Name,Type}},Rest2}; parse_SelectionType(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'identifier <']}}). + parse_error(Tokens). resolve_module(Type) -> @@ -787,30 +686,13 @@ resolve_module(_Type, Current, undefined) -> resolve_module(Type, Current, Imports) -> case [Mod || #'SymbolsFromModule'{symbols = S, module = Mod} <- Imports, #'Externaltypereference'{type = T} <- S, - Type == T] of + Type =:= T] of [#'Externaltypereference'{type = Mod}|_] -> Mod; %% This allows the same symbol to be imported several times %% which ought to be checked elsewhere and flagged as an error [] -> Current end. -%% -------------------------- - - -%% This should probably be removed very soon -% parse_ConstrainedType(Tokens) -> -% case (catch parse_TypeWithConstraint(Tokens)) of -% {'EXIT',Reason} -> -% {Type,Rest} = parse_Type(Tokens), -% {Constraint,Rest2} = parse_Constraint(Rest), -% {Type#type{constraint=Constraint},Rest2}; -% {asn1_error,Reason2} -> -% {Type,Rest} = parse_Type(Tokens), -% {Constraint,Rest2} = parse_Constraint(Rest), -% {Type#type{constraint=Constraint},Rest2}; -% Result -> -% Result -% end. parse_Constraints(Tokens) -> parse_Constraints(Tokens,[]). @@ -819,9 +701,9 @@ parse_Constraints(Tokens,Acc) -> {Constraint,Rest} = parse_Constraint(Tokens), case Rest of [{'(',_}|_Rest2] -> - parse_Constraints(Rest,[Constraint|Acc]); + parse_Constraints(Rest, [Constraint|Acc]); _ -> - {lists:reverse([Constraint|Acc]),Rest} + {lists:reverse(Acc, [Constraint]),Rest} end. parse_Constraint([{'(',_}|Rest]) -> @@ -830,46 +712,27 @@ parse_Constraint([{'(',_}|Rest]) -> case Rest3 of [{')',_}|Rest4] -> {#constraint{c=Constraint,e=Exception},Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,')']}}) - end; -parse_Constraint(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'(']}}). + [_|_] -> + parse_error(Rest3) + end. parse_ConstraintSpec(Tokens) -> Flist = [fun parse_GeneralConstraint/1, fun parse_SubtypeConstraint/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - {asn1_error,Reason2} -> - throw({asn1_error,Reason2}); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_ExceptionSpec([LPar={')',_}|Rest]) -> {undefined,[LPar|Rest]}; parse_ExceptionSpec([{'!',_}|Rest]) -> parse_ExceptionIdentification(Rest); parse_ExceptionSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,[')','!']]}}). + parse_error(Tokens). parse_ExceptionIdentification(Tokens) -> Flist = [fun parse_SignedNumber/1, fun parse_DefinedValue/1, fun parse_TypeColonValue/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - {asn1_error,Reason2} -> - throw({asn1_error,Reason2}); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_TypeColonValue(Tokens) -> {Type,Rest} = parse_Type(Tokens), @@ -877,32 +740,28 @@ parse_TypeColonValue(Tokens) -> [{':',_}|Rest2] -> {Value,Rest3} = parse_Value(Rest2), {{Type,Value},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,':']}}) + [_|_] -> + parse_error(Rest) end. parse_SubtypeConstraint(Tokens) -> parse_ElementSetSpecs(Tokens). -parse_ElementSetSpecs([{'...',_}|Rest]) -> - {Elements,Rest2} = parse_ElementSetSpec(Rest), - {{[],Elements},Rest2}; parse_ElementSetSpecs(Tokens) -> {RootElems,Rest} = parse_ElementSetSpec(Tokens), case Rest of [{',',_},{'...',_},{',',_}|Rest2] -> {AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2), - {{RootElems,AdditionalElems},Rest3}; + {{element_set,RootElems,AdditionalElems},Rest3}; [{',',_},{'...',_}|Rest2] -> - {{RootElems,[]},Rest2}; + {{element_set,RootElems,empty},Rest2}; _ -> - {RootElems,Rest} + {{element_set,RootElems,none},Rest} end. parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) -> {Exclusions,Rest2} = parse_Elements(Rest), - {{'ALL',{'EXCEPT',Exclusions}},Rest2}; + {{'ALL-EXCEPT',Exclusions},Rest2}; parse_ElementSetSpec(Tokens) -> parse_Unions(Tokens). @@ -918,14 +777,8 @@ parse_Unions(Tokens) -> case {InterSec,Unions} of {InterSec,[]} -> {InterSec,Rest2}; - {{'SingleValue',V1},{'SingleValue',V2}} -> - {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2}; - {V1,V2} when is_list(V2) -> - {[V1] ++ [union|V2],Rest2}; {V1,V2} -> - {[V1,union,V2],Rest2} -% Other -> -% throw(Other) + {{union,V1,V2},Rest2} end. parse_UnionsRec([{'|',_}|Rest]) -> @@ -934,12 +787,8 @@ parse_UnionsRec([{'|',_}|Rest]) -> case {InterSec,URec} of {V1,[]} -> {V1,Rest3}; - {{'SingleValue',V1},{'SingleValue',V2}} -> - {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3}; - {V1,V2} when is_list(V2) -> - {[V1] ++ [union|V2],Rest3}; {V1,V2} -> - {[V1,union,V2],Rest3} + {{union,V1,V2},Rest3} end; parse_UnionsRec([{'UNION',Info}|Rest]) -> parse_UnionsRec([{'|',Info}|Rest]); @@ -952,13 +801,8 @@ parse_Intersections(Tokens) -> case {InterSec,IRec} of {V1,[]} -> {V1,Rest2}; - {{'SingleValue',V1},{'SingleValue',V2}} -> - {{'SingleValue', - ordsets:intersection(to_set(V1),to_set(V2))},Rest2}; - {V1,V2} when is_list(V2) -> - {[V1] ++ [intersection|V2],Rest2}; {V1,V2} -> - {[V1,intersection,V2],Rest2} + {{intersection,V1,V2},Rest2} end. %% parse_IElemsRec(Tokens) -> Result @@ -967,15 +811,10 @@ parse_IElemsRec([{'^',_}|Rest]) -> {InterSec,Rest2} = parse_IntersectionElements(Rest), {IRec,Rest3} = parse_IElemsRec(Rest2), case {InterSec,IRec} of - {{'SingleValue',V1},{'SingleValue',V2}} -> - {{'SingleValue', - ordsets:intersection(to_set(V1),to_set(V2))},Rest3}; {V1,[]} -> - {V1,Rest3}; - {V1,V2} when is_list(V2) -> - {[V1] ++ [intersection|V2],Rest3}; + {V1,Rest2}; {V1,V2} -> - {[V1,intersection,V2],Rest3} + {{intersection,V1,V2},Rest3} end; parse_IElemsRec([{'INTERSECTION',Info}|Rest]) -> parse_IElemsRec([{'^',Info}|Rest]); @@ -992,7 +831,7 @@ parse_IntersectionElements(Tokens) -> case Rest of [{'EXCEPT',_}|Rest2] -> {Exclusion,Rest3} = parse_Elements(Rest2), - {{InterSec,{'EXCEPT',Exclusion}},Rest3}; + {{'EXCEPT',InterSec,Exclusion},Rest3}; Rest -> {InterSec,Rest} end. @@ -1006,102 +845,73 @@ parse_Elements([{'(',_}|Rest]) -> case Rest2 of [{')',_}|Rest3] -> {Elems,Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,')']}}) + [_|_] -> + parse_error(Rest2) end; parse_Elements(Tokens) -> Flist = [fun parse_ObjectSetElements/1, fun parse_SubtypeElements/1, -% fun parse_Value/1, -% fun parse_Type/1, fun parse_Object/1, fun parse_DefinedObjectSet/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - Err = {asn1_error,_} -> - throw(Err); - Result = {Val,_} when is_record(Val,type) -> - Result; - - Result -> - Result - end. - - + parse_or(Tokens, Flist). %% -------------------------- -parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) -> -%% {{objectclassname,ModName,ObjClName},Rest}; -% {{objectclassname,tref2Exttref(Tr)},Rest}; - {tref2Exttref(Tr),Rest}; +parse_DefinedObjectClass([{typereference,_,ModName},{'.',_}, + {typereference,Pos,Name}|Tokens]) -> + Ext = #'Externaltypereference'{pos=Pos, + module=ModName, + type=Name}, + {Ext,Tokens}; parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) -> -% {{objectclassname,tref2Exttref(Tr)},Rest}; {tref2Exttref(Tr),Rest}; parse_DefinedObjectClass(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - ['typereference . typereference', - typereference, - 'TYPE-IDENTIFIER', - 'ABSTRACT-SYNTAX']]}}). - -parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) -> - {Type,Rest2} = parse_ObjectClass(Rest), - {#classdef{pos=L1,name=ObjClName,module=resolve_module(Type), - typespec=Type},Rest2}; -parse_ObjectClassAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - 'typereference ::=']}}). + parse_error(Tokens). parse_ObjectClass(Tokens) -> - Flist = [fun parse_DefinedObjectClass/1, - fun parse_ObjectClassDefn/1, - fun parse_ParameterizedObjectClass/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - {asn1_error,Reason2} -> - throw({asn1_error,Reason2}); - Result -> - Result - end. + Flist = [fun parse_ObjectClassDefn/1, + fun parse_DefinedObjectClass/1], + parse_or(Tokens, Flist). parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) -> {Type,Rest2} = parse_FieldSpec(Rest), {WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2), {#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3}; parse_ObjectClassDefn(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'CLASS {']}}). + parse_error(Tokens). parse_FieldSpec(Tokens) -> parse_FieldSpec(Tokens,[]). -parse_FieldSpec(Tokens,Acc) -> - Flist = [fun parse_FixedTypeValueFieldSpec/1, - fun parse_VariableTypeValueFieldSpec/1, - fun parse_ObjectFieldSpec/1, - fun parse_FixedTypeValueSetFieldSpec/1, - fun parse_VariableTypeValueSetFieldSpec/1, - fun parse_TypeFieldSpec/1, - fun parse_ObjectSetFieldSpec/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); +parse_FieldSpec(Tokens0, Acc) -> + Fl = case Tokens0 of + [{valuefieldreference,_,_}|_] -> + %% 1) &field Type + %% &object CLASS-NAME + %% 2) &field &FieldName + %% A fixed type field cannot be distinguished from + %% an object field without type information. + [fun parse_FixedTypeValueFieldSpec/1, + fun parse_VariableTypeValueFieldSpec/1]; + [{typefieldreference,_,_}|_] -> + %% 1) &Set Type + %% &ObjectSet CLASS-NAME + %% 2) &Set &FieldName + %% 3) &Type + %% A value set and an object cannot be distinguished + %% without type information. + [fun parse_FixedTypeValueSetFieldSpec/1, + fun parse_VariableTypeValueSetFieldSpec/1, + fun parse_TypeFieldSpec/1]; + [_|_] -> + parse_error(Tokens0) + end, + case parse_or(Tokens0, Fl) of {Type,[{'}',_}|Rest]} -> - {lists:reverse([Type|Acc]),Rest}; + {lists:reverse(Acc, [Type]),Rest}; {Type,[{',',_}|Rest2]} -> - parse_FieldSpec(Rest2,[Type|Acc]); - {_,[H|_T]} -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'}']}}) + parse_FieldSpec(Rest2, [Type|Acc]) end. parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) -> @@ -1109,27 +919,19 @@ parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) -> parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) -> {{valuefieldreference,FieldName},Rest}; parse_PrimitiveFieldName(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [typefieldreference,valuefieldreference]]}}). + parse_error(Tokens). parse_FieldName(Tokens) -> {Field,Rest} = parse_PrimitiveFieldName(Tokens), parse_FieldName(Rest,[Field]). -parse_FieldName([{'.',_}|Rest],Acc) -> - case (catch parse_PrimitiveFieldName(Rest)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - {FieldName,Rest2} -> - parse_FieldName(Rest2,[FieldName|Acc]) - end; -parse_FieldName(Tokens,Acc) -> +parse_FieldName([{'.',_}|Rest0],Acc) -> + {FieldName,Rest1} = parse_PrimitiveFieldName(Rest0), + parse_FieldName(Rest1, [FieldName|Acc]); +parse_FieldName(Tokens, Acc) -> {lists:reverse(Acc),Tokens}. -parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) -> +parse_FixedTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) -> {Type,Rest2} = parse_Type(Rest), {Unique,Rest3} = case Rest2 of @@ -1139,109 +941,61 @@ parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) -> {undefined,Rest2} end, {OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3), - case {Unique,Rest5} of - {'UNIQUE',[{Del,_}|_]} when Del =:= ','; Del =:= '}' -> - case OptionalitySpec of - {'DEFAULT',_} -> - throw({asn1_error, - {L1,get(asn1_module), - ['UNIQUE and DEFAULT in same field',VFieldName]}}); - _ -> - {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5} - end; - {_,[{Del,_}|_]} when Del =:= ','; Del =:= '}' -> - {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5}; - _ -> - throw({asn1_error,{L1,get(asn1_module), - [got,get_token(hd(Rest5)),expected,[',','}']]}}) - end; -parse_FixedTypeValueFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,valuefieldreference]}}). + case is_end_delimiter(Rest5) of + false -> parse_error(Rest5); + true -> ok + end, + Tag = case Unique of + 'UNIQUE' -> fixedtypevaluefield; + _ -> object_or_fixedtypevalue_field + end, + {{Tag,VFieldName,Type,Unique,OptionalitySpec},Rest5}. + +parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest0]) -> + {FieldRef,Rest1} = parse_FieldName(Rest0), + {OptionalitySpec,Rest} = parse_ValueOptionalitySpec(Rest1), + case is_end_delimiter(Rest) of + true -> + {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec}, + Rest}; + false -> + parse_error(Rest) + end. -parse_VariableTypeValueFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) -> - {FieldRef,Rest2} = parse_FieldName(Rest), - {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2), - case Rest3 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest3)),expected,[',','}']]}}) - end; -parse_VariableTypeValueFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,valuefieldreference]}}). +parse_TypeFieldSpec([{typefieldreference,_,Name}|Rest0]) -> + {OptionalitySpec,Rest} = parse_TypeOptionalitySpec(Rest0), + case is_end_delimiter(Rest) of + true -> + {{typefield,Name,OptionalitySpec},Rest}; + false -> + parse_error(Rest) + end. -parse_ObjectFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) -> - {Class,Rest2} = parse_DefinedObjectClass(Rest), - {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2), - case Rest3 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{objectfield,VFieldName,Class,undefined,OptionalitySpec},Rest3}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest3)),expected,[',','}']]}}) - end; -parse_ObjectFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,valuefieldreference]}}). +parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) -> + {Type,Rest1} = parse_Type(Rest0), + {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1), + case is_end_delimiter(Rest) of + true -> + {{objectset_or_fixedtypevalueset_field,Name,Type, + OptionalitySpec},Rest}; + false -> + parse_error(Rest) + end. -parse_TypeFieldSpec([{typefieldreference,L,TFieldName}|Rest]) -> - {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest), - case Rest2 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{typefield,TFieldName,OptionalitySpec},Rest2}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest2)),expected,[',','}']]}}) - end; -parse_TypeFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,typefieldreference]}}). +parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) -> + {FieldRef,Rest1} = parse_FieldName(Rest0), + {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1), + case is_end_delimiter(Rest) of + true -> + {{variabletypevaluesetfield,Name,FieldRef,OptionalitySpec}, + Rest}; + false -> + parse_error(Rest) + end. -parse_FixedTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) -> - {Type,Rest2} = parse_Type(Rest), - {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2), - case Rest3 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{objectset_or_fixedtypevalueset_field,TFieldName,Type, - OptionalitySpec},Rest3}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest3)),expected,[',','}']]}}) - end; -parse_FixedTypeValueSetFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,typefieldreference]}}). - -parse_VariableTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) -> - {FieldRef,Rest2} = parse_FieldName(Rest), - {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2), - case Rest3 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest3)),expected,[',','}']]}}) - end; -parse_VariableTypeValueSetFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,typefieldreference]}}). - -parse_ObjectSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) -> - {Class,Rest2} = parse_DefinedObjectClass(Rest), - {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2), - case Rest3 of - [{Del,_}|_] when Del =:= ','; Del =:= '}' -> - {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3}; - _ -> - throw({asn1_error,{L,get(asn1_module), - [got,get_token(hd(Rest3)),expected,[',','}']]}}) - end; -parse_ObjectSetFieldSpec(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,typefieldreference]}}). +is_end_delimiter([{',',_}|_]) -> true; +is_end_delimiter([{'}',_}|_]) -> true; +is_end_delimiter([_|_]) -> false. parse_ValueOptionalitySpec(Tokens)-> case Tokens of @@ -1252,15 +1006,6 @@ parse_ValueOptionalitySpec(Tokens)-> _ -> {'MANDATORY',Tokens} end. -parse_ObjectOptionalitySpec(Tokens) -> - case Tokens of - [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest}; - [{'DEFAULT',_}|Rest] -> - {Object,Rest2} = parse_Object(Rest), - {{'DEFAULT',Object},Rest2}; - _ -> {'MANDATORY',Tokens} - end. - parse_TypeOptionalitySpec(Tokens) -> case Tokens of [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest}; @@ -1279,65 +1024,44 @@ parse_ValueSetOptionalitySpec(Tokens) -> _ -> {'MANDATORY',Tokens} end. -parse_ObjectSetOptionalitySpec(Tokens) -> - case Tokens of - [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest}; - [{'DEFAULT',_}|Rest] -> - {ObjectSet,Rest2} = parse_ObjectSet(Rest), - {{'DEFAULT',ObjectSet},Rest2}; - _ -> {'MANDATORY',Tokens} - end. - parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) -> {SyntaxList,Rest2} = parse_SyntaxList(Rest), {{'WITH SYNTAX',SyntaxList},Rest2}; parse_WithSyntaxSpec(Tokens) -> {[],Tokens}. -parse_SyntaxList([{'{',_},{'}',_}|Rest]) -> - {[],Rest}; parse_SyntaxList([{'{',_}|Rest]) -> parse_SyntaxList(Rest,[]); parse_SyntaxList(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,['{}','{']]}}). + parse_error(Tokens). -parse_SyntaxList(Tokens,Acc) -> +parse_SyntaxList(Tokens, Acc) -> {SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens), case Rest of [{'}',_}|Rest2] -> - {lists:reverse([SyntaxList|Acc]),Rest2}; + {lists:reverse(Acc, [SyntaxList]),Rest2}; _ -> - parse_SyntaxList(Rest,[SyntaxList|Acc]) + parse_SyntaxList(Rest, [SyntaxList|Acc]) end. parse_TokenOrGroupSpec(Tokens) -> Flist = [fun parse_RequiredToken/1, fun parse_OptionalGroup/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). -parse_RequiredToken([{typereference,L1,WordName}|Rest]) -> +parse_RequiredToken([{typereference,_,WordName}|Rest]=Tokens) -> case is_word(WordName) of false -> - throw({asn1_error,{L1,get(asn1_module), - [got,WordName,expected,a,'Word']}}); + parse_error(Tokens); true -> {WordName,Rest} end; parse_RequiredToken([{',',L1}|Rest]) -> {{',',L1},Rest}; -parse_RequiredToken([{WordName,L1}|Rest]) -> +parse_RequiredToken([{WordName,_}|Rest]=Tokens) -> case is_word(WordName) of false -> - throw({asn1_error,{L1,get(asn1_module), - [got,WordName,expected,a,'Word']}}); + parse_error(Tokens); true -> {WordName,Rest} end; @@ -1347,7 +1071,9 @@ parse_RequiredToken(Tokens) -> parse_OptionalGroup([{'[',_}|Rest]) -> {Spec,Rest2} = parse_TokenOrGroupSpec(Rest), {SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]), - {SpecList,Rest3}. + {SpecList,Rest3}; +parse_OptionalGroup(Tokens) -> + parse_error(Tokens). parse_OptionalGroup([{']',_}|Rest],Acc) -> {lists:reverse(Acc),Rest}; @@ -1355,82 +1081,55 @@ parse_OptionalGroup(Tokens,Acc) -> {Spec,Rest} = parse_TokenOrGroupSpec(Tokens), parse_OptionalGroup(Rest,[Spec|Acc]). -parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) -> +parse_DefinedObject([#identifier{}=Id|Rest]) -> {{object,identifier2Extvalueref(Id)},Rest}; -parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) -> +parse_DefinedObject([{typereference,L1,ModName},{'.',_},#identifier{val=ObjName}|Rest]) -> {{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest}; parse_DefinedObject(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [identifier,'typereference.identifier']]}}). + parse_error(Tokens). -parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) -> +parse_ObjectAssignment([#identifier{pos=L1,val=ObjName}|Rest]) -> {Class,Rest2} = parse_DefinedObjectClass(Rest), case Rest2 of [{'::=',_}|Rest3] -> {Object,Rest4} = parse_Object(Rest3), {#typedef{pos=L1,name=ObjName, typespec=#'Object'{classname=Class,def=Object}},Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}); - Other -> - throw({asn1_error,{L1,get(asn1_module), - [got,Other,expected,'::=']}}) - end; -parse_ObjectAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). - + _ -> + parse_error(Rest2) + end. %% parse_Object(Tokens) -> Ret %% Tokens = [Tok] %% Tok = tuple() %% Ret = {object,_} | {object, _, _} parse_Object(Tokens) -> - Flist=[fun parse_ObjectDefn/1, - fun parse_ObjectFromObject/1, - fun parse_ParameterizedObject/1, - fun parse_DefinedObject/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + %% The ObjectFromObject production is not included here, + %% since it will have been catched by the ValueFromObject + %% before we reach this point. + Flist = [fun parse_ObjectDefn/1, + fun parse_DefinedObject/1], + parse_or(Tokens, Flist). parse_ObjectDefn(Tokens) -> Flist=[fun parse_DefaultSyntax/1, fun parse_DefinedSyntax/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). -parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) -> - {{object,defaultsyntax,[]},Rest}; parse_DefaultSyntax([{'{',_}|Rest]) -> parse_DefaultSyntax(Rest,[]); parse_DefaultSyntax(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,['{}','{']]}}). + parse_error(Tokens). -parse_DefaultSyntax(Tokens,Acc) -> +parse_DefaultSyntax(Tokens, Acc) -> {Setting,Rest} = parse_FieldSetting(Tokens), case Rest of [{',',_}|Rest2] -> parse_DefaultSyntax(Rest2,[Setting|Acc]); [{'}',_}|Rest3] -> - {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,[',','}']]}}) + {{object,defaultsyntax,lists:reverse(Acc, [Setting])},Rest3}; + _ -> + parse_error(Rest) end. parse_FieldSetting(Tokens) -> @@ -1439,7 +1138,9 @@ parse_FieldSetting(Tokens) -> {{PrimFieldName,Setting},Rest2}. parse_DefinedSyntax([{'{',_}|Rest]) -> - parse_DefinedSyntax(Rest,[]). + parse_DefinedSyntax(Rest, []); +parse_DefinedSyntax(Tokens) -> + parse_error(Tokens). parse_DefinedSyntax(Tokens,Acc) -> case Tokens of @@ -1455,95 +1156,70 @@ parse_DefinedSyntax(Tokens,Acc) -> %% Literal ::= word | ',' %% Setting ::= Type | Value | ValueSet | Object | ObjectSet %% word equals typereference, but no lower cases -parse_DefinedSyntaxToken([{',',L1}|Rest]) -> - {{',',L1},Rest}; +parse_DefinedSyntaxToken([{',',_}=Comma|Rest]) -> + {Comma,Rest}; %% ObjectClassFieldType or a defined type with a constraint. %% Should also be able to parse a parameterized type. It may be %% impossible to distinguish between a parameterized type and a Literal %% followed by an object set. -parse_DefinedSyntaxToken(Tokens=[{typereference,L1,_Name},{T,_}|_Rest]) - when T == '.'; T == '(' -> - case catch parse_Setting(Tokens) of - {asn1_error,_} -> - throw({asn1_error,{L1,get(asn1_module), - [got,hd(Tokens), expected,['Word',setting]]}}); - {'EXIT',Reason} -> - exit(Reason); - Result -> - Result - end; -parse_DefinedSyntaxToken(Tokens=[TRef={typereference,L1,Name}|Rest]) -> +parse_DefinedSyntaxToken([{typereference,_,_Name},{T,_}|_]=Tokens) + when T =:= '.'; T =:= '(' -> + parse_Setting(Tokens); +parse_DefinedSyntaxToken([{typereference,L1,Name}=TRef|Rest]=Tokens) -> case is_word(Name) of false -> case lookahead_definedsyntax(Rest) of word_or_setting -> {{setting,L1,tref2Exttref(TRef)},Rest}; - _ -> + setting -> parse_Setting(Tokens) end; true -> - %% {{word_or_setting,L1,Name},Rest} {{word_or_setting,L1,tref2Exttref(TRef)},Rest} end; parse_DefinedSyntaxToken(Tokens) -> - case catch parse_Setting(Tokens) of - {asn1_error,_} -> - parse_Word(Tokens); - {'EXIT',Reason} -> - exit(Reason); - Result -> + try parse_Setting(Tokens) of + {_,_}=Result -> Result + catch + throw:{asn1_error,_} -> + parse_Word(Tokens) end. lookahead_definedsyntax([{typereference,_,Name}|_Rest]) -> - case is_word(Name) of + case is_word(Name) of true -> word_or_setting; - _ -> setting + false -> setting end; lookahead_definedsyntax([{'}',_}|_Rest]) -> word_or_setting; lookahead_definedsyntax(_) -> setting. -parse_Word([{Name,Pos}|Rest]) -> +parse_Word([{Name,Pos}|Rest]=Tokens) -> case is_word(Name) of false -> - throw({asn1_error,{Pos,get(asn1_module), - [got,Name, expected,a,'Word']}}); + parse_error(Tokens); true -> {{word_or_setting,Pos,tref2Exttref(Pos,Name)},Rest} - end. + end; +parse_Word(Tokens) -> + parse_error(Tokens). parse_Setting(Tokens) -> Flist = [{type_tag,fun parse_Type/1}, {value_tag,fun parse_Value/1}, {object_tag,fun parse_Object/1}, {objectset_tag,fun parse_ObjectSet/1}], - case (catch parse_or_tag(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result = {{value_tag,_},_} -> + case parse_or_tag(Tokens, Flist) of + {{value_tag,_},_}=Result -> + %% Keep the value_tag. Result; {{Tag,Setting},Rest} when is_atom(Tag) -> + %% Remove all other tags. {Setting,Rest} end. -%% parse_Setting(Tokens) -> -%% Flist = [fun parse_Type/1, -%% fun parse_Value/1, -%% fun parse_Object/1, -%% fun parse_ObjectSet/1], -%% case (catch parse_or(Tokens,Flist)) of -%% {'EXIT',Reason} -> -%% exit(Reason); -%% AsnErr = {asn1_error,_} -> -%% throw(AsnErr); -%% Result -> -%% Result -%% end. - parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_}, {typereference,L2,ObjSetName}|Rest]) -> {{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName, @@ -1552,9 +1228,7 @@ parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) -> {{objectset,L1,#'Externaltypereference'{pos=L1,module=resolve_module(ObjSetName), type=ObjSetName}},Rest}; parse_DefinedObjectSet(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [typereference,'typereference.typereference']]}}). + parse_error(Tokens). parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) -> {Class,Rest2} = parse_DefinedObjectClass(Rest), @@ -1564,16 +1238,9 @@ parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) -> {#typedef{pos=L1,name=ObjSetName, typespec=#'ObjectSet'{class=Class, set=ObjectSet}},Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) -%%% Other -> -%%% throw(Other) - end; -parse_ObjectSetAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest2) + end. %% parse_ObjectSet(Tokens) -> {Ret,Rest} %% Tokens = [Tok] @@ -1590,26 +1257,20 @@ parse_ObjectSet([{'{',_}|Rest]) -> case Rest2 of [{'}',_}|Rest3] -> {ObjSetSpec,Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'}']}}) + _ -> + parse_error(Rest2) end; parse_ObjectSet(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_error(Tokens). -parse_ObjectSetSpec([{'...',_}|Rest]) -> - case Rest of - [{',',_}|Rest2] -> - {Elements,Rest3}=parse_ElementSetSpecs(Rest2), - {{[],Elements},Rest3}; - _ -> - {['EXTENSIONMARK'],Rest} - end; +parse_ObjectSetSpec([{'...',_},{',',_}|Tokens0]) -> + {Elements,Tokens} = parse_ElementSetSpec(Tokens0), + {{element_set,empty,Elements},Tokens}; +parse_ObjectSetSpec([{'...',_}|Tokens]) -> + {{element_set,empty,empty},Tokens}; parse_ObjectSetSpec(Tokens) -> parse_ElementSetSpecs(Tokens). -% moved fun parse_Object/1 and fun parse_DefinedObjectSet/1 to parse_Elements %% parse_ObjectSetElements(Tokens) -> {Result,Rest} %% Result ::= {'ObjectSetFromObjects',Objects,Name} | {pos,ObjectSet,Params} %% Objects ::= ReferencedObjects @@ -1619,18 +1280,9 @@ parse_ObjectSetSpec(Tokens) -> %% ObjectSet ::= {objectset,integer(),#'Externaltypereference'{}} %% Params ::= list() (see parse_ActualParameterList/1) parse_ObjectSetElements(Tokens) -> - Flist = [%fun parse_Object/1, - %fun parse_DefinedObjectSet/1, - fun parse_ObjectSetFromObjects/1, + Flist = [fun parse_ObjectSetFromObjects/1, fun parse_ParameterizedObjectSet/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_ObjectClassFieldType(Tokens) -> {Class,Rest} = parse_DefinedObjectClass(Tokens), @@ -1641,25 +1293,10 @@ parse_ObjectClassFieldType(Tokens) -> classname=Class, class=Class,fieldname=FieldName}, {#type{def=OCFT},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) -%%% Other -> -%%% throw(Other) + _ -> + parse_error(Rest) end. -%parse_ObjectClassFieldValue(Tokens) -> -% Flist = [fun parse_OpenTypeFieldVal/1, -% fun parse_FixedTypeFieldVal/1], -% case (catch parse_or(Tokens,Flist)) of -% {'EXIT',Reason} -> -% throw(Reason); -% AsnErr = {asn1_error,_} -> -% throw(AsnErr); -% Result -> -% Result -% end. - parse_ObjectClassFieldValue(Tokens) -> parse_OpenTypeFieldVal(Tokens). @@ -1669,28 +1306,10 @@ parse_OpenTypeFieldVal(Tokens) -> [{':',_}|Rest2] -> {Value,Rest3} = parse_Value(Rest2), {{opentypefieldvalue,Type,Value},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,':']}}) + _ -> + parse_error(Rest) end. -% parse_FixedTypeFieldVal(Tokens) -> -% parse_Value(Tokens). - -% parse_InformationFromObjects(Tokens) -> -% Flist = [fun parse_ValueFromObject/1, -% fun parse_ValueSetFromObjects/1, -% fun parse_TypeFromObject/1, -% fun parse_ObjectFromObject/1], -% case (catch parse_or(Tokens,Flist)) of -% {'EXIT',Reason} -> -% throw(Reason); -% AsnErr = {asn1_error,_} -> -% throw(AsnErr); -% Result -> -% Result -% end. - %% parse_ReferencedObjects(Tokens) -> {Result,Rest} %% Result ::= DefObject | DefObjSet | %% {po,DefObject,Params} | {pos,DefObjSet,Params} | @@ -1702,18 +1321,11 @@ parse_OpenTypeFieldVal(Tokens) -> parse_ReferencedObjects(Tokens) -> Flist = [fun parse_DefinedObject/1, fun parse_DefinedObjectSet/1, - fun parse_ParameterizedObject/1, fun parse_ParameterizedObjectSet/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_ValueFromObject(Tokens) -> + %% This production also matches ObjectFromObject. {Objects,Rest} = parse_ReferencedObjects(Tokens), case Rest of [{'.',_}|Rest2] -> @@ -1722,35 +1334,10 @@ parse_ValueFromObject(Tokens) -> {valuefieldreference,_} -> {{'ValueFromObject',Objects,Name},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,typefieldreference,expected, - valuefieldreference]}}) + parse_error(Rest2) end; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) -%%% Other -> -%%% throw({asn1_error,{got,Other,expected,'.'}}) - end. - -parse_ValueSetFromObjects(Tokens) -> - {Objects,Rest} = parse_ReferencedObjects(Tokens), - case Rest of - [{'.',_}|Rest2] -> - {Name,Rest3} = parse_FieldName(Rest2), - case lists:last(Name) of - {typefieldreference,_FieldName} -> - {{'ValueSetFromObjects',Objects,Name},Rest3}; - _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected, - typefieldreference]}}) - end; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) -%%% Other -> -%%% throw({asn1_error,{got,Other,expected,'.'}}) + _ -> + parse_error(Rest) end. parse_TypeFromObject(Tokens) -> @@ -1760,28 +1347,12 @@ parse_TypeFromObject(Tokens) -> {Name,Rest3} = parse_FieldName(Rest2), case lists:last(Name) of {typefieldreference,_FieldName} -> - {{'TypeFromObject',Objects,Name},Rest3}; + {#type{def={'TypeFromObject',Objects,Name}},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected, - typefieldreference]}}) + parse_error(Rest2) end; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) -%%% Other -> -%%% throw({asn1_error,{got,Other,expected,'.'}}) - end. - -parse_ObjectFromObject(Tokens) -> - {Objects,Rest} = parse_ReferencedObjects(Tokens), - case Rest of - [{'.',_}|Rest2] -> - {Name,Rest3} = parse_FieldName(Rest2), - {{'ObjectFromObject',Objects,Name},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) + _ -> + parse_error(Rest) end. %% parse_ObjectSetFromObjects(Tokens) -> {Result,Rest} @@ -1799,23 +1370,12 @@ parse_ObjectSetFromObjects(Tokens) -> {typefieldreference,_FieldName} -> {{'ObjectSetFromObjects',Objects,Name},Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected, - typefieldreference]}}) + parse_error(Rest2) end; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'.']}}) + _ -> + parse_error(Rest) end. -% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) -> -% {Class,Rest2} = parse_DefinedObjectClass(Rest), -% {{'InstanceOfType',Class},Rest2}. - -% parse_InstanceOfValue(Tokens) -> -% parse_Value(Tokens). - - %% X.682 constraint specification @@ -1823,14 +1383,7 @@ parse_GeneralConstraint(Tokens) -> Flist = [fun parse_UserDefinedConstraint/1, fun parse_TableConstraint/1, fun parse_ContentsConstraint/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])-> {{constrained_by,[]},Rest}; @@ -1841,32 +1394,23 @@ parse_UserDefinedConstraint([{'CONSTRAINED',_}, case Rest2 of [{'}',_}|Rest3] -> {{constrained_by,Param},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'}']}}) + _ -> + parse_error(Rest2) end; parse_UserDefinedConstraint(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}). + parse_error(Tokens). parse_UserDefinedConstraintParameter(Tokens) -> - parse_UserDefinedConstraintParameter(Tokens,[]). -parse_UserDefinedConstraintParameter(Tokens,Acc) -> + parse_UserDefinedConstraintParameter(Tokens, []). + +parse_UserDefinedConstraintParameter(Tokens0, Acc) -> Flist = [fun parse_GovernorAndActualParameter/1, fun parse_ActualParameter/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - {Result,Rest} -> - case Rest of - [{',',_}|_Rest2] -> - parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]); - _ -> - {lists:reverse([Result|Acc]),Rest} - end + case parse_or(Tokens0, Flist) of + {Result,[{',',_}|Tokens]} -> + parse_UserDefinedConstraintParameter(Tokens, [Result|Acc]); + {Result,Tokens} -> + {lists:reverse(Acc, [Result]),Tokens} end. parse_GovernorAndActualParameter(Tokens) -> @@ -1875,26 +1419,18 @@ parse_GovernorAndActualParameter(Tokens) -> [{':',_}|Rest2] -> {Params,Rest3} = parse_ActualParameter(Rest2), {{'Governor_Params',Governor,Params},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,':']}}) + _ -> + parse_error(Rest) end. parse_TableConstraint(Tokens) -> Flist = [fun parse_ComponentRelationConstraint/1, fun parse_SimpleTableConstraint/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_SimpleTableConstraint(Tokens) -> {ObjectSet,Rest} = parse_ObjectSet(Tokens), - {{simpletable,ObjectSet},Rest}. + {{element_set,{simpletable,ObjectSet},none},Rest}. parse_ComponentRelationConstraint([{'{',_}|Rest]) -> {ObjectSet,Rest2} = parse_DefinedObjectSet(Rest), @@ -1903,21 +1439,18 @@ parse_ComponentRelationConstraint([{'{',_}|Rest]) -> {AtNot,Rest4} = parse_AtNotationList(Rest3,[]), case Rest4 of [{'}',_}|Rest5] -> - {{componentrelation,ObjectSet,AtNot},Rest5}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'}']}}) + Ret = {element_set, + {componentrelation,ObjectSet,AtNot}, + none}, + {Ret,Rest5}; + _ -> + parse_error(Rest4) end; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected, - 'ComponentRelationConstraint',ended,with,'}']}}) -%%% Other -> -%%% throw(Other) + _ -> + parse_error(Rest2) end; parse_ComponentRelationConstraint(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_error(Tokens). parse_AtNotationList(Tokens,Acc) -> {AtNot,Rest} = parse_AtNotation(Tokens), @@ -1925,7 +1458,7 @@ parse_AtNotationList(Tokens,Acc) -> [{',',_}|Rest2] -> parse_AtNotationList(Rest2,[AtNot|Acc]); _ -> - {lists:reverse([AtNot|Acc]),Rest} + {lists:reverse(Acc, [AtNot]),Rest} end. parse_AtNotation([{'@',_},{'.',_}|Rest]) -> @@ -1935,20 +1468,17 @@ parse_AtNotation([{'@',_}|Rest]) -> {CIdList,Rest2} = parse_ComponentIdList(Rest), {{outermost,CIdList},Rest2}; parse_AtNotation(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,['@','@.']]}}). + parse_error(Tokens). parse_ComponentIdList(Tokens) -> parse_ComponentIdList(Tokens,[]). -parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) -> +parse_ComponentIdList([#identifier{}=Id,{'.',_}|Rest], Acc) -> parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]); -parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) -> - {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest}; +parse_ComponentIdList([#identifier{}=Id|Rest], Acc) -> + {lists:reverse(Acc, [identifier2Extvalueref(Id)]),Rest}; parse_ComponentIdList(Tokens,_) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [identifier,'identifier.']]}}). + parse_error(Tokens). parse_ContentsConstraint([{'CONTAINING',_}|Rest]) -> {Type,Rest2} = parse_Type(Rest), @@ -1963,24 +1493,14 @@ parse_ContentsConstraint([{'ENCODED',_},{'BY',_}|Rest]) -> {Value,Rest2} = parse_Value(Rest), {{contentsconstraint,[],Value},Rest2}; parse_ContentsConstraint(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - 'CONTAINING','or','ENCODED BY']}}). - + parse_error(Tokens). % X.683 Parameterization of ASN.1 specifications parse_Governor(Tokens) -> Flist = [fun parse_Type/1, fun parse_DefinedObjectClass/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_ActualParameter(Tokens) -> Flist = [fun parse_Type/1, @@ -1989,32 +1509,7 @@ parse_ActualParameter(Tokens) -> fun parse_DefinedObjectClass/1, fun parse_Object/1, fun parse_ObjectSet/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. - -parse_ParameterizedAssignment(Tokens) -> - Flist = [fun parse_ParameterizedTypeAssignment/1, - fun parse_ParameterizedValueAssignment/1, - fun parse_ParameterizedValueSetTypeAssignment/1, - fun parse_ParameterizedObjectClassAssignment/1, - fun parse_ParameterizedObjectAssignment/1, - fun parse_ParameterizedObjectSetAssignment/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - AsnAssErr = {asn1_assignment_error,_} -> - throw(AsnAssErr); - Result -> - Result - end. + parse_or(Tokens, Flist). %% parse_ParameterizedTypeAssignment(Tokens) -> Result %% Result = {#ptypedef{},Rest} | throw() @@ -2025,18 +1520,13 @@ parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) -> {Type,Rest4} = parse_Type(Rest3), {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type}, Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) - end; -parse_ParameterizedTypeAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest2) + end. %% parse_ParameterizedValueAssignment(Tokens) -> Result %% Result = {#pvaluedef{},Rest} | throw() -parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) -> +parse_ParameterizedValueAssignment([#identifier{pos=L1,val=Name}|Rest]) -> {ParameterList,Rest2} = parse_ParameterList(Rest), {Type,Rest3} = parse_Type(Rest2), case Rest3 of @@ -2044,13 +1534,9 @@ parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) -> {Value,Rest5} = parse_Value(Rest4), {#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type, value=Value},Rest5}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) - end; -parse_ParameterizedValueAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). + _ -> + parse_error(Rest3) + end. %% parse_ParameterizedValueSetTypeAssignment(Tokens) -> Result %% Result = {#pvaluesetdef{},Rest} | throw() @@ -2062,14 +1548,9 @@ parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) -> {ValueSet,Rest5} = parse_ValueSet(Rest4), {#pvaluesetdef{pos=L1,name=Name,args=ParameterList, type=Type,valueset=ValueSet},Rest5}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) - end; -parse_ParameterizedValueSetTypeAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest3) + end. %% parse_ParameterizedObjectClassAssignment(Tokens) -> Result %% Result = {#ptypedef{},Rest} | throw() @@ -2080,18 +1561,13 @@ parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) -> {Class,Rest4} = parse_ObjectClass(Rest3), {#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class}, Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) - end; -parse_ParameterizedObjectClassAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest2) + end. %% parse_ParameterizedObjectAssignment(Tokens) -> Result %% Result = {#pobjectdef{},Rest} | throw() -parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) -> +parse_ParameterizedObjectAssignment([#identifier{pos=L1,val=Name}|Rest]) -> {ParameterList,Rest2} = parse_ParameterList(Rest), {Class,Rest3} = parse_DefinedObjectClass(Rest2), case Rest3 of @@ -2099,36 +1575,9 @@ parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) -> {Object,Rest5} = parse_Object(Rest4), {#pobjectdef{pos=L1,name=Name,args=ParameterList, class=Class,def=Object},Rest5}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) -%%% Other -> -%%% throw(Other) - end; -parse_ParameterizedObjectAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). - -%% parse_ParameterizedObjectSetAssignment(Tokens) -> Result -%% Result = {#pobjectsetdef{},Rest} | throw{} -parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) -> - {ParameterList,Rest2} = parse_ParameterList(Rest), - {Class,Rest3} = parse_DefinedObjectClass(Rest2), - case Rest3 of - [{'::=',_}|Rest4] -> - {ObjectSet,Rest5} = parse_ObjectSet(Rest4), - {#pobjectsetdef{pos=L1,name=Name,args=ParameterList, - class=Class,def=ObjectSet},Rest5}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'::=']}}) -%%% Other -> -%%% throw(Other) - end; -parse_ParameterizedObjectSetAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest3) + end. %% parse_ParameterList(Tokens) -> Result %% Result = [Parameter] @@ -2137,35 +1586,24 @@ parse_ParameterizedObjectSetAssignment(Tokens) -> %% Type = #type{} %% DefinedObjectClass = #'Externaltypereference'{} %% Reference = #'Externaltypereference'{} | #'Externalvaluereference'{} -parse_ParameterList([{'{',_}|Rest]) -> - parse_ParameterList(Rest,[]); -parse_ParameterList(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). +parse_ParameterList([{'{',_}|Tokens]) -> + parse_ParameterList(Tokens, []). parse_ParameterList(Tokens,Acc) -> {Parameter,Rest} = parse_Parameter(Tokens), case Rest of [{',',_}|Rest2] -> - parse_ParameterList(Rest2,[Parameter|Acc]); + parse_ParameterList(Rest2, [Parameter|Acc]); [{'}',_}|Rest3] -> - {lists:reverse([Parameter|Acc]),Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,[',','}']]}}) + {lists:reverse(Acc, [Parameter]),Rest3}; + _ -> + parse_error(Rest) end. parse_Parameter(Tokens) -> Flist = [fun parse_ParamGovAndRef/1, fun parse_Reference/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_ParamGovAndRef(Tokens) -> {ParamGov,Rest} = parse_ParamGovernor(Tokens), @@ -2173,86 +1611,54 @@ parse_ParamGovAndRef(Tokens) -> [{':',_}|Rest2] -> {Ref,Rest3} = parse_Reference(Rest2), {{ParamGov,Ref},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,':']}}) + _ -> + parse_error(Rest) end. parse_ParamGovernor(Tokens) -> Flist = [fun parse_Governor/1, fun parse_Reference/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. - -% parse_ParameterizedReference(Tokens) -> -% {Ref,Rest} = parse_Reference(Tokens), -% case Rest of -% [{'{',_},{'}',_}|Rest2] -> -% {{ptref,Ref},Rest2}; -% _ -> -% {{ptref,Ref},Rest} -% end. + parse_or(Tokens, Flist). parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_}, {typereference,_,TypeName}|Rest]) -> {#'Externaltypereference'{pos=L1,module=ModuleName, type=TypeName},Rest}; parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) -> -% {#'Externaltypereference'{pos=L2,module=get(asn1_module), -% type=TypeName},Rest}; {tref2Exttref(Tref),Rest}; parse_SimpleDefinedType(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [typereference,'typereference.typereference']]}}). + parse_error(Tokens). parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_}, - {identifier,_,Value}|Rest]) -> + #identifier{val=Value}|Rest]) -> {{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName, value=Value}},Rest}; -parse_SimpleDefinedValue([Id={identifier,_,_Value}|Rest]) -> +parse_SimpleDefinedValue([#identifier{}=Id|Rest]) -> {{simpledefinedvalue,identifier2Extvalueref(Id)},Rest}; parse_SimpleDefinedValue(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - ['typereference.identifier',identifier]]}}). + parse_error(Tokens). parse_ParameterizedType(Tokens) -> + %% May also be a parameterized class. {Type,Rest} = parse_SimpleDefinedType(Tokens), {Params,Rest2} = parse_ActualParameterList(Rest), - {{pt,Type,Params},Rest2}. + {#type{def={pt,Type,Params}},Rest2}. parse_ParameterizedValue(Tokens) -> + %% May also be a parameterized object. {Value,Rest} = parse_SimpleDefinedValue(Tokens), {Params,Rest2} = parse_ActualParameterList(Rest), {{pv,Value,Params},Rest2}. -parse_ParameterizedObjectClass(Tokens) -> - {Type,Rest} = parse_DefinedObjectClass(Tokens), - {Params,Rest2} = parse_ActualParameterList(Rest), - {{poc,Type,Params},Rest2}. - parse_ParameterizedObjectSet(Tokens) -> {ObjectSet,Rest} = parse_DefinedObjectSet(Tokens), {Params,Rest2} = parse_ActualParameterList(Rest), {{pos,ObjectSet,Params},Rest2}. -parse_ParameterizedObject(Tokens) -> - {Object,Rest} = parse_DefinedObject(Tokens), - {Params,Rest2} = parse_ActualParameterList(Rest), - {{po,Object,Params},Rest2}. - parse_ActualParameterList([{'{',_}|Rest]) -> parse_ActualParameterList(Rest,[]); parse_ActualParameterList(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_error(Tokens). parse_ActualParameterList(Tokens,Acc) -> {Parameter,Rest} = parse_ActualParameter(Tokens), @@ -2260,43 +1666,22 @@ parse_ActualParameterList(Tokens,Acc) -> [{',',_}|Rest2] -> parse_ActualParameterList(Rest2,[Parameter|Acc]); [{'}',_}|Rest3] -> - {lists:reverse([Parameter|Acc]),Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,[',','}']]}}) -%%% Other -> -%%% throw(Other) + {lists:reverse(Acc, [Parameter]),Rest3}; + _ -> + parse_error(Rest) end. - - - - - - -%------------------------- - +%% Test whether Token is allowed in a syntax list. is_word(Token) -> - case not_allowed_word(Token) of + List = atom_to_list(Token), + case not_allowed_word(List) of true -> false; - _ -> - if - is_atom(Token) -> - Item = atom_to_list(Token), - is_word(Item); - is_list(Token), length(Token) == 1 -> - check_one_char_word(Token); - is_list(Token) -> - [A|Rest] = Token, - case check_first(A) of - true -> - check_rest(Rest); - _ -> - false - end - end + false -> is_word_1(List) end. +is_word_1([H|T]) -> + check_first(H) andalso check_rest(T). + not_allowed_word(Name) -> lists:member(Name,["BIT", "BOOLEAN", @@ -2321,257 +1706,123 @@ not_allowed_word(Name) -> "TRUE", "UNION"]). -check_one_char_word([A]) when $A =< A, $Z >= A -> - true; -check_one_char_word([_]) -> - false. %% unknown item in SyntaxList +check_first(C) -> + $A =< C andalso C =< $Z. -check_first(A) when $A =< A, $Z >= A -> - true; -check_first(_) -> - false. %% unknown item in SyntaxList - -check_rest([R,R|_Rs]) when $- == R -> - false; %% two consecutive hyphens are not allowed in a word -check_rest([R]) when $- == R -> - false; %% word cannot end with hyphen -check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R -> +check_rest([R|Rs]) when $A =< R, R =< $Z; R =:= $- -> check_rest(Rs); check_rest([]) -> true; check_rest(_) -> false. +%%% +%%% Parse alternative type lists for CHOICE. +%%% + +parse_AlternativeTypeLists(Tokens0) -> + {Root,Tokens1} = parse_AlternativeTypeList(Tokens0), + case Tokens1 of + [{',',_}|Tokens2] -> + {ExtMarker,Tokens3} = parse_ExtensionAndException(Tokens2), + {ExtAlts,Tokens4} = parse_ExtensionAdditionAlternatives(Tokens3), + {_,Tokens} = parse_OptionalExtensionMarker(Tokens4, []), + {Root++ExtMarker++ExtAlts,Tokens}; + Tokens -> + {Root,Tokens} + end. + +parse_ExtensionAndException([{'...',L}|Tokens0]) -> + {[#'EXTENSIONMARK'{pos=L}], + case Tokens0 of + [{'!',_}|Tokens1] -> + {_,Tokens} = parse_ExceptionIdentification(Tokens1), + Tokens; + _ -> + Tokens0 + end}. + +parse_AlternativeTypeList([#identifier{}|_]=Tokens0) -> + {AltType,Tokens} = parse_NamedType(Tokens0), + parse_AlternativeTypeList_1(Tokens, [AltType]); +parse_AlternativeTypeList(Tokens) -> + parse_error(Tokens). + +parse_AlternativeTypeList_1([{',',_}|[#identifier{}|_]=Tokens0], Acc) -> + {AltType,Tokens} = parse_NamedType(Tokens0), + parse_AlternativeTypeList_1(Tokens, [AltType|Acc]); +parse_AlternativeTypeList_1(Tokens, Acc) -> + {lists:reverse(Acc),Tokens}. -to_set(V) when is_list(V) -> - ordsets:from_list(V); -to_set(V) -> - ordsets:from_list([V]). - -parse_AlternativeTypeLists(Tokens) -> - parse_AlternativeTypeLists(Tokens,[]). - -parse_AlternativeTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) -> - {CompList,Rest1} = parse_AlternativeTypeList(Tokens,[]), - parse_AlternativeTypeLists(Rest1,Clist++CompList); -parse_AlternativeTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) -> - {_,Rest03} = parse_ExceptionIdentification(Rest02), - %% Exception info is currently thrown away - parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]); -parse_AlternativeTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []-> - {_,Rest03} = parse_ExceptionIdentification(Rest02), - %% Exception info is currently thrown away - parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]); - -parse_AlternativeTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []-> - parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]); -parse_AlternativeTypeLists([{'...',L1}|Rest02],Clist0) -> - parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]); -parse_AlternativeTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) -> - {Clist0,Tokens}. - -parse_AlternativeTypeLists2(Tokens,Clist) -> - {ExtAdd,Rest} = parse_ExtensionAdditionAlternatives(Tokens,Clist), - {Clist2,Rest2} = parse_OptionalExtensionMarker(Rest,lists:flatten(ExtAdd)), - case Rest2 of - [{',',_}|Rest3] -> - {CompList,Rest4} = parse_AlternativeTypeList(Rest3,[]), - {Clist2 ++ CompList,Rest4}; - _ -> - {Clist2,Rest2} - end. - - - -parse_AlternativeTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] -> - {AlternativeType,Rest2} = parse_NamedType([Id|Rest]), - parse_AlternativeTypeList(Rest2,[AlternativeType|Acc]); -parse_AlternativeTypeList(Tokens = [{'}',_}|_],Acc) -> - {lists:reverse(Acc),Tokens}; -parse_AlternativeTypeList(Tokens = [{']',_},{']',_}|_],Acc) -> - {lists:reverse(Acc),Tokens}; -parse_AlternativeTypeList(Tokens = [{',',_},{'...',_}|_],Acc) -> - {lists:reverse(Acc),Tokens}; -parse_AlternativeTypeList(Tokens,[]) -> - {AlternativeType,Rest} = parse_NamedType(Tokens), - parse_AlternativeTypeList(Rest,[AlternativeType]); -parse_AlternativeTypeList(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['}',', identifier']]}}). - -parse_ExtensionAdditionAlternatives(Tokens =[{',',_}|_],Clist) -> - {ExtAddList,Rest2} = parse_ExtensionAdditionAlternativesList(Tokens,[]), - {Clist++lists:flatten(ExtAddList),Rest2}; -parse_ExtensionAdditionAlternatives(Tokens,Clist) -> - %% Empty - {Clist,Tokens}. +parse_ExtensionAdditionAlternatives([{',',_}|_]=Tokens0) -> + parse_ExtensionAdditionAlternativesList(Tokens0, []); +parse_ExtensionAdditionAlternatives(Tokens) -> + {[],Tokens}. -parse_ExtensionAdditionAlternativesList([{',',_},Id = {identifier,_,_}|Rest],Acc) -> - {AlternativeType,Rest2} = parse_NamedType([Id|Rest]), - parse_ExtensionAdditionAlternativesList(Rest2,[AlternativeType|Acc]); -parse_ExtensionAdditionAlternativesList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) -> - {ExtAddGroup,Rest2} = parse_ExtensionAdditionAlternativesGroup([C1,C2|Rest],[]), - parse_ExtensionAdditionAlternativesList(Rest2,[ExtAddGroup|Acc]); -parse_ExtensionAdditionAlternativesList(Tokens = [{'}',_}|_],Acc) -> - {lists:reverse(Acc),Tokens}; -parse_ExtensionAdditionAlternativesList(Tokens = [{',',_},{'...',_}|_],Acc) -> - {lists:reverse(Acc),Tokens}; -parse_ExtensionAdditionAlternativesList(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['}',', identifier']]}}). - - -parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) -> - parse_ExtensionAdditionAlternativesGroup2(Rest,Num); -parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_}|Rest],[]) -> - parse_ExtensionAdditionAlternativesGroup2(Rest,undefined); -parse_ExtensionAdditionAlternativesGroup(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['[[']]}}). - - -parse_ExtensionAdditionAlternativesGroup2(Tokens,Num) -> - {CompTypeList,Rest} = parse_AlternativeTypeList(Tokens,[]), - case Rest of - [{']',_},{']',_}|Rest2] -> - {[{'ExtensionAdditionGroup',Num}|CompTypeList] ++ - ['ExtensionAdditionGroupEnd'],Rest2}; +parse_ExtensionAdditionAlternativesList([{',',_}|Tokens1]=Tokens0, Acc) -> + try parse_ExtensionAdditionAlternative(Tokens1) of + {ExtAddAlt,Tokens2} -> + parse_ExtensionAdditionAlternativesList(Tokens2, [ExtAddAlt|Acc]) + catch + throw:{asn1_error,_} -> + {lists:append(lists:reverse(Acc)),Tokens0} + end; +parse_ExtensionAdditionAlternativesList(Tokens, Acc) -> + {lists:append(lists:reverse(Acc)),Tokens}. + +parse_ExtensionAdditionAlternative([#identifier{}|_]=Tokens0) -> + {NamedType,Tokens} = parse_NamedType(Tokens0), + {[NamedType],Tokens}; +parse_ExtensionAdditionAlternative([{'[',_},{'[',_}|Tokens0]) -> + Tokens2 = case Tokens0 of + [{number,_,_},{':',_}|Tokens1] -> Tokens1; + _ -> Tokens0 + end, + {GroupList,Tokens3} = parse_AlternativeTypeList(Tokens2), + case Tokens3 of + [{']',_},{']',_}|Tokens] -> + {GroupList,Tokens}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected,[']]']]}}) - end. - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -%% parse_AlternativeTypeLists(Tokens,ExtensionDefault) -> -%% {AltTypeList,Rest1} = parse_AlternativeTypeList(Tokens), -%% {ExtensionAndException,Rest2} = -%% case Rest1 of -%% [{',',_},{'...',L1},{'!',_}|Rest12] -> -%% {_,Rest13} = parse_ExceptionIdentification(Rest12), -%% %% Exception info is currently thrown away -%% {[#'EXTENSIONMARK'{pos=L1}],Rest13}; -%% [{',',_},{'...',L1}|Rest12] -> -%% {[#'EXTENSIONMARK'{pos=L1}],Rest12}; -%% _ -> -%% {[],Rest1} -%% end, -%% {AltTypeList2,Rest5} = -%% case ExtensionAndException of -%% [] -> -%% {AltTypeList,Rest2}; -%% _ -> -%% {ExtensionAddition,Rest3} = -%% case Rest2 of -%% [{',',_}|Rest23] -> -%% parse_ExtensionAdditionAlternativeList(Rest23); -%% _ -> -%% {[],Rest2} -%% end, -%% {OptionalExtensionMarker,Rest4} = -%% case Rest3 of -%% [{',',_},{'...',L3}|Rest31] -> -%% {[#'EXTENSIONMARK'{pos=L3}],Rest31}; -%% _ -> -%% {[],Rest3} -%% end, -%% {AltTypeList ++ ExtensionAndException ++ -%% ExtensionAddition ++ OptionalExtensionMarker, Rest4} -%% end, -%% AltTypeList3 = -%% case [X || X=#'EXTENSIONMARK'{} <- AltTypeList2] of -%% [] when ExtensionDefault == 'IMPLIED' -> -%% AltTypeList2 ++ [#'EXTENSIONMARK'{}]; -%% _ -> -%% AltTypeList2 -%% end, -%% {AltTypeList3,Rest5}. - - -%% parse_AlternativeTypeList(Tokens) -> -%% parse_AlternativeTypeList(Tokens,[]). + parse_error(Tokens3) + end; +parse_ExtensionAdditionAlternative(Tokens) -> + parse_error(Tokens). -%% parse_AlternativeTypeList(Tokens,Acc) -> -%% {NamedType,Rest} = parse_NamedType(Tokens), -%% case Rest of -%% [{',',_},Id = {identifier,_,_}|Rest2] -> -%% parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]); -%% _ -> -%% {lists:reverse([NamedType|Acc]),Rest} -%% end. +%%% +%%% End of parsing of alternative type lists. +%%% - - -%% parse_ExtensionAdditionAlternativeList(Tokens) -> -%% parse_ExtensionAdditionAlternativeList(Tokens,[]). - -%% parse_ExtensionAdditionAlternativeList([{'[[',_}|Rest],Acc) -> -%% parse_ExtensionAdditionAlternativeList(Rest,Acc); -%% parse_ExtensionAdditionAlternativeList(Tokens = [{identifier,_,_}|_Rest],Acc) -> -%% {Element,Rest0} = parse_NamedType(Tokens); -%% case Rest0 of -%% [{',',_}|Rest01] -> -%% parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]); -%% _ -> -%% {lists:reverse([Element|Acc]),Rest0} -%% end. - -%% parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) -> -%% parse_ExtensionAdditionAlternatives(Rest,[]); -%% parse_ExtensionAdditionAlternatives(Tokens) -> -%% throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), -%% [got,get_token(hd(Tokens)),expected,'[[']}}). - -%% parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) -> -%% {NamedType, Rest2} = parse_NamedType([Id|Rest]), -%% case Rest2 of -%% [{',',_}|Rest21] -> -%% parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]); -%% [{']]',_}|Rest21] -> -%% {lists:reverse(Acc),Rest21}; -%% _ -> -%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), -%% [got,get_token(hd(Rest2)),expected,[',',']]']]}}) -%% end. - -parse_NamedType([{identifier,L1,Idname}|Rest]) -> +parse_NamedType([#identifier{pos=L1,val=Idname}|Rest]) -> {Type,Rest2} = parse_Type(Rest), {#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2}; parse_NamedType(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). + parse_error(Tokens). +%%% +%%% Parse component type lists for SEQUENCE and SET. +%%% parse_ComponentTypeLists(Tokens) -> - parse_ComponentTypeLists(Tokens,[]). + parse_ComponentTypeLists(Tokens, []). -parse_ComponentTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) -> - {CompList,Rest1} = parse_ComponentTypeList(Tokens,[]), - parse_ComponentTypeLists(Rest1,Clist++CompList); -parse_ComponentTypeLists(Tokens = [{'COMPONENTS',_},{'OF',_}|_Rest],Clist) -> +parse_ComponentTypeLists([#identifier{}|_Rest0]=Tokens, Clist) -> {CompList,Rest1} = parse_ComponentTypeList(Tokens,[]), parse_ComponentTypeLists(Rest1,Clist++CompList); -parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) -> - {_,Rest03} = parse_ExceptionIdentification(Rest02), - %% Exception info is currently thrown away - parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]); +parse_ComponentTypeLists([{'COMPONENTS',_},{'OF',_}|_]=Tokens,Clist) -> + {CompList,Rest1} = parse_ComponentTypeList(Tokens, []), + parse_ComponentTypeLists(Rest1, Clist++CompList); parse_ComponentTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []-> {_,Rest03} = parse_ExceptionIdentification(Rest02), %% Exception info is currently thrown away parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]); - - parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []-> +parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []-> parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]); parse_ComponentTypeLists([{'...',L1}|Rest02],Clist0) -> parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]); parse_ComponentTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) -> - {Clist0,Tokens}. + {Clist0,Tokens}; +parse_ComponentTypeLists(Tokens, _) -> + parse_error(Tokens). parse_ComponentTypeLists2(Tokens,Clist) -> {ExtAdd,Rest} = parse_ExtensionAdditions(Tokens,Clist), @@ -2590,12 +1841,12 @@ parse_OptionalExtensionMarker(Tokens,Clist) -> {Clist,Tokens}. -parse_ComponentTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] -> - {ComponentType,Rest2} = parse_ComponentType([Id|Rest]), - parse_ComponentTypeList(Rest2,[ComponentType|Acc]); -parse_ComponentTypeList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) when Acc =/= [] -> - {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]), - parse_ComponentTypeList(Rest2,[ComponentType|Acc]); +parse_ComponentTypeList([{',',_}|[#identifier{}|_]=Tokens0], Acc) when Acc =/= [] -> + {ComponentType,Tokens} = parse_ComponentType(Tokens0), + parse_ComponentTypeList(Tokens, [ComponentType|Acc]); +parse_ComponentTypeList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) when Acc =/= [] -> + {ComponentType,Tokens} = parse_ComponentType(Tokens0), + parse_ComponentTypeList(Tokens, [ComponentType|Acc]); parse_ComponentTypeList(Tokens = [{'}',_}|_],Acc) -> {lists:reverse(Acc),Tokens}; parse_ComponentTypeList(Tokens = [{']',_},{']',_}|_],Acc) -> @@ -2606,10 +1857,7 @@ parse_ComponentTypeList(Tokens,[]) -> {ComponentType,Rest} = parse_ComponentType(Tokens), parse_ComponentTypeList(Rest,[ComponentType]); parse_ComponentTypeList(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['}',', identifier']]}}). + parse_error(Tokens). parse_ExtensionAdditions(Tokens=[{',',_}|_],Clist) -> {ExtAddList,Rest2} = parse_ExtensionAdditionList(Tokens,[]), @@ -2618,46 +1866,36 @@ parse_ExtensionAdditions(Tokens,Clist) -> %% Empty {Clist,Tokens}. -parse_ExtensionAdditionList([{',',_},Id = {identifier,_,_}|Rest],Acc) -> - {ComponentType,Rest2} = parse_ComponentType([Id|Rest]), - parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]); -parse_ExtensionAdditionList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) -> - {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]), - parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]); -parse_ExtensionAdditionList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) -> - {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup([C1,C2|Rest],[]), +parse_ExtensionAdditionList([{',',_}|[#identifier{}|_]=Tokens0], Acc) -> + {ComponentType,Tokens} = parse_ComponentType(Tokens0), + parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]); +parse_ExtensionAdditionList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) -> + {ComponentType,Tokens} = parse_ComponentType(Tokens0), + parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]); +parse_ExtensionAdditionList([{',',_},{'[',_},{'[',_}|Tokens], Acc) -> + {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup(Tokens), parse_ExtensionAdditionList(Rest2,[ExtAddGroup|Acc]); -parse_ExtensionAdditionList(Tokens = [{'}',_}|_],Acc) -> +parse_ExtensionAdditionList([{'}',_}|_]=Tokens, Acc) -> {lists:reverse(Acc),Tokens}; -parse_ExtensionAdditionList(Tokens = [{',',_},{'...',_}|_],Acc) -> +parse_ExtensionAdditionList([{',',_},{'...',_}|_]=Tokens, Acc) -> {lists:reverse(Acc),Tokens}; -parse_ExtensionAdditionList(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['}',', identifier']]}}). - +parse_ExtensionAdditionList(Tokens, _) -> + parse_error(Tokens). -parse_ExtensionAdditionGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) -> - parse_ExtensionAdditionGroup2(Rest,Num); -parse_ExtensionAdditionGroup([ {'[',_},{'[',_}|Rest],[]) -> - parse_ExtensionAdditionGroup2(Rest,undefined); -parse_ExtensionAdditionGroup(Tokens,_) -> - throw({asn1_error, - {get_line(hd(Tokens)),get(asn1_module), - [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))], - expected,['[[']]}}). +parse_ExtensionAdditionGroup([{number,_,Num},{':',_}|Tokens]) -> + parse_ExtensionAdditionGroup2(Tokens, Num); +parse_ExtensionAdditionGroup(Tokens) -> + parse_ExtensionAdditionGroup2(Tokens, undefined). -parse_ExtensionAdditionGroup2(Tokens,Num) -> +parse_ExtensionAdditionGroup2(Tokens, Num) -> {CompTypeList,Rest} = parse_ComponentTypeList(Tokens,[]), case Rest of [{']',_},{']',_}|Rest2] -> {[{'ExtensionAdditionGroup',Num}|CompTypeList] ++ ['ExtensionAdditionGroupEnd'],Rest2}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected,[']]']]}}) + parse_error(Rest) end. @@ -2676,83 +1914,81 @@ parse_ComponentType(Tokens) -> Result end. - +%%% +%%% Parse ENUMERATED. +%%% -parse_SignedNumber([{number,_,Value}|Rest]) -> - {Value,Rest}; -parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) -> - {-Value,Rest}; -parse_SignedNumber(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - [number,'-number']]}}). - -parse_Enumerations(Tokens=[{identifier,_,_}|_Rest],ExtensionDefault) -> - parse_Enumerations(Tokens,[],ExtensionDefault); -parse_Enumerations([H|_T],_) -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,identifier]}}). - -parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc, ExtensionDefault) -> - {NamedNumber,Rest2} = parse_NamedNumber(Tokens), - case Rest2 of - [{',',_}|Rest3] -> - parse_Enumerations(Rest3,[NamedNumber|Acc], ExtensionDefault); - _ when ExtensionDefault == 'IMPLIED'-> - {lists:reverse(['EXTENSIONMARK',NamedNumber|Acc]),Rest2}; +parse_Enumerations(Tokens0) -> + {Root,Tokens1} = parse_Enumeration(Tokens0), + case Tokens1 of + [{',',_},{'...',_},{',',_}|Tokens2] -> + {Ext,Tokens} = parse_Enumeration(Tokens2), + {Root++['EXTENSIONMARK'|Ext],Tokens}; + [{',',_},{'...',_}|Tokens] -> + {Root++['EXTENSIONMARK'],Tokens}; _ -> - {lists:reverse([NamedNumber|Acc]),Rest2} - end; -parse_Enumerations([{identifier,_,Id}|Rest], Acc, ExtensionDefault) -> - case Rest of - [{',',_}|Rest2] -> - parse_Enumerations(Rest2,[Id|Acc], ExtensionDefault); - _ when ExtensionDefault == 'IMPLIED' -> - {lists:reverse(['EXTENSIONMARK', Id |Acc]),Rest}; - _ -> - {lists:reverse([Id|Acc]),Rest} - end; -parse_Enumerations([{'...',_}|Rest], Acc, _ExtensionDefault) -> - case Rest of - [{',',_}|Rest2] -> - parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc],undefined); - _ -> - {lists:reverse(['EXTENSIONMARK'|Acc]),Rest} + case get(extensiondefault) of + 'IMPLIED' -> + {Root++['EXTENSIONMARK'],Tokens1}; + _ -> + {Root,Tokens1} + end + end. + +parse_Enumeration(Tokens0) -> + {Item,Tokens} = parse_EnumerationItem(Tokens0), + parse_Enumeration_1(Tokens, [Item]). + +parse_Enumeration_1([{',',_}|Tokens1]=Tokens0, Acc) -> + try parse_EnumerationItem(Tokens1) of + {Item,Tokens} -> + parse_Enumeration_1(Tokens, [Item|Acc]) + catch + throw:{asn1_error,_} -> + {lists:reverse(Acc),Tokens0} end; -parse_Enumerations([H|_T],_,_) -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,identifier]}}). +parse_Enumeration_1(Tokens, Acc) -> + {lists:reverse(Acc),Tokens}. + +parse_EnumerationItem([#identifier{},{'(',_}|_]=Tokens) -> + parse_NamedNumber(Tokens); +parse_EnumerationItem([#identifier{val=Id}|Tokens]) -> + {Id,Tokens}; +parse_EnumerationItem(Tokens) -> + parse_error(Tokens). + +%%% +%%% End of parsing of ENUMERATED. +%%% parse_NamedNumberList(Tokens) -> - parse_NamedNumberList(Tokens,[]). + parse_NamedNumberList(Tokens, []). -parse_NamedNumberList(Tokens,Acc) -> +parse_NamedNumberList(Tokens, Acc) -> {NamedNum,Rest} = parse_NamedNumber(Tokens), case Rest of [{',',_}|Rest2] -> parse_NamedNumberList(Rest2,[NamedNum|Acc]); _ -> - {lists:reverse([NamedNum|Acc]),Rest} + {lists:reverse(Acc, [NamedNum]),Rest} end. -parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) -> +parse_NamedNumber([#identifier{val=Name},{'(',_}|Rest]) -> Flist = [fun parse_SignedNumber/1, fun parse_DefinedValue/1], - case (catch parse_or(Rest,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); + case parse_or(Rest, Flist) of {NamedNum,[{')',_}|Rest2]} -> {{'NamedNumber',Name,NamedNum},Rest2}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected,'NamedNumberList']}}) + parse_error(Rest) end; parse_NamedNumber(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). + parse_error(Tokens). +parse_SignedNumber([{number,_,Value}|Rest]) -> + {Value,Rest}; +parse_SignedNumber(Tokens) -> + parse_error(Tokens). parse_Tag([{'[',_}|Rest]) -> {Class,Rest2} = parse_Class(Rest), @@ -2767,12 +2003,8 @@ parse_Tag([{'[',_}|Rest]) -> [{']',_}|Rest4] -> {#tag{class=Class,number=ClassNumber},Rest4}; _ -> - throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module), - [got,get_token(hd(Rest3)),expected,']']}}) - end; -parse_Tag(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'[']}}). + parse_error(Rest3) + end. parse_Class([{'UNIVERSAL',_}|Rest]) -> {'UNIVERSAL',Rest}; @@ -2791,15 +2023,7 @@ parse_Value(Tokens) -> Flist = [fun parse_BuiltinValue/1, fun parse_ValueFromObject/1, fun parse_DefinedValue/1], - - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end. + parse_or(Tokens, Flist). parse_BuiltinValue([{bstring,_,Bstr}|Rest]) -> {{bstring,Bstr},Rest}; @@ -2812,18 +2036,11 @@ parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) -> fun parse_SequenceOfValue/1, fun parse_SequenceValue/1, fun parse_ObjectIdentifierValue/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - Result -> - Result - end; -parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) -> + parse_or(Tokens, Flist); +parse_BuiltinValue([#identifier{val=IdName},{':',_}|Rest]) -> {Value,Rest2} = parse_Value(Rest), {{'CHOICE',{IdName,Value}},Rest2}; -parse_BuiltinValue(Tokens=[{'NULL',_},{':',_}|_Rest]) -> +parse_BuiltinValue([{'NULL',_},{':',_}|_]=Tokens) -> parse_ObjectClassFieldValue(Tokens); parse_BuiltinValue([{'NULL',_}|Rest]) -> {'NULL',Rest}; @@ -2839,31 +2056,29 @@ parse_BuiltinValue([{cstring,_,Cstr}|Rest]) -> {Cstr,Rest}; parse_BuiltinValue([{number,_,Num}|Rest]) -> {Num,Rest}; -parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) -> - {- Num,Rest}; parse_BuiltinValue(Tokens) -> parse_ObjectClassFieldValue(Tokens). -parse_DefinedValue(Tokens=[{identifier,_,_},{'{',_}|_Rest]) -> - parse_ParameterizedValue(Tokens); -%% Externalvaluereference -parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) -> +parse_DefinedValue(Tokens) -> + Flist = [fun parse_ParameterizedValue/1, + fun parse_DefinedValue2/1], + parse_or(Tokens, Flist). + +parse_DefinedValue2([{typereference,L1,Tname}, + {'.',_}, + #identifier{val=Idname}|Rest]) -> {#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest}; %% valuereference -parse_DefinedValue([Id = {identifier,_,_}|Rest]) -> +parse_DefinedValue2([#identifier{}=Id|Rest]) -> {identifier2Extvalueref(Id),Rest}; -%% ParameterizedValue -parse_DefinedValue(Tokens) -> - parse_ParameterizedValue(Tokens). +parse_DefinedValue2(Tokens) -> + parse_error(Tokens). parse_SequenceValue([{'{',_}|Tokens]) -> - parse_SequenceValue(Tokens,[]); -parse_SequenceValue(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_SequenceValue(Tokens, []). -parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) -> +parse_SequenceValue([#identifier{pos=Pos,val=IdName}|Rest],Acc) -> {Value,Rest2} = parse_Value(Rest), SeqTag = #seqtag{pos=Pos,module=get(asn1_module),val=IdName}, case Rest2 of @@ -2872,18 +2087,13 @@ parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) -> [{'}',_}|Rest3] -> {lists:reverse(Acc, [{SeqTag,Value}]),Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end; parse_SequenceValue(Tokens,_Acc) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). + parse_error(Tokens). parse_SequenceOfValue([{'{',_}|Tokens]) -> - parse_SequenceOfValue(Tokens,[]); -parse_SequenceOfValue(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_SequenceOfValue(Tokens, []). parse_SequenceOfValue(Tokens,Acc) -> {Value,Rest2} = parse_Value(Tokens), @@ -2891,10 +2101,9 @@ parse_SequenceOfValue(Tokens,Acc) -> [{',',_}|Rest3] -> parse_SequenceOfValue(Rest3,[Value|Acc]); [{'}',_}|Rest3] -> - {lists:reverse([Value|Acc]),Rest3}; + {lists:reverse(Acc, [Value]),Rest3}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'}']}}) + parse_error(Rest2) end. parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) -> @@ -2904,49 +2113,31 @@ parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) -> {ValueSet,Rest4} = parse_ValueSet(Rest3), {#valuedef{pos=L1,name=Name,type=Type,value=ValueSet, module=get(asn1_module)},Rest4}; - [H|_T] -> - throw({asn1_error,{get_line(L1),get(asn1_module), - [got,get_token(H),expected,'::=']}}) - end; -parse_ValueSetTypeAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected, - typereference]}}). + _ -> + parse_error(Rest2) + end. parse_ValueSet([{'{',_}|Rest]) -> {Elems,Rest2} = parse_ElementSetSpecs(Rest), case Rest2 of [{'}',_}|Rest3] -> {{valueset,Elems},Rest3}; - [H|_T] -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,'}']}}) + _ -> + parse_error(Rest2) end; parse_ValueSet(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'{']}}). + parse_error(Tokens). -parse_ValueAssignment([{identifier,L1,IdName}|Rest]) -> +parse_ValueAssignment([#identifier{pos=L1,val=IdName}|Rest]) -> {Type,Rest2} = parse_Type(Rest), case Rest2 of [{'::=',_}|Rest3] -> {Value,Rest4} = parse_Value(Rest3), - case catch lookahead_assignment(Rest4) of - ok -> - {#valuedef{pos=L1,name=IdName,type=Type,value=Value, - module=get(asn1_module)},Rest4}; - Error -> - throw(Error) -%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), -%% [got,get_token(hd(Rest2)),expected,'::=']}}) - end; + {#valuedef{pos=L1,name=IdName,type=Type,value=Value, + module=get(asn1_module)},Rest4}; _ -> - throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module), - [got,get_token(hd(Rest2)),expected,'::=']}}) - end; -parse_ValueAssignment(Tokens) -> - throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,identifier]}}). + parse_error(Rest2) + end. %% SizeConstraint parse_SubtypeElements([{'SIZE',_}|Tokens]) -> @@ -2966,8 +2157,7 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tok [{'}',_}|Rest2] -> {{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected,'}']}}) + parse_error(Rest) end; parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) -> {Constraint,Rest} = parse_TypeConstraints(Tokens), @@ -2975,28 +2165,18 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) -> [{'}',_}|Rest2] -> {{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2}; _ -> - throw({asn1_error,{get_line(hd(Rest)),get(asn1_module), - [got,get_token(hd(Rest)),expected,'}']}}) + parse_error(Rest) end; parse_SubtypeElements([{'PATTERN',_}|Tokens]) -> {Value,Rest} = parse_Value(Tokens), {{pattern,Value},Rest}; -%% SingleValue -%% ContainedSubtype -%% ValueRange -%% TypeConstraint -%% Moved fun parse_Value/1 and fun parse_Type/1 to parse_Elements parse_SubtypeElements(Tokens) -> Flist = [fun parse_ContainedSubtype/1, fun parse_Value/1, - fun([{'MIN',_}|T]) -> {'MIN',T} end, + fun parse_MIN/1, fun parse_Type/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - {asn1_error,Reason} -> - throw(Reason); - Result = {Val,_} when is_record(Val,type) -> + case parse_or(Tokens, Flist) of + {#type{},_}=Result -> Result; {Lower,[{'..',_}|Rest]} -> {Upper,Rest2} = parse_UpperEndpoint(Rest), @@ -3014,10 +2194,7 @@ parse_ContainedSubtype([{'INCLUDES',_}|Rest]) -> {Type,Rest2} = parse_Type(Rest), {{'ContainedSubtype',Type},Rest2}; parse_ContainedSubtype(Tokens) -> - throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module), - [got,get_token(hd(Tokens)),expected,'INCLUDES']}}). -%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements -%% parse_Type(Tokens). + parse_error(Tokens). parse_UpperEndpoint([{'<',_}|Rest]) -> parse_UpperEndpoint(lt,Rest); @@ -3025,33 +2202,38 @@ parse_UpperEndpoint(Tokens) -> parse_UpperEndpoint(false,Tokens). parse_UpperEndpoint(Lt,Tokens) -> - Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end, - fun parse_Value/1], - case (catch parse_or(Tokens,Flist)) of - {'EXIT',Reason} -> - exit(Reason); - AsnErr = {asn1_error,_} -> - throw(AsnErr); - {Value,Rest2} when Lt == lt -> + Flist = [fun parse_MAX/1, + fun parse_Value/1], + case parse_or(Tokens, Flist) of + {Value,Rest2} when Lt =:= lt -> {{lt,Value},Rest2}; {Value,Rest2} -> {Value,Rest2} end. +parse_MIN([{'MIN',_}|T]) -> + {'MIN',T}; +parse_MIN(Tokens) -> + parse_error(Tokens). + +parse_MAX([{'MAX',_}|T]) -> + {'MAX',T}; +parse_MAX(Tokens) -> + parse_error(Tokens). + parse_TypeConstraints(Tokens) -> - parse_TypeConstraints(Tokens,[]). + parse_TypeConstraints(Tokens, []). -parse_TypeConstraints([{identifier,_,_}|Rest],Acc) -> +parse_TypeConstraints([#identifier{}|Rest], Acc) -> {ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest), case Rest2 of [{',',_}|Rest3] -> - parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]); + parse_TypeConstraints(Rest3, [ComponentConstraint|Acc]); _ -> - {lists:reverse([ComponentConstraint|Acc]),Rest2} + {lists:reverse(Acc, [ComponentConstraint]),Rest2} end; -parse_TypeConstraints([H|_T],_) -> - throw({asn1_error,{get_line(H),get(asn1_module), - [got,get_token(H),expected,identifier]}}). +parse_TypeConstraints(Tokens, _) -> + parse_error(Tokens). parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) -> {ValueConstraint,Rest2} = parse_Constraint(Tokens), @@ -3071,145 +2253,36 @@ parse_PresenceConstraint(Tokens) -> {asn1_empty,Tokens}. -% merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint -% {merge_constraints(Rlist,[],[]), -% merge_constraints(ExtList,[],[])}; - -%% An arg with a constraint with extension marker will look like -%% [#constraint{c={Root,Ext}}|Rest] - merge_constraints(Clist) -> merge_constraints(Clist, [], []). -merge_constraints([Ch|Ct],Cacc, Eacc) -> - NewEacc = case Ch#constraint.e of - undefined -> Eacc; - E -> [E|Eacc] - end, - merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc); - -merge_constraints([],Cacc,[]) -> -%% lists:flatten(Cacc); +merge_constraints([#constraint{c=C,e=E}|T], Cacc0, Eacc0) -> + Eacc = case E of + undefined -> Eacc0; + E -> [E|Eacc0] + end, + Cacc = [C|Cacc0], + merge_constraints(T, Cacc, Eacc); +merge_constraints([], Cacc, []) -> lists:reverse(Cacc); -merge_constraints([],Cacc,Eacc) -> -%% lists:flatten(Cacc) ++ [{'Errors',Eacc}]. - lists:reverse(Cacc) ++ [{'Errors',Eacc}]. - - -fixup_constraint(C) -> - case C of - {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' -> - SubType; - {'SingleValue',V} when is_list(V) -> - C; - %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}]; - %% bug, turns wrong when an element in V is a reference to a defined value - {'PermittedAlphabet',{'SingleValue',V}} when is_list(V) -> - %%sort and remove duplicates - V2 = {'SingleValue', - ordsets:from_list(lists:flatten(V))}, - {'PermittedAlphabet',V2}; - {'PermittedAlphabet',{'SingleValue',V}} -> - V2 = {'SingleValue',[V]}, - {'PermittedAlphabet',V2}; - {'SizeConstraint',Sc} -> - {'SizeConstraint',fixup_size_constraint(Sc)}; - - List when is_list(List) -> %% In This case maybe a union or intersection - [fixup_constraint(Xc)||Xc <- List]; - Other -> - Other - end. +merge_constraints([], Cacc, Eacc) -> + lists:reverse(Cacc) ++ [{element_set,{'Errors',Eacc},none}]. -fixup_size_constraint({'ValueRange',{Lb,Ub}}) -> - {Lb,Ub}; -fixup_size_constraint({{'ValueRange',R},[]}) -> - {R,[]}; -fixup_size_constraint({[],{'ValueRange',R}}) -> - {[],R}; -fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) -> - {R1,R2}; -fixup_size_constraint({'SingleValue',[Sv]}) -> - fixup_size_constraint({'SingleValue',Sv}); -fixup_size_constraint({'SingleValue',L}) when is_list(L) -> - ordsets:from_list(L); -fixup_size_constraint({'SingleValue',L}) -> - {L,L}; -fixup_size_constraint({'SizeConstraint',C}) -> - %% this is a second SIZE - fixup_size_constraint(C); -fixup_size_constraint({C1,C2}) -> - %% this is with extension marks - {turn2vr(fixup_size_constraint(C1)), extension_size(fixup_size_constraint(C2))}; -fixup_size_constraint(CList) when is_list(CList) -> - [fixup_constraint(Xc)||Xc <- CList]. - -turn2vr(L) when is_list(L) -> - L2 =[X||X<-ordsets:from_list(L),is_integer(X)], - case L2 of - [H|_] -> - {H,hd(lists:reverse(L2))}; - _ -> - L - end; -turn2vr(VR) -> - VR. -extension_size({I,I}) -> - [I]; -extension_size({I1,I2}) -> - [I1,I2]; -extension_size(C) -> - C. - -get_line({_,Pos,Token}) when is_integer(Pos),is_atom(Token) -> +get_line({Token,Pos,_}) when is_integer(Pos), is_atom(Token) -> Pos; get_line({Token,Pos}) when is_integer(Pos),is_atom(Token) -> - Pos; -get_line(_) -> - undefined. - -get_token({_,Pos,Token}) when is_integer(Pos),is_atom(Token) -> - Token; + Pos. + +get_token({valuefieldreference,_,FieldName}) -> + list_to_atom([$&|atom_to_list(FieldName)]); +get_token({typefieldreference,_,FieldName}) -> + list_to_atom([$&|atom_to_list(FieldName)]); +get_token({Token,Pos,Value}) when is_integer(Pos), is_atom(Token) -> + Value; get_token({'$end',Pos}) when is_integer(Pos) -> - undefined; + 'END-OF-FILE'; get_token({Token,Pos}) when is_integer(Pos),is_atom(Token) -> - Token; -get_token(_) -> - undefined. - -prioritize_error(ErrList) -> - case lists:keymember(asn1_error,1,ErrList) of - false -> % only asn1_assignment_error -> take the last - lists:last(ErrList); - true -> % contains errors from deeper in a Type - NewErrList = [_Err={_,_}|_RestErr] = - lists:filter(fun({asn1_error,_})->true;(_)->false end, - ErrList), - SplitErrs = - lists:splitwith(fun({_,X})-> - case element(1,X) of - Int when is_integer(Int) -> true; - _ -> false - end - end, - NewErrList), - case SplitErrs of - {[],UndefPosErrs} -> % if no error with Positon exists - lists:last(UndefPosErrs); - {IntPosErrs,_} -> - IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs), - SortedReasons = lists:keysort(1,IntPosReasons), - {asn1_error,lists:last(SortedReasons)} - end - end. - -%% most_prio_error([H={_,Reason}|T],Atom,Err) when is_atom(Atom) -> -%% most_prio_error(T,element(1,Reason),H); -%% most_prio_error([H={_,Reason}|T],Greatest,Err) -> -%% case element(1,Reason) of -%% Pos when is_integer(Pos),Pos>Greatest -> -%% most_prio_error( - + Token. tref2Exttref(#typereference{pos=Pos,val=Name}) -> #'Externaltypereference'{pos=Pos, @@ -3226,19 +2299,5 @@ identifier2Extvalueref(#identifier{pos=Pos,val=Name}) -> module=resolve_module(Name), value=Name}. -%% lookahead_assignment/1 checks that the next sequence of tokens -%% in Token contain a valid assignment or the -%% 'END' token. Otherwise an exception is thrown. -lookahead_assignment([{'END',_}|_Rest]) -> - ok; -lookahead_assignment(Tokens) -> - parse_Assignment(Tokens), - ok. - -is_pre_defined_class('TYPE-IDENTIFIER') -> - true; -is_pre_defined_class('ABSTRACT-SYNTAX') -> - true; -is_pre_defined_class(_) -> - false. - +parse_error(Tokens) -> + throw({asn1_error,{parse_error,Tokens}}). diff --git a/lib/asn1/src/asn1ct_tok.erl b/lib/asn1/src/asn1ct_tok.erl index 8687ed955c..d51fea6402 100644 --- a/lib/asn1/src/asn1ct_tok.erl +++ b/lib/asn1/src/asn1ct_tok.erl @@ -21,191 +21,177 @@ %% Tokenize ASN.1 code (input to parser generated with yecc) --export([get_name/2,tokenise/4, file/1]). +-export([file/1,format_error/1]). - -file(File) -> - case file:open(File, [read]) of +file(File0) -> + case file:open(File0, [read]) of {error, Reason} -> - {error,{File,file:format_error(Reason)}}; + {error,{File0,file:format_error(Reason)}}; {ok,Stream} -> - process(Stream,0,[]) + try + process(Stream, 1, []) + catch + throw:{error,Line,Reason} -> + File = filename:basename(File0), + Error = {structured_error,{File,Line},?MODULE,Reason}, + {error,[Error]} + end end. -process(Stream,Lno,R) -> - process(io:get_line(Stream, ''), Stream,Lno+1,R). +process(Stream, Lno, R) -> + process(io:get_line(Stream, ''), Stream, Lno, R). -process(eof, Stream,Lno,R) -> +process(eof, Stream, Lno, Acc) -> ok = file:close(Stream), - lists:flatten(lists:reverse([{'$end',Lno}|R])); - - -process(L, Stream,Lno,R) when is_list(L) -> - %%io:format('read:~s',[L]), - case catch tokenise(Stream,L,Lno,[]) of - {'ERR',Reason} -> - io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]), - exit(0); - {NewLno,T} -> - %%io:format('toks:~w~n',[T]), - process(Stream,NewLno,[T|R]) - end. - -tokenise(Stream,[H|T],Lno,R) when $a =< H , H =< $z -> - {X, T1} = get_name(T, [H]), - tokenise(Stream,T1,Lno,[{identifier,Lno, list_to_atom(X)}|R]); - -tokenise(Stream,[$&,H|T],Lno,R) when $A =< H , H =< $Z -> - {Y, T1} = get_name(T, [H]), - X = list_to_atom(Y), - tokenise(Stream,T1,Lno,[{typefieldreference, Lno, X} | R]); - -tokenise(Stream,[$&,H|T],Lno,R) when $a =< H , H =< $z -> - {Y, T1} = get_name(T, [H]), - X = list_to_atom(Y), - tokenise(Stream,T1,Lno,[{valuefieldreference, Lno, X} | R]); - -tokenise(Stream,[H|T],Lno,R) when $A =< H , H =< $Z -> - {Y, T1} = get_name(T, [H]), - X = list_to_atom(Y), - case reserved_word(X) of - true -> - tokenise(Stream,T1,Lno,[{X,Lno}|R]); - false -> - tokenise(Stream,T1,Lno,[{typereference,Lno,X}|R]); - rstrtype -> - tokenise(Stream,T1,Lno,[{restrictedcharacterstringtype,Lno,X}|R]) - end; - -tokenise(Stream,[$-,H|T],Lno,R) when $0 =< H , H =< $9 -> - {X, T1} = get_number(T, [H]), - tokenise(Stream,T1,Lno,[{number,Lno,-1 * list_to_integer(X)}|R]); + lists:reverse([{'$end',Lno}|Acc]); +process(L, Stream, Lno0, Acc) when is_list(L) -> + try tokenise(Stream, L, Lno0, []) of + {Lno,[]} -> + process(Stream, Lno, Acc); + {Lno,Ts} -> + process(Stream, Lno, Ts++Acc) + catch + throw:{error,Reason} -> + throw({error,Lno0,Reason}) + end. -tokenise(Stream,[H|T],Lno,R) when $0 =< H , H =< $9 -> +format_error(eof_in_comment) -> + "premature end of file in multi-line comment"; +format_error(eol_in_token) -> + "end of line in token"; +format_error({invalid_binary_number,Str}) -> + io_lib:format("invalid binary number: '~s'", [Str]); +format_error({invalid_hex_number,Str}) -> + io_lib:format("invalid hex number: '~s'", [Str]); +format_error(Other) -> + io_lib:format("~p", [Other]). + +tokenise(Stream, [$&,H|T], Lno, R) when $A =< H , H =< $Z -> + {X,T1} = get_name(T, [H]), + tokenise(Stream, T1, Lno, [{typefieldreference,Lno,X}|R]); +tokenise(Stream, [$&,H|T], Lno, R) when $a =< H , H =< $z -> + {X,T1} = get_name(T, [H]), + tokenise(Stream, T1, Lno, [{valuefieldreference,Lno,X}|R]); + +tokenise(Stream, "--"++T, Lno, R) -> + tokenise(Stream, skip_comment(T), Lno, R); + +tokenise(Stream, [$-,H|T], Lno, R) when $0 =< H , H =< $9 -> {X, T1} = get_number(T, [H]), - tokenise(Stream,T1,Lno,[{number,Lno,list_to_integer(X)}|R]); - -tokenise(Stream,[$-,$-|T],Lno,R) -> - tokenise(Stream,skip_comment(T),Lno,R); + tokenise(Stream, T1, Lno, [{number,Lno,-list_to_integer(X)}|R]); -tokenise(Stream,[$/,$*|T],Lno,R) -> - {NewLno,T1} = skip_multiline_comment(Stream,T,Lno,0), - tokenise(Stream,T1,NewLno,R); +tokenise(Stream, "/*"++T, Lno0, R) -> + {Lno,T1} = skip_multiline_comment(Stream, T, Lno0, 0), + tokenise(Stream, T1, Lno, R); -tokenise(Stream,[$:,$:,$=|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'::=',Lno}|R]); - -tokenise(Stream,[$'|T],Lno,R) -> - case catch collect_quoted(T,Lno,[]) of - {'ERR',_} -> - throw({'ERR','bad_quote'}); - {Thing, T1} -> - tokenise(Stream,T1,Lno,[Thing|R]) - end; +tokenise(Stream, "::="++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'::=',Lno}|R]); +tokenise(Stream, ":"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{':',Lno}|R]); +tokenise(Stream, "'"++T0, Lno, R) -> + {Thing, T1} = collect_quoted(T0, Lno, []), + tokenise(Stream, T1, Lno, [Thing|R]); tokenise(Stream,[$"|T],Lno,R) -> {Str,T1} = collect_string(T,Lno), tokenise(Stream,T1,Lno,[Str|R]); -tokenise(Stream,[${|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'{',Lno}|R]); - -tokenise(Stream,[$}|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'}',Lno}|R]); - -%% tokenise(Stream,[$],$]|T],Lno,R) -> -%% tokenise(Stream,T,Lno,[{']]',Lno}|R]); +tokenise(Stream, "{"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'{',Lno}|R]); +tokenise(Stream, "}"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'}',Lno}|R]); %% Even though x.680 specify '[[' and ']]' as lexical items -%% it does not work to have them as such since the single [ and ] can -%% be used beside each other in the SYNTAX OF in x.681 -%% the solution chosen here , i.e. to have them as separate lexical items +%% it does not work to have them as such since the single '[' and ']' can +%% be used beside each other in 'WITH SYNTAX' in x.681. +%% The solution chosen here, i.e. to have them as separate lexical items %% will not detect the cases where there is white space between them -%% which would be an error in the use in ExtensionAdditionGroups - -%% tokenise(Stream,[$[,$[|T],Lno,R) -> -%% tokenise(Stream,T,Lno,[{'[[',Lno}|R]); - -tokenise(Stream,[$]|T],Lno,R) -> - tokenise(Stream,T,Lno,[{']',Lno}|R]); - -tokenise(Stream,[$[|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'[',Lno}|R]); +%% which would be an error in the use in ExtensionAdditionGroups. -tokenise(Stream,[$,|T],Lno,R) -> - tokenise(Stream,T,Lno,[{',',Lno}|R]); +tokenise(Stream, "]"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{']',Lno}|R]); +tokenise(Stream, "["++T,Lno,R) -> + tokenise(Stream, T, Lno, [{'[',Lno}|R]); -tokenise(Stream,[$(|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'(',Lno}|R]); -tokenise(Stream,[$)|T],Lno,R) -> - tokenise(Stream,T,Lno,[{')',Lno}|R]); +tokenise(Stream, ","++T,Lno,R) -> + tokenise(Stream, T, Lno, [{',',Lno}|R]); -tokenise(Stream,[$.,$.,$.|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'...',Lno}|R]); +tokenise(Stream, "("++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'(',Lno}|R]); +tokenise(Stream, ")"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{')',Lno}|R]); -tokenise(Stream,[$.,$.|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'..',Lno}|R]); +tokenise(Stream, "..."++T,Lno,R) -> + tokenise(Stream, T, Lno, [{'...',Lno}|R]); +tokenise(Stream, ".."++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'..',Lno}|R]); +tokenise(Stream, "."++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'.',Lno}|R]); -tokenise(Stream,[$.|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'.',Lno}|R]); -tokenise(Stream,[$^|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'^',Lno}|R]); -tokenise(Stream,[$!|T],Lno,R) -> - tokenise(Stream,T,Lno,[{'!',Lno}|R]); -tokenise(Stream,[$||T],Lno,R) -> - tokenise(Stream,T,Lno,[{'|',Lno}|R]); +tokenise(Stream, "|"++T, Lno, R) -> + tokenise(Stream, T, Lno, [{'|',Lno}|R]); -tokenise(Stream,[H|T],Lno,R) -> - case white_space(H) of +tokenise(Stream, [H|T], Lno, R) when $A =< H , H =< $Z -> + {X,T1} = get_name(T, [H]), + case reserved_word(X) of true -> - tokenise(Stream,T,Lno,R); + tokenise(Stream, T1, Lno, [{X,Lno}|R]); false -> - tokenise(Stream,T,Lno,[{list_to_atom([H]),Lno}|R]) + tokenise(Stream, T1, Lno, [{typereference,Lno,X}|R]); + rstrtype -> + tokenise(Stream, T1, Lno, [{restrictedcharacterstringtype,Lno,X}|R]) end; -tokenise(_Stream,[],Lno,R) -> - {Lno,lists:reverse(R)}. +tokenise(Stream, [H|T], Lno, R) when $a =< H , H =< $z -> + {X, T1} = get_name(T, [H]), + tokenise(Stream, T1, Lno, [{identifier,Lno,X}|R]); -collect_string(L,Lno) -> - collect_string(L,Lno,[]). +tokenise(Stream, [H|T], Lno, R) when $0 =< H , H =< $9 -> + {X, T1} = get_number(T, [H]), + tokenise(Stream, T1, Lno, [{number,Lno,list_to_integer(X)}|R]); -collect_string([],_,_) -> - throw({'ERR','bad_quote found eof'}); +tokenise(Stream, [H|T], Lno, R) when H =< $\s -> + tokenise(Stream, T, Lno, R); -collect_string([H|T],Lno,Str) -> - case H of - $" -> - {{cstring,1,lists:reverse(Str)},T}; - Ch -> - collect_string(T,Lno,[Ch|Str]) - end. - +tokenise(Stream, [H|T], Lno, R) -> + tokenise(Stream, T, Lno, [{list_to_atom([H]),Lno}|R]); +tokenise(_Stream, [], Lno, R) -> + {Lno+1,R}. -% <name> is letters digits hyphens -% hypen is not the last character. Hypen hyphen is NOT allowed -% -% <identifier> ::= <lowercase> <name> +collect_string(L, Lno) -> + collect_string(L, Lno, []). -get_name([$-,Char|T], L) -> +collect_string([$"|T], _Lno, Str) -> + {{cstring,1,lists:reverse(Str)},T}; +collect_string([H|T], Lno, Str) -> + collect_string(T, Lno, [H|Str]); +collect_string([], _, _) -> + throw({error,missing_quote_at_eof}). + +%% <name> is letters digits hyphens. +%% Hypen is not the last character. Hypen hyphen is NOT allowed. +%% +%% <identifier> ::= <lowercase> <name> + +get_name([$-,Char|T]=T0, Acc) -> case isalnum(Char) of true -> - get_name(T,[Char,$-|L]); + get_name(T, [Char,$-|Acc]); false -> - {lists:reverse(L),[$-,Char|T]} + {list_to_atom(lists:reverse(Acc)),T0} end; -get_name([$-|T], L) -> - {lists:reverse(L),[$-|T]}; -get_name([Char|T], L) -> +get_name([$-|_]=T, Acc) -> + {list_to_atom(lists:reverse(Acc)),T}; +get_name([Char|T]=T0, Acc) -> case isalnum(Char) of true -> - get_name(T,[Char|L]); + get_name(T, [Char|Acc]); false -> - {lists:reverse(L),[Char|T]} + {list_to_atom(lists:reverse(Acc)),T0} end; -get_name([], L) -> - {lists:reverse(L), []}. - +get_name([], Acc) -> + {list_to_atom(lists:reverse(Acc)),[]}. isalnum(H) when $A =< H , H =< $Z -> true; @@ -221,67 +207,54 @@ isdigit(H) when $0 =< H , H =< $9 -> isdigit(_) -> false. -white_space(9) -> true; -white_space(10) -> true; -white_space(13) -> true; -white_space(32) -> true; -white_space(_) -> false. - - -get_number([H|T], L) -> +get_number([H|T]=T0, L) -> case isdigit(H) of true -> get_number(T, [H|L]); false -> - {lists:reverse(L), [H|T]} + {lists:reverse(L), T0} end; get_number([], L) -> {lists:reverse(L), []}. -skip_comment([]) -> - []; -skip_comment([$-,$-|T]) -> - T; -skip_comment([_|T]) -> - skip_comment(T). - +skip_comment([]) -> []; +skip_comment("--"++T) -> T; +skip_comment([_|T]) -> skip_comment(T). -skip_multiline_comment(Stream,[],Lno,Level) -> - case io:get_line(Stream,'') of +skip_multiline_comment(Stream, [], Lno, Level) -> + case io:get_line(Stream, '') of eof -> - io:format("Tokeniser error on line: ~w~n" - "premature end of multiline comment~n",[Lno]), - exit(0); + throw({error,eof_in_comment}); Line -> - skip_multiline_comment(Stream,Line,Lno+1,Level) + skip_multiline_comment(Stream, Line, Lno+1, Level) end; -skip_multiline_comment(_Stream,[$*,$/|T],Lno,0) -> +skip_multiline_comment(_Stream, "*/"++T, Lno, 0) -> {Lno,T}; -skip_multiline_comment(Stream,[$*,$/|T],Lno,Level) -> - skip_multiline_comment(Stream,T,Lno,Level - 1); -skip_multiline_comment(Stream,[$/,$*|T],Lno,Level) -> - skip_multiline_comment(Stream,T,Lno,Level + 1); -skip_multiline_comment(Stream,[_|T],Lno,Level) -> - skip_multiline_comment(Stream,T,Lno,Level). - -collect_quoted([$',$B|T],Lno, L) -> +skip_multiline_comment(Stream, "*/"++T, Lno, Level) -> + skip_multiline_comment(Stream, T, Lno, Level - 1); +skip_multiline_comment(Stream, "/*"++T, Lno, Level) -> + skip_multiline_comment(Stream, T, Lno, Level + 1); +skip_multiline_comment(Stream, [_|T], Lno, Level) -> + skip_multiline_comment(Stream, T, Lno, Level). + +collect_quoted("'B"++T, Lno, L) -> case check_bin(L) of true -> - {{bstring,Lno, lists:reverse(L)}, T}; + {{bstring,Lno,lists:reverse(L)}, T}; false -> - throw({'ERR',{invalid_binary_number, lists:reverse(L)}}) + throw({error,{invalid_binary_number,lists:reverse(L)}}) end; -collect_quoted([$',$H|T],Lno, L) -> +collect_quoted("'H"++T, Lno, L) -> case check_hex(L) of true -> - {{hstring,Lno, lists:reverse(L)}, T}; + {{hstring,Lno,lists:reverse(L)}, T}; false -> - throw({'ERR',{invalid_binary_number, lists:reverse(L)}}) + throw({error,{invalid_hex_number,lists:reverse(L)}}) end; collect_quoted([H|T], Lno, L) -> collect_quoted(T, Lno,[H|L]); collect_quoted([], _, _) -> % This should be allowed FIX later - throw({'ERR',{eol_in_token}}). + throw({error,eol_in_token}). check_bin([$0|T]) -> check_bin(T); @@ -351,7 +324,6 @@ reserved_word('INCLUDES') -> true; reserved_word('INSTANCE') -> true; reserved_word('INTEGER') -> true; reserved_word('INTERSECTION') -> true; -reserved_word('ISO646String') -> rstrtype; reserved_word('MAX') -> true; reserved_word('MIN') -> true; reserved_word('MINUS-INFINITY') -> true; diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile index b1b08aa9f9..ea5a0f857e 100644 --- a/lib/asn1/test/Makefile +++ b/lib/asn1/test/Makefile @@ -78,6 +78,7 @@ MODULES= \ testEnumExt \ testInfObjectClass \ testInfObj \ + testInfObjExtract \ testParameterizedInfObj \ testFragmented \ testMergeCompile \ @@ -104,14 +105,19 @@ MODULES= \ test_compile_options \ testDoubleEllipses \ test_modified_x420 \ - testX420 \ test_x691 \ testWSParamClass \ + testValueTest \ + testUniqueObjectSets \ + testRfcs \ + testImporting \ + testExtensibilityImplied \ asn1_test_lib \ asn1_app_test \ asn1_appup_test \ asn1_SUITE \ - error_SUITE + error_SUITE \ + syntax_SUITE ERL_FILES= $(MODULES:%=%.erl) diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl index 432197eec0..9dfcc3f571 100644 --- a/lib/asn1/test/asn1_SUITE.erl +++ b/lib/asn1/test/asn1_SUITE.erl @@ -52,9 +52,7 @@ all() -> groups() -> Parallel = asn1_test_lib:parallel(), [{compile, Parallel, - [c_syntax, - c_string, - c_implicit_before_choice, + [c_string, constraint_equivalence]}, {ber, Parallel, @@ -89,6 +87,7 @@ groups() -> ber_other, der, h323test]}, + testExtensibilityImplied, testChoPrim, testChoExtension, testChoOptional, @@ -135,19 +134,19 @@ groups() -> testChoiceIndefinite, per_open_type, testInfObjectClass, + testUniqueObjectSets, + testInfObjExtract, testParam, testFragmented, testMergeCompile, testobj, testDeepTConstr, - testExport, testImport, testDER, testDEFAULT, testMvrasn6, testContextSwitchingTypes, testOpenTypeImplicitTag, - duplicate_tags, testROSE, testINSTANCE_OF, testTCAP, @@ -158,16 +157,19 @@ groups() -> testNortel, % Uses 'PKCS7', 'InformationFramework' {group, [], [test_WS_ParamClass, - test_modified_x420, - testX420]}, - testTcapsystem, - testNBAPsystem, - testS1AP, + test_modified_x420]}, + %% Don't run all these at the same time. + {group, [], + [testTcapsystem, + testNBAPsystem, + testS1AP, + testRfcs]}, test_compile_options, testDoubleEllipses, test_x691, ticket_6143, - test_OTP_9688]}, + test_OTP_9688, + testValueTest]}, {performance, [], [testTimer_ber, @@ -196,7 +198,7 @@ init_per_testcase(Func, Config) -> true = code:add_patha(CaseDir), Dog = case Func of - testX420 -> ct:timetrap({minutes, 90}); + testRfcs -> ct:timetrap({minutes, 90}); _ -> ct:timetrap({minutes, 60}) end, [{case_dir, CaseDir}, {watchdog, Dog}|Config]. @@ -374,6 +376,12 @@ testExternal(Config, Rule, Opts) -> testSetOfTag:main(Rule), testSetTag:main(Rule). +testExtensibilityImplied(Config) -> + test(Config, fun testExtensibilityImplied/3). +testExtensibilityImplied(Config, Rule, Opts) -> + asn1_test_lib:compile("ExtensibilityImplied", Config, + [Rule,no_ok_wrapper|Opts]), + testExtensibilityImplied:main(). testChoPrim(Config) -> test(Config, fun testChoPrim/3). testChoPrim(Config, Rule, Opts) -> @@ -561,39 +569,21 @@ testSetOfCho(Config, Rule, Opts) -> asn1_test_lib:compile("SetOfCho", Config, [Rule|Opts]), testSetOfCho:main(Rule). -c_syntax(Config) -> - DataDir = ?config(data_dir, Config), - [{error, _} = asn1ct:compile(filename:join(DataDir, F)) - || F <-["Syntax", - "BadTypeEnding", - "BadValueAssignment1", - "BadValueAssignment2", - "BadValueSet", - "ChoiceBadExtension", - "EnumerationBadExtension", - "Example", - "Export1", - "MissingEnd", - "SequenceBadComma", - "SequenceBadComponentName", - "SequenceBadComponentType", - "SeqBadComma"]]. - c_string(Config) -> test(Config, fun c_string/3). c_string(Config, Rule, Opts) -> asn1_test_lib:compile("String", Config, [Rule|Opts]), asn1ct:test('String'). -c_implicit_before_choice(Config) -> - test(Config, fun c_implicit_before_choice/3, [ber]). -c_implicit_before_choice(Config, Rule, Opts) -> - DataDir = ?config(data_dir, Config), - CaseDir = ?config(case_dir, Config), - {error, _R2} = asn1ct:compile(filename:join(DataDir, "CCSNARG3"), - [Rule, {outdir, CaseDir}|Opts]). - constraint_equivalence(Config) -> + constraint_equivalence_abs(Config), + test(Config, fun constraint_equivalence/3). + +constraint_equivalence(Config, Rule, Opts) -> + M = 'ConstraintEquivalence', + asn1_test_lib:compile(M, Config, [Rule|Opts]). + +constraint_equivalence_abs(Config) -> DataDir = ?config(data_dir, Config), CaseDir = ?config(case_dir, Config), Asn1Spec = "ConstraintEquivalence", @@ -765,6 +755,16 @@ testInfObjectClass(Config, Rule, Opts) -> testInfObjectClass:main(Rule), testInfObj:main(Rule). +testUniqueObjectSets(Config) -> test(Config, fun testUniqueObjectSets/3). +testUniqueObjectSets(Config, Rule, Opts) -> + CaseDir = ?config(case_dir, Config), + testUniqueObjectSets:main(CaseDir, Rule, Opts). + +testInfObjExtract(Config) -> test(Config, fun testInfObjExtract/3). +testInfObjExtract(Config, Rule, Opts) -> + asn1_test_lib:compile("InfObjExtract", Config, [Rule|Opts]), + testInfObjExtract:main(). + testParam(Config) -> test(Config, fun testParam/3, [ber,{ber,[der]},per,uper]). testParam(Config, Rule, Opts) -> @@ -804,18 +804,14 @@ testDeepTConstr(Config, Rule, Opts) -> [Rule|Opts]), testDeepTConstr:main(Rule). -testExport(Config) -> - {error, _} = - asn1ct:compile(filename:join(?config(data_dir, Config), - "IllegalExport"), - [{outdir, ?config(case_dir, Config)}]). - testImport(Config) -> test(Config, fun testImport/3). testImport(Config, Rule, Opts) -> - Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3"], + Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3", + "Importing","Exporting"], asn1_test_lib:compile_all(Files, Config, [Rule|Opts]), 42 = 'ImportsFrom':i(), + testImporting:main(), ok. testMegaco(Config) -> test(Config, fun testMegaco/3). @@ -839,24 +835,20 @@ testContextSwitchingTypes(Config, Rule, Opts) -> testTypeValueNotation(Config) -> test(Config, fun testTypeValueNotation/3). testTypeValueNotation(Config, Rule, Opts) -> - asn1_test_lib:compile_all(["SeqTypeRefPrim", "ValueTest"], Config, - [Rule|Opts]), + asn1_test_lib:compile("SeqTypeRefPrim", Config, [Rule|Opts]), testTypeValueNotation:main(Rule, Opts). +testValueTest(Config) -> test(Config, fun testValueTest/3). +testValueTest(Config, Rule, Opts) -> + asn1_test_lib:compile("ValueTest", Config, [Rule|Opts]), + testValueTest:main(). + testOpenTypeImplicitTag(Config) -> test(Config, fun testOpenTypeImplicitTag/3). testOpenTypeImplicitTag(Config, Rule, Opts) -> asn1_test_lib:compile("OpenTypeImplicitTag", Config, [Rule|Opts]), testOpenTypeImplicitTag:main(Rule). -duplicate_tags(Config) -> - DataDir = ?config(data_dir, Config), - CaseDir = ?config(case_dir, Config), - {error, [{error, {type, _, _, 'SeqOpt1Imp', - {asn1, {duplicates_of_the_tags, _}}}}]} = - asn1ct:compile(filename:join(DataDir, "SeqOptional2"), - [abs, {outdir, CaseDir}]). - rtUI(Config) -> test(Config, fun rtUI/3). rtUI(Config, Rule, Opts) -> asn1_test_lib:compile("Prim", Config, [Rule|Opts]), @@ -990,13 +982,22 @@ testS1AP(Config, Rule, Opts) -> ok end. +testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]). +testRfcs(Config, Rule, Opts) -> + case erlang:system_info(system_architecture) of + "sparc-sun-solaris2.10" -> + {skip,"Too slow for an old Sparc"}; + _ -> + testRfcs:compile(Config, Rule, Opts), + testRfcs:test() + end. + test_compile_options(Config) -> ok = test_compile_options:wrong_path(Config), ok = test_compile_options:path(Config), ok = test_compile_options:noobj(Config), ok = test_compile_options:record_name_prefix(Config), - ok = test_compile_options:verbose(Config), - ok = test_compile_options:warnings_as_errors(Config). + ok = test_compile_options:verbose(Config). testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3). testDoubleEllipses(Config, Rule, Opts) -> @@ -1084,6 +1085,7 @@ test_modules() -> "CommonDataTypes", "Constraints", "ContextSwitchingTypes", + "CoverParser", "DS-EquipmentUser-CommonFunctionOrig-TransmissionPath", "Enum", "From", @@ -1118,7 +1120,9 @@ test_modules() -> "Def", "Opt", "ELDAPv3", - "LDAP"]. + "LDAP", + "SeqOptional2", + "CCSNARG3"]. test_OTP_9688(Config) -> PrivDir = ?config(case_dir, Config), diff --git a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn b/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn deleted file mode 100644 index 3ccd838ac0..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn +++ /dev/null @@ -1,6 +0,0 @@ -BadTypeEnding DEFINITIONS ::= -BEGIN - -T ::= Typ; - -END diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 deleted file mode 100644 index a5d4984e60..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 +++ /dev/null @@ -1,8 +0,0 @@ -BadValueAssignment1 DEFINITIONS ::= -BEGIN - -int INTEGER ::= 3 - -int2 integer ::= 3 - -END diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 deleted file mode 100644 index 7a96406001..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 +++ /dev/null @@ -1,8 +0,0 @@ -BadValueAssignment2 DEFINITIONS ::= -BEGIN - -int INTEGER ::= 3 - -int2 ::= 3 - -END diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 deleted file mode 100644 index 68bd4380b7..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 +++ /dev/null @@ -1,9 +0,0 @@ -BadValueSet DEFINITIONS ::= -BEGIN - -Int INTEGER ::= {1|2|3} - -Int2 INTEGER ::= { - 1,2,3} - -END diff --git a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn index 23c1f32ceb..8932238adc 100644 --- a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn +++ b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn @@ -3,7 +3,7 @@ BEGIN CallCentreServiceNotificationArg ::= SEQUENCE { scriptInformation [0] ScriptToScriptInformation, - eventInformation [1] IMPLICIT EventInformation OPTIONAL + eventInformation [1] EventInformation OPTIONAL } diff --git a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 index f6fe18be10..18473bae30 100644 --- a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 @@ -41,10 +41,4 @@ ChoExt4 ::= CHOICE str OCTET STRING } -ChoEmptyRoot ::= CHOICE { - ..., - bool BOOLEAN, - int INTEGER (0..7) -} - END diff --git a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 deleted file mode 100644 index d0789d7414..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 +++ /dev/null @@ -1,27 +0,0 @@ -ChoiceBadExtension DEFINITIONS ::= -BEGIN - -Seq ::= SEQUENCE { - ..., - name PrintableString, - location INTEGER {home(0),field(1),roving(2)}, - age INTEGER - } - -Cho1 ::= CHOICE { - name PrintableString, - ..., - location INTEGER {home(0),field(1),roving(2)}, - age INTEGER - } - -Cho2 ::= CHOICE { - ..., - name PrintableString, - location INTEGER {home(0),field(1),roving(2)}, - age INTEGER - } - -END - - diff --git a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 index 8b3d151502..648275dd66 100644 --- a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 @@ -11,6 +11,10 @@ BEGIN SingleValueX8 ::= INTEGER (integer42) SingleValueX9 ::= INTEGER (integer42..integer42) SingleValueX10 ::= INTEGER ((integer42) INTERSECTION (40..49)) + SingleValueX11 ::= INTEGER (40..49) (integer42) + SingleValueX12 ::= INTEGER ((MIN..0) ^ (1..10) | integer42) + SingleValueX13 ::= INTEGER ((11..20) ^ (1..10) | integer42) + SingleValueX14 ::= INTEGER ((MIN..42) ^ (1..100) ^ (42..50)) UnconstrainedX0 ::= INTEGER UnconstrainedX1 ::= INTEGER (MIN..MAX) @@ -19,6 +23,10 @@ BEGIN UnconstrainedX4 ::= INTEGER ((MIN..MAX)|9|10) UnconstrainedX5 ::= INTEGER ((MIN..MAX)|10..20) UnconstrainedX6 ::= INTEGER ((MIN..MAX) UNION (10..20)) + UnconstrainedX7 ::= INTEGER ((MIN..MAX) ^ ((MIN..MAX) UNION (10..20))) + UnconstrainedX8 ::= INTEGER ((-100..MAX) ^ (42..MAX) | (MIN..41)) + UnconstrainedX9 ::= INTEGER (UnconstrainedX0) + UnconstrainedX10 ::= INTEGER (UnconstrainedX0)(MIN..MAX) RangeX00 ::= INTEGER (5..10) RangeX01 ::= INTEGER (4<..<11) @@ -38,22 +46,66 @@ BEGIN RangeX16 ::= INTEGER ((5|6) UNION (7) UNION (7<..<11)) RangeX20 ::= INTEGER (0..20) (5..10) - RangeX21 ::= INTEGER (0..10) (5..20) - RangeX22 ::= INTEGER (0..10) (5..20) (MIN..MAX) - RangeX23 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX)) - RangeX24 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX)) + RangeX21 ::= INTEGER ((0..10) ^ (5..20)) + RangeX22 ::= INTEGER ((0..10) ^ (5..20) ^ (MIN..MAX)) + RangeX23 ::= INTEGER (MIN..MAX) (-100..20) (5..10) + RangeX24 ::= INTEGER (MIN..MAX) (0..100) (5..20) (5..10) + RangeX25 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX)) + RangeX26 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX)) + + RangeX30 ::= INTEGER (((5|6) | (5..20)) ^ (0..10)) + RangeX31 ::= INTEGER (((((5|6) | (5..20)) ^ (0..10))) ^ (MIN..MAX)) + RangeX32 ::= INTEGER ((5|7) | (5..10)) + + Semi00 ::= INTEGER (0..MAX) + Semi01 ::= INTEGER (0..MAX) (MIN..MAX) + Semi02 ::= INTEGER ((0..100) UNION (200..MAX) UNION (50..1024)) + + RangeExtX00 ::= INTEGER (5..10, ...) + RangeExtX01 ::= INTEGER (0..20) (5..10, ...) + RangeExtX02 ::= INTEGER (RangeX26) (5..10, ...) +-- RangeExtX03 ::= RangeX26 (5..10, ...) + + MinRangeX00 ::= INTEGER (MIN..10) + MinRangeX01 ::= INTEGER ((MIN..0) | (0..10)) + MinRangeX02 ::= INTEGER (MIN..MAX) (MIN..100) (MIN..10) + MinRangeX03 ::= INTEGER (((MIN..-100)|(-60..-50)) | (MIN..10)) + + DisjointRangeX00 ::= INTEGER (0..5 UNION 95..99) + DisjointRangeX01 ::= INTEGER (0|1|2|3|4|5|95|96|97|98|99) + DisjointRangeX02 ::= INTEGER (0..100) (0..2 UNION 95..99 UNION 3|4|5) + DisjointRangeX03 ::= INTEGER (MIN..MAX) (0..2 UNION 95..99 UNION 3|4|5) + + MinDisjointRangeX00 ::= INTEGER (MIN..-100 UNION 100..1000) + MinDisjointRangeX01 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-100)) + MinDisjointRangeX02 ::= INTEGER (MIN..-50000 UNION 100..1000 UNION (MIN..-100)) + MinDisjointRangeX03 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000)) + MinDisjointRangeX04 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000)) + MinDisjointRangeX05 ::= INTEGER (MIN..-100 ^ (MIN..-100) UNION 100..1000) + MinDisjointRangeX06 ::= INTEGER (MIN..-100 ^ (MIN..0) UNION 100..1000) UnconstrainedStringX00 ::= IA5String UnconstrainedStringX01 ::= IA5String (SIZE (0..MAX)) + UnconstrainedStringX02 ::= IA5String (SIZE (0..42|43..MAX)) ConstrainedStringX00 ::= IA5String (SIZE (0..5)) ConstrainedStringX01 ::= IA5String (SIZE (0|1|2|3|4|5)) + StringExtFromX00 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20)) + StringExtFromX01 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20)) + StringExtFromX02 ::= IA5String ((FROM ("AB", ..., "CD")) ^ ((SIZE (1..10, ..., 15..20)))) + StringExtFromX03 ::= IA5String ((FROM ("AB", ..., "CD")) ^ (SIZE (1..10, ..., 15..20))) + StringExtFromX04 ::= IA5String (StringExtFromX00) + -- Note: None of the back-ends care about the exact values -- outside of the root range. ExtConstrainedStringX00 ::= IA5String (SIZE (1..2, ...)) ExtConstrainedStringX01 ::= IA5String (SIZE (1|2, ..., 3)) ExtConstrainedStringX02 ::= IA5String (SIZE (1|2, ..., 3|4|5)) + ExtConstrainedStringX03 ::= IA5String (SIZE (1|2, ..., 1|2|3|4|5)) + ExtConstrainedStringX04 ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3|4|5)) + ExtConstrainedStringX05 ::= IA5String (SIZE (1|2, ...), ..., + SIZE (1|2|3|4|5, ...)) integer4 INTEGER ::= 4 integer11 INTEGER ::= 11 diff --git a/lib/asn1/test/asn1_SUITE_data/Constraints.py b/lib/asn1/test/asn1_SUITE_data/Constraints.py index 3495cd841b..a40c513141 100644 --- a/lib/asn1/test/asn1_SUITE_data/Constraints.py +++ b/lib/asn1/test/asn1_SUITE_data/Constraints.py @@ -81,7 +81,7 @@ maxNrOfCellPortionsPerCell-1 INTEGER ::= 35 CellPortionID ::= INTEGER (0..maxNrOfCellPortionsPerCell-1,...) -- OTP-6763 -T ::= IA5String (SIZE (1|2, ..., SIZE (1|2|3))) -- Dubuisson 268 +T ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3)) -- Dubuisson 268 T2 ::= IA5String (SIZE (1|2, ..., 3)) -- equal with T -- OTP-8046 @@ -144,5 +144,47 @@ NonOverlapping ::= INTEGER (7280..7560 | 23000..24000 | 24960..26900) +-- +-- Test INTEGER constraints from fields in objects. +-- + +INT-HOLDER ::= CLASS { + &id INTEGER UNIQUE, + &obj INT-HOLDER OPTIONAL +} WITH SYNTAX { + ID &id + [OBJ &obj] +} + +int-holder-1 INT-HOLDER ::= { ID 2 } +int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 } + +IntObjectConstr ::= INTEGER (int-holder-2.&obj.&id..int-holder-2.&id) + +-- +-- INTEGER constraints defined using named INTEGERs. +-- + +ConstrainedNamedInt ::= INTEGER {v1(42)} (v1) +constrainedNamedInt-1 INTEGER {v1(42)} (v1) ::= 42 +constrainedNamedInt-2 ConstrainedNamedInt ::= 100 + +SeqWithNamedInt ::= SEQUENCE { + int INTEGER {v2(7)} (v2) +} + +-- +-- Cover simpletable constraint checking code. +-- + +ContentInfo ::= SEQUENCE { + contentType ContentType +} + +Contents TYPE-IDENTIFIER ::= { + {OCTET STRING IDENTIFIED BY {2 1 1 1 1 1 1}} +} + +ContentType ::= TYPE-IDENTIFIER.&id({Contents}) END diff --git a/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 new file mode 100644 index 0000000000..75d40188ca --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 @@ -0,0 +1,57 @@ +CoverParser DEFINITIONS AUTOMATIC TAGS ::= +BEGIN + + Cho1 ::= CHOICE { + i INTEGER, + ... ! 42, + [[ b BOOLEAN ]] + } + + Cho2 ::= CHOICE { + i INTEGER, + ..., + [[ b BOOLEAN, + s IA5String ]], + ... + } + + Int1 ::= INTEGER (CONSTRAINED BY {INTEGER:1,INTEGER:2}) + + Seq1 ::= SEQUENCE { + ... ! INTEGER:1 + } + + Seq2 ::= SEQUENCE { + ... ! INTEGER:1, + i INTEGER + } + + Seq3 ::= SEQUENCE { + b BOOLEAN, + ... ! INTEGER:1, + i INTEGER + } + + Seq4 ::= SEQUENCE { + a INTEGER OPTIONAL, + b OCTET STRING OPTIONAL + } (WITH COMPONENTS {a ABSENT, b OPTIONAL} | + WITH COMPONENTS {a PRESENT, b PRESENT}) + + SeqOf1 ::= SEQUENCE OF INTEGER + SeqOf2 ::= SeqOf1 (WITH COMPONENT (0..7)) + + SegOf3 ::= SEQUENCE (SIZE (1..10)) OF id INTEGER + + Set1 ::= SET { + ... ! INTEGER:1 + } + + Set2 ::= SET { + ... ! INTEGER:1, + a INTEGER + } + + SetOf3 ::= SET (SIZE (1..10)) OF id INTEGER + +END diff --git a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 index 74fa97e7aa..55ad5a01a1 100644 --- a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 @@ -53,5 +53,7 @@ SeqBig ::= SEQUENCE { i INTEGER } +EnumSkip ::= ENUMERATED {a(2), ..., b, c, d, e, f} + END diff --git a/lib/asn1/test/asn1_SUITE_data/Example.asn1 b/lib/asn1/test/asn1_SUITE_data/Example.asn1 deleted file mode 100644 index 2639f63940..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/Example.asn1 +++ /dev/null @@ -1,20 +0,0 @@ -Example DEFINITIONS ::= -BEGIN - -T ::= Typ - -Typ ::= SEQUENCE { - a b, - c Typ} ---ECLASS ::= CLASS { --- &num INTEGER UNIQUE, --- &Typo --- } WITH SYNTAX { --- &Typo DETERMINED BY &num --- } - ---v1 ECLASS ::= {INTEGER DETERMINED BY 12} - ---v2 INTEGER ::= 13 - -END diff --git a/lib/asn1/test/asn1_SUITE_data/Export1.asn b/lib/asn1/test/asn1_SUITE_data/Export1.asn deleted file mode 100644 index 78ead8f4d2..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/Export1.asn +++ /dev/null @@ -1,7 +0,0 @@ -Export1 DEFINITIONS ::= -BEGIN -EXPORTS T - -T ::= Typ - -END diff --git a/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 new file mode 100644 index 0000000000..e4f32f6788 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 @@ -0,0 +1,18 @@ +Exporting DEFINITIONS AUTOMATIC TAGS ::= +BEGIN + + Seq ::= SEQUENCE { id INTEGER, f BOOLEAN } + PtSeq{T} ::= SEQUENCE { a T } + + CL ::= CLASS { + &id INTEGER UNIQUE, + &Type + } WITH SYNTAX { + ID &id TYPE &Type + } + + obj CL ::= { ID 1 TYPE OCTET STRING } + + pt-object{CL:ob} CL ::= {ID ob.&id TYPE OCTET STRING} + +END diff --git a/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 new file mode 100644 index 0000000000..d59b0edda5 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 @@ -0,0 +1,30 @@ +ExtensibilityImplied DEFINITIONS +AUTOMATIC TAGS +EXTENSIBILITY IMPLIED +::= +BEGIN + +Enum1 ::= ENUMERATED { root, ..., ext } +Enum2 ::= ENUMERATED { root } + +Seq1 ::= SEQUENCE { + b BOOLEAN, + ..., + i INTEGER +} + +Seq2 ::= SEQUENCE { + b BOOLEAN +} + +Set1 ::= SET { + b BOOLEAN, + ..., + i INTEGER +} + +Set2 ::= SET { + b BOOLEAN +} + +END diff --git a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 b/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 deleted file mode 100644 index 1b5e42ad3c..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 +++ /dev/null @@ -1,7 +0,0 @@ -IllegalExport DEFINITIONS ::= -BEGIN -EXPORTS T, KalleAnka; - -T ::= INTEGER - -END diff --git a/lib/asn1/test/asn1_SUITE_data/Importing.asn1 b/lib/asn1/test/asn1_SUITE_data/Importing.asn1 new file mode 100644 index 0000000000..2f2699c576 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/Importing.asn1 @@ -0,0 +1,20 @@ +Importing DEFINITIONS AUTOMATIC TAGS ::= +BEGIN + + Seq ::= Exporting.PtSeq{ INTEGER(0..7) } + OtherSeq ::= Exporting.Seq + + seq Exporting.Seq ::= { id 42, f TRUE } + + o1 Exporting.CL ::= { ID 2 TYPE INTEGER (0..63) } + + ObjSet Exporting.CL ::= { o1 | Exporting.obj } + + ObjSeq ::= SEQUENCE { + id Exporting.CL.&id ({ObjSet}), + type Exporting.CL.&Type ({ObjSet}{@id}) + } + + o1-cloned Exporting.CL ::= Exporting.pt-object{o1} + +END diff --git a/lib/asn1/test/asn1_SUITE_data/InfObj.asn b/lib/asn1/test/asn1_SUITE_data/InfObj.asn index 719119f418..3b88770d78 100644 --- a/lib/asn1/test/asn1_SUITE_data/InfObj.asn +++ b/lib/asn1/test/asn1_SUITE_data/InfObj.asn @@ -206,7 +206,9 @@ ConstructedDefaultSet CONSTRUCTED-DEFAULT ::= { { &id 4, &Type SET { a INTEGER, b BIT STRING } } | { &id 5, &Type CHOICE { i INTEGER, b BIT STRING } } | { &id 6, &Type SEQUENCE OF INTEGER (1..16) } | - { &id 7, &Type SET OF INTEGER (1..64) } + { &id 7, &Type SET OF INTEGER (1..64) } | + { &id 8, &Type SEQUENCE OF SEQUENCE { x INTEGER, y INTEGER } } | + { &id 9, &Type SET OF SEQUENCE { x INTEGER, y INTEGER } } } ConstructedPdu ::= SEQUENCE { @@ -288,18 +290,196 @@ OstSeq1234 ::= ObjectSetTest{ {Ost1234} } OstSeq45 ::= ObjectSetTest{ {Ost45} } OstSeq12345 ::= ObjectSetTest{ {Ost12345} } +OstSeq12Except ::= ObjectSetTest{ {Ost123 EXCEPT ost3} } +OstSeq123Except ::= ObjectSetTest{ {Ost12345 EXCEPT Ost45} } + +ExOst1 OBJECT-SET-TEST ::= { ost1, ... } ExOst12 OBJECT-SET-TEST ::= { ost1, ..., ost2 } ExOst123 OBJECT-SET-TEST ::= { ost3, ..., ExOst12 } ---ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 } +ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 } ExOst45 OBJECT-SET-TEST ::= { ost4, ..., ost5 } ExOst12345 OBJECT-SET-TEST ::= { ExOst123, ..., ExOst45 } +ExOstSeq1 ::= ObjectSetTest{ {ExOst1} } ExOstSeq12 ::= ObjectSetTest{ {ExOst12} } ExOstSeq123 ::= ObjectSetTest{ {ExOst123} } ---ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} } +ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} } ExOstSeq45 ::= ObjectSetTest{ {ExOst45} } ExOstSeq12345 ::= ObjectSetTest{ {ExOst12345} } -END +ExOstSeq12Except ::= ObjectSetTest{ {ExOst123 EXCEPT ost3} } +ExOstSeq123Except ::= ObjectSetTest{ {ExOst12345 EXCEPT ExOst45} } + +ExInlOst1 OBJECT-SET-TEST ::= { + { 1 IS BIT STRING }, + ... +} +ExInlOst12 OBJECT-SET-TEST ::= { + { 1 IS BIT STRING }, + ..., + { 2 IS OCTET STRING } +} + +ExInlOstSeq1 ::= ObjectSetTest{ {ExInlOst1} } +ExInlOstSeq12 ::= ObjectSetTest{ {ExInlOst12} } + +-- +-- Test that extensions in a simple class works. +-- + +ExtClassSeq ::= SEQUENCE { + arg EXT-CLASS.&id({Extend}) +} + +EXT-CLASS ::= CLASS { + &id INTEGER UNIQUE +} WITH SYNTAX { + ID &id +} + +Extend EXT-CLASS ::= { { ID alt1 } | { ID alt2 }, ... } + +alt1 INTEGER ::= 4 +alt2 INTEGER ::= 5 + + +-- +-- Test a BIT STRING which is optional in the simplified syntax. +-- + +PUBLIC-KEY ::= CLASS { + &id INTEGER UNIQUE, + &keyUsage KeyUsage OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [OPTIONAL-BIT-STRING &keyUsage] +} + +KeyUsage ::= BIT STRING { + digitalSignature (0), + nonRepudiation (1), + keyEncipherment (2) + } + +object-with-optional-bit-string PUBLIC-KEY ::= { + IDENTIFIER 42 + OPTIONAL-BIT-STRING {digitalSignature, nonRepudiation, keyEncipherment} +} + +-- Test object identifiers from objects. + +CONTAINER ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &rid RELATIVE-OID OPTIONAL, + &Type OPTIONAL +} WITH SYNTAX { + IDENTIFIED BY &id + [REL-OID &rid] + [TYPE &Type] +} + +id1 OBJECT IDENTIFIER ::= {1 2 42} +obj1 CONTAINER ::= { IDENTIFIED BY id1 REL-OID {100 101} } + +value-2 OBJECT IDENTIFIER ::= { value-1 25 } +value-1 OBJECT IDENTIFIER ::= obj1.&id +value-3 RELATIVE-OID ::= obj1.&rid +value-4 OBJECT IDENTIFIER ::= { 1 2 value-3 } + + +-- Test an obscure issue when ATTRIBUTE.&id was not +-- properly evaluated. + +Rdn ::= SingleAttribute { {SupportedAttributes} } + +ATTRIBUTE ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Type OPTIONAL +} + +SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE { + type ATTRIBUTE.&id({AttrSet}), + value ATTRIBUTE.&Type({AttrSet}{@type}) +} +AttributeType ::= ATTRIBUTE.&id +SupportedAttributes ATTRIBUTE ::= { at-name } + +id-at OBJECT IDENTIFIER ::= { 2 5 4 41 } +id-at-name AttributeType ::= id-at +at-name ATTRIBUTE ::= { &Type PrintableString, &id id-at-name } + +-- +-- Test using an alias for TYPE-IDENTIFIER. +-- + +TiAliasParameterized { TI-ALIAS:InfoObjectSet } ::= SEQUENCE { + algorithm TI-ALIAS.&id({InfoObjectSet}), + parameters TI-ALIAS.&Type({InfoObjectSet} {@algorithm}) OPTIONAL +} + +TI-ALIAS ::= TYPE-IDENTIFIER + +TiAliasSeq ::= SEQUENCE { + prf TiAliasParameterized {{TiAliasSet}} +} + +TiAliasSet TI-ALIAS ::= { + {NULL IDENTIFIED BY {2 1 2}}, + ... +} + +-- +-- Test using an alias for a class. +-- + +ALIAS-CONTAINER ::= CLASS { + &id INTEGER UNIQUE, + &obj INDIRECT-CLASS +} + +INDIRECTED-CLASS ::= CLASS { + &id INTEGER UNIQUE, + &Type +} + +INDIRECT-CLASS ::= INDIRECTED-CLASS + +-- +-- Indirect ObjectClassFieldType in a SEQUENCE. +-- + +ContentInfo ::= SEQUENCE { + contentType ContentType, -- Indirect ObjectClassFieldType + content TYPE-IDENTIFIER.&Type({Contents}{@contentType}) +OPTIONAL +} + +Contents TYPE-IDENTIFIER ::= { + {IA5String IDENTIFIED BY id-content-type} +} + +ContentType ::= TYPE-IDENTIFIER.&id({Contents}) +id-content-type ContentType ::= { 2 7 8 9 } + +-- +-- Tricky parsing of simplified syntax. +-- + +TrickyType-1 ::= BIT STRING +TrickyType-2 ::= OCTET STRING + +TRICKY ::= CLASS { + &Type1, + &Type2 +} WITH SYNTAX { + TYPE &Type1 &Type2 +} + +tricky-object TRICKY ::= {TYPE TrickyType-1 TrickyType-2} + +tricky-bit-string tricky-object.&Type1 ::= '1011'B +tricky-octet-string tricky-object.&Type1 ::= 'CAFE'H + +END diff --git a/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 new file mode 100644 index 0000000000..13981b546d --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 @@ -0,0 +1,136 @@ +InfObjExtract DEFINITIONS AUTOMATIC TAGS ::= +BEGIN + +DATA-CLASS ::= CLASS { + &id INTEGER UNIQUE, + &Type +} WITH SYNTAX { + ID &id + TYPE &Type +} + +data-object-1 DATA-CLASS ::= { ID 1 TYPE BOOLEAN } +data-object-2 DATA-CLASS ::= { ID 2 TYPE OCTET STRING } +data-object-3 DATA-CLASS ::= { ID 3 TYPE BIT STRING } + +ObjSet DATA-CLASS ::= { + holder-object-1.&obj | + data-object-2 | + data-object-3, + ... +} + +OBJ-SET DATA-CLASS ::= { + holder-object-1.&obj | + data-object-2 | + data-object-3, + ... +} + +SingleElementSet DATA-CLASS ::= { + holder-object-1.&obj +} + +holder-object-1 HOLDER-CLASS ::= { + OBJ data-object-1 +} + +holder-object-2 HOLDER-CLASS ::= { + OBJ-SET {data-object-1} +} + +holder-object-3 HOLDER-CLASS ::= { + OBJ-SET {holder-object-2.&ObjSet} +} + +-- Note: References to object sets with names in all uppercase/hyphens +-- may be represented differently compared to object sets with names +-- that contain lowercase letters. CAVEAT TESTOR. + +HOLDER-OBJECTS HOLDER-CLASS ::= { holder-object-2 } +HolderObjects HOLDER-CLASS ::= { holder-object-3 } + +holder-object-4 HOLDER-CLASS ::= { + OBJ-SET { HOLDER-OBJECTS.&ObjSet } +} + +holder-object-5 HOLDER-CLASS ::= { + OBJ-SET { HolderObjects.&ObjSet } +} + +holder-object-6 HOLDER-CLASS ::= { + OBJ-SET { OBJ-SET } +} + +holder-object-7 HOLDER-CLASS ::= { + OBJ-SET { ObjSet } +} + +HOLDER-CLASS ::= CLASS { + &obj DATA-CLASS OPTIONAL, + &ObjSet DATA-CLASS OPTIONAL +} WITH SYNTAX { + [OBJ &obj] + [OBJ-SET &ObjSet] +} + +TestSeq{DATA-CLASS:ObjectSet} ::= SEQUENCE { + id DATA-CLASS.&id ({ObjectSet}), + data DATA-CLASS.&Type ({ObjectSet}{@id}) +} + +DataSeq-1 ::= TestSeq{ {ObjSet} } +DataSeq-2 ::= TestSeq{ {holder-object-3.&ObjSet} } + +DataSeq-3 ::= TestSeq{ {holder-object-4.&ObjSet} } +DataSeq-4 ::= TestSeq{ {holder-object-5.&ObjSet} } +DataSeq-5 ::= TestSeq{ {holder-object-6.&ObjSet} } +DataSeq-6 ::= TestSeq{ {holder-object-7.&ObjSet} } + +DataSeqSingleSet-1 ::= TestSeq{ {SingleElementSet} } +DataSeqSingleSet-2 ::= TestSeq{ {holder-object-1.&obj} } + +-- +-- Test ObjectSetFromObjects. +-- + +OBJ-CLASS ::= CLASS { + &id INTEGER UNIQUE, + &Data OPTIONAL, + &Obj OBJ-CLASS OPTIONAL, + &obj OBJ-CLASS OPTIONAL +} + +obj-class-obj-1 OBJ-CLASS ::= { &id 1, &Data BOOLEAN } + +obj-class-obj-2 OBJ-CLASS ::= { &id 2, &Data BOOLEAN, + &Obj {obj-class-obj-1} } + +obj-class-obj-3 OBJ-CLASS ::= { &id 3, &Data BOOLEAN, + &obj {&id 99, &Obj {obj-class-obj-1}} } + +obj-class-obj-4 OBJ-CLASS ::= { &id 4, &Data BOOLEAN, &obj obj-class-obj-2 } + +obj-class-obj-5 OBJ-CLASS ::= { &id 5, &Data BOOLEAN, + &Obj {obj-class-obj-4.&obj} } + +ObjClassSet OBJ-CLASS ::= { obj-class-obj-3.&obj.&Obj | + obj-class-obj-4.&Obj | -- Non-existing field + obj-class-obj-5.&Obj + } + +TestObjClassSeq{OBJ-CLASS:ObjectSet} ::= SEQUENCE { + id OBJ-CLASS.&id ({ObjectSet}), + data OBJ-CLASS.&Data ({ObjectSet}{@id}) +} + +ObjClassSeq-1 ::= TestObjClassSeq{{ObjClassSet}} + +-- +-- Test several levels of inlined definitions. +-- + +obj-class-obj-6 OBJ-CLASS ::= { &id 6, &Obj {{&id 100, &Data INTEGER}}, + &Data INTEGER } + +END diff --git a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 b/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 deleted file mode 100644 index 66912ef693..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 +++ /dev/null @@ -1,5 +0,0 @@ -MissingEnd DEFINITIONS ::= -BEGIN - -T ::= Typ - diff --git a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 index 9368e8dceb..9193ed495c 100644 --- a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 @@ -50,6 +50,7 @@ itu-t-o OBJECT IDENTIFIER ::= {itu-t recommendation o} itu-t-p OBJECT IDENTIFIER ::= {itu-t recommendation p} itu-t-q OBJECT IDENTIFIER ::= {itu-t recommendation q} itu-t-r OBJECT IDENTIFIER ::= {itu-t recommendation r} +itu-t-s OBJECT IDENTIFIER ::= {itu-t recommendation s} itu-t-t OBJECT IDENTIFIER ::= {itu-t recommendation t} itu-t-u OBJECT IDENTIFIER ::= {itu-t recommendation u} itu-t-v OBJECT IDENTIFIER ::= {itu-t recommendation v} diff --git a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 index 68fc782f33..d203b6c816 100644 --- a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 @@ -42,4 +42,37 @@ SIGNATURE-ALGORITHM ::= CLASS { KEY &id CONTAINING &Type } +alg-seq-1 AnAlgorithm ::= { algorithm 1, type 42 } +alg-seq-2 AnAlgorithm ::= { algorithm 2, type TRUE } + +-- +-- Test that indirect classes references are resolved. +-- + +AlgorithmIdentifier2 { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE { + algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}), + parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL +} + +ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER + +Seq ::= SEQUENCE { + c1 AlgorithmIdentifier2 {{ObjectSet-1}}, + c2 AlgorithmIdentifier2 {{ObjectSet-2}} +} + +ObjectSet-1 ALGORITHM-IDENTIFIER ::= { {INTEGER IDENTIFIED BY {2 1 1}}, ... } +ObjectSet-2 ALGORITHM-IDENTIFIER ::= { ... } + +-- Test a value that uses the instantiation of a parameterized type inline. +-- (Adapted from PKCS-5.) +-- + +algid-hmacWithSHA1 AlgorithmIdentifier2 {{ObjectSet-3}} ::= + {algorithm id-hmacWithSHA1, parameters NULL : NULL} + +ObjectSet-3 TYPE-IDENTIFIER ::= { {NULL IDENTIFIED BY id-hmacWithSHA1} } + +id-hmacWithSHA1 OBJECT IDENTIFIER ::= {2 9 9 9 7} + END diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1 index cc0e61422a..b4c011fd39 100644 --- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 +++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1 @@ -24,6 +24,8 @@ BEGIN friday(5),saturday(6),sunday(7)} SingleEnumVal ::= ENUMERATED {true} SingleEnumValExt ::= ENUMERATED {true, ...} + NegEnumVal ::= ENUMERATED {neg(-1), ..., zero(0)} + EnumVal128 ::= ENUMERATED {val(128)} ObjId ::= OBJECT IDENTIFIER diff --git a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn index d7bfbf1788..6163f390dd 100644 --- a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn +++ b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn @@ -14,7 +14,7 @@ Element ::= CHOICE {bool BOOLEAN, utf UTF8String, ro RELATIVE-OID, nums NumericString, - symbol PrintableString, + symbol PrintableString, telet TeletexString, t61 T61String, video VideotexString, @@ -23,13 +23,14 @@ Element ::= CHOICE {bool BOOLEAN, generalizedTime GeneralizedTime, gs GraphicString, vs VisibleString, --- iso64 ISO646String, generalString GeneralString, univ UniversalString, cs CHARACTER STRING, bmp BMPString} -MendeleyevTable ::= SEQUENCE OF symbol < Element +MendeleyevTable ::= SEQUENCE OF symbol < Element +MendeleyevSet ::= SET OF atomic-no < Element + BoolType ::= bool < Element einsteinium symbol < Element ::= "Es" @@ -51,7 +52,6 @@ utctimev utctime < Element ::= "9805281429Z" gTime generalizedTime < Element ::= "19980528142905.1" gsv gs < Element ::= "graphic" vsv vs < Element ::= "visible" ---iso64v iso64 < Element ::= "iso" gStringv generalString < Element ::= "general" univv univ < Element ::= "Universal" bmov bmp < Element ::= "bmp" diff --git a/lib/asn1/test/asn1_SUITE_data/Seq.py b/lib/asn1/test/asn1_SUITE_data/Seq.py index f345373ab5..b68f9045a6 100644 --- a/lib/asn1/test/asn1_SUITE_data/Seq.py +++ b/lib/asn1/test/asn1_SUITE_data/Seq.py @@ -142,7 +142,10 @@ SeqImp3 ::= SET set Set1 } - +SeqCompOf ::= SEQUENCE { + ..., + COMPONENTS OF SeqS3 +} END diff --git a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn index 7de9134096..bb85c9e418 100644 --- a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn +++ b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn @@ -15,10 +15,10 @@ SeqOpt1Imp ::= SEQUENCE bool1 [1] BOOLEAN OPTIONAL, int1 INTEGER, seq1 [2] SeqIn OPTIONAL, - seq2 [2] SeqIn OPTIONAL, + seq2 [3] SeqIn OPTIONAL, ..., - int2 [3] SeqIn, - int3 [3] SeqIn + int2 [4] SeqIn, + int3 [5] SeqIn } SeqOpt1Exp ::= SEQUENCE diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn b/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn deleted file mode 100644 index 436815aa9b..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn +++ /dev/null @@ -1,10 +0,0 @@ -SequenceBadComma DEFINITIONS IMPLICIT TAGS ::= -BEGIN -EXPORTS Person; - -Person ::= [PRIVATE 19] SEQUENCE {, - name PrintableString, - location INTEGER {home(0),field(1),roving(2)}, - age INTEGER OPTIONAL - } -END diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 deleted file mode 100644 index 8b2b8816db..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 +++ /dev/null @@ -1,10 +0,0 @@ -SequenceBadComponentName DEFINITIONS ::= -BEGIN - -T ::= Typ - -Typ ::= SEQUENCE { - a INTEGER, - C Typ} - -END diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 deleted file mode 100644 index 0c33f48906..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 +++ /dev/null @@ -1,10 +0,0 @@ -SequenceBadComponentType DEFINITIONS ::= -BEGIN - -T ::= Typ - -Typ ::= SEQUENCE { - a b, - c T} - -END diff --git a/lib/asn1/test/asn1_SUITE_data/Syntax.py b/lib/asn1/test/asn1_SUITE_data/Syntax.py deleted file mode 100644 index 867d1148e1..0000000000 --- a/lib/asn1/test/asn1_SUITE_data/Syntax.py +++ /dev/null @@ -1,10 +0,0 @@ -Syntax DEFINITIONS IMPLICIT TAGS ::= -BEGIN -EXPORTS Person; - -Person ::= [PRIVATE 19] SEQUENCE {, - name PrintableString, - location INTEGER {home(0),field(1),roving(2)}, - age INTEGER OPTIONAL - } -END diff --git a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn index dae9ae498a..b2c59d686a 100644 --- a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn +++ b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn @@ -1,4 +1,4 @@ -ValueTest DEFINITIONS ::= +ValueTest DEFINITIONS AUTOMATIC TAGS ::= BEGIN @@ -23,8 +23,15 @@ vENUMERATED RadioButton ::= button1 vBS BSNNL ::= {zero,two} vNULL NULL ::= NULL vOS OCTET STRING ::= '313233'H -vOD OBJECT IDENTIFIER ::= {2 1 1} +-- OBJECT IDENTIFIER +vOD OBJECT IDENTIFIER ::= {2 1 1} +one INTEGER ::= 1 +integer-first OBJECT IDENTIFIER ::= {one 2} +rel-oid-1 RELATIVE-OID ::= {2 4 5} +include-roid OBJECT IDENTIFIER ::= {0 rel-oid-1} +include-oid OBJECT IDENTIFIER ::= {integer-first 1} +include-all OBJECT IDENTIFIER ::= {integer-first 1 rel-oid-1 42} --Character strings numericstring NumericString ::= "01234567" @@ -41,7 +48,6 @@ objectdescriptor ObjectDescriptor ::= "ObjectDescriptor" graphicstring GraphicString ::= "GraphicString" generalstring GeneralString ::= "GeneralString" bmpstring1 BMPString ::= "BMPString" ---bmpstring2 BMPString ::= [{0,0,0,66},{0,0,0,77},{0,0,0,80},{0,0,0,115},{0,0,0,116},{0,0,0,114},{0,0,0,105},{0,0,0,110},{0,0,0,103}] latinCapitalLetterA UniversalString ::= {0,0,0,65} greekCapitalLetterSigma UniversalString ::= {0,0,3,145} my-universalstring UniversalString ::= {"This is a capital A: ", @@ -50,4 +56,88 @@ my-universalstring UniversalString ::= {"This is a capital A: ", greekCapitalLetterSigma, "; try and spot the difference!"} +-- Useful parameterized SEQUENCE. +ParamSeq{Type} ::= SEQUENCE { + a Type +} + +-- Integer values. +IntegerSeq ::= ParamSeq{INTEGER} +someInteger INTEGER ::= 42 +integerSeq1 IntegerSeq ::= { a otherInteger } +otherInteger INTEGER ::= someInteger + +-- +-- Values from objects. +-- +int-from-object-1 INTEGER ::= int-holder-2.&obj.&id +int-from-object-2 INTEGER ::= int-holder-2.&id + +INT-HOLDER ::= CLASS { + &id INTEGER UNIQUE, + &obj INT-HOLDER OPTIONAL +} WITH SYNTAX { + ID &id + [OBJ &obj] +} + +int-holder-1 INT-HOLDER ::= { ID 2 } +int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 } + +II ::= INTEGER (int-from-object-1..int-from-object-2) + +-- Recursive OCTET STRING definitions. + +OS-HOLDER ::= CLASS { + &id INTEGER UNIQUE, + &os OCTET STRING +} WITH SYNTAX { + ID &id OS &os +} + +os-holder-1 OS-HOLDER ::= { ID 1 OS '4041FF'H } + +OctetStringSeq ::= ParamSeq{OCTET STRING} + +someOctetString OCTET STRING ::= '404142'H + +octetStringSeq1 OctetStringSeq ::= { a someOctetString } +octetStringSeq2 OctetStringSeq ::= { a otherOctetString } +octetStringSeq3 OctetStringSeq ::= { a os-holder-1.&os } + +otherOctetString OCTET STRING ::= someOctetString + +os-1 OCTET STRING ::= os-2 +os-2 OCTET STRING ::= os-holder-1.&os + +-- Recursive BIT STRING definitions. + +BS-HOLDER ::= CLASS { + &id INTEGER UNIQUE, + &bs BIT STRING, + &named-bs NamedBsType +} WITH SYNTAX { + ID &id BS &bs NAMED-BS &named-bs +} +bs-holder-1 BS-HOLDER ::= { ID 1 BS '101'B NAMED-BS {a,c} } + +NamedBsType ::= BIT STRING {a(0),b(1),c(2)} +BsSeq ::= SEQUENCE { + a BIT STRING, + b NamedBsType +} + +someBitString BIT STRING ::= '101101'B + +bsSeq1 BsSeq ::= { a someBitString, b someNamedBs } +bsSeq2 BsSeq ::= { a otherBitString, b someOtherNamedBs } +bsSeq3 BsSeq ::= { a bs-holder-1.&bs, b bs-holder-1.&named-bs } + +otherBitString BIT STRING ::= someBitString +bsFromObjectInd BIT STRING ::= bsFromObject +bsFromObject BIT STRING ::= bs-holder-1.&bs + +someOtherNamedBs NamedBsType ::= someNamedBs +someNamedBs NamedBsType ::= {c} + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1 index 3f1385323a..3f1385323a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 new file mode 100644 index 0000000000..f912966c72 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 @@ -0,0 +1,466 @@ +AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + +DEFINITIONS EXPLICIT TAGS ::= +BEGIN +EXPORTS ALL; +IMPORTS + +KeyUsage +FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-implicit-02(59)} ; + +-- Suggested prefixes for algorithm objects are: +-- +-- mda- Message Digest Algorithms +-- sa- Signature Algorithms +-- kta- Key Transport Algorithms (Asymmetric) +-- kaa- Key Agreement Algorithms (Asymmetric) +-- kwa- Key Wrap Algorithms (Symmetric) +-- kda- Key Derivation Algorithms +-- maca- Message Authentication Code Algorithms +-- pk- Public Key +-- cea- Content (symmetric) Encryption Algorithms +-- cap- S/MIME Capabilities + +ParamOptions ::= ENUMERATED { + required, -- Parameters MUST be encoded in structure + preferredPresent, -- Parameters SHOULD be encoded in structure + preferredAbsent, -- Parameters SHOULD NOT be encoded in structure + absent, -- Parameters MUST NOT be encoded in structure + inheritable, -- Parameters are inherited if not present + optional, -- Parameters MAY be encoded in the structure + ... +} + +-- DIGEST-ALGORITHM +-- +-- Describes the basic information for ASN.1 and a digest +-- algorithm. +-- +-- &id - contains the OID identifying the digest algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- +-- Additional information such as the length of the hash could have +-- been encoded. Without a clear understanding of what information +-- is needed by applications, such extraneous information was not +-- considered to be of sufficent importance. +-- +-- Example: +-- mda-sha1 DIGEST-ALGORITHM ::= { +-- IDENTIFIER id-sha1 +-- PARAMS TYPE NULL ARE preferredAbsent +-- } + +DIGEST-ALGORITHM ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence ] +} + +-- SIGNATURE-ALGORITHM +-- +-- Describes the basic properties of a signature algorithm +-- +-- &id - contains the OID identifying the signature algorithm +-- &Value - contains a type definition for the value structure of +-- the signature; if absent, implies that no ASN.1 +-- encoding is performed on the value +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &HashSet - The set of hash algorithms used with this +-- signature algorithm +-- &PublicKeySet - the set of public key algorithms for this +-- signature algorithm +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- sig-RSA-PSS SIGNATURE-ALGORITHM ::= { +-- IDENTIFIER id-RSASSA-PSS +-- PARAMS TYPE RSASSA-PSS-params ARE required +-- HASHES { mda-sha1 | mda-md5, ... } +-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss } +-- } + +SIGNATURE-ALGORITHM ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Value OPTIONAL, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &HashSet DIGEST-ALGORITHM OPTIONAL, + &PublicKeySet PUBLIC-KEY OPTIONAL, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [VALUE &Value] + [PARAMS [TYPE &Params] ARE ¶mPresence ] + [HASHES &HashSet] + [PUBLIC-KEYS &PublicKeySet] + [SMIME-CAPS &smimeCaps] +} + +-- PUBLIC-KEY +-- +-- Describes the basic properties of a public key +-- +-- &id - contains the OID identifying the public key +-- &KeyValue - contains the type for the key value +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &keyUsage - contains the set of bits that are legal for this +-- key type. Note that is does not make any statement +-- about how bits may be paired. +-- &PrivateKey - contains a type structure for encoding the private +-- key information. +-- +-- Example: +-- pk-rsa-pss PUBLIC-KEY ::= { +-- IDENTIFIER id-RSASSA-PSS +-- KEY RSAPublicKey +-- PARAMS TYPE RSASSA-PSS-params ARE optional +-- CERT-KEY-USAGE { .... } +-- } + +PUBLIC-KEY ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &KeyValue OPTIONAL, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &keyUsage KeyUsage OPTIONAL, + &PrivateKey OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [KEY &KeyValue] + [PARAMS [TYPE &Params] ARE ¶mPresence] + [CERT-KEY-USAGE &keyUsage] + [PRIVATE-KEY &PrivateKey] +} + +-- KEY-TRANSPORT +-- +-- Describes the basic properties of a key transport algorithm +-- +-- &id - contains the OID identifying the key transport algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &PublicKeySet - specifies which public keys are used with +-- this algorithm +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- kta-rsaTransport KEY-TRANSPORT ::= { +-- IDENTIFIER &id +-- PARAMS TYPE NULL ARE required +-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss } +-- } + +KEY-TRANSPORT ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &PublicKeySet PUBLIC-KEY OPTIONAL, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [PUBLIC-KEYS &PublicKeySet] + [SMIME-CAPS &smimeCaps] +} + +-- KEY-AGREE +-- +-- Describes the basic properties of a key agreement algorithm +-- +-- &id - contains the OID identifying the key agreement algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &PublicKeySet - specifies which public keys are used with +-- this algorithm +-- &Ukm - type of user keying material used +-- &ukmPresence - specifies the requirements to define the UKM field +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- kaa-dh-static-ephemeral KEY-AGREE ::= { +-- IDENTIFIER id-alg-ESDH +-- PARAMS TYPE KeyWrapAlgorithm ARE required +-- PUBLIC-KEYS { +-- {IDENTIFIER dh-public-number KEY DHPublicKey +-- PARAMS TYPE DHDomainParameters ARE inheritable } +-- } +-- - - UKM should be present but is not separately ASN.1-encoded +-- UKM ARE preferredPresent +-- } + +KEY-AGREE ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &PublicKeySet PUBLIC-KEY OPTIONAL, + &Ukm OPTIONAL, + &ukmPresence ParamOptions DEFAULT absent, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [PUBLIC-KEYS &PublicKeySet] + [UKM [TYPE &Ukm] ARE &ukmPresence] + [SMIME-CAPS &smimeCaps] +} + +-- KEY-WRAP +-- +-- Describes the basic properties of a key wrap algorithm +-- +-- &id - contains the OID identifying the key wrap algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- kwa-cms3DESwrap KEY-WRAP ::= { +-- IDENTIFIER id-alg-CMS3DESwrap +-- PARAMS TYPE NULL ARE required +-- } + +KEY-WRAP ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [SMIME-CAPS &smimeCaps] +} +-- KEY-DERIVATION +-- +-- Describes the basic properties of a key derivation algorithm +-- +-- &id - contains the OID identifying the key derivation algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- kda-pbkdf2 KEY-DERIVATION ::= { +-- IDENTIFIER id-PBKDF2 +-- PARAMS TYPE PBKDF2-params ARE required +-- } + +KEY-DERIVATION ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [SMIME-CAPS &smimeCaps] +} + +-- MAC-ALGORITHM +-- +-- Describes the basic properties of a message +-- authentication code (MAC) algorithm +-- +-- &id - contains the OID identifying the MAC algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &keyed - MAC algorithm is a keyed MAC algorithm +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Some parameters that perhaps should have been added would be +-- fields with the minimum and maximum MAC lengths for +-- those MAC algorithms that allow truncations. +-- +-- Example: +-- maca-hmac-sha1 MAC-ALGORITHM ::= { +-- IDENTIFIER hMAC-SHA1 +-- PARAMS TYPE NULL ARE preferredAbsent +-- IS KEYED MAC TRUE +-- SMIME-CAPS {IDENTIFIED BY hMAC-SHA1} +-- } + +MAC-ALGORITHM ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &keyed BOOLEAN, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + IS-KEYED-MAC &keyed + [SMIME-CAPS &smimeCaps] +} + +-- CONTENT-ENCRYPTION +-- +-- Describes the basic properties of a content encryption +-- algorithm +-- +-- &id - contains the OID identifying the content +-- encryption algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- Example: +-- cea-3DES-cbc CONTENT-ENCRYPTION ::= { +-- IDENTIFIER des-ede3-cbc +-- PARAMS TYPE IV ARE required +-- SMIME-CAPS { IDENTIFIED BY des-ede3-cbc } +-- } + +CONTENT-ENCRYPTION ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [SMIME-CAPS &smimeCaps] +} + +-- ALGORITHM +-- +-- Describes a generic algorithm identifier +-- +-- &id - contains the OID identifying the algorithm +-- &Params - if present, contains the type for the algorithm +-- parameters; if absent, implies no parameters +-- ¶mPresence - parameter presence requirement +-- &smimeCaps - contains the object describing how the S/MIME +-- capabilities are presented. +-- +-- This would be used for cases where an algorithm of an unknown +-- type is used. In general however, one should either define +-- a more complete algorithm structure (such as the one above) +-- or use the TYPE-IDENTIFIER class. + +ALGORITHM ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Params OPTIONAL, + ¶mPresence ParamOptions DEFAULT absent, + &smimeCaps SMIME-CAPS OPTIONAL +} WITH SYNTAX { + IDENTIFIER &id + [PARAMS [TYPE &Params] ARE ¶mPresence] + [SMIME-CAPS &smimeCaps] +} + +-- AlgorithmIdentifier +-- +-- Provides the generic structure that is used to encode algorithm +-- identification and the parameters associated with the +-- algorithm. +-- +-- The first parameter represents the type of the algorithm being +-- used. +-- The second parameter represents an object set containing the +-- algorithms that may occur in this situation. +-- The initial list of required algorithms should occur to the +-- left of an extension marker; all other algorithms should +-- occur to the right of an extension marker. +-- +-- The object class ALGORITHM can be used for generic unspecified +-- items. +-- If new ALGORITHM classes are defined, the fields &id and &Params +-- need to be present as fields in the object in order to use +-- this parameterized type. +-- +-- Example: +-- SignatureAlgorithmIdentifier ::= +-- AlgorithmIdentifier{SIGNATURE-ALGORITHM, {SignatureAlgSet}} + +AlgorithmIdentifier{ALGORITHM-TYPE, ALGORITHM-TYPE:AlgorithmSet} ::= + SEQUENCE { + algorithm ALGORITHM-TYPE.&id({AlgorithmSet}), + parameters ALGORITHM-TYPE. + &Params({AlgorithmSet}{@algorithm}) OPTIONAL + } + +-- S/MIME Capabilities +-- +-- We have moved the SMIME-CAPS from the module for RFC 3851 to here +-- because it is used in RFC 4262 (X.509 Certificate Extension for +-- S/MIME Capabilities) +-- +-- +-- This class is used to represent an S/MIME capability. S/MIME +-- capabilities are used to represent what algorithm capabilities +-- an individual has. The classic example was the content encryption +-- algorithm RC2 where the algorithm id and the RC2 key lengths +-- supported needed to be advertised, but the IV used is not fixed. +-- Thus, for RC2 we used +-- +-- cap-RC2CBC SMIME-CAPS ::= { +-- TYPE INTEGER ( 40 | 128 ) IDENTIFIED BY rc2-cbc } +-- +-- where 40 and 128 represent the RC2 key length in number of bits. +-- +-- Another example where information needs to be shown is for +-- RSA-OAEP where only specific hash functions or mask generation +-- functions are supported, but the saltLength is specified by the +-- sender and not the recipient. In this case, one can either +-- generate a number of capability items, +-- or a new S/MIME capability type could be generated where +-- multiple hash functions could be specified. +-- +-- +-- SMIME-CAP +-- +-- This class is used to associate the type that describes the +-- capabilities with the object identifier. +-- + +SMIME-CAPS ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Type OPTIONAL +} +WITH SYNTAX { [TYPE &Type] IDENTIFIED BY &id } + +-- +-- Generic type - this is used for defining values. +-- + +-- Define a single S/MIME capability encoding + +SMIMECapability{SMIME-CAPS:CapabilitySet} ::= SEQUENCE { + capabilityID SMIME-CAPS.&id({CapabilitySet}), + parameters SMIME-CAPS.&Type({CapabilitySet} + {@capabilityID}) OPTIONAL +} + +-- Define a sequence of S/MIME capability values + +SMIMECapabilities { SMIME-CAPS:CapabilitySet } ::= + SEQUENCE SIZE (1..MAX) OF SMIMECapability{{CapabilitySet} } + +END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 new file mode 100644 index 0000000000..46b431af40 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 @@ -0,0 +1,59 @@ + AttributeCertificateVersion1-2009 + {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-v1AttrCert-02(49)} + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + IMPORTS + + SIGNATURE-ALGORITHM, ALGORITHM, AlgorithmIdentifier{} + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) } + + CertificateSerialNumber, UniqueIdentifier, SIGNED{} + FROM PKIX1Explicit-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) } + + GeneralNames + FROM PKIX1Implicit-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) } + + AttCertValidityPeriod, IssuerSerial + FROM PKIXAttributeCertificate-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) } ; + + -- Definition extracted from X.509-1997 [X.509-97], but + -- different type names are used to avoid collisions. + + AttributeCertificateV1 ::= SIGNED{AttributeCertificateInfoV1} + + AttributeCertificateInfoV1 ::= SEQUENCE { + version AttCertVersionV1 DEFAULT v1, + subject CHOICE { + baseCertificateID [0] IssuerSerial, + -- associated with a Public Key Certificate + subjectName [1] GeneralNames }, + -- associated with a name + issuer GeneralNames, + signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, {...}}, + serialNumber CertificateSerialNumber, + attCertValidityPeriod AttCertValidityPeriod, + attributes SEQUENCE OF AttributeSet{{AttrList}}, + issuerUniqueID UniqueIdentifier OPTIONAL, + extensions Extensions{{AttributeCertExtensionsV1}} OPTIONAL } + + AttCertVersionV1 ::= INTEGER { v1(0) } + + AttrList ATTRIBUTE ::= {...} + AttributeCertExtensionsV1 EXTENSION ::= {...} + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1 index 5cfa9062f0..5cfa9062f0 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1 index d8b2b687ae..d8b2b687ae 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1 index 0daf2208e9..0daf2208e9 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1 index 04060cf060..04060cf060 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1 index aed48ac26b..aed48ac26b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1 index 7ba5bf194a..7ba5bf194a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1 index 24c7fafc38..24c7fafc38 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 new file mode 100644 index 0000000000..3e350294be --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 @@ -0,0 +1,463 @@ + CryptographicMessageSyntax-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) } + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM, + PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM, + KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM, + AlgorithmIdentifier + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + SignatureAlgs, MessageDigestAlgs, KeyAgreementAlgs, + MessageAuthAlgs, KeyWrapAlgs, ContentEncryptionAlgs, + KeyTransportAlgs, KeyDerivationAlgs, KeyAgreePublicKeys + FROM CryptographicMessageSyntaxAlgorithms-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cmsalg-2001-02(37) } + + Certificate, CertificateList, CertificateSerialNumber, + Name, ATTRIBUTE + FROM PKIX1Explicit-2009 + { iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-explicit-02(51) } + + AttributeCertificate + FROM PKIXAttributeCertificate-2009 + { iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-attribute-cert-02(47) } + + AttributeCertificateV1 + FROM AttributeCertificateVersion1-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-v1AttrCert-02(49) } ; + + -- Cryptographic Message Syntax + + -- The following are used for version numbers using the ASN.1 + -- idiom "[[n:" + -- Version 1 = PKCS #7 + -- Version 2 = S/MIME V2 + -- Version 3 = RFC 2630 + -- Version 4 = RFC 3369 + -- Version 5 = RFC 3852 + + CONTENT-TYPE ::= TYPE-IDENTIFIER + ContentType ::= CONTENT-TYPE.&id + + ContentInfo ::= SEQUENCE { + contentType CONTENT-TYPE. + &id({ContentSet}), + content [0] EXPLICIT CONTENT-TYPE. + &Type({ContentSet}{@contentType})} + + ContentSet CONTENT-TYPE ::= { + -- Define the set of content types to be recognized. + ct-Data | ct-SignedData | ct-EncryptedData | ct-EnvelopedData | + ct-AuthenticatedData | ct-DigestedData, ... } + + SignedData ::= SEQUENCE { + version CMSVersion, + digestAlgorithms SET OF DigestAlgorithmIdentifier, + encapContentInfo EncapsulatedContentInfo, + certificates [0] IMPLICIT CertificateSet OPTIONAL, + crls [1] IMPLICIT RevocationInfoChoices OPTIONAL, + signerInfos SignerInfos } + + SignerInfos ::= SET OF SignerInfo + + EncapsulatedContentInfo ::= SEQUENCE { + eContentType CONTENT-TYPE.&id({ContentSet}), + eContent [0] EXPLICIT OCTET STRING + ( CONTAINING CONTENT-TYPE. + &Type({ContentSet}{@eContentType})) OPTIONAL } + + SignerInfo ::= SEQUENCE { + version CMSVersion, + sid SignerIdentifier, + digestAlgorithm DigestAlgorithmIdentifier, + signedAttrs [0] IMPLICIT SignedAttributes OPTIONAL, + signatureAlgorithm SignatureAlgorithmIdentifier, + signature SignatureValue, + unsignedAttrs [1] IMPLICIT Attributes + {{UnsignedAttributes}} OPTIONAL } + + SignedAttributes ::= Attributes {{ SignedAttributesSet }} + + SignerIdentifier ::= CHOICE { + issuerAndSerialNumber IssuerAndSerialNumber, + ..., + [[3: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] } + + SignedAttributesSet ATTRIBUTE ::= + { aa-signingTime | aa-messageDigest | aa-contentType, ... } + + UnsignedAttributes ATTRIBUTE ::= { aa-countersignature, ... } + + SignatureValue ::= OCTET STRING + + EnvelopedData ::= SEQUENCE { + version CMSVersion, + originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL, + recipientInfos RecipientInfos, + encryptedContentInfo EncryptedContentInfo, + ..., + [[2: unprotectedAttrs [1] IMPLICIT Attributes + {{ UnprotectedAttributes }} OPTIONAL ]] } + + OriginatorInfo ::= SEQUENCE { + certs [0] IMPLICIT CertificateSet OPTIONAL, + crls [1] IMPLICIT RevocationInfoChoices OPTIONAL } + + RecipientInfos ::= SET SIZE (1..MAX) OF RecipientInfo + + EncryptedContentInfo ::= SEQUENCE { + contentType CONTENT-TYPE.&id({ContentSet}), + contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, + encryptedContent [0] IMPLICIT OCTET STRING OPTIONAL } + + -- If you want to do constraints, you might use: + -- EncryptedContentInfo ::= SEQUENCE { + -- contentType CONTENT-TYPE.&id({ContentSet}), + -- contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier, + -- encryptedContent [0] IMPLICIT ENCRYPTED {CONTENT-TYPE. + -- &Type({ContentSet}{@contentType}) OPTIONAL } + -- ENCRYPTED {ToBeEncrypted} ::= OCTET STRING ( CONSTRAINED BY + -- { ToBeEncrypted } ) + + UnprotectedAttributes ATTRIBUTE ::= { ... } + + RecipientInfo ::= CHOICE { + ktri KeyTransRecipientInfo, + ..., + [[3: kari [1] KeyAgreeRecipientInfo ]], + [[4: kekri [2] KEKRecipientInfo]], + [[5: pwri [3] PasswordRecipientInfo, + ori [4] OtherRecipientInfo ]] } + + EncryptedKey ::= OCTET STRING + + KeyTransRecipientInfo ::= SEQUENCE { + version CMSVersion, -- always set to 0 or 2 + rid RecipientIdentifier, + keyEncryptionAlgorithm AlgorithmIdentifier + {KEY-TRANSPORT, {KeyTransportAlgorithmSet}}, + encryptedKey EncryptedKey } + + KeyTransportAlgorithmSet KEY-TRANSPORT ::= { KeyTransportAlgs, ... } + + RecipientIdentifier ::= CHOICE { + issuerAndSerialNumber IssuerAndSerialNumber, + ..., + [[2: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] } + KeyAgreeRecipientInfo ::= SEQUENCE { + version CMSVersion, -- always set to 3 + originator [0] EXPLICIT OriginatorIdentifierOrKey, + ukm [1] EXPLICIT UserKeyingMaterial OPTIONAL, + keyEncryptionAlgorithm AlgorithmIdentifier + {KEY-AGREE, {KeyAgreementAlgorithmSet}}, + recipientEncryptedKeys RecipientEncryptedKeys } + + KeyAgreementAlgorithmSet KEY-AGREE ::= { KeyAgreementAlgs, ... } + + OriginatorIdentifierOrKey ::= CHOICE { + issuerAndSerialNumber IssuerAndSerialNumber, + subjectKeyIdentifier [0] SubjectKeyIdentifier, + originatorKey [1] OriginatorPublicKey } + + OriginatorPublicKey ::= SEQUENCE { + algorithm AlgorithmIdentifier {PUBLIC-KEY, {OriginatorKeySet}}, + publicKey BIT STRING } + + OriginatorKeySet PUBLIC-KEY ::= { KeyAgreePublicKeys, ... } + + RecipientEncryptedKeys ::= SEQUENCE OF RecipientEncryptedKey + + RecipientEncryptedKey ::= SEQUENCE { + rid KeyAgreeRecipientIdentifier, + encryptedKey EncryptedKey } + + KeyAgreeRecipientIdentifier ::= CHOICE { + issuerAndSerialNumber IssuerAndSerialNumber, + rKeyId [0] IMPLICIT RecipientKeyIdentifier } + + RecipientKeyIdentifier ::= SEQUENCE { + subjectKeyIdentifier SubjectKeyIdentifier, + date GeneralizedTime OPTIONAL, + other OtherKeyAttribute OPTIONAL } + + SubjectKeyIdentifier ::= OCTET STRING + + KEKRecipientInfo ::= SEQUENCE { + version CMSVersion, -- always set to 4 + kekid KEKIdentifier, + keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier, + encryptedKey EncryptedKey } + + KEKIdentifier ::= SEQUENCE { + keyIdentifier OCTET STRING, + date GeneralizedTime OPTIONAL, + other OtherKeyAttribute OPTIONAL } + PasswordRecipientInfo ::= SEQUENCE { + version CMSVersion, -- always set to 0 + keyDerivationAlgorithm [0] KeyDerivationAlgorithmIdentifier + OPTIONAL, + keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier, + encryptedKey EncryptedKey } + + OTHER-RECIPIENT ::= TYPE-IDENTIFIER + + OtherRecipientInfo ::= SEQUENCE { + oriType OTHER-RECIPIENT. + &id({SupportedOtherRecipInfo}), + oriValue OTHER-RECIPIENT. + &Type({SupportedOtherRecipInfo}{@oriType})} + + SupportedOtherRecipInfo OTHER-RECIPIENT ::= { ... } + + DigestedData ::= SEQUENCE { + version CMSVersion, + digestAlgorithm DigestAlgorithmIdentifier, + encapContentInfo EncapsulatedContentInfo, + digest Digest, ... } + + Digest ::= OCTET STRING + + EncryptedData ::= SEQUENCE { + version CMSVersion, + encryptedContentInfo EncryptedContentInfo, + ..., + [[2: unprotectedAttrs [1] IMPLICIT Attributes + {{UnprotectedAttributes}} OPTIONAL ]] } + + AuthenticatedData ::= SEQUENCE { + version CMSVersion, + originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL, + recipientInfos RecipientInfos, + macAlgorithm MessageAuthenticationCodeAlgorithm, + digestAlgorithm [1] DigestAlgorithmIdentifier OPTIONAL, + encapContentInfo EncapsulatedContentInfo, + authAttrs [2] IMPLICIT AuthAttributes OPTIONAL, + mac MessageAuthenticationCode, + unauthAttrs [3] IMPLICIT UnauthAttributes OPTIONAL } + + AuthAttributes ::= SET SIZE (1..MAX) OF Attribute + {{AuthAttributeSet}} + + AuthAttributeSet ATTRIBUTE ::= { aa-contentType | aa-messageDigest + | aa-signingTime, ...} + MessageAuthenticationCode ::= OCTET STRING + + UnauthAttributes ::= SET SIZE (1..MAX) OF Attribute + {{UnauthAttributeSet}} + + UnauthAttributeSet ATTRIBUTE ::= {...} + + -- + -- General algorithm definitions + -- + + DigestAlgorithmIdentifier ::= AlgorithmIdentifier + {DIGEST-ALGORITHM, {DigestAlgorithmSet}} + + DigestAlgorithmSet DIGEST-ALGORITHM ::= { + CryptographicMessageSyntaxAlgorithms-2009.MessageDigestAlgs, ... } + + SignatureAlgorithmIdentifier ::= AlgorithmIdentifier + {SIGNATURE-ALGORITHM, {SignatureAlgorithmSet}} + + SignatureAlgorithmSet SIGNATURE-ALGORITHM ::= + { SignatureAlgs, ... } + + KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + {KEY-WRAP, {KeyEncryptionAlgorithmSet}} + + KeyEncryptionAlgorithmSet KEY-WRAP ::= { KeyWrapAlgs, ... } + + ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier + {CONTENT-ENCRYPTION, {ContentEncryptionAlgorithmSet}} + + ContentEncryptionAlgorithmSet CONTENT-ENCRYPTION ::= + { ContentEncryptionAlgs, ... } + + MessageAuthenticationCodeAlgorithm ::= AlgorithmIdentifier + {MAC-ALGORITHM, {MessageAuthenticationCodeAlgorithmSet}} + + MessageAuthenticationCodeAlgorithmSet MAC-ALGORITHM ::= + { MessageAuthAlgs, ... } + + KeyDerivationAlgorithmIdentifier ::= AlgorithmIdentifier + {KEY-DERIVATION, {KeyDerivationAlgs, ...}} + + RevocationInfoChoices ::= SET OF RevocationInfoChoice + + RevocationInfoChoice ::= CHOICE { + crl CertificateList, + ..., + [[5: other [1] IMPLICIT OtherRevocationInfoFormat ]] } + + OTHER-REVOK-INFO ::= TYPE-IDENTIFIER + + OtherRevocationInfoFormat ::= SEQUENCE { + otherRevInfoFormat OTHER-REVOK-INFO. + &id({SupportedOtherRevokInfo}), + otherRevInfo OTHER-REVOK-INFO. + &Type({SupportedOtherRevokInfo}{@otherRevInfoFormat})} + + SupportedOtherRevokInfo OTHER-REVOK-INFO ::= { ... } + + CertificateChoices ::= CHOICE { + certificate Certificate, + extendedCertificate [0] IMPLICIT ExtendedCertificate, + -- Obsolete + ..., + [[3: v1AttrCert [1] IMPLICIT AttributeCertificateV1]], + -- Obsolete + [[4: v2AttrCert [2] IMPLICIT AttributeCertificateV2]], + [[5: other [3] IMPLICIT OtherCertificateFormat]] } + + AttributeCertificateV2 ::= AttributeCertificate + + OTHER-CERT-FMT ::= TYPE-IDENTIFIER + + OtherCertificateFormat ::= SEQUENCE { + otherCertFormat OTHER-CERT-FMT. + &id({SupportedCertFormats}), + otherCert OTHER-CERT-FMT. + &Type({SupportedCertFormats}{@otherCertFormat})} + + SupportedCertFormats OTHER-CERT-FMT ::= { ... } + + CertificateSet ::= SET OF CertificateChoices + + IssuerAndSerialNumber ::= SEQUENCE { + issuer Name, + serialNumber CertificateSerialNumber } + + CMSVersion ::= INTEGER { v0(0), v1(1), v2(2), v3(3), v4(4), v5(5) } + + UserKeyingMaterial ::= OCTET STRING + + KEY-ATTRIBUTE ::= TYPE-IDENTIFIER + + OtherKeyAttribute ::= SEQUENCE { + keyAttrId KEY-ATTRIBUTE. + + &id({SupportedKeyAttributes}), + keyAttr KEY-ATTRIBUTE. + &Type({SupportedKeyAttributes}{@keyAttrId})} + + SupportedKeyAttributes KEY-ATTRIBUTE ::= { ... } + + -- Content Type Object Identifiers + + id-ct-contentInfo OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) smime(16) ct(1) 6 } + + ct-Data CONTENT-TYPE ::= {OCTET STRING IDENTIFIED BY id-data} + + id-data OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs7(7) 1 } + + ct-SignedData CONTENT-TYPE ::= + { SignedData IDENTIFIED BY id-signedData} + + id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 } + + ct-EnvelopedData CONTENT-TYPE ::= + { EnvelopedData IDENTIFIED BY id-envelopedData} + + id-envelopedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs7(7) 3 } + + ct-DigestedData CONTENT-TYPE ::= + { DigestedData IDENTIFIED BY id-digestedData} + + id-digestedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs7(7) 5 } + + ct-EncryptedData CONTENT-TYPE ::= + { EncryptedData IDENTIFIED BY id-encryptedData} + + id-encryptedData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs7(7) 6 } + + ct-AuthenticatedData CONTENT-TYPE ::= + { AuthenticatedData IDENTIFIED BY id-ct-authData} + + id-ct-authData OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) ct(1) 2 } + + -- + -- The CMS Attributes + -- + + MessageDigest ::= OCTET STRING + + SigningTime ::= Time + + Time ::= CHOICE { + utcTime UTCTime, + generalTime GeneralizedTime } + + Countersignature ::= SignerInfo + + -- Attribute Object Identifiers + + aa-contentType ATTRIBUTE ::= + { TYPE ContentType IDENTIFIED BY id-contentType } + id-contentType OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 3 } + + aa-messageDigest ATTRIBUTE ::= + { TYPE MessageDigest IDENTIFIED BY id-messageDigest} + id-messageDigest OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 4 } + + aa-signingTime ATTRIBUTE ::= + { TYPE SigningTime IDENTIFIED BY id-signingTime } + id-signingTime OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 5 } + + aa-countersignature ATTRIBUTE ::= + { TYPE Countersignature IDENTIFIED BY id-countersignature } + id-countersignature OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 6 } + + -- + -- Obsolete Extended Certificate syntax from PKCS#6 + -- + + ExtendedCertificateOrCertificate ::= CHOICE { + certificate Certificate, + extendedCertificate [0] IMPLICIT ExtendedCertificate } + + ExtendedCertificate ::= SEQUENCE { + extendedCertificateInfo ExtendedCertificateInfo, + signatureAlgorithm SignatureAlgorithmIdentifier, + signature Signature } + + ExtendedCertificateInfo ::= SEQUENCE { + version CMSVersion, + certificate Certificate, + attributes UnauthAttributes } + + Signature ::= BIT STRING + + Attribute{ ATTRIBUTE:AttrList } ::= SEQUENCE { + attrType ATTRIBUTE. + &id({AttrList}), + attrValues SET OF ATTRIBUTE. + &Type({AttrList}{@attrType}) } + + Attributes { ATTRIBUTE:AttrList } ::= + SET SIZE (1..MAX) OF Attribute {{ AttrList }} + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 new file mode 100644 index 0000000000..72e8b270db --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 @@ -0,0 +1,248 @@ + CryptographicMessageSyntaxAlgorithms-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cmsalg-2001-02(37) } + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM, + PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM, + KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM, + AlgorithmIdentifier{}, SMIME-CAPS + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + pk-rsa, pk-dh, pk-dsa, rsaEncryption, DHPublicKey, dhpublicnumber + FROM PKIXAlgs-2009 + {iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56)} + + cap-RC2CBC + FROM SecureMimeMessageV3dot1-2009 + {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-msg-v3dot1-02(39)}; + + -- 2. Hash algorithms in this document + + MessageDigestAlgs DIGEST-ALGORITHM ::= { + -- mda-md5 | mda-sha1, + ... } + + -- 3. Signature algorithms in this document + + SignatureAlgs SIGNATURE-ALGORITHM ::= { + -- See RFC 3279 + -- sa-dsaWithSHA1 | sa-rsaWithMD5 | sa-rsaWithSHA1, + ... } + + -- 4. Key Management Algorithms + -- 4.1 Key Agreement Algorithms + + KeyAgreementAlgs KEY-AGREE ::= { kaa-esdh | kaa-ssdh, ...} + KeyAgreePublicKeys PUBLIC-KEY ::= { pk-dh, ...} + + -- 4.2 Key Transport Algorithms + + KeyTransportAlgs KEY-TRANSPORT ::= { kt-rsa, ... } + + -- 4.3 Symmetric Key-Encryption Key Algorithms + + KeyWrapAlgs KEY-WRAP ::= { kwa-3DESWrap | kwa-RC2Wrap, ... } + + -- 4.4 Key Derivation Algorithms + + KeyDerivationAlgs KEY-DERIVATION ::= { kda-PBKDF2, ... } + + -- 5. Content Encryption Algorithms + + ContentEncryptionAlgs CONTENT-ENCRYPTION ::= + { cea-3DES-cbc | cea-RC2-cbc, ... } + + -- 6. Message Authentication Code Algorithms + + MessageAuthAlgs MAC-ALGORITHM ::= { maca-hMAC-SHA1, ... } + + -- S/MIME Capabilities for these items + + SMimeCaps SMIME-CAPS ::= { + kaa-esdh.&smimeCaps | + kaa-ssdh.&smimeCaps | + kt-rsa.&smimeCaps | + kwa-3DESWrap.&smimeCaps | + kwa-RC2Wrap.&smimeCaps | + cea-3DES-cbc.&smimeCaps | + cea-RC2-cbc.&smimeCaps | + maca-hMAC-SHA1.&smimeCaps, + ...} + + -- + -- + -- + + -- Algorithm Identifiers + + -- rsaEncryption OBJECT IDENTIFIER ::= { iso(1) member-body(2) + -- us(840) rsadsi(113549) pkcs(1) pkcs-1(1) 1 } + + id-alg-ESDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 5 } + + id-alg-SSDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 10 } + + id-alg-CMS3DESwrap OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 6 } + + id-alg-CMSRC2wrap OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 7 } + + des-ede3-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) encryptionAlgorithm(3) 7 } + + rc2-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840) + rsadsi(113549) encryptionAlgorithm(3) 2 } + + hMAC-SHA1 OBJECT IDENTIFIER ::= { iso(1) identified-organization(3) + dod(6) internet(1) security(5) mechanisms(5) 8 1 2 } + + id-PBKDF2 OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-5(5) 12 } + + -- Algorithm Identifier Parameter Types + + KeyWrapAlgorithm ::= + AlgorithmIdentifier {KEY-WRAP, {KeyWrapAlgs }} + + RC2wrapParameter ::= RC2ParameterVersion + RC2ParameterVersion ::= INTEGER + + CBCParameter ::= IV + + IV ::= OCTET STRING -- exactly 8 octets + + RC2CBCParameter ::= SEQUENCE { + rc2ParameterVersion INTEGER (1..256), + iv OCTET STRING } -- exactly 8 octets + + maca-hMAC-SHA1 MAC-ALGORITHM ::= { + IDENTIFIER hMAC-SHA1 + PARAMS TYPE NULL ARE preferredAbsent + IS-KEYED-MAC TRUE + SMIME-CAPS {IDENTIFIED BY hMAC-SHA1} + } + + PBKDF2-PRFsAlgorithmIdentifier ::= AlgorithmIdentifier{ ALGORITHM, + {PBKDF2-PRFs} } + + alg-hMAC-SHA1 ALGORITHM ::= + { IDENTIFIER hMAC-SHA1 PARAMS TYPE NULL ARE required } + + PBKDF2-PRFs ALGORITHM ::= { alg-hMAC-SHA1, ... } + + PBKDF2-SaltSources ALGORITHM ::= { ... } + + PBKDF2-SaltSourcesAlgorithmIdentifier ::= + AlgorithmIdentifier {ALGORITHM, {PBKDF2-SaltSources}} + + defaultPBKDF2 PBKDF2-PRFsAlgorithmIdentifier ::= + { algorithm alg-hMAC-SHA1.&id, parameters NULL:NULL } + + PBKDF2-params ::= SEQUENCE { + salt CHOICE { + specified OCTET STRING, + otherSource PBKDF2-SaltSourcesAlgorithmIdentifier }, + iterationCount INTEGER (1..MAX), + keyLength INTEGER (1..MAX) OPTIONAL, + prf PBKDF2-PRFsAlgorithmIdentifier DEFAULT + defaultPBKDF2 + } + + -- + -- This object is included for completeness. It should not be used + -- for encoding of signatures, but was sometimes used in older + -- versions of CMS for encoding of RSA signatures. + -- + -- + -- sa-rsa SIGNATURE-ALGORITHM ::= { + -- IDENTIFIER rsaEncryption + -- - - value is not ASN.1 encoded + -- PARAMS TYPE NULL ARE required + -- HASHES {mda-sha1 | mda-md5, ...} + -- PUBLIC-KEYS { pk-rsa} + -- } + -- + -- No ASN.1 encoding is applied to the signature value + -- for these items + + kaa-esdh KEY-AGREE ::= { + IDENTIFIER id-alg-ESDH + PARAMS TYPE KeyWrapAlgorithm ARE required + PUBLIC-KEYS { pk-dh } + -- UKM is not ASN.1 encoded + UKM ARE optional + SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-ESDH} + } + + kaa-ssdh KEY-AGREE ::= { + IDENTIFIER id-alg-SSDH + PARAMS TYPE KeyWrapAlgorithm ARE required + PUBLIC-KEYS {pk-dh} + -- UKM is not ASN.1 encoded + UKM ARE optional + SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-SSDH} + } + + dh-public-number OBJECT IDENTIFIER ::= dhpublicnumber + + pk-originator-dh PUBLIC-KEY ::= { + IDENTIFIER dh-public-number + KEY DHPublicKey + PARAMS ARE absent + CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly} + } + + kwa-3DESWrap KEY-WRAP ::= { + IDENTIFIER id-alg-CMS3DESwrap + PARAMS TYPE NULL ARE required + SMIME-CAPS {IDENTIFIED BY id-alg-CMS3DESwrap} + } + + kwa-RC2Wrap KEY-WRAP ::= { + IDENTIFIER id-alg-CMSRC2wrap + PARAMS TYPE RC2wrapParameter ARE required + SMIME-CAPS { IDENTIFIED BY id-alg-CMSRC2wrap } + } + + kda-PBKDF2 KEY-DERIVATION ::= { + IDENTIFIER id-PBKDF2 + PARAMS TYPE PBKDF2-params ARE required + -- No S/MIME caps defined + } + + cea-3DES-cbc CONTENT-ENCRYPTION ::= { + IDENTIFIER des-ede3-cbc + PARAMS TYPE IV ARE required + SMIME-CAPS { IDENTIFIED BY des-ede3-cbc } + } + + cea-RC2-cbc CONTENT-ENCRYPTION ::= { + IDENTIFIER rc2-cbc + PARAMS TYPE RC2CBCParameter ARE required + SMIME-CAPS cap-RC2CBC + } + + kt-rsa KEY-TRANSPORT ::= { + IDENTIFIER rsaEncryption + PARAMS TYPE NULL ARE required + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS {IDENTIFIED BY rsaEncryption} + } + + -- S/MIME Capabilities - most have no label. + + cap-3DESwrap SMIME-CAPS ::= { IDENTIFIED BY id-alg-CMS3DESwrap } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1 index cd3330dc56..cd3330dc56 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1 index df5e8489ea..df5e8489ea 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1 index ef1187ba8c..ef1187ba8c 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1 index 5a5d310729..5a5d310729 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1 index 10d6979f6d..10d6979f6d 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1 index 91c0a865f7..91c0a865f7 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1 index e3e1f95621..e3e1f95621 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1 index 9df5d2783a..9df5d2783a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1 index 37c6cac261..37c6cac261 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1 index acbb692b6f..acbb692b6f 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1 index cace79d109..cace79d109 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1 index 72e791f10c..72e791f10c 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1 index d8c15b7afa..d8c15b7afa 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1 index 9991a59454..9991a59454 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 new file mode 100644 index 0000000000..17a45a0a6b --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 @@ -0,0 +1,543 @@ + EnrollmentMessageSyntax-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) id-mod-cmc2002-02(53)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + EXPORTS ALL; + IMPORTS + + AttributeSet{}, Extension{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + AlgorithmIdentifier{}, DIGEST-ALGORITHM, KEY-WRAP, KEY-DERIVATION, + MAC-ALGORITHM, SIGNATURE-ALGORITHM, PUBLIC-KEY + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + CertificateSerialNumber, GeneralName, CRLReason, ReasonFlags, + CertExtensions + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + + Name, id-pkix, PublicKeyAlgorithms, SignatureAlgorithms + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)} + + ContentInfo, IssuerAndSerialNumber, CONTENT-TYPE + FROM CryptographicMessageSyntax-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cms-2004-02(41)} + + CertReqMsg, PKIPublicationInfo, CertTemplate + FROM PKIXCRMF-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)} + + mda-sha1 + FROM PKIXAlgs-2009 + { iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56)} + + kda-PBKDF2, maca-hMAC-SHA1 + FROM CryptographicMessageSyntaxAlgorithms-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cmsalg-2001-02(37) } + + mda-sha256 + FROM PKIX1-PSS-OAEP-Algorithms-2009 + { iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-rsa-pkalgs-02(54) } ; + + -- CMS Content types defined in this document + CMC-ContentTypes CONTENT-TYPE ::= { ct-PKIData | ct-PKIResponse, ... } + + -- Signature Algorithms defined in this document + + SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-noSignature } + + -- CMS Unsigned Attributes + + CMC-UnsignedAtts ATTRIBUTE ::= { aa-cmc-unsignedData } + + -- + -- + + id-cmc OBJECT IDENTIFIER ::= {id-pkix 7} -- CMC controls + id-cct OBJECT IDENTIFIER ::= {id-pkix 12} -- CMC content types + + -- This is the content type for a request message in the protocol + + ct-PKIData CONTENT-TYPE ::= + { PKIData IDENTIFIED BY id-cct-PKIData } + id-cct-PKIData OBJECT IDENTIFIER ::= { id-cct 2 } + + PKIData ::= SEQUENCE { + controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute, + reqSequence SEQUENCE SIZE(0..MAX) OF TaggedRequest, + cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo, + otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg + } + + BodyPartID ::= INTEGER(0..4294967295) + + TaggedAttribute ::= SEQUENCE { + bodyPartID BodyPartID, + attrType CMC-CONTROL.&id({Cmc-Control-Set}), + attrValues SET OF CMC-CONTROL. + &Type({Cmc-Control-Set}{@attrType}) + } + + Cmc-Control-Set CMC-CONTROL ::= { + cmc-identityProof | cmc-dataReturn | cmc-regInfo | + cmc-responseInfo | cmc-queryPending | cmc-popLinkRandom | + cmc-popLinkWitness | cmc-identification | cmc-transactionId | + cmc-senderNonce | cmc-recipientNonce | cmc-statusInfo | + cmc-addExtensions | cmc-encryptedPOP | cmc-decryptedPOP | + cmc-lraPOPWitness | cmc-getCert | cmc-getCRL | + cmc-revokeRequest | cmc-confirmCertAcceptance | + cmc-statusInfoV2 | cmc-trustedAnchors | cmc-authData | + cmc-batchRequests | cmc-batchResponses | cmc-publishCert | + cmc-modCertTemplate | cmc-controlProcessed | + cmc-identityProofV2 | cmc-popLinkWitnessV2, ... } + + OTHER-REQUEST ::= TYPE-IDENTIFIER + + -- We do not define any other requests in this document; + -- examples might be attribute certification requests + + OtherRequests OTHER-REQUEST ::= {...} + + TaggedRequest ::= CHOICE { + tcr [0] TaggedCertificationRequest, + crm [1] CertReqMsg, + orm [2] SEQUENCE { + bodyPartID BodyPartID, + requestMessageType OTHER-REQUEST.&id({OtherRequests}), + requestMessageValue OTHER-REQUEST.&Type({OtherRequests} + {@.requestMessageType}) + } + } + + TaggedCertificationRequest ::= SEQUENCE { + bodyPartID BodyPartID, + certificationRequest CertificationRequest + } + + AttributeList ATTRIBUTE ::= {at-extension-req, ...} + + CertificationRequest ::= SEQUENCE { + certificationRequestInfo SEQUENCE { + version INTEGER, + subject Name, + subjectPublicKeyInfo SEQUENCE { + algorithm AlgorithmIdentifier{PUBLIC-KEY, + {PublicKeyAlgorithms}}, + subjectPublicKey BIT STRING + }, + attributes [0] IMPLICIT SET OF + AttributeSet{{AttributeList}} + }, + signatureAlgorithm AlgorithmIdentifier + {SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + signature BIT STRING + } + + TaggedContentInfo ::= SEQUENCE { + bodyPartID BodyPartID, + contentInfo ContentInfo + } + + OTHER-MSG ::= TYPE-IDENTIFIER + + -- No other messages currently defined + + OtherMsgSet OTHER-MSG ::= {...} + + OtherMsg ::= SEQUENCE { + bodyPartID BodyPartID, + otherMsgType OTHER-MSG.&id({OtherMsgSet}), + otherMsgValue OTHER-MSG.&Type({OtherMsgSet}{@otherMsgType}) } + + -- This defines the response message in the protocol + + ct-PKIResponse CONTENT-TYPE ::= + { PKIResponse IDENTIFIED BY id-cct-PKIResponse } + id-cct-PKIResponse OBJECT IDENTIFIER ::= { id-cct 3 } + + ResponseBody ::= PKIResponse + + PKIResponse ::= SEQUENCE { + controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute, + cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo, + otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg + } + + CMC-CONTROL ::= TYPE-IDENTIFIER + + -- The following controls have the type OCTET STRING + + cmc-identityProof CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-identityProof } + id-cmc-identityProof OBJECT IDENTIFIER ::= {id-cmc 3} + + cmc-dataReturn CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-dataReturn } + id-cmc-dataReturn OBJECT IDENTIFIER ::= {id-cmc 4} + + cmc-regInfo CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-regInfo } + id-cmc-regInfo OBJECT IDENTIFIER ::= {id-cmc 18} + + cmc-responseInfo CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-responseInfo } + id-cmc-responseInfo OBJECT IDENTIFIER ::= {id-cmc 19} + + cmc-queryPending CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-queryPending } + id-cmc-queryPending OBJECT IDENTIFIER ::= {id-cmc 21} + + cmc-popLinkRandom CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-popLinkRandom } + id-cmc-popLinkRandom OBJECT IDENTIFIER ::= {id-cmc 22} + + cmc-popLinkWitness CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-popLinkWitness } + id-cmc-popLinkWitness OBJECT IDENTIFIER ::= {id-cmc 23} + + -- The following controls have the type UTF8String + + cmc-identification CMC-CONTROL ::= + { UTF8String IDENTIFIED BY id-cmc-identification } + id-cmc-identification OBJECT IDENTIFIER ::= {id-cmc 2} + + -- The following controls have the type INTEGER + + cmc-transactionId CMC-CONTROL ::= + { INTEGER IDENTIFIED BY id-cmc-transactionId } + id-cmc-transactionId OBJECT IDENTIFIER ::= {id-cmc 5} + + -- The following controls have the type OCTET STRING + + cmc-senderNonce CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-senderNonce } + + id-cmc-senderNonce OBJECT IDENTIFIER ::= {id-cmc 6} + + cmc-recipientNonce CMC-CONTROL ::= + { OCTET STRING IDENTIFIED BY id-cmc-recipientNonce } + id-cmc-recipientNonce OBJECT IDENTIFIER ::= {id-cmc 7} + + -- Used to return status in a response + + cmc-statusInfo CMC-CONTROL ::= + { CMCStatusInfo IDENTIFIED BY id-cmc-statusInfo } + id-cmc-statusInfo OBJECT IDENTIFIER ::= {id-cmc 1} + + CMCStatusInfo ::= SEQUENCE { + cMCStatus CMCStatus, + bodyList SEQUENCE SIZE (1..MAX) OF BodyPartID, + statusString UTF8String OPTIONAL, + otherInfo CHOICE { + failInfo CMCFailInfo, + pendInfo PendInfo + } OPTIONAL + } + + PendInfo ::= SEQUENCE { + pendToken OCTET STRING, + pendTime GeneralizedTime + } + + CMCStatus ::= INTEGER { + success (0), + failed (2), + pending (3), + noSupport (4), + confirmRequired (5), + popRequired (6), + partial (7) + } + + -- Note: + -- The spelling of unsupportedExt is corrected in this version. + -- In RFC 2797, it was unsuportedExt. + + CMCFailInfo ::= INTEGER { + badAlg (0), + badMessageCheck (1), + badRequest (2), + badTime (3), + badCertId (4), + unsuportedExt (5), + mustArchiveKeys (6), + badIdentity (7), + popRequired (8), + popFailed (9), + noKeyReuse (10), + internalCAError (11), + tryLater (12), + authDataFail (13) + } + + -- Used for RAs to add extensions to certification requests + + cmc-addExtensions CMC-CONTROL ::= + { AddExtensions IDENTIFIED BY id-cmc-addExtensions } + id-cmc-addExtensions OBJECT IDENTIFIER ::= {id-cmc 8} + + AddExtensions ::= SEQUENCE { + pkiDataReference BodyPartID, + certReferences SEQUENCE OF BodyPartID, + extensions SEQUENCE OF Extension{{CertExtensions}} + } + + cmc-encryptedPOP CMC-CONTROL ::= + { EncryptedPOP IDENTIFIED BY id-cmc-encryptedPOP } + cmc-decryptedPOP CMC-CONTROL ::= + { DecryptedPOP IDENTIFIED BY id-cmc-decryptedPOP } + id-cmc-encryptedPOP OBJECT IDENTIFIER ::= {id-cmc 9} + id-cmc-decryptedPOP OBJECT IDENTIFIER ::= {id-cmc 10} + + EncryptedPOP ::= SEQUENCE { + request TaggedRequest, + cms ContentInfo, + thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}}, + witnessAlgID AlgorithmIdentifier{DIGEST-ALGORITHM, + {WitnessAlgs}}, + witness OCTET STRING + } + + POPAlgs MAC-ALGORITHM ::= {maca-hMAC-SHA1, ...} + WitnessAlgs DIGEST-ALGORITHM ::= {mda-sha1, ...} + + DecryptedPOP ::= SEQUENCE { + bodyPartID BodyPartID, + thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}}, + thePOP OCTET STRING + } + + cmc-lraPOPWitness CMC-CONTROL ::= + { LraPopWitness IDENTIFIED BY id-cmc-lraPOPWitness } + + id-cmc-lraPOPWitness OBJECT IDENTIFIER ::= {id-cmc 11} + + LraPopWitness ::= SEQUENCE { + pkiDataBodyid BodyPartID, + bodyIds SEQUENCE OF BodyPartID + } + + -- + + cmc-getCert CMC-CONTROL ::= + { GetCert IDENTIFIED BY id-cmc-getCert } + id-cmc-getCert OBJECT IDENTIFIER ::= {id-cmc 15} + + GetCert ::= SEQUENCE { + issuerName GeneralName, + serialNumber INTEGER } + + cmc-getCRL CMC-CONTROL ::= + { GetCRL IDENTIFIED BY id-cmc-getCRL } + id-cmc-getCRL OBJECT IDENTIFIER ::= {id-cmc 16} + GetCRL ::= SEQUENCE { + issuerName Name, + cRLName GeneralName OPTIONAL, + time GeneralizedTime OPTIONAL, + reasons ReasonFlags OPTIONAL } + + cmc-revokeRequest CMC-CONTROL ::= + { RevokeRequest IDENTIFIED BY id-cmc-revokeRequest} + id-cmc-revokeRequest OBJECT IDENTIFIER ::= {id-cmc 17} + + RevokeRequest ::= SEQUENCE { + issuerName Name, + serialNumber INTEGER, + reason CRLReason, + invalidityDate GeneralizedTime OPTIONAL, + passphrase OCTET STRING OPTIONAL, + comment UTF8String OPTIONAL } + + cmc-confirmCertAcceptance CMC-CONTROL ::= + { CMCCertId IDENTIFIED BY id-cmc-confirmCertAcceptance } + id-cmc-confirmCertAcceptance OBJECT IDENTIFIER ::= {id-cmc 24} + + CMCCertId ::= IssuerAndSerialNumber + + -- The following is used to request v3 extensions be added + -- to a certificate + + at-extension-req ATTRIBUTE ::= + { TYPE ExtensionReq IDENTIFIED BY id-ExtensionReq } + id-ExtensionReq OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-9(9) 14} + + ExtensionReq ::= SEQUENCE SIZE (1..MAX) OF + Extension{{CertExtensions}} + + -- The following allows Diffie-Hellman Certification Request + -- Messages to be well-formed + + sa-noSignature SIGNATURE-ALGORITHM ::= { + IDENTIFIER id-alg-noSignature + VALUE NoSignatureValue + PARAMS TYPE NULL ARE required + HASHES { mda-sha1 } + } + id-alg-noSignature OBJECT IDENTIFIER ::= {id-pkix id-alg(6) 2} + + NoSignatureValue ::= OCTET STRING + -- Unauthenticated attribute to carry removable data. + + id-aa OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) id-aa(2)} + + aa-cmc-unsignedData ATTRIBUTE ::= + { TYPE CMCUnsignedData IDENTIFIED BY id-aa-cmc-unsignedData } + id-aa-cmc-unsignedData OBJECT IDENTIFIER ::= {id-aa 34} + + CMCUnsignedData ::= SEQUENCE { + bodyPartPath BodyPartPath, + identifier TYPE-IDENTIFIER.&id, + content TYPE-IDENTIFIER.&Type + } + + -- Replaces CMC Status Info + -- + + cmc-statusInfoV2 CMC-CONTROL ::= + { CMCStatusInfoV2 IDENTIFIED BY id-cmc-statusInfoV2 } + id-cmc-statusInfoV2 OBJECT IDENTIFIER ::= {id-cmc 25} + + EXTENDED-FAILURE-INFO ::= TYPE-IDENTIFIER + + ExtendedFailures EXTENDED-FAILURE-INFO ::= {...} + + CMCStatusInfoV2 ::= SEQUENCE { + cMCStatus CMCStatus, + bodyList SEQUENCE SIZE (1..MAX) OF + BodyPartReference, + statusString UTF8String OPTIONAL, + otherInfo CHOICE { + failInfo CMCFailInfo, + pendInfo PendInfo, + extendedFailInfo [1] SEQUENCE { + failInfoOID TYPE-IDENTIFIER.&id + ({ExtendedFailures}), + failInfoValue TYPE-IDENTIFIER.&Type + ({ExtendedFailures} + {@.failInfoOID}) + } + } OPTIONAL + } + + BodyPartReference ::= CHOICE { + bodyPartID BodyPartID, + bodyPartPath BodyPartPath + } + + BodyPartPath ::= SEQUENCE SIZE (1..MAX) OF BodyPartID + + -- Allow for distribution of trust anchors + -- + + cmc-trustedAnchors CMC-CONTROL ::= + { PublishTrustAnchors IDENTIFIED BY id-cmc-trustedAnchors } + id-cmc-trustedAnchors OBJECT IDENTIFIER ::= {id-cmc 26} + + PublishTrustAnchors ::= SEQUENCE { + seqNumber INTEGER, + hashAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM, + {HashAlgorithms}}, + anchorHashes SEQUENCE OF OCTET STRING + } + + HashAlgorithms DIGEST-ALGORITHM ::= { + mda-sha1 | mda-sha256, ... + } + + cmc-authData CMC-CONTROL ::= + { AuthPublish IDENTIFIED BY id-cmc-authData } + id-cmc-authData OBJECT IDENTIFIER ::= {id-cmc 27} + + AuthPublish ::= BodyPartID + + -- These two items use BodyPartList + + cmc-batchRequests CMC-CONTROL ::= + { BodyPartList IDENTIFIED BY id-cmc-batchRequests } + id-cmc-batchRequests OBJECT IDENTIFIER ::= {id-cmc 28} + + cmc-batchResponses CMC-CONTROL ::= + { BodyPartList IDENTIFIED BY id-cmc-batchResponses } + id-cmc-batchResponses OBJECT IDENTIFIER ::= {id-cmc 29} + + BodyPartList ::= SEQUENCE SIZE (1..MAX) OF BodyPartID + + cmc-publishCert CMC-CONTROL ::= + { CMCPublicationInfo IDENTIFIED BY id-cmc-publishCert } + id-cmc-publishCert OBJECT IDENTIFIER ::= {id-cmc 30} + + CMCPublicationInfo ::= SEQUENCE { + hashAlg AlgorithmIdentifier{DIGEST-ALGORITHM, + {HashAlgorithms}}, + certHashes SEQUENCE OF OCTET STRING, + pubInfo PKIPublicationInfo + } + + cmc-modCertTemplate CMC-CONTROL ::= + { ModCertTemplate IDENTIFIED BY id-cmc-modCertTemplate } + id-cmc-modCertTemplate OBJECT IDENTIFIER ::= {id-cmc 31} + + ModCertTemplate ::= SEQUENCE { + pkiDataReference BodyPartPath, + certReferences BodyPartList, + replace BOOLEAN DEFAULT TRUE, + certTemplate CertTemplate + } + + -- Inform follow-on servers that one or more controls have + -- already been processed + + cmc-controlProcessed CMC-CONTROL ::= + { ControlsProcessed IDENTIFIED BY id-cmc-controlProcessed } + id-cmc-controlProcessed OBJECT IDENTIFIER ::= {id-cmc 32} + + ControlsProcessed ::= SEQUENCE { + bodyList SEQUENCE SIZE(1..MAX) OF BodyPartReference + } + + -- Identity Proof control w/ algorithm agility + + cmc-identityProofV2 CMC-CONTROL ::= + { IdentityProofV2 IDENTIFIED BY id-cmc-identityProofV2 } + id-cmc-identityProofV2 OBJECT IDENTIFIER ::= { id-cmc 33 } + + IdentityProofV2 ::= SEQUENCE { + proofAlgID AlgorithmIdentifier{DIGEST-ALGORITHM, + {WitnessAlgs}}, + macAlgId AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}}, + witness OCTET STRING + } + + cmc-popLinkWitnessV2 CMC-CONTROL ::= + { PopLinkWitnessV2 IDENTIFIED BY id-cmc-popLinkWitnessV2 } + id-cmc-popLinkWitnessV2 OBJECT IDENTIFIER ::= { id-cmc 34 } + + PopLinkWitnessV2 ::= SEQUENCE { + keyGenAlgorithm AlgorithmIdentifier{KEY-DERIVATION, + {KeyDevAlgs}}, + macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}}, + witness OCTET STRING + } + + KeyDevAlgs KEY-DERIVATION ::= {kda-PBKDF2, ...} + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1 index 9a7d4936a6..9a7d4936a6 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1 index 9b6a426ca2..9b6a426ca2 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1 index c59451dcdb..c59451dcdb 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1 index 60acbb3b5c..60acbb3b5c 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1 index 84c1ee9851..84c1ee9851 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1 index 28daa467e1..28daa467e1 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1 index 336b824174..336b824174 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1 index db2725c37d..db2725c37d 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1 index 4e0084b079..4e0084b079 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1 index 3fec8ae64a..3fec8ae64a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1 index 8c0c8138e2..8c0c8138e2 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1 index 9805a6189d..9805a6189d 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1 index b39e03c3b6..b39e03c3b6 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1 index 171f4b4223..171f4b4223 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1 index 59de6d1b04..59de6d1b04 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1 index 57faac6587..57faac6587 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1 index 4e46c7679b..4e46c7679b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1 index 09ef4de282..09ef4de282 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1 index 752e8d05e1..752e8d05e1 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1 index 3fb0463ee7..3fb0463ee7 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1 index 719bca4987..719bca4987 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1 index 6e5c01ab40..6e5c01ab40 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1 index 2b46b27b3e..2b46b27b3e 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1 index 8c692ccb31..8c692ccb31 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1 index 27324f614f..27324f614f 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1 index b7efd7417e..b7efd7417e 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1 index a57a276704..a57a276704 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1 index 0149602040..0149602040 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1 index bd1d8d3c48..bd1d8d3c48 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1 index 813ac9c6a0..813ac9c6a0 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1 index 2c78360b7b..2c78360b7b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1 index 92c887bb06..92c887bb06 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1 index 64fc4436e4..64fc4436e4 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1 index 5de6491621..5de6491621 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1 index fab36bf12a..fab36bf12a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1 index 187c3c8ad4..187c3c8ad4 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1 index 40f53b9458..40f53b9458 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1 index 052b3b2041..052b3b2041 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1 index b69d72b3ed..b69d72b3ed 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1 index 99d34b2883..99d34b2883 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1 index eceae4ab44..eceae4ab44 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1 index 37c894da86..37c894da86 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1 index df194f838c..df194f838c 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1 index 6494fbd3ef..6494fbd3ef 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1 index 38035c77ae..38035c77ae 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1 index 68a5118bc8..68a5118bc8 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1 index f66d117f35..f66d117f35 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1 index 03181c5951..03181c5951 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1 index 1615b241ee..1615b241ee 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1 index 10eac962cb..10eac962cb 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1 index 96dfc39b6a..96dfc39b6a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 new file mode 100644 index 0000000000..db500fe9a1 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 @@ -0,0 +1,183 @@ + OCSP-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48)} + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + IMPORTS + + Extensions{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + + AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + AuthorityInfoAccessSyntax, GeneralName, CrlEntryExtensions + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + + Name, CertificateSerialNumber, id-kp, id-ad-ocsp, Certificate + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)} + + sa-dsaWithSHA1, sa-rsaWithMD2, sa-rsaWithMD5, sa-rsaWithSHA1 + FROM PKIXAlgs-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56)}; + + OCSPRequest ::= SEQUENCE { + tbsRequest TBSRequest, + optionalSignature [0] EXPLICIT Signature OPTIONAL } + + TBSRequest ::= SEQUENCE { + version [0] EXPLICIT Version DEFAULT v1, + requestorName [1] EXPLICIT GeneralName OPTIONAL, + requestList SEQUENCE OF Request, + requestExtensions [2] EXPLICIT Extensions {{re-ocsp-nonce | + re-ocsp-response, ...}} OPTIONAL } + + Signature ::= SEQUENCE { + signatureAlgorithm AlgorithmIdentifier + { SIGNATURE-ALGORITHM, {...}}, + signature BIT STRING, + certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL } + + Version ::= INTEGER { v1(0) } + + Request ::= SEQUENCE { + reqCert CertID, + singleRequestExtensions [0] EXPLICIT Extensions + { {re-ocsp-service-locator, + ...}} OPTIONAL } + + CertID ::= SEQUENCE { + hashAlgorithm AlgorithmIdentifier + {DIGEST-ALGORITHM, {...}}, + issuerNameHash OCTET STRING, -- Hash of Issuer's DN + issuerKeyHash OCTET STRING, -- Hash of Issuer's public key + serialNumber CertificateSerialNumber } + + OCSPResponse ::= SEQUENCE { + responseStatus OCSPResponseStatus, + responseBytes [0] EXPLICIT ResponseBytes OPTIONAL } + + OCSPResponseStatus ::= ENUMERATED { + successful (0), --Response has valid confirmations + malformedRequest (1), --Illegal confirmation request + internalError (2), --Internal error in issuer + tryLater (3), --Try again later + -- (4) is not used + sigRequired (5), --Must sign the request + unauthorized (6) --Request unauthorized + } + + RESPONSE ::= TYPE-IDENTIFIER + + ResponseSet RESPONSE ::= {basicResponse, ...} + + ResponseBytes ::= SEQUENCE { + responseType RESPONSE. + &id ({ResponseSet}), + response OCTET STRING (CONTAINING RESPONSE. + &Type({ResponseSet}{@responseType}))} + + basicResponse RESPONSE ::= + { BasicOCSPResponse IDENTIFIED BY id-pkix-ocsp-basic } + + BasicOCSPResponse ::= SEQUENCE { + tbsResponseData ResponseData, + signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {sa-dsaWithSHA1 | sa-rsaWithSHA1 | + sa-rsaWithMD5 | sa-rsaWithMD2, ...}}, + signature BIT STRING, + certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL } + + ResponseData ::= SEQUENCE { + version [0] EXPLICIT Version DEFAULT v1, + responderID ResponderID, + producedAt GeneralizedTime, + responses SEQUENCE OF SingleResponse, + responseExtensions [1] EXPLICIT Extensions + {{re-ocsp-nonce, ...}} OPTIONAL } + + ResponderID ::= CHOICE { + byName [1] Name, + byKey [2] KeyHash } + + KeyHash ::= OCTET STRING --SHA-1 hash of responder's public key + -- (excluding the tag and length fields) + + SingleResponse ::= SEQUENCE { + certID CertID, + certStatus CertStatus, + thisUpdate GeneralizedTime, + nextUpdate [0] EXPLICIT GeneralizedTime OPTIONAL, + singleExtensions [1] EXPLICIT Extensions{{re-ocsp-crl | + re-ocsp-archive-cutoff | + CrlEntryExtensions, ...} + } OPTIONAL } + + CertStatus ::= CHOICE { + good [0] IMPLICIT NULL, + revoked [1] IMPLICIT RevokedInfo, + unknown [2] IMPLICIT UnknownInfo } + + RevokedInfo ::= SEQUENCE { + revocationTime GeneralizedTime, + revocationReason [0] EXPLICIT CRLReason OPTIONAL } + + UnknownInfo ::= NULL + + CRLReason ::= INTEGER + + ArchiveCutoff ::= GeneralizedTime + + AcceptableResponses ::= SEQUENCE OF RESPONSE.&id({ResponseSet}) + + ServiceLocator ::= SEQUENCE { + issuer Name, + locator AuthorityInfoAccessSyntax } + + CrlID ::= SEQUENCE { + crlUrl [0] EXPLICIT IA5String OPTIONAL, + crlNum [1] EXPLICIT INTEGER OPTIONAL, + crlTime [2] EXPLICIT GeneralizedTime OPTIONAL } + + -- Request Extensions + + re-ocsp-nonce EXTENSION ::= { SYNTAX OCTET STRING IDENTIFIED + BY id-pkix-ocsp-nonce } + re-ocsp-response EXTENSION ::= { SYNTAX AcceptableResponses IDENTIFIED + BY id-pkix-ocsp-response } + re-ocsp-service-locator EXTENSION ::= { SYNTAX ServiceLocator + IDENTIFIED BY + id-pkix-ocsp-service-locator } + + -- Response Extensions + + re-ocsp-crl EXTENSION ::= { SYNTAX CrlID IDENTIFIED BY + id-pkix-ocsp-crl } + re-ocsp-archive-cutoff EXTENSION ::= { SYNTAX ArchiveCutoff + IDENTIFIED BY + id-pkix-ocsp-archive-cutoff } + + -- Object Identifiers + + id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 } + id-pkix-ocsp OBJECT IDENTIFIER ::= id-ad-ocsp + id-pkix-ocsp-basic OBJECT IDENTIFIER ::= { id-pkix-ocsp 1 } + id-pkix-ocsp-nonce OBJECT IDENTIFIER ::= { id-pkix-ocsp 2 } + id-pkix-ocsp-crl OBJECT IDENTIFIER ::= { id-pkix-ocsp 3 } + id-pkix-ocsp-response OBJECT IDENTIFIER ::= { id-pkix-ocsp 4 } + id-pkix-ocsp-nocheck OBJECT IDENTIFIER ::= { id-pkix-ocsp 5 } + id-pkix-ocsp-archive-cutoff OBJECT IDENTIFIER ::= { id-pkix-ocsp 6 } + id-pkix-ocsp-service-locator OBJECT IDENTIFIER ::= { id-pkix-ocsp 7 } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1 index ac449b59c7..ab555200bb 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1 @@ -3,7 +3,7 @@ -- This Annex contains a module of PKCS#7 ASN.1 definitions conforming to current ASN.1 standards rather than the obsolescent (and now deprecated) 1988/90 version of ASN.1 used in version 1.5 of PKCS#7. -- Extensions to PKCS#7 defined in RFC 2630 are included. -- If differences are found between the ASN.1 in the following module and that in PKCS#7, the latter is definitive. -PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7 +OLD-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7 module(0) -- module not currently defined in PKCS#7 --} DEFINITIONS IMPLICIT TAGS ::= BEGIN diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1 index b4f91f50c5..b4f91f50c5 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1 index 2044feb155..2044feb155 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 new file mode 100644 index 0000000000..a5fd0fefb9 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 @@ -0,0 +1,56 @@ + PKCS-10 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM, + PUBLIC-KEY + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + ATTRIBUTE, Name + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}; + + -- Certificate requests + CertificationRequestInfo ::= SEQUENCE { + version INTEGER { v1(0) } (v1, ... ), + subject Name, + subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }}, + attributes [0] Attributes{{ CRIAttributes }} + } + + SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE { + algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}}, + subjectPublicKey BIT STRING + } + + PKInfoAlgorithms PUBLIC-KEY ::= { + ... -- add any locally defined algorithms here -- } + + Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }} + + CRIAttributes ATTRIBUTE ::= { + ... -- add any locally defined attributes here -- } + + Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE { + type ATTRIBUTE.&id({IOSet}), + values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type}) + } + + CertificationRequest ::= SEQUENCE { + certificationRequestInfo CertificationRequestInfo, + signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM, + { SignatureAlgorithms }}, + signature BIT STRING + } + + SignatureAlgorithms SIGNATURE-ALGORITHM ::= { + ... -- add any locally defined algorithms here -- } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 new file mode 100644 index 0000000000..5b37a552f9 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 @@ -0,0 +1,174 @@ +PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-12(12) modules(0) pkcs-12(1)} + +-- $Revision$ + +DEFINITIONS IMPLICIT TAGS ::= + +BEGIN + +-- EXPORTS ALL +-- All types and values defined in this module is exported for use in +-- other ASN.1 modules. + +IMPORTS + +informationFramework + FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1) + usefulDefinitions(0) 3} + +ATTRIBUTE + FROM InformationFramework informationFramework + +ContentInfo, DigestInfo + FROM PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-7(7) modules(0) pkcs-7(1)} + +PrivateKeyInfo, EncryptedPrivateKeyInfo + FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)} + +pkcs-9, friendlyName, localKeyId, certTypes, crlTypes + FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-9(9) modules(0) pkcs-9(1)}; + +-- Object identifiers + +rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) rsadsi(113549)} +pkcs OBJECT IDENTIFIER ::= {rsadsi pkcs(1)} +pkcs-12 OBJECT IDENTIFIER ::= {pkcs 12} +pkcs-12PbeIds OBJECT IDENTIFIER ::= {pkcs-12 1} +pbeWithSHAAnd128BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 1} +pbeWithSHAAnd40BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 2} +pbeWithSHAAnd3-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 3} +pbeWithSHAAnd2-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 4} +pbeWithSHAAnd128BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 5} +pbewithSHAAnd40BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 6} + +bagtypes OBJECT IDENTIFIER ::= {pkcs-12 10 1} + +-- The PFX PDU + +PFX ::= SEQUENCE { + version INTEGER {v3(3)}(v3,...), + authSafe ContentInfo, + macData MacData OPTIONAL +} + +MacData ::= SEQUENCE { + mac DigestInfo, + macSalt OCTET STRING, + iterations INTEGER DEFAULT 1 +-- Note: The default is for historical reasons and its use is +-- deprecated. A higher value, like 1024 is recommended. +} + +AuthenticatedSafe ::= SEQUENCE OF ContentInfo + -- Data if unencrypted + -- EncryptedData if password-encrypted + -- EnvelopedData if public key-encrypted + +SafeContents ::= SEQUENCE OF SafeBag + +SafeBag ::= SEQUENCE { + bagId BAG-TYPE.&id ({PKCS12BagSet}), + bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}), + bagAttributes SET OF PKCS12Attribute OPTIONAL +} + +-- Bag types + +keyBag BAG-TYPE ::= + {KeyBag IDENTIFIED BY {bagtypes 1}} +pkcs8ShroudedKeyBag BAG-TYPE ::= + {PKCS8ShroudedKeyBag IDENTIFIED BY {bagtypes 2}} +certBag BAG-TYPE ::= + {CertBag IDENTIFIED BY {bagtypes 3}} +crlBag BAG-TYPE ::= + {CRLBag IDENTIFIED BY {bagtypes 4}} +secretBag BAG-TYPE ::= + {SecretBag IDENTIFIED BY {bagtypes 5}} +safeContentsBag BAG-TYPE ::= + {SafeContents IDENTIFIED BY {bagtypes 6}} + +PKCS12BagSet BAG-TYPE ::= { + keyBag | + pkcs8ShroudedKeyBag | + certBag | + crlBag | + secretBag | + safeContentsBag, + ... -- For future extensions +} + +BAG-TYPE ::= TYPE-IDENTIFIER + +-- KeyBag + +KeyBag ::= PrivateKeyInfo + +-- Shrouded KeyBag + +PKCS8ShroudedKeyBag ::= EncryptedPrivateKeyInfo + +-- CertBag + +CertBag ::= SEQUENCE { + certId BAG-TYPE.&id ({CertTypes}), + certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId}) +} + +x509Certificate BAG-TYPE ::= + {OCTET STRING IDENTIFIED BY {certTypes 1}} + -- DER-encoded X.509 certificate stored in OCTET STRING +sdsiCertificate BAG-TYPE ::= + {IA5String IDENTIFIED BY {certTypes 2}} + -- Base64-encoded SDSI certificate stored in IA5String + +CertTypes BAG-TYPE ::= { + x509Certificate | + sdsiCertificate, + ... -- For future extensions +} + +-- CRLBag + +CRLBag ::= SEQUENCE { + crlId BAG-TYPE.&id ({CRLTypes}), + crlValue [0] EXPLICIT BAG-TYPE.&Type ({CRLTypes}{@crlId}) +} + +x509CRL BAG-TYPE ::= + {OCTET STRING IDENTIFIED BY {crlTypes 1}} + -- DER-encoded X.509 CRL stored in OCTET STRING + +CRLTypes BAG-TYPE ::= { + x509CRL, + ... -- For future extensions +} + +-- Secret Bag + +SecretBag ::= SEQUENCE { + secretTypeId BAG-TYPE.&id ({SecretTypes}), + secretValue [0] EXPLICIT BAG-TYPE.&Type ({SecretTypes}{@secretTypeId}) +} + +SecretTypes BAG-TYPE ::= { + ... -- For future extensions +} + +-- Attributes + +PKCS12Attribute ::= SEQUENCE { + attrId ATTRIBUTE.&id ({PKCS12AttrSet}), + attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId}) +} -- This type is compatible with the X.500 type 'Attribute' + +PKCS12AttrSet ATTRIBUTE ::= { + friendlyName | + localKeyId, + ... -- Other attributes are allowed +} + +END
\ No newline at end of file diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 new file mode 100644 index 0000000000..91b0dc36bf --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 @@ -0,0 +1,202 @@ +-- PKCS #5 v2.1 ASN.1 Module +-- Revised October 27, 2012 + +-- This module has been checked for conformance with the +-- ASN.1 standard by the OSS ASN.1 Tools + +PKCS-5 { + iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-5(5) modules(16) + pkcs5v2-1(2)} + +DEFINITIONS EXPLICIT TAGS ::= + +BEGIN + +-- ============================ +-- Basic object identifiers +-- ============================ + +nistAlgorithms OBJECT IDENTIFIER ::= + {joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) 4} +oiw OBJECT IDENTIFIER ::= {iso(1) identified-organization(3) 14} +rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) 113549} +pkcs OBJECT IDENTIFIER ::= {rsadsi 1} +pkcs-5 OBJECT IDENTIFIER ::= {pkcs 5} + + +-- ============================ +-- Basic types and classes +-- ============================ + +AlgorithmIdentifier { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE { + algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}), + parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL +} + +ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER + + +-- ============================ +-- PBKDF2 +-- ============================ + +PBKDF2Algorithms ALGORITHM-IDENTIFIER ::= + { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ...} + +id-PBKDF2 OBJECT IDENTIFIER ::= {pkcs-5 12} + +algid-hmacWithSHA1 AlgorithmIdentifier {{PBKDF2-PRFs}} ::= + {algorithm id-hmacWithSHA1, parameters NULL : NULL} + +PBKDF2-params ::= SEQUENCE { + salt CHOICE { + specified OCTET STRING, + otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}} + }, + iterationCount INTEGER (1..MAX), + keyLength INTEGER (1..MAX) OPTIONAL, + prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1 +} + +PBKDF2-SaltSources ALGORITHM-IDENTIFIER ::= { ... } + +PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= { + {NULL IDENTIFIED BY id-hmacWithSHA1} | + {NULL IDENTIFIED BY id-hmacWithSHA224} | + {NULL IDENTIFIED BY id-hmacWithSHA256} | + {NULL IDENTIFIED BY id-hmacWithSHA384} | + {NULL IDENTIFIED BY id-hmacWithSHA512} | + {NULL IDENTIFIED BY id-hmacWithSHA512-224} | + {NULL IDENTIFIED BY id-hmacWithSHA512-256}, + ... +} + + +-- ============================ + -- PBES1 +-- ============================ + +PBES1Algorithms ALGORITHM-IDENTIFIER ::= { + {PBEParameter IDENTIFIED BY pbeWithMD2AndDES-CBC} | + {PBEParameter IDENTIFIED BY pbeWithMD2AndRC2-CBC} | + {PBEParameter IDENTIFIED BY pbeWithMD5AndDES-CBC} | + {PBEParameter IDENTIFIED BY pbeWithMD5AndRC2-CBC} | + {PBEParameter IDENTIFIED BY pbeWithSHA1AndDES-CBC} | + {PBEParameter IDENTIFIED BY pbeWithSHA1AndRC2-CBC}, + ... +} + +pbeWithMD2AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 1} +pbeWithMD2AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 4} +pbeWithMD5AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 3} +pbeWithMD5AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 6} +pbeWithSHA1AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 10} +pbeWithSHA1AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 11} + +PBEParameter ::= SEQUENCE { + salt OCTET STRING (SIZE(8)), + iterationCount INTEGER +} + + +-- ============================ +-- PBES2 +-- ============================ + +PBES2Algorithms ALGORITHM-IDENTIFIER ::= { + {PBES2-params IDENTIFIED BY id-PBES2}, + ... +} + +id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13} + +PBES2-params ::= SEQUENCE { + keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}}, + encryptionScheme AlgorithmIdentifier {{PBES2-Encs}} +} + +PBES2-KDFs ALGORITHM-IDENTIFIER ::= { + {PBKDF2-params IDENTIFIED BY id-PBKDF2}, + ... +} + +PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... } + + +-- ============================ +-- PBMAC1 +-- ============================ + +PBMAC1Algorithms ALGORITHM-IDENTIFIER ::= { + {PBMAC1-params IDENTIFIED BY id-PBMAC1}, + ... +} + +id-PBMAC1 OBJECT IDENTIFIER ::= {pkcs-5 14} + +PBMAC1-params ::= SEQUENCE { + keyDerivationFunc AlgorithmIdentifier {{PBMAC1-KDFs}}, + messageAuthScheme AlgorithmIdentifier {{PBMAC1-MACs}} +} + +PBMAC1-KDFs ALGORITHM-IDENTIFIER ::= { + {PBKDF2-params IDENTIFIED BY id-PBKDF2}, + ... +} + +PBMAC1-MACs ALGORITHM-IDENTIFIER ::= { ... } + +-- ============================ +-- Supporting techniques +-- ============================ + +digestAlgorithm OBJECT IDENTIFIER ::= {rsadsi 2} +encryptionAlgorithm OBJECT IDENTIFIER ::= {rsadsi 3} + +SupportingAlgorithms ALGORITHM-IDENTIFIER ::= { + {NULL IDENTIFIED BY id-hmacWithSHA1} | + {OCTET STRING (SIZE(8)) IDENTIFIED BY desCBC} | + {OCTET STRING (SIZE(8)) IDENTIFIED BY des-EDE3-CBC} | + {RC2-CBC-Parameter IDENTIFIED BY rc2CBC} | + {RC5-CBC-Parameters IDENTIFIED BY rc5-CBC-PAD} | + {OCTET STRING (SIZE(16)) IDENTIFIED BY aes128-CBC-PAD} | + {OCTET STRING (SIZE(16)) IDENTIFIED BY aes192-CBC-PAD} | + {OCTET STRING (SIZE(16)) IDENTIFIED BY aes256-CBC-PAD}, + ... +} + +id-hmacWithSHA1 OBJECT IDENTIFIER ::= {digestAlgorithm 7} +id-hmacWithSHA224 OBJECT IDENTIFIER ::= {digestAlgorithm 8} +id-hmacWithSHA256 OBJECT IDENTIFIER ::= {digestAlgorithm 9} +id-hmacWithSHA384 OBJECT IDENTIFIER ::= {digestAlgorithm 10} +id-hmacWithSHA512 OBJECT IDENTIFIER ::= {digestAlgorithm 11} +id-hmacWithSHA512-224 OBJECT IDENTIFIER ::= {digestAlgorithm 12} +id-hmacWithSHA512-256 OBJECT IDENTIFIER ::= {digestAlgorithm 13} + +-- from OIW +desCBC OBJECT IDENTIFIER ::= {oiw secsig(3) algorithms(2) 7} + +des-EDE3-CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 7} + +rc2CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 2} + +RC2-CBC-Parameter ::= SEQUENCE { + rc2ParameterVersion INTEGER OPTIONAL, + iv OCTET STRING (SIZE(8)) +} + +rc5-CBC-PAD OBJECT IDENTIFIER ::= {encryptionAlgorithm 9} + +RC5-CBC-Parameters ::= SEQUENCE { + version INTEGER {v1-0(16)} (v1-0), + rounds INTEGER (8..127), + blockSizeInBits INTEGER (64 | 128), + iv OCTET STRING OPTIONAL +} + +aes OBJECT IDENTIFIER ::= { nistAlgorithms 1 } +aes128-CBC-PAD OBJECT IDENTIFIER ::= { aes 2 } +aes192-CBC-PAD OBJECT IDENTIFIER ::= { aes 22 } +aes256-CBC-PAD OBJECT IDENTIFIER ::= { aes 42 } + +END
\ No newline at end of file diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 new file mode 100644 index 0000000000..4cea8db240 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 @@ -0,0 +1,326 @@ +PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-7(7) + modules(0) pkcs-7(1)} + +DEFINITIONS EXPLICIT TAGS ::= +BEGIN + +-- +-- 3. Definitions +-- + +-- EXPORTS All; + +IMPORTS + +informationFramework, authenticationFramework + FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1) + usefulDefinitions(0) 3} + + Name, ATTRIBUTE + FROM InformationFramework informationFramework + + ALGORITHM, Certificate, CertificateSerialNumber, + CertificateList + FROM AuthenticationFramework authenticationFramework + + contentType, messageDigest, signingTime, counterSignature + FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-9(9) modules(0) pkcs-9(1)}; +-- +-- 6. Useful types +-- + +-- Also defined in X.509 +-- Redeclared here as a parameterized type +AlgorithmIdentifier {ALGORITHM:IOSet} ::= SEQUENCE { + algorithm ALGORITHM.&id({IOSet}), + parameters ALGORITHM.&Type({IOSet}{@algorithm}) OPTIONAL +} + +-- Also defined in X.501 +-- Redeclared here as a parameterized type +Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE { + type ATTRIBUTE.&id({IOSet}), + values SET SIZE (1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type}) +} + +CertificateRevocationLists ::= + SET OF CertificateList + +Certificates ::= + SEQUENCE OF Certificate + +CRLSequence ::= + SEQUENCE OF CertificateList + +ContentEncryptionAlgorithmIdentifier ::= + AlgorithmIdentifier {{ContentEncryptionAlgorithms}} + +ContentEncryptionAlgorithms ALGORITHM ::= { + ... -- add any application-specific algorithms here +} + +DigestAlgorithmIdentifier ::= + AlgorithmIdentifier {{DigestAlgorithms}} + +DigestAlgorithms ALGORITHM ::= { + ... -- add any application-specific algorithms here +} + +DigestEncryptionAlgorithmIdentifier ::= + AlgorithmIdentifier {{DigestEncryptionAlgorithms}} + +DigestEncryptionAlgorithms ALGORITHM ::= { + ... -- add any application-specific algorithms here +} + +ExtendedCertificateOrCertificate ::= CHOICE { + certificate Certificate, -- X.509 + extendedCertificate [0] IMPLICIT ExtendedCertificate -- PKCS#6 +} + +ExtendedCertificate ::= Certificate -- cheating + +ExtendedCertificatesAndCertificates ::= + SET OF ExtendedCertificateOrCertificate + +IssuerAndSerialNumber ::= SEQUENCE { + issuer Name, + serialNumber CertificateSerialNumber +} + +KeyEncryptionAlgorithmIdentifier ::= + AlgorithmIdentifier {{KeyEncryptionAlgorithms}} + +KeyEncryptionAlgorithms ALGORITHM ::= { + ... -- add any application-specific algorithms here +} + +-- +-- 7. General syntax +-- + +ContentInfo ::= SEQUENCE { + contentType ContentType, + content [0] EXPLICIT CONTENTS.&Type({Contents}{@contentType}) +OPTIONAL +} + +CONTENTS ::= TYPE-IDENTIFIER + +Contents CONTENTS ::= { + {Data IDENTIFIED BY data} | + {SignedData IDENTIFIED BY signedData} | + {EnvelopedData IDENTIFIED BY envelopedData} | + {SignedAndEnvelopedData IDENTIFIED BY signedAndEnvelopedData} | + {DigestedData IDENTIFIED BY digestedData} | + {EncryptedData IDENTIFIED BY encryptedData}, + ... -- add any application-specific types/contents here +} + +ContentType ::= CONTENTS.&id({Contents}) + +-- +-- 8. Data content type +-- + +Data ::= OCTET STRING + +-- +-- 9. Signed-data content type +-- + +SignedData ::= SEQUENCE { + version INTEGER {sdVer1(1), sdVer2(2)} (sdVer1 | sdVer2), + digestAlgorithms + DigestAlgorithmIdentifiers, + contentInfo ContentInfo, + certificates CHOICE { + certSet [0] IMPLICIT ExtendedCertificatesAndCertificates, + certSequence [2] IMPLICIT Certificates + } OPTIONAL, + crls CHOICE { + crlSet [1] IMPLICIT CertificateRevocationLists, + crlSequence [3] IMPLICIT CRLSequence + } OPTIONAL, + signerInfos SignerInfos +} (WITH COMPONENTS { ..., version (sdVer1), + digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }), + certificates (WITH COMPONENTS { ..., certSequence ABSENT }), + crls (WITH COMPONENTS { ..., crlSequence ABSENT }), + signerInfos (WITH COMPONENTS { ..., siSet PRESENT }) + } | + WITH COMPONENTS { ..., version (sdVer2), + digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }), + certificates (WITH COMPONENTS { ..., certSet ABSENT }), + crls (WITH COMPONENTS { ..., crlSet ABSENT }), + signerInfos (WITH COMPONENTS { ..., siSequence PRESENT }) +}) + +SignerInfos ::= CHOICE { + siSet SET OF SignerInfo, + siSequence SEQUENCE OF SignerInfo +} + +DigestAlgorithmIdentifiers ::= CHOICE { + daSet SET OF DigestAlgorithmIdentifier, + daSequence SEQUENCE OF DigestAlgorithmIdentifier +} + +SignerInfo ::= SEQUENCE { + version INTEGER {siVer1(1), siVer2(2)} (siVer1 | siVer2), + issuerAndSerialNumber + IssuerAndSerialNumber, + digestAlgorithm DigestAlgorithmIdentifier, + authenticatedAttributes CHOICE { + aaSet [0] IMPLICIT SET OF Attribute {{Authenticated}}, + aaSequence [2] EXPLICIT SEQUENCE OF Attribute {{Authenticated}} + -- Explicit because easier to compute digest on sequence of attributes and then reuse + -- encoded sequence in aaSequence. + } OPTIONAL, + digestEncryptionAlgorithm + DigestEncryptionAlgorithmIdentifier, + encryptedDigest EncryptedDigest, + unauthenticatedAttributes CHOICE { + uaSet [1] IMPLICIT SET OF Attribute {{Unauthenticated}}, + uaSequence [3] IMPLICIT SEQUENCE OF Attribute {{Unauthenticated}} + } OPTIONAL +} (WITH COMPONENTS { ..., version (siVer1), + authenticatedAttributes (WITH COMPONENTS { ..., aaSequence ABSENT }), + unauthenticatedAttributes (WITH COMPONENTS { ..., uaSequence ABSENT }) +} | WITH COMPONENTS { ..., version (siVer2), + authenticatedAttributes (WITH COMPONENTS { ..., aaSet ABSENT }), + unauthenticatedAttributes (WITH COMPONENTS { ..., uaSet ABSENT }) +}) + +Authenticated ATTRIBUTE ::= { + contentType | + messageDigest, + ..., -- add application-specific attributes here + signingTime +} + +Unauthenticated ATTRIBUTE ::= { + ..., -- add application-specific attributes here + counterSignature +} + +EncryptedDigest ::= OCTET STRING + +DigestInfo ::= SEQUENCE { + digestAlgorithm DigestAlgorithmIdentifier, + digest Digest +} + +Digest ::= OCTET STRING + +-- +-- 10. Enveloped-data content type +-- + +EnvelopedData ::= SEQUENCE { + version INTEGER {edVer0(0), edVer1(1)} (edVer0 | edVer1), + recipientInfos RecipientInfos, + encryptedContentInfo + EncryptedContentInfo +} (WITH COMPONENTS { ..., version (edVer0), + recipientInfos (WITH COMPONENTS { ..., riSet PRESENT }) +} | WITH COMPONENTS { ..., version (edVer1), + recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT }) +}) + +RecipientInfos ::= CHOICE { + riSet SET OF RecipientInfo, + riSequence SEQUENCE OF RecipientInfo +} + +EncryptedContentInfo ::= SEQUENCE { + contentType ContentType, + contentEncryptionAlgorithm + ContentEncryptionAlgorithmIdentifier, + encryptedContent + [0] IMPLICIT EncryptedContent OPTIONAL +} + +EncryptedContent ::= OCTET STRING + +RecipientInfo ::= SEQUENCE { + version INTEGER {riVer0(0)} (riVer0), + issuerAndSerialNumber + IssuerAndSerialNumber, + keyEncryptionAlgorithm + KeyEncryptionAlgorithmIdentifier, + encryptedKey EncryptedKey +} + +EncryptedKey ::= OCTET STRING + +-- +-- 11. Signed-and-enveloped-data content type +-- + +SignedAndEnvelopedData ::= SEQUENCE { + version INTEGER {seVer1(1), seVer2(2)} (seVer1 | seVer2), + recipientInfos RecipientInfos, + digestAlgorithms + DigestAlgorithmIdentifiers, + encryptedContentInfo + EncryptedContentInfo, + certificates CHOICE { + certSet [0] IMPLICIT ExtendedCertificatesAndCertificates, + certSequence [2] IMPLICIT Certificates + } OPTIONAL, + crls CHOICE { + crlSet [1] IMPLICIT CertificateRevocationLists, + crlSequence [3] IMPLICIT CRLSequence + } OPTIONAL, + signerInfos SignerInfos +} (WITH COMPONENTS { ..., version (seVer1), + recipientInfos (WITH COMPONENTS { ..., riSet PRESENT }), + digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }), + certificates (WITH COMPONENTS { ..., certSequence ABSENT }), + crls (WITH COMPONENTS { ..., crlSequence ABSENT }), + signerInfos (WITH COMPONENTS { ..., siSet PRESENT }) +} | + WITH COMPONENTS { ..., version (seVer2), + recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT }), + digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }), + certificates (WITH COMPONENTS { ..., certSet ABSENT }), + crls (WITH COMPONENTS { ..., crlSet ABSENT }), + signerInfos (WITH COMPONENTS { ..., siSequence PRESENT }) +}) + +-- +-- 12. Digested-data content type +-- + +DigestedData ::= SEQUENCE { + version INTEGER {ddVer0(0)} (ddVer0), + digestAlgorithm DigestAlgorithmIdentifier, + contentInfo ContentInfo, + digest Digest +} + +-- +-- 13. Encrypted-data content type +-- + +EncryptedData ::= SEQUENCE { + version INTEGER {edVer0(0)} (edVer0), + encryptedContentInfo EncryptedContentInfo +} + +-- +-- 14. Object Identifiers +-- + +pkcs-7 OBJECT IDENTIFIER ::= + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 7 } +data OBJECT IDENTIFIER ::= { pkcs-7 1 } +signedData OBJECT IDENTIFIER ::= { pkcs-7 2 } +envelopedData OBJECT IDENTIFIER ::= { pkcs-7 3 } +signedAndEnvelopedData OBJECT IDENTIFIER ::= { pkcs-7 4 } +digestedData OBJECT IDENTIFIER ::= { pkcs-7 5 } +encryptedData OBJECT IDENTIFIER ::= { pkcs-7 6 } + +END
\ No newline at end of file diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 new file mode 100644 index 0000000000..266f90170a --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 @@ -0,0 +1,61 @@ +PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-8(8) + modules(1) pkcs-8(1)} + +-- $Revision: 1.5 $ + +-- This module has been checked for conformance with the ASN.1 +-- standard by the OSS ASN.1 Tools + +DEFINITIONS IMPLICIT TAGS ::= + +BEGIN + +-- EXPORTS All -- +-- All types and values defined in this module is exported for use in other +-- ASN.1 modules. + +IMPORTS + +informationFramework + FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1) + usefulDefinitions(0) 3} + +Attribute + FROM InformationFramework informationFramework + +AlgorithmIdentifier, ALGORITHM-IDENTIFIER + FROM PKCS-5 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-5(5) modules(16) pkcs-5(1)}; + +-- Private-key information syntax + +PrivateKeyInfo ::= SEQUENCE { + version Version, + privateKeyAlgorithm AlgorithmIdentifier {{PrivateKeyAlgorithms}}, + privateKey PrivateKey, + attributes [0] Attributes OPTIONAL } + +Version ::= INTEGER {v1(0)} (v1,...) + +PrivateKey ::= OCTET STRING + +Attributes ::= SET OF Attribute + +-- Encrypted private-key information syntax + +EncryptedPrivateKeyInfo ::= SEQUENCE { + encryptionAlgorithm AlgorithmIdentifier {{KeyEncryptionAlgorithms}}, + encryptedData EncryptedData +} + +EncryptedData ::= OCTET STRING + +PrivateKeyAlgorithms ALGORITHM-IDENTIFIER ::= { + ... -- For local profiles +} + +KeyEncryptionAlgorithms ALGORITHM-IDENTIFIER ::= { + ... -- For local profiles +} + +END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 new file mode 100644 index 0000000000..cd561f4d7e --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 @@ -0,0 +1,391 @@ +PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) +pkcs-9(9) modules(0) pkcs-9(1)} + +-- $Revision$ + +DEFINITIONS IMPLICIT TAGS ::= + +BEGIN + +-- EXPORTS All -- +-- All types and values defined in this module is exported for use in +-- other ASN.1 modules. + +IMPORTS + +informationFramework, authenticationFramework, selectedAttributeTypes, + upperBounds , id-at + FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1) + usefulDefinitions(0) 3} + +ub-name + FROM UpperBounds upperBounds + +OBJECT-CLASS, ATTRIBUTE, MATCHING-RULE, Attribute, top, objectIdentifierMatch + FROM InformationFramework informationFramework + +ALGORITHM, Extensions, Time + FROM AuthenticationFramework authenticationFramework + +DirectoryString, octetStringMatch, caseIgnoreMatch, caseExactMatch, + generalizedTimeMatch, integerMatch, serialNumber + FROM SelectedAttributeTypes selectedAttributeTypes + +ContentInfo, SignerInfo + FROM CryptographicMessageSyntax-2009 {iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) modules(0) cms(1)} + +EncryptedPrivateKeyInfo + FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)} + +PFX + FROM PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-12(12) modules(0) pkcs-12(1)} + +-- PKCS15Token +-- FROM PKCS-15 {iso(1) member-body(2) us(840) rsadsi(113549) +-- pkcs(1) pkcs-15(15) modules(1) pkcs-15(1)} +; + +-- Upper bounds +pkcs-9-ub-pkcs9String INTEGER ::= 255 +pkcs-9-ub-emailAddress INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-unstructuredName INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-unstructuredAddress INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-challengePassword INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-friendlyName INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-signingDescription INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-match INTEGER ::= pkcs-9-ub-pkcs9String +pkcs-9-ub-pseudonym INTEGER ::= ub-name +pkcs-9-ub-placeOfBirth INTEGER ::= ub-name + +-- Object Identifiers + +pkcs-9 OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) + rsadsi(113549) pkcs(1) 9} + + -- Main arcs +pkcs-9-mo OBJECT IDENTIFIER ::= {pkcs-9 0} -- Modules branch +pkcs-9-oc OBJECT IDENTIFIER ::= {pkcs-9 24} -- Object class branch +pkcs-9-at OBJECT IDENTIFIER ::= {pkcs-9 25} -- Attribute branch, for new attributes +pkcs-9-sx OBJECT IDENTIFIER ::= {pkcs-9 26} -- For syntaxes (RFC 2252) +pkcs-9-mr OBJECT IDENTIFIER ::= {pkcs-9 27} -- Matching rules + + -- Object classes +pkcs-9-oc-pkcsEntity OBJECT IDENTIFIER ::= {pkcs-9-oc 1} +pkcs-9-oc-naturalPerson OBJECT IDENTIFIER ::= {pkcs-9-oc 2} + + -- Attributes +pkcs-9-at-emailAddress OBJECT IDENTIFIER ::= {pkcs-9 1} +pkcs-9-at-unstructuredName OBJECT IDENTIFIER ::= {pkcs-9 2} +pkcs-9-at-contentType OBJECT IDENTIFIER ::= {pkcs-9 3} +pkcs-9-at-messageDigest OBJECT IDENTIFIER ::= {pkcs-9 4} +pkcs-9-at-signingTime OBJECT IDENTIFIER ::= {pkcs-9 5} +pkcs-9-at-counterSignature OBJECT IDENTIFIER ::= {pkcs-9 6} +pkcs-9-at-challengePassword OBJECT IDENTIFIER ::= {pkcs-9 7} +pkcs-9-at-unstructuredAddress OBJECT IDENTIFIER ::= {pkcs-9 8} +pkcs-9-at-extendedCertificateAttributes OBJECT IDENTIFIER ::= {pkcs-9 9} + +-- Obsolete (?) attribute identifiers, purportedly from "tentative +-- PKCS #9 draft" +-- pkcs-9-at-issuerAndSerialNumber OBJECT IDENTIFIER ::= {pkcs-9 10} +-- pkcs-9-at-passwordCheck OBJECT IDENTIFIER ::= {pkcs-9 11} +-- pkcs-9-at-publicKey OBJECT IDENTIFIER ::= {pkcs-9 12} + +pkcs-9-at-signingDescription OBJECT IDENTIFIER ::= {pkcs-9 13} +pkcs-9-at-extensionRequest OBJECT IDENTIFIER ::= {pkcs-9 14} +pkcs-9-at-smimeCapabilities OBJECT IDENTIFIER ::= {pkcs-9 15} + +-- Unused (?) +-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 17} +-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 18} +-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 19} + +pkcs-9-at-friendlyName OBJECT IDENTIFIER ::= {pkcs-9 20} +pkcs-9-at-localKeyId OBJECT IDENTIFIER ::= {pkcs-9 21} +pkcs-9-at-userPKCS12 OBJECT IDENTIFIER ::= {2 16 840 1 113730 3 1 216} +pkcs-9-at-pkcs15Token OBJECT IDENTIFIER ::= {pkcs-9-at 1} +pkcs-9-at-encryptedPrivateKeyInfo OBJECT IDENTIFIER ::= {pkcs-9-at 2} +pkcs-9-at-randomNonce OBJECT IDENTIFIER ::= {pkcs-9-at 3} +pkcs-9-at-sequenceNumber OBJECT IDENTIFIER ::= {pkcs-9-at 4} +pkcs-9-at-pkcs7PDU OBJECT IDENTIFIER ::= {pkcs-9-at 5} + + -- IETF PKIX Attribute branch +ietf-at OBJECT IDENTIFIER ::= {1 3 6 1 5 5 7 9} + +pkcs-9-at-dateOfBirth OBJECT IDENTIFIER ::= {ietf-at 1} +pkcs-9-at-placeOfBirth OBJECT IDENTIFIER ::= {ietf-at 2} +pkcs-9-at-gender OBJECT IDENTIFIER ::= {ietf-at 3} +pkcs-9-at-countryOfCitizenship OBJECT IDENTIFIER ::= {ietf-at 4} +pkcs-9-at-countryOfResidence OBJECT IDENTIFIER ::= {ietf-at 5} + + -- Syntaxes (for use with LDAP accessible directories) +pkcs-9-sx-pkcs9String OBJECT IDENTIFIER ::= {pkcs-9-sx 1} +pkcs-9-sx-signingTime OBJECT IDENTIFIER ::= {pkcs-9-sx 2} + + -- Matching rules +pkcs-9-mr-caseIgnoreMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 1} +pkcs-9-mr-signingTimeMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 2} + + -- Arcs with attributes defined elsewhere +smime OBJECT IDENTIFIER ::= {pkcs-9 16} + -- Main arc for S/MIME (RFC 2633) +certTypes OBJECT IDENTIFIER ::= {pkcs-9 22} + -- Main arc for certificate types defined in PKCS #12 +crlTypes OBJECT IDENTIFIER ::= {pkcs-9 23} + -- Main arc for crl types defined in PKCS #12 + + -- Other object identifiers +id-at-pseudonym OBJECT IDENTIFIER ::= {id-at 65} + +-- Useful types + +PKCS9String {INTEGER : maxSize} ::= CHOICE { + ia5String IA5String (SIZE(1..maxSize)), + directoryString DirectoryString {maxSize} +} + +-- Object classes + +pkcsEntity OBJECT-CLASS ::= { + SUBCLASS OF { top } + KIND auxiliary + MAY CONTAIN { PKCSEntityAttributeSet } + ID pkcs-9-oc-pkcsEntity +} + +naturalPerson OBJECT-CLASS ::= { + SUBCLASS OF { top } + KIND auxiliary + MAY CONTAIN { NaturalPersonAttributeSet } + ID pkcs-9-oc-naturalPerson +} + +-- Attribute sets + +PKCSEntityAttributeSet ATTRIBUTE ::= { + pKCS7PDU | + userPKCS12 | +-- pKCS15Token | + encryptedPrivateKeyInfo, + ... -- For future extensions +} + +NaturalPersonAttributeSet ATTRIBUTE ::= { + emailAddress | + unstructuredName | + unstructuredAddress | + dateOfBirth | + placeOfBirth | + gender | + countryOfCitizenship | + countryOfResidence | + pseudonym | + serialNumber, + ... -- For future extensions +} + +-- Attributes + +pKCS7PDU ATTRIBUTE ::= { + WITH SYNTAX ContentInfo + ID pkcs-9-at-pkcs7PDU +} + +userPKCS12 ATTRIBUTE ::= { + WITH SYNTAX PFX + ID pkcs-9-at-userPKCS12 +} + +-- pKCS15Token ATTRIBUTE ::= { +-- WITH SYNTAX PKCS15Token +-- ID pkcs-9-at-pkcs15Token +-- } + +encryptedPrivateKeyInfo ATTRIBUTE ::= { + WITH SYNTAX EncryptedPrivateKeyInfo + ID pkcs-9-at-encryptedPrivateKeyInfo +} + +emailAddress ATTRIBUTE ::= { + WITH SYNTAX IA5String (SIZE(1..pkcs-9-ub-emailAddress)) + EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch + ID pkcs-9-at-emailAddress +} + +unstructuredName ATTRIBUTE ::= { + WITH SYNTAX PKCS9String {pkcs-9-ub-unstructuredName} + EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch + ID pkcs-9-at-unstructuredName +} + +unstructuredAddress ATTRIBUTE ::= { + WITH SYNTAX DirectoryString {pkcs-9-ub-unstructuredAddress} + EQUALITY MATCHING RULE caseIgnoreMatch + ID pkcs-9-at-unstructuredAddress +} + +dateOfBirth ATTRIBUTE ::= { + WITH SYNTAX GeneralizedTime + EQUALITY MATCHING RULE generalizedTimeMatch + SINGLE VALUE TRUE + ID pkcs-9-at-dateOfBirth +} + +placeOfBirth ATTRIBUTE ::= { + WITH SYNTAX DirectoryString {pkcs-9-ub-placeOfBirth} + EQUALITY MATCHING RULE caseExactMatch + SINGLE VALUE TRUE + ID pkcs-9-at-placeOfBirth +} + +gender ATTRIBUTE ::= { + WITH SYNTAX PrintableString (SIZE(1) ^ FROM ("M" | "F" | "m" | "f")) + EQUALITY MATCHING RULE caseIgnoreMatch + SINGLE VALUE TRUE + ID pkcs-9-at-gender +} + +countryOfCitizenship ATTRIBUTE ::= { + WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY { + -- Must be a two-letter country acronym in accordance with + -- ISO/IEC 3166 --}) + EQUALITY MATCHING RULE caseIgnoreMatch + ID pkcs-9-at-countryOfCitizenship +} + +countryOfResidence ATTRIBUTE ::= { + WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY { + -- Must be a two-letter country acronym in accordance with + -- ISO/IEC 3166 --}) + EQUALITY MATCHING RULE caseIgnoreMatch + ID pkcs-9-at-countryOfResidence +} + +pseudonym ATTRIBUTE ::= { + WITH SYNTAX DirectoryString {pkcs-9-ub-pseudonym} + EQUALITY MATCHING RULE caseExactMatch + ID id-at-pseudonym +} + +contentType ATTRIBUTE ::= { + WITH SYNTAX ContentType + EQUALITY MATCHING RULE objectIdentifierMatch + SINGLE VALUE TRUE + ID pkcs-9-at-contentType +} + +ContentType ::= OBJECT IDENTIFIER + +messageDigest ATTRIBUTE ::= { + WITH SYNTAX MessageDigest + EQUALITY MATCHING RULE octetStringMatch + SINGLE VALUE TRUE + ID pkcs-9-at-messageDigest +} + +MessageDigest ::= OCTET STRING + +signingTime ATTRIBUTE ::= { + WITH SYNTAX SigningTime + EQUALITY MATCHING RULE signingTimeMatch + SINGLE VALUE TRUE + ID pkcs-9-at-signingTime +} + +SigningTime ::= Time -- imported from ISO/IEC 9594-8 + +randomNonce ATTRIBUTE ::= { + WITH SYNTAX RandomNonce + EQUALITY MATCHING RULE octetStringMatch + SINGLE VALUE TRUE + ID pkcs-9-at-randomNonce +} + +RandomNonce ::= OCTET STRING (SIZE(4..MAX)) -- At least four bytes long + +sequenceNumber ATTRIBUTE ::= { + WITH SYNTAX SequenceNumber + EQUALITY MATCHING RULE integerMatch + SINGLE VALUE TRUE + ID pkcs-9-at-sequenceNumber +} + +SequenceNumber ::= INTEGER (1..MAX) + +counterSignature ATTRIBUTE ::= { + WITH SYNTAX SignerInfo + ID pkcs-9-at-counterSignature +} + +challengePassword ATTRIBUTE ::= { + WITH SYNTAX DirectoryString {pkcs-9-ub-challengePassword} + EQUALITY MATCHING RULE caseExactMatch + SINGLE VALUE TRUE + ID pkcs-9-at-challengePassword +} + +extensionRequest ATTRIBUTE ::= { + WITH SYNTAX ExtensionRequest + SINGLE VALUE TRUE + ID pkcs-9-at-extensionRequest +} + +ExtensionRequest ::= Extensions + +extendedCertificateAttributes ATTRIBUTE ::= { + WITH SYNTAX SET OF Attribute + SINGLE VALUE TRUE + ID pkcs-9-at-extendedCertificateAttributes +} + +friendlyName ATTRIBUTE ::= { + WITH SYNTAX BMPString (SIZE(1..pkcs-9-ub-friendlyName)) + EQUALITY MATCHING RULE caseIgnoreMatch + SINGLE VALUE TRUE + ID pkcs-9-at-friendlyName +} + +localKeyId ATTRIBUTE ::= { + WITH SYNTAX OCTET STRING + EQUALITY MATCHING RULE octetStringMatch + SINGLE VALUE TRUE + ID pkcs-9-at-localKeyId +} + +signingDescription ATTRIBUTE ::= { + WITH SYNTAX DirectoryString {pkcs-9-ub-signingDescription} + EQUALITY MATCHING RULE caseIgnoreMatch + SINGLE VALUE TRUE + ID pkcs-9-at-signingDescription +} + +smimeCapabilities ATTRIBUTE ::= { + WITH SYNTAX SMIMECapabilities + SINGLE VALUE TRUE + ID pkcs-9-at-smimeCapabilities +} + +SMIMECapabilities ::= SEQUENCE OF SMIMECapability + +SMIMECapability ::= SEQUENCE { + algorithm ALGORITHM.&id ({SMIMEv3Algorithms}), + parameters ALGORITHM.&Type ({SMIMEv3Algorithms}{@algorithm}) +} + +SMIMEv3Algorithms ALGORITHM ::= {...-- See RFC 2633 --} + + -- Matching rules + +pkcs9CaseIgnoreMatch MATCHING-RULE ::= { + SYNTAX PKCS9String {pkcs-9-ub-match} + ID pkcs-9-mr-caseIgnoreMatch +} + +signingTimeMatch MATCHING-RULE ::= { + SYNTAX SigningTime + ID pkcs-9-mr-signingTimeMatch +} + +END
\ No newline at end of file diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1 index 525ee3c5ec..1bcc2281a1 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1 @@ -6,7 +6,7 @@ BEGIN IMPORTS -- PKCS#7 ContentInfo - FROM PKCS7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) + FROM PKCS-7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) 7 module(0)} -- module not formally defined in the PKCS#7document, therefore defined in Annex O -- IPMS Information Objects diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 new file mode 100644 index 0000000000..fde5bddbf3 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 @@ -0,0 +1,166 @@ + PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + + -- ATTRIBUTE + -- + -- Describe the set of data associated with an attribute of some type + -- + -- &id is an OID identifying the attribute + -- &Type is the ASN.1 type structure for the attribute; not all + -- attributes have a data structure, so this field is optional + -- &minCount contains the minimum number of times the attribute can + -- occur in an AttributeSet + -- &maxCount contains the maximum number of times the attribute can + -- appear in an AttributeSet + -- Note: this cannot be automatically enforced as the field + -- cannot be defaulted to MAX. + -- &equality-match contains information about how matching should be + -- done + -- + -- Currently we are using two different prefixes for attributes. + -- + -- at- for certificate attributes + -- aa- for CMS attributes + -- + + ATTRIBUTE ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &Type OPTIONAL, + &equality-match MATCHING-RULE OPTIONAL, + &minCount INTEGER DEFAULT 1, + &maxCount INTEGER OPTIONAL + } WITH SYNTAX { + [TYPE &Type] + [EQUALITY MATCHING RULE &equality-match] + [COUNTS [MIN &minCount] [MAX &maxCount]] + IDENTIFIED BY &id + } + + -- Specification of MATCHING-RULE information object class + -- + + MATCHING-RULE ::= CLASS { + &ParentMatchingRules MATCHING-RULE OPTIONAL, + &AssertionType OPTIONAL, + &uniqueMatchIndicator ATTRIBUTE OPTIONAL, + &id OBJECT IDENTIFIER UNIQUE + } + WITH SYNTAX { + [PARENT &ParentMatchingRules] + [SYNTAX &AssertionType] + [UNIQUE-MATCH-INDICATOR &uniqueMatchIndicator] + ID &id + } + + -- AttributeSet + -- + -- Used when a set of attributes is to occur. + -- + -- type contains the identifier of the attribute + -- values contains a set of values where the structure of the ASN.1 + -- is defined by the attribute + -- + -- The parameter contains the set of objects describing + -- those attributes that can occur in this location. + -- + + AttributeSet{ATTRIBUTE:AttrSet} ::= SEQUENCE { + type ATTRIBUTE.&id({AttrSet}), + values SET SIZE (1..MAX) OF ATTRIBUTE. + &Type({AttrSet}{@type}) + } + + -- SingleAttribute + -- + -- Used for a single valued attribute + -- + -- The parameter contains the set of objects describing the + -- attributes that can occur in this location + -- + + SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE { + type ATTRIBUTE.&id({AttrSet}), + value ATTRIBUTE.&Type({AttrSet}{@type}) + } + + -- EXTENSION + -- + -- This class definition is used to describe the association of + -- object identifier and ASN.1 type structure for extensions + -- + -- All extensions are prefixed with ext- + -- + -- &id contains the object identifier for the extension + -- &ExtnType specifies the ASN.1 type structure for the extension + -- &Critical contains the set of legal values for the critical field. + -- This is normally {TRUE|FALSE} but in some instances may be + -- restricted to just one of these values. + -- + + EXTENSION ::= CLASS { + &id OBJECT IDENTIFIER UNIQUE, + &ExtnType, + &Critical BOOLEAN DEFAULT {TRUE | FALSE } + } WITH SYNTAX { + SYNTAX &ExtnType IDENTIFIED BY &id + [CRITICALITY &Critical] + } + + -- Extensions + -- + -- Used for a sequence of extensions. + -- + -- The parameter contains the set of legal extensions that can + -- occur in this sequence. + -- + + Extensions{EXTENSION:ExtensionSet} ::= + SEQUENCE SIZE (1..MAX) OF Extension{{ExtensionSet}} + + -- Extension + -- + -- Used for a single extension + -- + -- The parameter contains the set of legal extensions that can + -- occur in this extension. + -- + -- The restriction on the critical field has been commented out + -- the authors are not completely sure it is correct. + -- The restriction could be done using custom code rather than + -- compiler-generated code, however. + -- + + Extension{EXTENSION:ExtensionSet} ::= SEQUENCE { + extnID EXTENSION.&id({ExtensionSet}), + critical BOOLEAN + -- (EXTENSION.&Critical({ExtensionSet}{@extnID})) + DEFAULT FALSE, + extnValue OCTET STRING (CONTAINING + EXTENSION.&ExtnType({ExtensionSet}{@extnID})) + -- contains the DER encoding of the ASN.1 value + -- corresponding to the extension type identified + -- by extnID + } + + -- Security Category + -- + -- Security categories are used both for specifying clearances and + -- for labeling objects. We move this here from RFC 3281 so that + -- they will use a common single object class to express this + -- information. + -- + + SECURITY-CATEGORY ::= TYPE-IDENTIFIER + + SecurityCategory{SECURITY-CATEGORY:Supported} ::= SEQUENCE { + type [0] IMPLICIT SECURITY-CATEGORY. + &id({Supported}), + value [1] EXPLICIT SECURITY-CATEGORY. + &Type({Supported}{@type}) + } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 new file mode 100644 index 0000000000..41cbaea67e --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 @@ -0,0 +1,300 @@ + -- + -- This module is used to isolate all the X.400 naming information. + -- There is no reason to expect this to occur in a PKIX certificate. + -- + + PKIX-X400Address-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60) } + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + + -- X.400 address syntax starts here + + ORAddress ::= SEQUENCE { + built-in-standard-attributes BuiltInStandardAttributes, + built-in-domain-defined-attributes + BuiltInDomainDefinedAttributes OPTIONAL, + + -- see also teletex-domain-defined-attributes + extension-attributes ExtensionAttributes OPTIONAL } + + -- Built-in Standard Attributes + + BuiltInStandardAttributes ::= SEQUENCE { + country-name CountryName OPTIONAL, + administration-domain-name AdministrationDomainName OPTIONAL, + network-address [0] IMPLICIT NetworkAddress OPTIONAL, + -- see also extended-network-address + terminal-identifier [1] IMPLICIT TerminalIdentifier OPTIONAL, + private-domain-name [2] PrivateDomainName OPTIONAL, + organization-name [3] IMPLICIT OrganizationName OPTIONAL, + -- see also teletex-organization-name + numeric-user-identifier [4] IMPLICIT NumericUserIdentifier + OPTIONAL, + personal-name [5] IMPLICIT PersonalName OPTIONAL, + -- see also teletex-personal-name + organizational-unit-names [6] IMPLICIT OrganizationalUnitNames + OPTIONAL } + -- see also teletex-organizational-unit-names + + CountryName ::= [APPLICATION 1] CHOICE { + x121-dcc-code NumericString + (SIZE (ub-country-name-numeric-length)), + iso-3166-alpha2-code PrintableString + (SIZE (ub-country-name-alpha-length)) } + + AdministrationDomainName ::= [APPLICATION 2] CHOICE { + numeric NumericString (SIZE (0..ub-domain-name-length)), + printable PrintableString (SIZE (0..ub-domain-name-length)) } + + NetworkAddress ::= X121Address -- see also extended-network-address + + X121Address ::= NumericString (SIZE (1..ub-x121-address-length)) + + TerminalIdentifier ::= PrintableString (SIZE + (1..ub-terminal-id-length)) + + PrivateDomainName ::= CHOICE { + numeric NumericString (SIZE (1..ub-domain-name-length)), + printable PrintableString (SIZE (1..ub-domain-name-length)) } + + OrganizationName ::= PrintableString + (SIZE (1..ub-organization-name-length)) + -- see also teletex-organization-name + + NumericUserIdentifier ::= NumericString + (SIZE (1..ub-numeric-user-id-length)) + + PersonalName ::= SET { + surname [0] IMPLICIT PrintableString + (SIZE (1..ub-surname-length)), + given-name [1] IMPLICIT PrintableString + (SIZE (1..ub-given-name-length)) OPTIONAL, + initials [2] IMPLICIT PrintableString + (SIZE (1..ub-initials-length)) OPTIONAL, + generation-qualifier [3] IMPLICIT PrintableString + (SIZE (1..ub-generation-qualifier-length)) + OPTIONAL } + -- see also teletex-personal-name + + OrganizationalUnitNames ::= SEQUENCE SIZE (1..ub-organizational-units) + OF OrganizationalUnitName + -- see also teletex-organizational-unit-names + + OrganizationalUnitName ::= PrintableString (SIZE + (1..ub-organizational-unit-name-length)) + + -- Built-in Domain-defined Attributes + + BuiltInDomainDefinedAttributes ::= SEQUENCE SIZE + (1..ub-domain-defined-attributes) OF + BuiltInDomainDefinedAttribute + + BuiltInDomainDefinedAttribute ::= SEQUENCE { + type PrintableString (SIZE + (1..ub-domain-defined-attribute-type-length)), + value PrintableString (SIZE + (1..ub-domain-defined-attribute-value-length)) } + + -- Extension Attributes + + ExtensionAttributes ::= SET SIZE (1..ub-extension-attributes) OF + ExtensionAttribute + + EXTENSION-ATTRIBUTE ::= CLASS { + &id INTEGER (0..ub-extension-attributes) UNIQUE, + &Type + } WITH SYNTAX { &Type IDENTIFIED BY &id } + + ExtensionAttribute ::= SEQUENCE { + extension-attribute-type [0] IMPLICIT EXTENSION-ATTRIBUTE. + &id({SupportedExtensionAttributes}), + extension-attribute-value [1] EXTENSION-ATTRIBUTE. + &Type({SupportedExtensionAttributes} + {@extension-attribute-type})} + + SupportedExtensionAttributes EXTENSION-ATTRIBUTE ::= { + ea-commonName | ea-teletexCommonName | ea-teletexOrganizationName + | ea-teletexPersonalName | ea-teletexOrganizationalUnitNames | + ea-pDSName | ea-physicalDeliveryCountryName | ea-postalCode | + ea-physicalDeliveryOfficeName | ea-physicalDeliveryOfficeNumber | + ea-extensionORAddressComponents | ea-physicalDeliveryPersonalName + | ea-physicalDeliveryOrganizationName | + ea-extensionPhysicalDeliveryAddressComponents | + ea-unformattedPostalAddress | ea-streetAddress | + ea-postOfficeBoxAddress | ea-posteRestanteAddress | + ea-uniquePostalName | ea-localPostalAttributes | + ea-extendedNetworkAddress | ea-terminalType | + ea-teletexDomainDefinedAttributes, ... } + + -- Extension types and attribute values + + ea-commonName EXTENSION-ATTRIBUTE ::= { PrintableString + (SIZE (1..ub-common-name-length)) IDENTIFIED BY 1 } + + ea-teletexCommonName EXTENSION-ATTRIBUTE ::= {TeletexString + (SIZE (1..ub-common-name-length)) IDENTIFIED BY 2 } + + ea-teletexOrganizationName EXTENSION-ATTRIBUTE::= { TeletexString + (SIZE (1..ub-organization-name-length)) IDENTIFIED BY 3 } + + ea-teletexPersonalName EXTENSION-ATTRIBUTE ::= {SET { + surname [0] IMPLICIT TeletexString + (SIZE (1..ub-surname-length)), + given-name [1] IMPLICIT TeletexString + (SIZE (1..ub-given-name-length)) OPTIONAL, + initials [2] IMPLICIT TeletexString + (SIZE (1..ub-initials-length)) OPTIONAL, + generation-qualifier [3] IMPLICIT TeletexString + (SIZE (1..ub-generation-qualifier-length)) + OPTIONAL } IDENTIFIED BY 4 } + + ea-teletexOrganizationalUnitNames EXTENSION-ATTRIBUTE ::= + { SEQUENCE SIZE (1..ub-organizational-units) OF + TeletexOrganizationalUnitName IDENTIFIED BY 5 } + + TeletexOrganizationalUnitName ::= TeletexString + (SIZE (1..ub-organizational-unit-name-length)) + + ea-pDSName EXTENSION-ATTRIBUTE ::= {PrintableString + (SIZE (1..ub-pds-name-length)) IDENTIFIED BY 7 } + + ea-physicalDeliveryCountryName EXTENSION-ATTRIBUTE ::= { CHOICE { + x121-dcc-code NumericString (SIZE + (ub-country-name-numeric-length)), + iso-3166-alpha2-code PrintableString + (SIZE (ub-country-name-alpha-length)) } + IDENTIFIED BY 8 } + + ea-postalCode EXTENSION-ATTRIBUTE ::= { CHOICE { + numeric-code NumericString (SIZE (1..ub-postal-code-length)), + printable-code PrintableString (SIZE (1..ub-postal-code-length)) } + IDENTIFIED BY 9 } + + ea-physicalDeliveryOfficeName EXTENSION-ATTRIBUTE ::= + { PDSParameter IDENTIFIED BY 10 } + + ea-physicalDeliveryOfficeNumber EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 11 } + + ea-extensionORAddressComponents EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 12 } + + ea-physicalDeliveryPersonalName EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 13} + + ea-physicalDeliveryOrganizationName EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 14 } + + ea-extensionPhysicalDeliveryAddressComponents EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 15 } + + ea-unformattedPostalAddress EXTENSION-ATTRIBUTE ::= { SET { + printable-address SEQUENCE SIZE (1..ub-pds-physical-address-lines) + OF PrintableString (SIZE (1..ub-pds-parameter-length)) + OPTIONAL, + teletex-string TeletexString + (SIZE (1..ub-unformatted-address-length)) OPTIONAL } + IDENTIFIED BY 16 } + + ea-streetAddress EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 17 } + + ea-postOfficeBoxAddress EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 18 } + + ea-posteRestanteAddress EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 19 } + + ea-uniquePostalName EXTENSION-ATTRIBUTE ::= + { PDSParameter IDENTIFIED BY 20 } + + ea-localPostalAttributes EXTENSION-ATTRIBUTE ::= + {PDSParameter IDENTIFIED BY 21 } + PDSParameter ::= SET { + printable-string PrintableString + (SIZE(1..ub-pds-parameter-length)) OPTIONAL, + teletex-string TeletexString + (SIZE(1..ub-pds-parameter-length)) OPTIONAL } + + ea-extendedNetworkAddress EXTENSION-ATTRIBUTE ::= { + CHOICE { + e163-4-address SEQUENCE { + number [0] IMPLICIT NumericString + (SIZE (1..ub-e163-4-number-length)), + sub-address [1] IMPLICIT NumericString + (SIZE (1..ub-e163-4-sub-address-length)) OPTIONAL + }, + psap-address [0] IMPLICIT PresentationAddress + } IDENTIFIED BY 22 + } + + PresentationAddress ::= SEQUENCE { + pSelector [0] EXPLICIT OCTET STRING OPTIONAL, + sSelector [1] EXPLICIT OCTET STRING OPTIONAL, + tSelector [2] EXPLICIT OCTET STRING OPTIONAL, + nAddresses [3] EXPLICIT SET SIZE (1..MAX) OF OCTET STRING } + + ea-terminalType EXTENSION-ATTRIBUTE ::= {INTEGER { + telex (3), + teletex (4), + g3-facsimile (5), + g4-facsimile (6), + ia5-terminal (7), + videotex (8) } (0..ub-integer-options) + IDENTIFIED BY 23 } + + -- Extension Domain-defined Attributes + + ea-teletexDomainDefinedAttributes EXTENSION-ATTRIBUTE ::= + { SEQUENCE SIZE (1..ub-domain-defined-attributes) OF + TeletexDomainDefinedAttribute IDENTIFIED BY 6 } + + TeletexDomainDefinedAttribute ::= SEQUENCE { + type TeletexString + (SIZE (1..ub-domain-defined-attribute-type-length)), + value TeletexString + (SIZE (1..ub-domain-defined-attribute-value-length)) } + + -- specifications of Upper Bounds MUST be regarded as mandatory + -- from Annex B of ITU-T X.411 Reference Definition of MTS Parameter + -- Upper Bounds + -- Upper Bounds + ub-match INTEGER ::= 128 + ub-common-name-length INTEGER ::= 64 + ub-country-name-alpha-length INTEGER ::= 2 + ub-country-name-numeric-length INTEGER ::= 3 + ub-domain-defined-attributes INTEGER ::= 4 + ub-domain-defined-attribute-type-length INTEGER ::= 8 + ub-domain-defined-attribute-value-length INTEGER ::= 128 + ub-domain-name-length INTEGER ::= 16 + ub-extension-attributes INTEGER ::= 256 + ub-e163-4-number-length INTEGER ::= 15 + ub-e163-4-sub-address-length INTEGER ::= 40 + ub-generation-qualifier-length INTEGER ::= 3 + ub-given-name-length INTEGER ::= 16 + ub-initials-length INTEGER ::= 5 + ub-integer-options INTEGER ::= 256 + ub-numeric-user-id-length INTEGER ::= 32 + ub-organization-name-length INTEGER ::= 64 + ub-organizational-unit-name-length INTEGER ::= 32 + ub-organizational-units INTEGER ::= 4 + ub-pds-name-length INTEGER ::= 16 + ub-pds-parameter-length INTEGER ::= 30 + ub-pds-physical-address-lines INTEGER ::= 6 + ub-postal-code-length INTEGER ::= 16 + ub-surname-length INTEGER ::= 40 + ub-terminal-id-length INTEGER ::= 24 + ub-unformatted-address-length INTEGER ::= 180 + ub-x121-address-length INTEGER ::= 16 + + -- Note - upper bounds on string types, such as TeletexString, are + -- measured in characters. Excepting PrintableString or IA5String, a + -- significantly greater number of octets will be required to hold + -- such a value. As a minimum, 16 octets or twice the specified + -- upper bound, whichever is the larger, should be allowed for + -- TeletexString. For UTF8String or UniversalString, at least four + -- times the upper bound should be allowed. + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 new file mode 100644 index 0000000000..b1232fb8f2 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 @@ -0,0 +1,308 @@ + PKIX1-PSS-OAEP-Algorithms-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-rsa-pkalgs-02(54)} + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + IMPORTS + + AlgorithmIdentifier{}, ALGORITHM, DIGEST-ALGORITHM, KEY-TRANSPORT, + SIGNATURE-ALGORITHM, PUBLIC-KEY, SMIME-CAPS + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + id-sha1, mda-sha1, pk-rsa, RSAPublicKey + FROM PKIXAlgs-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56)}; + + -- ============================ + -- Object Set exports + -- ============================ + -- + -- Define top-level symbols with all of the objects defined for + -- export to other modules. These objects would be included as part + -- of an Object Set to restrict the set of legal values. + -- + + PublicKeys PUBLIC-KEY ::= { pk-rsaSSA-PSS | pk-rsaES-OAEP, ... } + SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-rsaSSA-PSS, ...} + KeyTransportAlgs KEY-TRANSPORT ::= { kta-rsaES-OAEP, ... } + HashAlgs DIGEST-ALGORITHM ::= { mda-sha224 | mda-sha256 | mda-sha384 + | mda-sha512, ... } + SMimeCaps SMIME-CAPS ::= { + sa-rsaSSA-PSS.&smimeCaps | + kta-rsaES-OAEP.&smimeCaps, + ... + } + + -- ============================= + -- Algorithm Objects + -- ============================= + + -- + -- Public key object for PSS signatures + -- + + pk-rsaSSA-PSS PUBLIC-KEY ::= { + IDENTIFIER id-RSASSA-PSS + KEY RSAPublicKey + PARAMS TYPE RSASSA-PSS-params ARE optional + -- Private key format not in this module -- + CERT-KEY-USAGE { nonRepudiation, digitalSignature, + keyCertSign, cRLSign } + } + + -- + -- Signature algorithm definition for PSS signatures + -- + + sa-rsaSSA-PSS SIGNATURE-ALGORITHM ::= { + IDENTIFIER id-RSASSA-PSS + PARAMS TYPE RSASSA-PSS-params ARE required + HASHES { mda-sha1 | mda-sha224 | mda-sha256 | mda-sha384 + | mda-sha512 } + PUBLIC-KEYS { pk-rsa | pk-rsaSSA-PSS } + SMIME-CAPS { IDENTIFIED BY id-RSASSA-PSS } + } + + -- + -- Signature algorithm definitions for PKCS v1.5 signatures + -- + + sa-sha224WithRSAEncryption SIGNATURE-ALGORITHM ::= { + IDENTIFIER sha224WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-sha224 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY sha224WithRSAEncryption } + } + sha224WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 14 } + + sa-sha256WithRSAEncryption SIGNATURE-ALGORITHM ::= { + IDENTIFIER sha256WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-sha256 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY sha256WithRSAEncryption } + } + sha256WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 11 } + + sa-sha384WithRSAEncryption SIGNATURE-ALGORITHM ::= { + IDENTIFIER sha384WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-sha384 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY sha384WithRSAEncryption } + } + sha384WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 12 } + + sa-sha512WithRSAEncryption SIGNATURE-ALGORITHM ::= { + IDENTIFIER sha512WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-sha512 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY sha512WithRSAEncryption } + } + sha512WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 13 } + + -- + -- Public key definition for OAEP encryption + -- + + pk-rsaES-OAEP PUBLIC-KEY ::= { + IDENTIFIER id-RSAES-OAEP + KEY RSAPublicKey + PARAMS TYPE RSAES-OAEP-params ARE optional + -- Private key format not in this module -- + CERT-KEY-USAGE {keyEncipherment, dataEncipherment} + } + + -- + -- Key transport key lock definition for OAEP encryption + -- + + kta-rsaES-OAEP KEY-TRANSPORT ::= { + IDENTIFIER id-RSAES-OAEP + PARAMS TYPE RSAES-OAEP-params ARE required + PUBLIC-KEYS { pk-rsa | pk-rsaES-OAEP } + SMIME-CAPS { TYPE RSAES-OAEP-params IDENTIFIED BY id-RSAES-OAEP} + } + -- ============================ + -- Basic object identifiers + -- ============================ + + pkcs-1 OBJECT IDENTIFIER ::= + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 1 } + + -- When rsaEncryption is used in an AlgorithmIdentifier, the + -- parameters MUST be present and MUST be NULL. + -- rsaEncryption OBJECT IDENTIFIER ::= { pkcs-1 1 } + + -- When id-RSAES-OAEP is used in an AlgorithmIdentifier, + -- and the parameters field is present, it MUST be + -- RSAES-OAEP-params. + + id-RSAES-OAEP OBJECT IDENTIFIER ::= { pkcs-1 7 } + + -- When id-mgf1 is used in an AlgorithmIdentifier, the parameters + -- MUST be present and MUST be a HashAlgorithm. + + id-mgf1 OBJECT IDENTIFIER ::= { pkcs-1 8 } + + -- When id-pSpecified is used in an AlgorithmIdentifier, the + -- parameters MUST be an OCTET STRING. + + id-pSpecified OBJECT IDENTIFIER ::= { pkcs-1 9 } + + -- When id-RSASSA-PSS is used in an AlgorithmIdentifier, and the + -- parameters field is present, it MUST be RSASSA-PSS-params. + + id-RSASSA-PSS OBJECT IDENTIFIER ::= { pkcs-1 10 } + + -- When the following OIDs are used in an AlgorithmIdentifier, the + -- parameters SHOULD be absent, but if the parameters are present, + -- they MUST be NULL. + + -- + -- id-sha1 is imported from RFC 3279. Additionally, the v1.5 + -- signature algorithms (i.e., rsaWithSHA256) are now solely placed + -- in that module. + -- + + id-sha224 OBJECT IDENTIFIER ::= + { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) + csor(3) nistAlgorithms(4) hashalgs(2) 4 } + + mda-sha224 DIGEST-ALGORITHM ::= { + IDENTIFIER id-sha224 + PARAMS TYPE NULL ARE preferredAbsent + } + + id-sha256 OBJECT IDENTIFIER ::= + { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) + csor(3) nistAlgorithms(4) hashalgs(2) 1 } + + mda-sha256 DIGEST-ALGORITHM ::= { + IDENTIFIER id-sha256 + PARAMS TYPE NULL ARE preferredAbsent + } + id-sha384 OBJECT IDENTIFIER ::= + { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) + csor(3) nistAlgorithms(4) hashalgs(2) 2 } + + mda-sha384 DIGEST-ALGORITHM ::= { + IDENTIFIER id-sha384 + PARAMS TYPE NULL ARE preferredAbsent + } + id-sha512 OBJECT IDENTIFIER ::= + { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) + csor(3) nistAlgorithms(4) hashalgs(2) 3 } + + mda-sha512 DIGEST-ALGORITHM ::= { + IDENTIFIER id-sha512 + PARAMS TYPE NULL ARE preferredAbsent + } + + -- ============= + -- Constants + -- ============= + + EncodingParameters ::= OCTET STRING(SIZE(0..MAX)) + + nullOctetString EncodingParameters ::= ''H + + nullParameters NULL ::= NULL + + -- ========================= + -- Algorithm Identifiers + -- ========================= + + HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM, + {HashAlgorithms}} + + HashAlgorithms DIGEST-ALGORITHM ::= { + { IDENTIFIER id-sha1 PARAMS TYPE NULL ARE preferredPresent } | + { IDENTIFIER id-sha224 PARAMS TYPE NULL ARE preferredPresent } | + { IDENTIFIER id-sha256 PARAMS TYPE NULL ARE preferredPresent } | + { IDENTIFIER id-sha384 PARAMS TYPE NULL ARE preferredPresent } | + { IDENTIFIER id-sha512 PARAMS TYPE NULL ARE preferredPresent } + } + + sha1Identifier HashAlgorithm ::= { + algorithm id-sha1, + parameters NULL : NULL + } + + -- + -- We have a default algorithm - create the value here + -- + + MaskGenAlgorithm ::= AlgorithmIdentifier{ALGORITHM, + {PKCS1MGFAlgorithms}} + + mgf1SHA1 MaskGenAlgorithm ::= { + algorithm id-mgf1, + parameters HashAlgorithm : sha1Identifier + } + + -- + -- Define the set of mask generation functions + -- + -- If the identifier is id-mgf1, any of the listed hash + -- algorithms may be used. + -- + + PKCS1MGFAlgorithms ALGORITHM ::= { + { IDENTIFIER id-mgf1 PARAMS TYPE HashAlgorithm ARE required }, + ... + } + + -- + -- Define the set of known source algorithms for PSS + -- + + PSourceAlgorithm ::= AlgorithmIdentifier{ALGORITHM, + {PSS-SourceAlgorithms}} + + PSS-SourceAlgorithms ALGORITHM ::= { + { IDENTIFIER id-pSpecified PARAMS TYPE EncodingParameters + ARE required }, + ... + } + pSpecifiedEmpty PSourceAlgorithm ::= { + algorithm id-pSpecified, + parameters EncodingParameters : nullOctetString + } + + -- =================== + -- Main structures + -- =================== + + -- AlgorithmIdentifier parameters for id-RSASSA-PSS. + -- Note that the tags in this Sequence are explicit. + -- Note: The hash algorithm in hashAlgorithm and in + -- maskGenAlgorithm should be the same. + + RSASSA-PSS-params ::= SEQUENCE { + hashAlgorithm [0] HashAlgorithm DEFAULT sha1Identifier, + maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT mgf1SHA1, + saltLength [2] INTEGER DEFAULT 20, + trailerField [3] INTEGER DEFAULT 1 + } + + -- AlgorithmIdentifier parameters for id-RSAES-OAEP. + -- Note that the tags in this Sequence are explicit. + -- Note: The hash algorithm in hashFunc and in + -- maskGenFunc should be the same. + + RSAES-OAEP-params ::= SEQUENCE { + hashFunc [0] HashAlgorithm DEFAULT sha1Identifier, + maskGenFunc [1] MaskGenAlgorithm DEFAULT mgf1SHA1, + pSourceFunc [2] PSourceAlgorithm DEFAULT + pSpecifiedEmpty + } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 new file mode 100644 index 0000000000..613e0e9d2c --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 @@ -0,0 +1,415 @@ + PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-explicit-02(51)} + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + + IMPORTS + + Extensions{}, EXTENSION, ATTRIBUTE, SingleAttribute{} + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + + AlgorithmIdentifier{}, PUBLIC-KEY, SIGNATURE-ALGORITHM + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + CertExtensions, CrlExtensions, CrlEntryExtensions + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + SignatureAlgs, PublicKeys + FROM PKIXAlgs-2009 + {iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) 56} + + SignatureAlgs, PublicKeys + FROM PKIX1-PSS-OAEP-Algorithms-2009 + {iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-rsa-pkalgs-02(54)} + + ORAddress + FROM PKIX-X400Address-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60)}; + + id-pkix OBJECT IDENTIFIER ::= + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7)} + + -- PKIX arcs + + id-pe OBJECT IDENTIFIER ::= { id-pkix 1 } + -- arc for private certificate extensions + id-qt OBJECT IDENTIFIER ::= { id-pkix 2 } + -- arc for policy qualifier types + id-kp OBJECT IDENTIFIER ::= { id-pkix 3 } + -- arc for extended key purpose OIDs + id-ad OBJECT IDENTIFIER ::= { id-pkix 48 } + -- arc for access descriptors + + -- policyQualifierIds for Internet policy qualifiers + + id-qt-cps OBJECT IDENTIFIER ::= { id-qt 1 } + -- OID for CPS qualifier + id-qt-unotice OBJECT IDENTIFIER ::= { id-qt 2 } + -- OID for user notice qualifier + + -- access descriptor definitions + + id-ad-ocsp OBJECT IDENTIFIER ::= { id-ad 1 } + id-ad-caIssuers OBJECT IDENTIFIER ::= { id-ad 2 } + id-ad-timeStamping OBJECT IDENTIFIER ::= { id-ad 3 } + id-ad-caRepository OBJECT IDENTIFIER ::= { id-ad 5 } + + -- attribute data types + AttributeType ::= ATTRIBUTE.&id + + -- Replaced by SingleAttribute{} + -- + -- AttributeTypeAndValue ::= SEQUENCE { + -- type ATTRIBUTE.&id({SupportedAttributes}), + -- value ATTRIBUTE.&Type({SupportedAttributes}{@type}) } + -- + + -- Suggested naming attributes: Definition of the following + -- information object set may be augmented to meet local + -- requirements. Note that deleting members of the set may + -- prevent interoperability with conforming implementations. + -- All attributes are presented in pairs: the AttributeType + -- followed by the type definition for the corresponding + -- AttributeValue. + + -- Arc for standard naming attributes + + id-at OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 4 } + + -- Naming attributes of type X520name + + id-at-name AttributeType ::= { id-at 41 } + at-name ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-name } + + id-at-surname AttributeType ::= { id-at 4 } + at-surname ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-surname } + + id-at-givenName AttributeType ::= { id-at 42 } + at-givenName ATTRIBUTE ::= + { TYPE X520name IDENTIFIED BY id-at-givenName } + + id-at-initials AttributeType ::= { id-at 43 } + at-initials ATTRIBUTE ::= + { TYPE X520name IDENTIFIED BY id-at-initials } + + id-at-generationQualifier AttributeType ::= { id-at 44 } + at-generationQualifier ATTRIBUTE ::= + { TYPE X520name IDENTIFIED BY id-at-generationQualifier } + + -- Directory string type -- + + DirectoryString{INTEGER:maxSize} ::= CHOICE { + teletexString TeletexString(SIZE (1..maxSize)), + printableString PrintableString(SIZE (1..maxSize)), + bmpString BMPString(SIZE (1..maxSize)), + universalString UniversalString(SIZE (1..maxSize)), + uTF8String UTF8String(SIZE (1..maxSize)) + } + + X520name ::= DirectoryString {ub-name} + + -- Naming attributes of type X520CommonName + + id-at-commonName AttributeType ::= { id-at 3 } + + at-x520CommonName ATTRIBUTE ::= + {TYPE X520CommonName IDENTIFIED BY id-at-commonName } + + X520CommonName ::= DirectoryString {ub-common-name} + + -- Naming attributes of type X520LocalityName + + id-at-localityName AttributeType ::= { id-at 7 } + + at-x520LocalityName ATTRIBUTE ::= + { TYPE X520LocalityName IDENTIFIED BY id-at-localityName } + X520LocalityName ::= DirectoryString {ub-locality-name} + + -- Naming attributes of type X520StateOrProvinceName + + id-at-stateOrProvinceName AttributeType ::= { id-at 8 } + + at-x520StateOrProvinceName ATTRIBUTE ::= + { TYPE DirectoryString {ub-state-name} + IDENTIFIED BY id-at-stateOrProvinceName } + X520StateOrProvinceName ::= DirectoryString {ub-state-name} + + -- Naming attributes of type X520OrganizationName + + id-at-organizationName AttributeType ::= { id-at 10 } + + at-x520OrganizationName ATTRIBUTE ::= + { TYPE DirectoryString {ub-organization-name} + IDENTIFIED BY id-at-organizationName } + X520OrganizationName ::= DirectoryString {ub-organization-name} + + -- Naming attributes of type X520OrganizationalUnitName + + id-at-organizationalUnitName AttributeType ::= { id-at 11 } + + at-x520OrganizationalUnitName ATTRIBUTE ::= + { TYPE DirectoryString {ub-organizational-unit-name} + IDENTIFIED BY id-at-organizationalUnitName } + X520OrganizationalUnitName ::= DirectoryString + {ub-organizational-unit-name} + + -- Naming attributes of type X520Title + + id-at-title AttributeType ::= { id-at 12 } + + at-x520Title ATTRIBUTE ::= { TYPE DirectoryString { ub-title } + IDENTIFIED BY id-at-title } + + -- Naming attributes of type X520dnQualifier + + id-at-dnQualifier AttributeType ::= { id-at 46 } + + at-x520dnQualifier ATTRIBUTE ::= { TYPE PrintableString + IDENTIFIED BY id-at-dnQualifier } + + -- Naming attributes of type X520countryName (digraph from IS 3166) + + id-at-countryName AttributeType ::= { id-at 6 } + + at-x520countryName ATTRIBUTE ::= { TYPE PrintableString (SIZE (2)) + IDENTIFIED BY id-at-countryName } + + -- Naming attributes of type X520SerialNumber + + id-at-serialNumber AttributeType ::= { id-at 5 } + + at-x520SerialNumber ATTRIBUTE ::= {TYPE PrintableString + (SIZE (1..ub-serial-number)) IDENTIFIED BY id-at-serialNumber } + + -- Naming attributes of type X520Pseudonym + + id-at-pseudonym AttributeType ::= { id-at 65 } + + at-x520Pseudonym ATTRIBUTE ::= { TYPE DirectoryString {ub-pseudonym} + IDENTIFIED BY id-at-pseudonym } + + -- Naming attributes of type DomainComponent (from RFC 2247) + + id-domainComponent AttributeType ::= + { itu-t(0) data(9) pss(2342) ucl(19200300) pilot(100) + pilotAttributeType(1) 25 } + + at-domainComponent ATTRIBUTE ::= {TYPE IA5String + IDENTIFIED BY id-domainComponent } + + -- Legacy attributes + + pkcs-9 OBJECT IDENTIFIER ::= + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 9 } + id-emailAddress AttributeType ::= { pkcs-9 1 } + + at-emailAddress ATTRIBUTE ::= {TYPE IA5String + (SIZE (1..ub-emailaddress-length)) IDENTIFIED BY + id-emailAddress } + + -- naming data types -- + + Name ::= CHOICE { -- only one possibility for now -- + rdnSequence RDNSequence } + + RDNSequence ::= SEQUENCE OF RelativeDistinguishedName + + DistinguishedName ::= RDNSequence + + RelativeDistinguishedName ::= + SET SIZE (1 .. MAX) OF SingleAttribute { {SupportedAttributes} } + + -- These are the known name elements for a DN + + SupportedAttributes ATTRIBUTE ::= { + at-name | at-surname | at-givenName | at-initials | + at-generationQualifier | at-x520CommonName | + at-x520LocalityName | at-x520StateOrProvinceName | + at-x520OrganizationName | at-x520OrganizationalUnitName | + at-x520Title | at-x520dnQualifier | at-x520countryName | + at-x520SerialNumber | at-x520Pseudonym | at-domainComponent | + at-emailAddress, ... } + + -- + -- Certificate- and CRL-specific structures begin here + -- + + Certificate ::= SIGNED{TBSCertificate} + + TBSCertificate ::= SEQUENCE { + version [0] Version DEFAULT v1, + serialNumber CertificateSerialNumber, + signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + issuer Name, + validity Validity, + subject Name, + subjectPublicKeyInfo SubjectPublicKeyInfo, + ... , + [[2: -- If present, version MUST be v2 + issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL, + subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL + ]], + [[3: -- If present, version MUST be v3 -- + extensions [3] Extensions{{CertExtensions}} OPTIONAL + ]], ... } + + Version ::= INTEGER { v1(0), v2(1), v3(2) } + + CertificateSerialNumber ::= INTEGER + + Validity ::= SEQUENCE { + notBefore Time, + notAfter Time } + + Time ::= CHOICE { + utcTime UTCTime, + generalTime GeneralizedTime } + + UniqueIdentifier ::= BIT STRING + + SubjectPublicKeyInfo ::= SEQUENCE { + algorithm AlgorithmIdentifier{PUBLIC-KEY, + {PublicKeyAlgorithms}}, + subjectPublicKey BIT STRING } + + -- CRL structures + + CertificateList ::= SIGNED{TBSCertList} + + TBSCertList ::= SEQUENCE { + version Version OPTIONAL, + -- if present, MUST be v2 + signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + issuer Name, + thisUpdate Time, + nextUpdate Time OPTIONAL, + revokedCertificates SEQUENCE SIZE (1..MAX) OF SEQUENCE { + userCertificate CertificateSerialNumber, + revocationDate Time, + ... , + [[2: -- if present, version MUST be v2 + crlEntryExtensions Extensions{{CrlEntryExtensions}} + OPTIONAL + ]], ... + } OPTIONAL, + ... , + [[2: -- if present, version MUST be v2 + crlExtensions [0] Extensions{{CrlExtensions}} + OPTIONAL + ]], ... } + + -- Version, Time, CertificateSerialNumber, and Extensions were + -- defined earlier for use in the certificate structure + + -- + -- The two object sets below should be expanded to include + -- those algorithms which are supported by the system. + -- + -- For example: + -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= { + -- PKIXAlgs-2008.SignatureAlgs, ..., + -- - - RFC 3279 provides the base set + -- PKIX1-PSS-OAEP-ALGORITHMS.SignatureAlgs | + -- - - RFC 4055 provides extension algs + -- OtherModule.SignatureAlgs + -- - - RFC XXXX provides additional extension algs + -- } + + SignatureAlgorithms SIGNATURE-ALGORITHM ::= { + PKIXAlgs-2009.SignatureAlgs, ..., + PKIX1-PSS-OAEP-Algorithms-2009.SignatureAlgs } + + PublicKeyAlgorithms PUBLIC-KEY ::= { + PKIXAlgs-2009.PublicKeys, ..., + PKIX1-PSS-OAEP-Algorithms-2009.PublicKeys} + + -- Upper Bounds + + ub-state-name INTEGER ::= 128 + ub-organization-name INTEGER ::= 64 + ub-organizational-unit-name INTEGER ::= 64 + ub-title INTEGER ::= 64 + ub-serial-number INTEGER ::= 64 + ub-pseudonym INTEGER ::= 128 + ub-emailaddress-length INTEGER ::= 255 + ub-locality-name INTEGER ::= 128 + ub-common-name INTEGER ::= 64 + ub-name INTEGER ::= 32768 + + -- Note - upper bounds on string types, such as TeletexString, are + -- measured in characters. Excepting PrintableString or IA5String, a + -- significantly greater number of octets will be required to hold + -- such a value. As a minimum, 16 octets or twice the specified + -- upper bound, whichever is the larger, should be allowed for + -- TeletexString. For UTF8String or UniversalString, at least four + -- times the upper bound should be allowed. + + -- Information object classes used in the definition + -- of certificates and CRLs + + -- Parameterized Type SIGNED + -- + -- Three different versions of doing SIGNED: + -- 1. Simple and close to the previous version + -- + -- SIGNED{ToBeSigned} ::= SEQUENCE { + -- toBeSigned ToBeSigned, + -- algorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM, + -- {SignatureAlgorithms}}, + -- signature BIT STRING + -- } + + -- 2. From Authenticated Framework + -- + -- SIGNED{ToBeSigned} ::= SEQUENCE { + -- toBeSigned ToBeSigned, + -- COMPONENTS OF SIGNATURE{ToBeSigned} + -- } + -- SIGNATURE{ToBeSigned} ::= SEQUENCE { + -- algorithmIdentifier AlgorithmIdentifier, + -- encrypted ENCRYPTED-HASH{ToBeSigned} + -- } + -- ENCRYPTED-HASH{ToBeSigned} ::= + -- BIT STRING + -- (CONSTRAINED BY { + -- shall be the result of applying a hashing procedure to + -- the DER-encoded (see 4.1) octets of a value of + -- ToBeSigned and then applying an encipherment procedure + -- to those octets + -- }) + -- + -- + -- 3. A more complex version, but one that automatically ties + -- together both the signature algorithm and the + -- signature value for automatic decoding. + -- + SIGNED{ToBeSigned} ::= SEQUENCE { + toBeSigned ToBeSigned, + algorithmIdentifier SEQUENCE { + algorithm SIGNATURE-ALGORITHM. + &id({SignatureAlgorithms}), + parameters SIGNATURE-ALGORITHM. + &Params({SignatureAlgorithms} + {@algorithmIdentifier.algorithm}) OPTIONAL + }, + signature BIT STRING (CONTAINING SIGNATURE-ALGORITHM.&Value( + {SignatureAlgorithms} + {@algorithmIdentifier.algorithm})) + } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 new file mode 100644 index 0000000000..3651a5249b --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 @@ -0,0 +1,447 @@ + PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + AttributeSet{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) } + + id-pe, id-kp, id-qt-unotice, id-qt-cps, ORAddress, Name, + RelativeDistinguishedName, CertificateSerialNumber, + DirectoryString{}, SupportedAttributes + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }; + + CertExtensions EXTENSION ::= { + ext-AuthorityKeyIdentifier | ext-SubjectKeyIdentifier | + ext-KeyUsage | ext-PrivateKeyUsagePeriod | + ext-CertificatePolicies | ext-PolicyMappings | + ext-SubjectAltName | ext-IssuerAltName | + ext-SubjectDirectoryAttributes | + ext-BasicConstraints | ext-NameConstraints | + ext-PolicyConstraints | ext-ExtKeyUsage | + ext-CRLDistributionPoints | ext-InhibitAnyPolicy | + ext-FreshestCRL | ext-AuthorityInfoAccess | + ext-SubjectInfoAccessSyntax, ... } + + CrlExtensions EXTENSION ::= { + ext-AuthorityKeyIdentifier | ext-IssuerAltName | + ext-CRLNumber | ext-DeltaCRLIndicator | + ext-IssuingDistributionPoint | ext-FreshestCRL, ... } + + CrlEntryExtensions EXTENSION ::= { + ext-CRLReason | ext-CertificateIssuer | + ext-HoldInstructionCode | ext-InvalidityDate, ... } + -- Shared arc for standard certificate and CRL extensions + + id-ce OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 29 } + + -- authority key identifier OID and syntax + + ext-AuthorityKeyIdentifier EXTENSION ::= { SYNTAX + AuthorityKeyIdentifier IDENTIFIED BY + id-ce-authorityKeyIdentifier } + id-ce-authorityKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 35 } + + AuthorityKeyIdentifier ::= SEQUENCE { + keyIdentifier [0] KeyIdentifier OPTIONAL, + authorityCertIssuer [1] GeneralNames OPTIONAL, + authorityCertSerialNumber [2] CertificateSerialNumber OPTIONAL } + (WITH COMPONENTS { + ..., + authorityCertIssuer PRESENT, + authorityCertSerialNumber PRESENT + } | + WITH COMPONENTS { + ..., + authorityCertIssuer ABSENT, + authorityCertSerialNumber ABSENT + }) + + KeyIdentifier ::= OCTET STRING + + -- subject key identifier OID and syntax + + ext-SubjectKeyIdentifier EXTENSION ::= { SYNTAX + KeyIdentifier IDENTIFIED BY id-ce-subjectKeyIdentifier } + id-ce-subjectKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 14 } + + -- key usage extension OID and syntax + + ext-KeyUsage EXTENSION ::= { SYNTAX + KeyUsage IDENTIFIED BY id-ce-keyUsage } + id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 } + + KeyUsage ::= BIT STRING { + digitalSignature (0), + nonRepudiation (1), -- recent editions of X.509 have + -- renamed this bit to + -- contentCommitment + keyEncipherment (2), + dataEncipherment (3), + keyAgreement (4), + keyCertSign (5), + cRLSign (6), + encipherOnly (7), + decipherOnly (8) + } + + -- private key usage period extension OID and syntax + + ext-PrivateKeyUsagePeriod EXTENSION ::= { SYNTAX + PrivateKeyUsagePeriod IDENTIFIED BY id-ce-privateKeyUsagePeriod } + id-ce-privateKeyUsagePeriod OBJECT IDENTIFIER ::= { id-ce 16 } + + PrivateKeyUsagePeriod ::= SEQUENCE { + notBefore [0] GeneralizedTime OPTIONAL, + notAfter [1] GeneralizedTime OPTIONAL } + (WITH COMPONENTS {..., notBefore PRESENT } | + WITH COMPONENTS {..., notAfter PRESENT }) + + -- certificate policies extension OID and syntax + + ext-CertificatePolicies EXTENSION ::= { SYNTAX + CertificatePolicies IDENTIFIED BY id-ce-certificatePolicies} + id-ce-certificatePolicies OBJECT IDENTIFIER ::= { id-ce 32 } + + CertificatePolicies ::= SEQUENCE SIZE (1..MAX) OF PolicyInformation + + PolicyInformation ::= SEQUENCE { + policyIdentifier CertPolicyId, + policyQualifiers SEQUENCE SIZE (1..MAX) OF + PolicyQualifierInfo OPTIONAL } + + CertPolicyId ::= OBJECT IDENTIFIER + + CERT-POLICY-QUALIFIER ::= TYPE-IDENTIFIER + + PolicyQualifierInfo ::= SEQUENCE { + policyQualifierId CERT-POLICY-QUALIFIER. + &id({PolicyQualifierId}), + qualifier CERT-POLICY-QUALIFIER. + &Type({PolicyQualifierId}{@policyQualifierId})} + + -- Implementations that recognize additional policy qualifiers MUST + -- augment the following definition for PolicyQualifierId + + PolicyQualifierId CERT-POLICY-QUALIFIER ::= + { pqid-cps | pqid-unotice, ... } + + pqid-cps CERT-POLICY-QUALIFIER ::= { CPSuri IDENTIFIED BY id-qt-cps } + pqid-unotice CERT-POLICY-QUALIFIER ::= { UserNotice + IDENTIFIED BY id-qt-unotice } + + -- CPS pointer qualifier + + CPSuri ::= IA5String + + -- user notice qualifier + + UserNotice ::= SEQUENCE { + noticeRef NoticeReference OPTIONAL, + explicitText DisplayText OPTIONAL} + + -- + -- This is not made explicit in the text + -- + -- {WITH COMPONENTS {..., noticeRef PRESENT} | + -- WITH COMPONENTS {..., DisplayText PRESENT }} + + NoticeReference ::= SEQUENCE { + organization DisplayText, + noticeNumbers SEQUENCE OF INTEGER } + + DisplayText ::= CHOICE { + ia5String IA5String (SIZE (1..200)), + visibleString VisibleString (SIZE (1..200)), + bmpString BMPString (SIZE (1..200)), + utf8String UTF8String (SIZE (1..200)) } + + -- policy mapping extension OID and syntax + + ext-PolicyMappings EXTENSION ::= { SYNTAX + PolicyMappings IDENTIFIED BY id-ce-policyMappings } + id-ce-policyMappings OBJECT IDENTIFIER ::= { id-ce 33 } + + PolicyMappings ::= SEQUENCE SIZE (1..MAX) OF SEQUENCE { + issuerDomainPolicy CertPolicyId, + subjectDomainPolicy CertPolicyId + } + + -- subject alternative name extension OID and syntax + + ext-SubjectAltName EXTENSION ::= { SYNTAX + GeneralNames IDENTIFIED BY id-ce-subjectAltName } + id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 } + + GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName + + GeneralName ::= CHOICE { + otherName [0] INSTANCE OF OTHER-NAME, + rfc822Name [1] IA5String, + dNSName [2] IA5String, + x400Address [3] ORAddress, + directoryName [4] Name, + ediPartyName [5] EDIPartyName, + uniformResourceIdentifier [6] IA5String, + iPAddress [7] OCTET STRING, + registeredID [8] OBJECT IDENTIFIER + } + + -- AnotherName replaces OTHER-NAME ::= TYPE-IDENTIFIER, as + -- TYPE-IDENTIFIER is not supported in the '88 ASN.1 syntax + + OTHER-NAME ::= TYPE-IDENTIFIER + + EDIPartyName ::= SEQUENCE { + nameAssigner [0] DirectoryString {ubMax} OPTIONAL, + partyName [1] DirectoryString {ubMax} + } + + -- issuer alternative name extension OID and syntax + + ext-IssuerAltName EXTENSION ::= { SYNTAX + GeneralNames IDENTIFIED BY id-ce-issuerAltName } + id-ce-issuerAltName OBJECT IDENTIFIER ::= { id-ce 18 } + + ext-SubjectDirectoryAttributes EXTENSION ::= { SYNTAX + SubjectDirectoryAttributes IDENTIFIED BY + id-ce-subjectDirectoryAttributes } + id-ce-subjectDirectoryAttributes OBJECT IDENTIFIER ::= { id-ce 9 } + + SubjectDirectoryAttributes ::= SEQUENCE SIZE (1..MAX) OF + AttributeSet{{SupportedAttributes}} + + -- basic constraints extension OID and syntax + + ext-BasicConstraints EXTENSION ::= { SYNTAX + BasicConstraints IDENTIFIED BY id-ce-basicConstraints } + id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 } + + BasicConstraints ::= SEQUENCE { + cA BOOLEAN DEFAULT FALSE, + pathLenConstraint INTEGER (0..MAX) OPTIONAL + } + + -- name constraints extension OID and syntax + ext-NameConstraints EXTENSION ::= { SYNTAX + NameConstraints IDENTIFIED BY id-ce-nameConstraints } + id-ce-nameConstraints OBJECT IDENTIFIER ::= { id-ce 30 } + + NameConstraints ::= SEQUENCE { + permittedSubtrees [0] GeneralSubtrees OPTIONAL, + excludedSubtrees [1] GeneralSubtrees OPTIONAL + } + -- + -- This is a constraint in the issued certificates by CAs, but is + -- not a requirement on EEs. + -- + -- (WITH COMPONENTS { ..., permittedSubtrees PRESENT} | + -- WITH COMPONENTS { ..., excludedSubtrees PRESENT }} + + GeneralSubtrees ::= SEQUENCE SIZE (1..MAX) OF GeneralSubtree + + GeneralSubtree ::= SEQUENCE { + base GeneralName, + minimum [0] BaseDistance DEFAULT 0, + maximum [1] BaseDistance OPTIONAL + } + + BaseDistance ::= INTEGER (0..MAX) + + -- policy constraints extension OID and syntax + + ext-PolicyConstraints EXTENSION ::= { SYNTAX + PolicyConstraints IDENTIFIED BY id-ce-policyConstraints } + id-ce-policyConstraints OBJECT IDENTIFIER ::= { id-ce 36 } + + PolicyConstraints ::= SEQUENCE { + requireExplicitPolicy [0] SkipCerts OPTIONAL, + inhibitPolicyMapping [1] SkipCerts OPTIONAL } + -- + -- This is a constraint in the issued certificates by CAs, + -- but is not a requirement for EEs + -- + -- (WITH COMPONENTS { ..., requireExplicitPolicy PRESENT} | + -- WITH COMPONENTS { ..., inhibitPolicyMapping PRESENT}) + + SkipCerts ::= INTEGER (0..MAX) + + -- CRL distribution points extension OID and syntax + + ext-CRLDistributionPoints EXTENSION ::= { SYNTAX + CRLDistributionPoints IDENTIFIED BY id-ce-cRLDistributionPoints} + id-ce-cRLDistributionPoints OBJECT IDENTIFIER ::= {id-ce 31} + CRLDistributionPoints ::= SEQUENCE SIZE (1..MAX) OF DistributionPoint + + DistributionPoint ::= SEQUENCE { + distributionPoint [0] DistributionPointName OPTIONAL, + reasons [1] ReasonFlags OPTIONAL, + cRLIssuer [2] GeneralNames OPTIONAL + } + -- + -- This is not a requirement in the text, but it seems as if it + -- should be + -- + --(WITH COMPONENTS {..., distributionPoint PRESENT} | + -- WITH COMPONENTS {..., cRLIssuer PRESENT}) + + DistributionPointName ::= CHOICE { + fullName [0] GeneralNames, + nameRelativeToCRLIssuer [1] RelativeDistinguishedName + } + + ReasonFlags ::= BIT STRING { + unused (0), + keyCompromise (1), + cACompromise (2), + affiliationChanged (3), + superseded (4), + cessationOfOperation (5), + certificateHold (6), + privilegeWithdrawn (7), + aACompromise (8) + } + + -- extended key usage extension OID and syntax + + ext-ExtKeyUsage EXTENSION ::= { SYNTAX + ExtKeyUsageSyntax IDENTIFIED BY id-ce-extKeyUsage } + id-ce-extKeyUsage OBJECT IDENTIFIER ::= {id-ce 37} + + ExtKeyUsageSyntax ::= SEQUENCE SIZE (1..MAX) OF KeyPurposeId + + KeyPurposeId ::= OBJECT IDENTIFIER + + -- permit unspecified key uses + + anyExtendedKeyUsage OBJECT IDENTIFIER ::= { id-ce-extKeyUsage 0 } + + -- extended key purpose OIDs + + id-kp-serverAuth OBJECT IDENTIFIER ::= { id-kp 1 } + id-kp-clientAuth OBJECT IDENTIFIER ::= { id-kp 2 } + id-kp-codeSigning OBJECT IDENTIFIER ::= { id-kp 3 } + id-kp-emailProtection OBJECT IDENTIFIER ::= { id-kp 4 } + id-kp-timeStamping OBJECT IDENTIFIER ::= { id-kp 8 } + id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 } + + -- inhibit any policy OID and syntax + + ext-InhibitAnyPolicy EXTENSION ::= {SYNTAX + SkipCerts IDENTIFIED BY id-ce-inhibitAnyPolicy } + id-ce-inhibitAnyPolicy OBJECT IDENTIFIER ::= { id-ce 54 } + + -- freshest (delta)CRL extension OID and syntax + + ext-FreshestCRL EXTENSION ::= {SYNTAX + CRLDistributionPoints IDENTIFIED BY id-ce-freshestCRL } + id-ce-freshestCRL OBJECT IDENTIFIER ::= { id-ce 46 } + + -- authority info access + + ext-AuthorityInfoAccess EXTENSION ::= { SYNTAX + AuthorityInfoAccessSyntax IDENTIFIED BY + id-pe-authorityInfoAccess } + id-pe-authorityInfoAccess OBJECT IDENTIFIER ::= { id-pe 1 } + + AuthorityInfoAccessSyntax ::= + SEQUENCE SIZE (1..MAX) OF AccessDescription + + AccessDescription ::= SEQUENCE { + accessMethod OBJECT IDENTIFIER, + accessLocation GeneralName } + + -- subject info access + + ext-SubjectInfoAccessSyntax EXTENSION ::= { SYNTAX + SubjectInfoAccessSyntax IDENTIFIED BY id-pe-subjectInfoAccess } + id-pe-subjectInfoAccess OBJECT IDENTIFIER ::= { id-pe 11 } + + SubjectInfoAccessSyntax ::= + SEQUENCE SIZE (1..MAX) OF AccessDescription + + -- CRL number extension OID and syntax + + ext-CRLNumber EXTENSION ::= {SYNTAX + INTEGER (0..MAX) IDENTIFIED BY id-ce-cRLNumber } + id-ce-cRLNumber OBJECT IDENTIFIER ::= { id-ce 20 } + + CRLNumber ::= INTEGER (0..MAX) + -- issuing distribution point extension OID and syntax + + ext-IssuingDistributionPoint EXTENSION ::= { SYNTAX + IssuingDistributionPoint IDENTIFIED BY + id-ce-issuingDistributionPoint } + id-ce-issuingDistributionPoint OBJECT IDENTIFIER ::= { id-ce 28 } + + IssuingDistributionPoint ::= SEQUENCE { + distributionPoint [0] DistributionPointName OPTIONAL, + onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE, + onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE, + onlySomeReasons [3] ReasonFlags OPTIONAL, + indirectCRL [4] BOOLEAN DEFAULT FALSE, + onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE + } + -- at most one of onlyContainsUserCerts, onlyContainsCACerts, + -- or onlyContainsAttributeCerts may be set to TRUE. + + ext-DeltaCRLIndicator EXTENSION ::= { SYNTAX + CRLNumber IDENTIFIED BY id-ce-deltaCRLIndicator } + id-ce-deltaCRLIndicator OBJECT IDENTIFIER ::= { id-ce 27 } + + -- CRL reasons extension OID and syntax + + ext-CRLReason EXTENSION ::= { SYNTAX + CRLReason IDENTIFIED BY id-ce-cRLReasons } + id-ce-cRLReasons OBJECT IDENTIFIER ::= { id-ce 21 } + + CRLReason ::= ENUMERATED { + unspecified (0), + keyCompromise (1), + cACompromise (2), + affiliationChanged (3), + superseded (4), + cessationOfOperation (5), + certificateHold (6), + removeFromCRL (8), + privilegeWithdrawn (9), + aACompromise (10) + } + + -- certificate issuer CRL entry extension OID and syntax + + ext-CertificateIssuer EXTENSION ::= { SYNTAX + GeneralNames IDENTIFIED BY id-ce-certificateIssuer } + id-ce-certificateIssuer OBJECT IDENTIFIER ::= { id-ce 29 } + + -- hold instruction extension OID and syntax + ext-HoldInstructionCode EXTENSION ::= { SYNTAX + OBJECT IDENTIFIER IDENTIFIED BY id-ce-holdInstructionCode } + id-ce-holdInstructionCode OBJECT IDENTIFIER ::= { id-ce 23 } + + -- ANSI x9 holdinstructions + + holdInstruction OBJECT IDENTIFIER ::= + {joint-iso-itu-t(2) member-body(2) us(840) x9cm(10040) 2} + id-holdinstruction-none OBJECT IDENTIFIER ::= + {holdInstruction 1} -- deprecated + id-holdinstruction-callissuer OBJECT IDENTIFIER ::= + {holdInstruction 2} + id-holdinstruction-reject OBJECT IDENTIFIER ::= + {holdInstruction 3} + + -- invalidity date CRL entry extension OID and syntax + + ext-InvalidityDate EXTENSION ::= { SYNTAX + GeneralizedTime IDENTIFIED BY id-ce-invalidityDate } + id-ce-invalidityDate OBJECT IDENTIFIER ::= { id-ce 24 } + -- Upper bounds + ubMax INTEGER ::= 32768 + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 new file mode 100644 index 0000000000..d58bcb5b19 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 @@ -0,0 +1,528 @@ + PKIXAlgs-2009 { iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56) } + + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + IMPORTS + + PUBLIC-KEY, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM, SMIME-CAPS + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + mda-sha224, mda-sha256, mda-sha384, mda-sha512 + FROM PKIX1-PSS-OAEP-Algorithms-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-rsa-pkalgs-02(54)} ; + + -- + -- Public Key (pk-) Algorithms + -- + + PublicKeys PUBLIC-KEY ::= { + pk-rsa | + pk-dsa | + pk-dh | + pk-kea, + ..., + pk-ec | + pk-ecDH | + pk-ecMQV + } + + -- + -- Signature Algorithms (sa-) + -- + + SignatureAlgs SIGNATURE-ALGORITHM ::= { + sa-rsaWithMD2 | + sa-rsaWithMD5 | + sa-rsaWithSHA1 | + sa-dsaWithSHA1 | + sa-ecdsaWithSHA1, + ..., -- Extensible + sa-dsaWithSHA224 | + sa-dsaWithSHA256 | + sa-ecdsaWithSHA224 | + sa-ecdsaWithSHA256 | + sa-ecdsaWithSHA384 | + sa-ecdsaWithSHA512 + } + + -- + -- S/MIME CAPS for algorithms in this document + -- + -- For all of the algorithms laid out in this document, the + -- parameters field for the S/MIME capabilities is defined as + -- ABSENT as there are no specific values that need to be known + -- by the receiver for negotiation. + + -- + + SMimeCaps SMIME-CAPS ::= { + sa-rsaWithMD2.&smimeCaps | + sa-rsaWithMD5.&smimeCaps | + sa-rsaWithSHA1.&smimeCaps | + sa-dsaWithSHA1.&smimeCaps | + sa-dsaWithSHA224.&smimeCaps | + sa-dsaWithSHA256.&smimeCaps | + sa-ecdsaWithSHA1.&smimeCaps | + sa-ecdsaWithSHA224.&smimeCaps | + sa-ecdsaWithSHA256.&smimeCaps | + sa-ecdsaWithSHA384.&smimeCaps | + sa-ecdsaWithSHA512.&smimeCaps, + ... } + + -- RSA PK Algorithm, Parameters, and Keys + + pk-rsa PUBLIC-KEY ::= { + IDENTIFIER rsaEncryption + KEY RSAPublicKey + PARAMS TYPE NULL ARE absent + -- Private key format not in this module -- + CERT-KEY-USAGE {digitalSignature, nonRepudiation, + keyEncipherment, dataEncipherment, keyCertSign, cRLSign} + } + + rsaEncryption OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-1(1) 1 } + + RSAPublicKey ::= SEQUENCE { + modulus INTEGER, -- n + publicExponent INTEGER -- e + } + + -- DSA PK Algorithm, Parameters, and Keys + + pk-dsa PUBLIC-KEY ::= { + IDENTIFIER id-dsa + KEY DSAPublicKey + PARAMS TYPE DSA-Params ARE inheritable + -- Private key format not in this module -- + CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyCertSign, + cRLSign } + } + + id-dsa OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 1 } + + DSA-Params ::= SEQUENCE { + p INTEGER, + q INTEGER, + g INTEGER + } + + DSAPublicKey ::= INTEGER -- public key, y + + -- Diffie-Hellman PK Algorithm, Parameters, and Keys + + pk-dh PUBLIC-KEY ::= { + IDENTIFIER dhpublicnumber + KEY DHPublicKey + PARAMS TYPE DomainParameters ARE inheritable + -- Private key format not in this module -- + CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly } + } + + dhpublicnumber OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-x942(10046) + number-type(2) 1 } + + DomainParameters ::= SEQUENCE { + p INTEGER, -- odd prime, p=jq +1 + g INTEGER, -- generator, g + q INTEGER, -- factor of p-1 + j INTEGER OPTIONAL, -- subgroup factor, j>= 2 + validationParams ValidationParams OPTIONAL + } + + ValidationParams ::= SEQUENCE { + seed BIT STRING, + pgenCounter INTEGER + } + + DiffieHellmanPublicNumber ::= INTEGER -- according to http://wikisec.free.fr/crypto/crypto.html + + DHPublicKey ::= INTEGER -- public key, y = g^x mod p + + -- KEA PK Algorithm and Parameters + + pk-kea PUBLIC-KEY ::= { + IDENTIFIER id-keyExchangeAlgorithm + -- key is not encoded -- + PARAMS TYPE KEA-Params-Id ARE required + -- Private key format not in this module -- + CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly } + } + id-keyExchangeAlgorithm OBJECT IDENTIFIER ::= { + joint-iso-itu-t(2) country(16) us(840) organization(1) + gov(101) dod(2) infosec(1) algorithms(1) 22 } + + KEA-Params-Id ::= OCTET STRING + + -- Elliptic Curve (EC) Signatures: Unrestricted Algorithms + -- (Section 2.1.1 of RFC 5480) + -- + -- EC Unrestricted Algorithm ID -- -- this is used for ECDSA + + pk-ec PUBLIC-KEY ::= { + IDENTIFIER id-ecPublicKey + KEY ECPoint + PARAMS TYPE ECParameters ARE required + -- Private key format not in this module -- + CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyAgreement, + keyCertSign, cRLSign } + } + + ECPoint ::= OCTET STRING -- see RFC 5480 for syntax and restrictions + + id-ecPublicKey OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 } + + -- Elliptic Curve (EC) Signatures: Restricted Algorithms + -- (Section 2.1.2 of RFC 5480) + -- + -- EC Diffie-Hellman Algorithm ID + + pk-ecDH PUBLIC-KEY ::= { + IDENTIFIER id-ecDH + KEY ECPoint + PARAMS TYPE ECParameters ARE required + -- Private key format not in this module -- + CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly } + } + + id-ecDH OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) schemes(1) + ecdh(12) } + + -- EC Menezes-Qu-Vanstone Algorithm ID + + pk-ecMQV PUBLIC-KEY ::= { + IDENTIFIER id-ecMQV + KEY ECPoint + PARAMS TYPE ECParameters ARE required + -- Private key format not in this module -- + CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly } + } + + id-ecMQV OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) schemes(1) + ecmqv(13) } + + -- Parameters and Keys for both Restricted and Unrestricted EC + + ECParameters ::= CHOICE { + namedCurve CURVE.&id({NamedCurve}) + -- implicitCurve NULL + -- implicitCurve MUST NOT be used in PKIX + -- specifiedCurve SpecifiedCurve + -- specifiedCurve MUST NOT be used in PKIX + -- Details for specifiedCurve can be found in [X9.62] + -- Any future additions to this CHOICE should be coordinated + -- with ANSI X.9. + } + -- If you need to be able to decode ANSI X.9 parameter structures, + -- uncomment the implicitCurve and specifiedCurve above, and also + -- uncomment the following: + --(WITH COMPONENTS {namedCurve PRESENT}) + + -- Sec 2.1.1.1 Named Curve + + CURVE ::= CLASS { &id OBJECT IDENTIFIER UNIQUE } + WITH SYNTAX { ID &id } + + NamedCurve CURVE ::= { + { ID secp192r1 } | { ID sect163k1 } | { ID sect163r2 } | + { ID secp224r1 } | { ID sect233k1 } | { ID sect233r1 } | + { ID secp256r1 } | { ID sect283k1 } | { ID sect283r1 } | + { ID secp384r1 } | { ID sect409k1 } | { ID sect409r1 } | + { ID secp521r1 } | { ID sect571k1 } | { ID sect571r1 }, + ... -- Extensible + } + + -- Note in [X9.62] the curves are referred to as 'ansiX9' as + -- opposed to 'sec'. For example, secp192r1 is the same curve as + -- ansix9p192r1. + + -- Note that in [PKI-ALG] the secp192r1 curve was referred to as + -- prime192v1 and the secp256r1 curve was referred to as + -- prime256v1. + + -- Note that [FIPS186-3] refers to secp192r1 as P-192, + -- secp224r1 as P-224, secp256r1 as P-256, secp384r1 as P-384, + -- and secp521r1 as P-521. + + secp192r1 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3) + prime(1) 1 } + + sect163k1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 1 } + + sect163r2 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 15 } + + secp224r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 33 } + + sect233k1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 26 } + + sect233r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 27 } + + secp256r1 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3) + prime(1) 7 } + + sect283k1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 16 } + + sect283r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 17 } + + secp384r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 34 } + + sect409k1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 36 } + + sect409r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 37 } + + secp521r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 35 } + + sect571k1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 38 } + + sect571r1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) certicom(132) curve(0) 39 } + + -- RSA with MD-2 + + sa-rsaWithMD2 SIGNATURE-ALGORITHM ::= { + IDENTIFIER md2WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-md2 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY md2WithRSAEncryption } + } + + md2WithRSAEncryption OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-1(1) 2 } + + -- RSA with MD-5 + + sa-rsaWithMD5 SIGNATURE-ALGORITHM ::= { + IDENTIFIER md5WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-md5 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS { IDENTIFIED BY md5WithRSAEncryption } + } + + md5WithRSAEncryption OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-1(1) 4 } + + -- RSA with SHA-1 + + sa-rsaWithSHA1 SIGNATURE-ALGORITHM ::= { + IDENTIFIER sha1WithRSAEncryption + PARAMS TYPE NULL ARE required + HASHES { mda-sha1 } + PUBLIC-KEYS { pk-rsa } + SMIME-CAPS {IDENTIFIED BY sha1WithRSAEncryption } + } + + sha1WithRSAEncryption OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) + pkcs-1(1) 5 } + + -- DSA with SHA-1 + + sa-dsaWithSHA1 SIGNATURE-ALGORITHM ::= { + IDENTIFIER dsa-with-sha1 + VALUE DSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha1 } + PUBLIC-KEYS { pk-dsa } + SMIME-CAPS { IDENTIFIED BY dsa-with-sha1 } + } + + dsa-with-sha1 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 3 } + + -- DSA with SHA-224 + + sa-dsaWithSHA224 SIGNATURE-ALGORITHM ::= { + IDENTIFIER dsa-with-sha224 + VALUE DSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha224 } + PUBLIC-KEYS { pk-dsa } + SMIME-CAPS { IDENTIFIED BY dsa-with-sha224 } + } + + dsa-with-sha224 OBJECT IDENTIFIER ::= { + joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101) + csor(3) algorithms(4) id-dsa-with-sha2(3) 1 } + + -- DSA with SHA-256 + + sa-dsaWithSHA256 SIGNATURE-ALGORITHM ::= { + IDENTIFIER dsa-with-sha256 + VALUE DSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha256 } + PUBLIC-KEYS { pk-dsa } + SMIME-CAPS { IDENTIFIED BY dsa-with-sha256 } + } + + dsa-with-sha256 OBJECT IDENTIFIER ::= { + joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101) + csor(3) algorithms(4) id-dsa-with-sha2(3) 2 } + + -- ECDSA with SHA-1 + + sa-ecdsaWithSHA1 SIGNATURE-ALGORITHM ::= { + IDENTIFIER ecdsa-with-SHA1 + VALUE ECDSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha1 } + PUBLIC-KEYS { pk-ec } + SMIME-CAPS {IDENTIFIED BY ecdsa-with-SHA1 } + } + + ecdsa-with-SHA1 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) + signatures(4) 1 } + + -- ECDSA with SHA-224 + + sa-ecdsaWithSHA224 SIGNATURE-ALGORITHM ::= { + IDENTIFIER ecdsa-with-SHA224 + VALUE ECDSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha224 } + PUBLIC-KEYS { pk-ec } + SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA224 } + } + + ecdsa-with-SHA224 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4) + ecdsa-with-SHA2(3) 1 } + + -- ECDSA with SHA-256 + + sa-ecdsaWithSHA256 SIGNATURE-ALGORITHM ::= { + IDENTIFIER ecdsa-with-SHA256 + VALUE ECDSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha256 } + PUBLIC-KEYS { pk-ec } + SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA256 } + } + + ecdsa-with-SHA256 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4) + ecdsa-with-SHA2(3) 2 } + + -- ECDSA with SHA-384 + + sa-ecdsaWithSHA384 SIGNATURE-ALGORITHM ::= { + IDENTIFIER ecdsa-with-SHA384 + VALUE ECDSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha384 } + PUBLIC-KEYS { pk-ec } + SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA384 } + } + ecdsa-with-SHA384 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4) + ecdsa-with-SHA2(3) 3 } + + -- ECDSA with SHA-512 + + sa-ecdsaWithSHA512 SIGNATURE-ALGORITHM ::= { + IDENTIFIER ecdsa-with-SHA512 + VALUE ECDSA-Sig-Value + PARAMS TYPE NULL ARE absent + HASHES { mda-sha512 } + PUBLIC-KEYS { pk-ec } + SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA512 } + } + + ecdsa-with-SHA512 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4) + ecdsa-with-SHA2(3) 4 } + + -- + -- Signature Values + -- + + -- DSA + + DSA-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + -- ECDSA + + ECDSA-Sig-Value ::= SEQUENCE { + r INTEGER, + s INTEGER + } + + -- + -- Message Digest Algorithms (mda-) + -- + + HashAlgs DIGEST-ALGORITHM ::= { + mda-md2 | + mda-md5 | + mda-sha1, + ... -- Extensible + } + -- MD-2 + + mda-md2 DIGEST-ALGORITHM ::= { + IDENTIFIER id-md2 + PARAMS TYPE NULL ARE preferredAbsent + } + + id-md2 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) + digestAlgorithm(2) 2 } + + -- MD-5 + + mda-md5 DIGEST-ALGORITHM ::= { + IDENTIFIER id-md5 + PARAMS TYPE NULL ARE preferredAbsent + } + + id-md5 OBJECT IDENTIFIER ::= { + iso(1) member-body(2) us(840) rsadsi(113549) + digestAlgorithm(2) 5 } + + -- SHA-1 + + mda-sha1 DIGEST-ALGORITHM ::= { + IDENTIFIER id-sha1 + PARAMS TYPE NULL ARE preferredAbsent + } + + id-sha1 OBJECT IDENTIFIER ::= { + iso(1) identified-organization(3) oiw(14) secsig(3) + algorithm(2) 26 } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 new file mode 100644 index 0000000000..3ab074643f --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 @@ -0,0 +1,292 @@ + PKIXAttributeCertificate-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + AttributeSet{}, Extensions{}, SecurityCategory{}, + EXTENSION, ATTRIBUTE, SECURITY-CATEGORY + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) } + + AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + -- IMPORTed module OIDs MAY change if [PKIXPROF] changes + -- PKIX Certificate Extensions + + CertificateSerialNumber, UniqueIdentifier, id-pkix, id-pe, id-kp, + id-ad, id-at, SIGNED{}, SignatureAlgorithms + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)} + + GeneralName, GeneralNames, id-ce, ext-AuthorityKeyIdentifier, + ext-AuthorityInfoAccess, ext-CRLDistributionPoints + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + + ContentInfo + FROM CryptographicMessageSyntax-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) + pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) }; + -- Define the set of extensions that can appear. + -- Some of these are imported from PKIX Cert + + AttributeCertExtensions EXTENSION ::= { + ext-auditIdentity | ext-targetInformation | + ext-AuthorityKeyIdentifier | ext-AuthorityInfoAccess | + ext-CRLDistributionPoints | ext-noRevAvail | ext-ac-proxying | + ext-aaControls, ... } + + ext-auditIdentity EXTENSION ::= { SYNTAX + OCTET STRING IDENTIFIED BY id-pe-ac-auditIdentity} + + ext-targetInformation EXTENSION ::= { SYNTAX + Targets IDENTIFIED BY id-ce-targetInformation } + + ext-noRevAvail EXTENSION ::= { SYNTAX + NULL IDENTIFIED BY id-ce-noRevAvail} + + ext-ac-proxying EXTENSION ::= { SYNTAX + ProxyInfo IDENTIFIED BY id-pe-ac-proxying} + + ext-aaControls EXTENSION ::= { SYNTAX + AAControls IDENTIFIED BY id-pe-aaControls} + + -- Define the set of attributes used here + + AttributesDefined ATTRIBUTE ::= { at-authenticationInfo | + at-accesIdentity | at-chargingIdentity | at-group | + at-role | at-clearance | at-encAttrs, ...} + + at-authenticationInfo ATTRIBUTE ::= { TYPE SvceAuthInfo + IDENTIFIED BY id-aca-authenticationInfo} + + at-accesIdentity ATTRIBUTE ::= { TYPE SvceAuthInfo + IDENTIFIED BY id-aca-accessIdentity} + + at-chargingIdentity ATTRIBUTE ::= { TYPE IetfAttrSyntax + IDENTIFIED BY id-aca-chargingIdentity} + + at-group ATTRIBUTE ::= { TYPE IetfAttrSyntax + IDENTIFIED BY id-aca-group} + + at-role ATTRIBUTE ::= { TYPE RoleSyntax + IDENTIFIED BY id-at-role} + + at-clearance ATTRIBUTE ::= { TYPE Clearance + IDENTIFIED BY id-at-clearance} + at-clearance-RFC3281 ATTRIBUTE ::= {TYPE Clearance-rfc3281 + IDENTIFIED BY id-at-clearance-rfc3281 } + + at-encAttrs ATTRIBUTE ::= { TYPE ContentInfo + IDENTIFIED BY id-aca-encAttrs} + + -- + -- OIDs used by Attribute Certificate Extensions + -- + + id-pe-ac-auditIdentity OBJECT IDENTIFIER ::= { id-pe 4 } + id-pe-aaControls OBJECT IDENTIFIER ::= { id-pe 6 } + id-pe-ac-proxying OBJECT IDENTIFIER ::= { id-pe 10 } + id-ce-targetInformation OBJECT IDENTIFIER ::= { id-ce 55 } + id-ce-noRevAvail OBJECT IDENTIFIER ::= { id-ce 56 } + + -- + -- OIDs used by Attribute Certificate Attributes + -- + + id-aca OBJECT IDENTIFIER ::= { id-pkix 10 } + + id-aca-authenticationInfo OBJECT IDENTIFIER ::= { id-aca 1 } + id-aca-accessIdentity OBJECT IDENTIFIER ::= { id-aca 2 } + id-aca-chargingIdentity OBJECT IDENTIFIER ::= { id-aca 3 } + id-aca-group OBJECT IDENTIFIER ::= { id-aca 4 } + -- { id-aca 5 } is reserved + id-aca-encAttrs OBJECT IDENTIFIER ::= { id-aca 6 } + + id-at-role OBJECT IDENTIFIER ::= { id-at 72} + id-at-clearance OBJECT IDENTIFIER ::= { + joint-iso-ccitt(2) ds(5) attributeType(4) clearance (55) } + + -- Uncomment the following declaration and comment the above line if + -- using the id-at-clearance attribute as defined in [RFC3281] + -- id-at-clearance ::= id-at-clearance-3281 + + id-at-clearance-rfc3281 OBJECT IDENTIFIER ::= { + joint-iso-ccitt(2) ds(5) module(1) selected-attribute-types(5) + clearance (55) } + + -- + -- The syntax of an Attribute Certificate + -- + + AttributeCertificate ::= SIGNED{AttributeCertificateInfo} + + AttributeCertificateInfo ::= SEQUENCE { + version AttCertVersion, -- version is v2 + holder Holder, + issuer AttCertIssuer, + signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + serialNumber CertificateSerialNumber, + attrCertValidityPeriod AttCertValidityPeriod, + attributes SEQUENCE OF + AttributeSet{{AttributesDefined}}, + issuerUniqueID UniqueIdentifier OPTIONAL, + extensions Extensions{{AttributeCertExtensions}} OPTIONAL + } + + AttCertVersion ::= INTEGER { v2(1) } + + Holder ::= SEQUENCE { + baseCertificateID [0] IssuerSerial OPTIONAL, + -- the issuer and serial number of + -- the holder's Public Key Certificate + entityName [1] GeneralNames OPTIONAL, + -- the name of the claimant or role + objectDigestInfo [2] ObjectDigestInfo OPTIONAL + -- used to directly authenticate the + -- holder, for example, an executable + } + + ObjectDigestInfo ::= SEQUENCE { + digestedObjectType ENUMERATED { + publicKey (0), + publicKeyCert (1), + otherObjectTypes (2) }, + -- otherObjectTypes MUST NOT + -- be used in this profile + otherObjectTypeID OBJECT IDENTIFIER OPTIONAL, + digestAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}, + objectDigest BIT STRING + } + + AttCertIssuer ::= CHOICE { + v1Form GeneralNames, -- MUST NOT be used in this + -- profile + v2Form [0] V2Form -- v2 only + } + + V2Form ::= SEQUENCE { + issuerName GeneralNames OPTIONAL, + baseCertificateID [0] IssuerSerial OPTIONAL, + objectDigestInfo [1] ObjectDigestInfo OPTIONAL + -- issuerName MUST be present in this profile + -- baseCertificateID and objectDigestInfo MUST + -- NOT be present in this profile + } + + IssuerSerial ::= SEQUENCE { + issuer GeneralNames, + serial CertificateSerialNumber, + issuerUID UniqueIdentifier OPTIONAL + } + + AttCertValidityPeriod ::= SEQUENCE { + notBeforeTime GeneralizedTime, + notAfterTime GeneralizedTime + } + + -- + -- Syntax used by Attribute Certificate Extensions + -- + + Targets ::= SEQUENCE OF Target + + Target ::= CHOICE { + targetName [0] GeneralName, + targetGroup [1] GeneralName, + targetCert [2] TargetCert + } + + TargetCert ::= SEQUENCE { + targetCertificate IssuerSerial, + targetName GeneralName OPTIONAL, + certDigestInfo ObjectDigestInfo OPTIONAL + } + + AAControls ::= SEQUENCE { + pathLenConstraint INTEGER (0..MAX) OPTIONAL, + permittedAttrs [0] AttrSpec OPTIONAL, + excludedAttrs [1] AttrSpec OPTIONAL, + permitUnSpecified BOOLEAN DEFAULT TRUE + } + + AttrSpec::= SEQUENCE OF OBJECT IDENTIFIER + + ProxyInfo ::= SEQUENCE OF Targets + + -- + -- Syntax used by Attribute Certificate Attributes + -- + IetfAttrSyntax ::= SEQUENCE { + policyAuthority[0] GeneralNames OPTIONAL, + values SEQUENCE OF CHOICE { + octets OCTET STRING, + oid OBJECT IDENTIFIER, + string UTF8String + } + } + + SvceAuthInfo ::= SEQUENCE { + service GeneralName, + ident GeneralName, + authInfo OCTET STRING OPTIONAL + } + + RoleSyntax ::= SEQUENCE { + roleAuthority [0] GeneralNames OPTIONAL, + roleName [1] GeneralName + } + + Clearance ::= SEQUENCE { + policyId OBJECT IDENTIFIER, + classList ClassList DEFAULT {unclassified}, + securityCategories SET OF SecurityCategory + {{SupportedSecurityCategories}} OPTIONAL + } + + -- Uncomment the following lines to support deprecated clearance + -- syntax and comment out previous Clearance. + + -- Clearance ::= Clearance-rfc3281 + + Clearance-rfc3281 ::= SEQUENCE { + policyId [0] OBJECT IDENTIFIER, + classList [1] ClassList DEFAULT {unclassified}, + securityCategories [2] SET OF SecurityCategory-rfc3281 + {{SupportedSecurityCategories}} OPTIONAL + } + + ClassList ::= BIT STRING { + unmarked (0), + unclassified (1), + restricted (2), + confidential (3), + secret (4), + topSecret (5) + } + SupportedSecurityCategories SECURITY-CATEGORY ::= { ... } + + SecurityCategory-rfc3281{SECURITY-CATEGORY:Supported} ::= SEQUENCE { + type [0] IMPLICIT SECURITY-CATEGORY. + &id({Supported}), + value [1] EXPLICIT SECURITY-CATEGORY. + &Type({Supported}{@type}) + } + + ACClearAttrs ::= SEQUENCE { + acIssuer GeneralName, + acSerial INTEGER, + attrs SEQUENCE OF AttributeSet{{AttributesDefined}} + } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 new file mode 100644 index 0000000000..968a142f28 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 @@ -0,0 +1,495 @@ + PKIXCMP-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-cmp2000-02(50) } + DEFINITIONS EXPLICIT TAGS ::= + BEGIN + IMPORTS + + AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + + AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM, + DIGEST-ALGORITHM, MAC-ALGORITHM + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + Certificate, CertificateList + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)} + + GeneralName, KeyIdentifier + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + + CertTemplate, PKIPublicationInfo, EncryptedValue, CertId, + CertReqMessages + FROM PKIXCRMF-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55) } + -- see also the behavioral clarifications to CRMF codified in + -- Appendix C of this specification + + CertificationRequest + FROM PKCS-10 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)} + -- (specified in RFC 2986 with 1993 ASN.1 syntax and IMPLICIT + -- tags). Alternatively, implementers may directly include + -- the [PKCS10] syntax in this module + ; + + -- the rest of the module contains locally defined OIDs and + -- constructs + + CMPCertificate ::= CHOICE { x509v3PKCert Certificate, ... } + -- This syntax, while bits-on-the-wire compatible with the + -- standard X.509 definition of "Certificate", allows the + -- possibility of future certificate types (such as X.509 + -- attribute certificates, WAP WTLS certificates, or other kinds + -- of certificates) within this certificate management protocol, + -- should a need ever arise to support such generality. Those + -- implementations that do not foresee a need to ever support + -- other certificate types MAY, if they wish, comment out the + -- above structure and "uncomment" the following one prior to + -- compiling this ASN.1 module. (Note that interoperability + -- with implementations that don't do this will be unaffected by + -- this change.) + + -- CMPCertificate ::= Certificate + + PKIMessage ::= SEQUENCE { + header PKIHeader, + body PKIBody, + protection [0] PKIProtection OPTIONAL, + extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate + OPTIONAL } + + PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage + + PKIHeader ::= SEQUENCE { + pvno INTEGER { cmp1999(1), cmp2000(2) }, + sender GeneralName, + -- identifies the sender + recipient GeneralName, + -- identifies the intended recipient + messageTime [0] GeneralizedTime OPTIONAL, + -- time of production of this message (used when sender + -- believes that the transport will be "suitable"; i.e., + -- that the time will still be meaningful upon receipt) + protectionAlg [1] AlgorithmIdentifier{ALGORITHM, {...}} + OPTIONAL, + -- algorithm used for calculation of protection bits + senderKID [2] KeyIdentifier OPTIONAL, + recipKID [3] KeyIdentifier OPTIONAL, + -- to identify specific keys used for protection + transactionID [4] OCTET STRING OPTIONAL, + -- identifies the transaction; i.e., this will be the same in + -- corresponding request, response, certConf, and PKIConf + -- messages + senderNonce [5] OCTET STRING OPTIONAL, + recipNonce [6] OCTET STRING OPTIONAL, + -- nonces used to provide replay protection, senderNonce + -- is inserted by the creator of this message; recipNonce + -- is a nonce previously inserted in a related message by + -- the intended recipient of this message + freeText [7] PKIFreeText OPTIONAL, + -- this may be used to indicate context-specific instructions + -- (this field is intended for human consumption) + generalInfo [8] SEQUENCE SIZE (1..MAX) OF + InfoTypeAndValue OPTIONAL + -- this may be used to convey context-specific information + -- (this field not primarily intended for human consumption) + } + + PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String + -- text encoded as UTF-8 String [RFC3629] (note: each + -- UTF8String MAY include an [RFC3066] language tag + -- to indicate the language of the contained text; + -- see [RFC2482] for details) + + PKIBody ::= CHOICE { -- message-specific body elements + ir [0] CertReqMessages, --Initialization Request + ip [1] CertRepMessage, --Initialization Response + cr [2] CertReqMessages, --Certification Request + cp [3] CertRepMessage, --Certification Response + p10cr [4] CertificationRequest, --imported from [PKCS10] + popdecc [5] POPODecKeyChallContent, --pop Challenge + popdecr [6] POPODecKeyRespContent, --pop Response + kur [7] CertReqMessages, --Key Update Request + kup [8] CertRepMessage, --Key Update Response + krr [9] CertReqMessages, --Key Recovery Request + krp [10] KeyRecRepContent, --Key Recovery Response + rr [11] RevReqContent, --Revocation Request + rp [12] RevRepContent, --Revocation Response + ccr [13] CertReqMessages, --Cross-Cert. Request + ccp [14] CertRepMessage, --Cross-Cert. Response + ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann. + cann [16] CertAnnContent, --Certificate Ann. + rann [17] RevAnnContent, --Revocation Ann. + crlann [18] CRLAnnContent, --CRL Announcement + pkiconf [19] PKIConfirmContent, --Confirmation + nested [20] NestedMessageContent, --Nested Message + genm [21] GenMsgContent, --General Message + genp [22] GenRepContent, --General Response + error [23] ErrorMsgContent, --Error Message + certConf [24] CertConfirmContent, --Certificate confirm + pollReq [25] PollReqContent, --Polling request + pollRep [26] PollRepContent --Polling response + } + + PKIProtection ::= BIT STRING + + ProtectedPart ::= SEQUENCE { + header PKIHeader, + body PKIBody } + + id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2) + usa(840) nt(113533) nsn(7) algorithms(66) 13 } + PBMParameter ::= SEQUENCE { + salt OCTET STRING, + -- note: implementations MAY wish to limit acceptable sizes + -- of this string to values appropriate for their environment + -- in order to reduce the risk of denial-of-service attacks + owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}, + -- AlgId for a One-Way Function (SHA-1 recommended) + iterationCount INTEGER, + -- number of times the OWF is applied + -- note: implementations MAY wish to limit acceptable sizes + -- of this integer to values appropriate for their environment + -- in order to reduce the risk of denial-of-service attacks + mac AlgorithmIdentifier{MAC-ALGORITHM, {...}} + -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11], + -- or HMAC [RFC2104, RFC2202]) + } + + id-DHBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2) + usa(840) nt(113533) nsn(7) algorithms(66) 30 } + DHBMParameter ::= SEQUENCE { + owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}, + -- AlgId for a One-Way Function (SHA-1 recommended) + mac AlgorithmIdentifier{MAC-ALGORITHM, {...}} + -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11], + -- or HMAC [RFC2104, RFC2202]) + } + + PKIStatus ::= INTEGER { + accepted (0), + -- you got exactly what you asked for + grantedWithMods (1), + -- you got something like what you asked for; the + -- requester is responsible for ascertaining the differences + rejection (2), + -- you don't get it, more information elsewhere in the message + waiting (3), + -- the request body part has not yet been processed; expect to + -- hear more later (note: proper handling of this status + -- response MAY use the polling req/rep PKIMessages specified + -- in Section 5.3.22; alternatively, polling in the underlying + -- transport layer MAY have some utility in this regard) + revocationWarning (4), + -- this message contains a warning that a revocation is + -- imminent + revocationNotification (5), + -- notification that a revocation has occurred + keyUpdateWarning (6) + -- update already done for the oldCertId specified in + -- CertReqMsg + } + + PKIFailureInfo ::= BIT STRING { + -- since we can fail in more than one way! + -- More codes may be added in the future if/when required. + badAlg (0), + -- unrecognized or unsupported Algorithm Identifier + badMessageCheck (1), + -- integrity check failed (e.g., signature did not verify) + badRequest (2), + -- transaction not permitted or supported + badTime (3), + -- messageTime was not sufficiently close to the system time, + -- as defined by local policy + badCertId (4), + -- no certificate could be found matching the provided criteria + badDataFormat (5), + -- the data submitted has the wrong format + wrongAuthority (6), + -- the authority indicated in the request is different from the + -- one creating the response token + incorrectData (7), + -- the requester's data is incorrect (for notary services) + missingTimeStamp (8), + -- when the timestamp is missing but should be there + -- (by policy) + badPOP (9), + -- the proof-of-possession failed + certRevoked (10), + -- the certificate has already been revoked + certConfirmed (11), + -- the certificate has already been confirmed + wrongIntegrity (12), + -- invalid integrity, password based instead of signature or + -- vice versa + badRecipientNonce (13), + -- invalid recipient nonce, either missing or wrong value + timeNotAvailable (14), + -- the TSA's time source is not available + unacceptedPolicy (15), + -- the requested TSA policy is not supported by the TSA + unacceptedExtension (16), + -- the requested extension is not supported by the TSA + addInfoNotAvailable (17), + -- the additional information requested could not be + -- understood or is not available + badSenderNonce (18), + -- invalid sender nonce, either missing or wrong size + badCertTemplate (19), + -- invalid cert. template or missing mandatory information + signerNotTrusted (20), + -- signer of the message unknown or not trusted + transactionIdInUse (21), + -- the transaction identifier is already in use + unsupportedVersion (22), + -- the version of the message is not supported + notAuthorized (23), + -- the sender was not authorized to make the preceding + -- request or perform the preceding action + systemUnavail (24), + -- the request cannot be handled due to system unavailability + systemFailure (25), + -- the request cannot be handled due to system failure + duplicateCertReq (26) + -- certificate cannot be issued because a duplicate + -- certificate already exists + } + + PKIStatusInfo ::= SEQUENCE { + status PKIStatus, + statusString PKIFreeText OPTIONAL, + failInfo PKIFailureInfo OPTIONAL } + + OOBCert ::= CMPCertificate + + OOBCertHash ::= SEQUENCE { + hashAlg [0] AlgorithmIdentifier{DIGEST-ALGORITHM, {...}} + OPTIONAL, + certId [1] CertId OPTIONAL, + hashVal BIT STRING + -- hashVal is calculated over the DER encoding of the + -- self-signed certificate with the identifier certID. + } + + POPODecKeyChallContent ::= SEQUENCE OF Challenge + -- One Challenge per encryption key certification request (in the + -- same order as these requests appear in CertReqMessages). + + Challenge ::= SEQUENCE { + owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}} + OPTIONAL, + -- MUST be present in the first Challenge; MAY be omitted in + -- any subsequent Challenge in POPODecKeyChallContent (if + -- omitted, then the owf used in the immediately preceding + -- Challenge is to be used). + witness OCTET STRING, + -- the result of applying the one-way function (owf) to a + -- randomly-generated INTEGER, A. [Note that a different + -- INTEGER MUST be used for each Challenge.] + challenge OCTET STRING + -- the encryption (under the public key for which the cert. + -- request is being made) of Rand, where Rand is specified as + -- Rand ::= SEQUENCE { + -- int INTEGER, + -- - the randomly-generated INTEGER A (above) + -- sender GeneralName + -- - the sender's name (as included in PKIHeader) + -- } + } + + POPODecKeyRespContent ::= SEQUENCE OF INTEGER + -- One INTEGER per encryption key certification request (in the + -- same order as these requests appear in CertReqMessages). The + -- retrieved INTEGER A (above) is returned to the sender of the + -- corresponding Challenge. + + CertRepMessage ::= SEQUENCE { + caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate + OPTIONAL, + response SEQUENCE OF CertResponse } + + CertResponse ::= SEQUENCE { + certReqId INTEGER, + -- to match this response with the corresponding request (a value + -- of -1 is to be used if certReqId is not specified in the + -- corresponding request) + status PKIStatusInfo, + certifiedKeyPair CertifiedKeyPair OPTIONAL, + rspInfo OCTET STRING OPTIONAL + -- analogous to the id-regInfo-utf8Pairs string defined + -- for regInfo in CertReqMsg [RFC4211] + } + + CertifiedKeyPair ::= SEQUENCE { + certOrEncCert CertOrEncCert, + privateKey [0] EncryptedValue OPTIONAL, + -- see [RFC4211] for comment on encoding + publicationInfo [1] PKIPublicationInfo OPTIONAL } + + CertOrEncCert ::= CHOICE { + certificate [0] CMPCertificate, + encryptedCert [1] EncryptedValue } + KeyRecRepContent ::= SEQUENCE { + status PKIStatusInfo, + newSigCert [0] CMPCertificate OPTIONAL, + caCerts [1] SEQUENCE SIZE (1..MAX) OF + CMPCertificate OPTIONAL, + keyPairHist [2] SEQUENCE SIZE (1..MAX) OF + CertifiedKeyPair OPTIONAL } + + RevReqContent ::= SEQUENCE OF RevDetails + + RevDetails ::= SEQUENCE { + certDetails CertTemplate, + -- allows requester to specify as much as they can about + -- the cert. for which revocation is requested + -- (e.g., for cases in which serialNumber is not available) + crlEntryDetails Extensions{{...}} OPTIONAL + -- requested crlEntryExtensions + } + + RevRepContent ::= SEQUENCE { + status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo, + -- in same order as was sent in RevReqContent + revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId OPTIONAL, + -- IDs for which revocation was requested + -- (same order as status) + crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList OPTIONAL + -- the resulting CRLs (there may be more than one) + } + + CAKeyUpdAnnContent ::= SEQUENCE { + oldWithNew CMPCertificate, -- old pub signed with new priv + newWithOld CMPCertificate, -- new pub signed with old priv + newWithNew CMPCertificate -- new pub signed with new priv + } + + CertAnnContent ::= CMPCertificate + + RevAnnContent ::= SEQUENCE { + status PKIStatus, + certId CertId, + willBeRevokedAt GeneralizedTime, + badSinceDate GeneralizedTime, + crlDetails Extensions{{...}} OPTIONAL + -- extra CRL details (e.g., crl number, reason, location, etc.) + } + + CRLAnnContent ::= SEQUENCE OF CertificateList + PKIConfirmContent ::= NULL + + NestedMessageContent ::= PKIMessages + + INFO-TYPE-AND-VALUE ::= TYPE-IDENTIFIER + + InfoTypeAndValue ::= SEQUENCE { + infoType INFO-TYPE-AND-VALUE. + &id({SupportedInfoSet}), + infoValue INFO-TYPE-AND-VALUE. + &Type({SupportedInfoSet}{@infoType}) } + + SupportedInfoSet INFO-TYPE-AND-VALUE ::= { ... } + + -- Example InfoTypeAndValue contents include, but are not limited + -- to, the following (uncomment in this ASN.1 module and use as + -- appropriate for a given environment): + -- + -- id-it-caProtEncCert OBJECT IDENTIFIER ::= {id-it 1} + -- CAProtEncCertValue ::= CMPCertificate + -- id-it-signKeyPairTypes OBJECT IDENTIFIER ::= {id-it 2} + -- SignKeyPairTypesValue ::= SEQUENCE OF + -- AlgorithmIdentifier{{...}} + -- id-it-encKeyPairTypes OBJECT IDENTIFIER ::= {id-it 3} + -- EncKeyPairTypesValue ::= SEQUENCE OF + -- AlgorithmIdentifier{{...}} + -- id-it-preferredSymmAlg OBJECT IDENTIFIER ::= {id-it 4} + -- PreferredSymmAlgValue ::= AlgorithmIdentifier{{...}} + -- id-it-caKeyUpdateInfo OBJECT IDENTIFIER ::= {id-it 5} + -- CAKeyUpdateInfoValue ::= CAKeyUpdAnnContent + -- id-it-currentCRL OBJECT IDENTIFIER ::= {id-it 6} + -- CurrentCRLValue ::= CertificateList + -- id-it-unsupportedOIDs OBJECT IDENTIFIER ::= {id-it 7} + -- UnsupportedOIDsValue ::= SEQUENCE OF OBJECT IDENTIFIER + -- id-it-keyPairParamReq OBJECT IDENTIFIER ::= {id-it 10} + -- KeyPairParamReqValue ::= OBJECT IDENTIFIER + -- id-it-keyPairParamRep OBJECT IDENTIFIER ::= {id-it 11} + -- KeyPairParamRepValue ::= AlgorithmIdentifer + -- id-it-revPassphrase OBJECT IDENTIFIER ::= {id-it 12} + -- RevPassphraseValue ::= EncryptedValue + -- id-it-implicitConfirm OBJECT IDENTIFIER ::= {id-it 13} + -- ImplicitConfirmValue ::= NULL + -- id-it-confirmWaitTime OBJECT IDENTIFIER ::= {id-it 14} + -- ConfirmWaitTimeValue ::= GeneralizedTime + -- id-it-origPKIMessage OBJECT IDENTIFIER ::= {id-it 15} + -- OrigPKIMessageValue ::= PKIMessages + -- id-it-suppLangTags OBJECT IDENTIFIER ::= {id-it 16} + -- SuppLangTagsValue ::= SEQUENCE OF UTF8String + -- + -- where + -- + -- id-pkix OBJECT IDENTIFIER ::= { + -- iso(1) identified-organization(3) + -- dod(6) internet(1) security(5) mechanisms(5) pkix(7)} + -- and + -- id-it OBJECT IDENTIFIER ::= {id-pkix 4} + -- + -- + -- This construct MAY also be used to define new PKIX Certificate + -- Management Protocol request and response messages, or general- + -- purpose (e.g., announcement) messages for future needs or for + -- specific environments. + + GenMsgContent ::= SEQUENCE OF InfoTypeAndValue + + -- May be sent by EE, RA, or CA (depending on message content). + -- The OPTIONAL infoValue parameter of InfoTypeAndValue will + -- typically be omitted for some of the examples given above. + -- The receiver is free to ignore any contained OBJECT IDs that it + -- does not recognize. If sent from EE to CA, the empty set + -- indicates that the CA may send + -- any/all information that it wishes. + + GenRepContent ::= SEQUENCE OF InfoTypeAndValue + -- Receiver MAY ignore any contained OIDs that it does not + -- recognize. + + ErrorMsgContent ::= SEQUENCE { + pKIStatusInfo PKIStatusInfo, + errorCode INTEGER OPTIONAL, + -- implementation-specific error codes + errorDetails PKIFreeText OPTIONAL + -- implementation-specific error details + } + + CertConfirmContent ::= SEQUENCE OF CertStatus + + CertStatus ::= SEQUENCE { + certHash OCTET STRING, + -- the hash of the certificate, using the same hash algorithm + -- as is used to create and verify the certificate signature + certReqId INTEGER, + -- to match this confirmation with the corresponding req/rep + statusInfo PKIStatusInfo OPTIONAL } + + PollReqContent ::= SEQUENCE OF SEQUENCE { + certReqId INTEGER } + + PollRepContent ::= SEQUENCE OF SEQUENCE { + certReqId INTEGER, + checkAfter INTEGER, -- time in seconds + reason PKIFreeText OPTIONAL } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 new file mode 100644 index 0000000000..1c0b780499 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 @@ -0,0 +1,409 @@ + PKIXCRMF-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE, + SingleAttribute{} + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkixCommon-02(57) } + + AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM, + DIGEST-ALGORITHM, MAC-ALGORITHM, PUBLIC-KEY + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + Version, Name, Time, SubjectPublicKeyInfo, UniqueIdentifier, id-pkix, + SignatureAlgorithms + FROM PKIX1Explicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)} + + GeneralName, CertExtensions + FROM PKIX1Implicit-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)} + + EnvelopedData, CONTENT-TYPE + FROM CryptographicMessageSyntax-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cms-2004-02(41)} + maca-hMAC-SHA1 + FROM CryptographicMessageSyntaxAlgorithms-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cmsalg-2001-02(37) } + + mda-sha1 + FROM PKIXAlgs-2009 + { iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56) } ; + + -- arc for Internet X.509 PKI protocols and their components + + id-pkip OBJECT IDENTIFIER ::= { id-pkix 5 } + + id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 } + + id-ct OBJECT IDENTIFIER ::= { id-smime 1 } -- content types + + -- Core definitions for this module + + CertReqMessages ::= SEQUENCE SIZE (1..MAX) OF CertReqMsg + + CertReqMsg ::= SEQUENCE { + certReq CertRequest, + popo ProofOfPossession OPTIONAL, + -- content depends upon key type + regInfo SEQUENCE SIZE(1..MAX) OF + SingleAttribute{{RegInfoSet}} OPTIONAL } + + CertRequest ::= SEQUENCE { + certReqId INTEGER, + -- ID for matching request and reply + certTemplate CertTemplate, + -- Selected fields of cert to be issued + controls Controls OPTIONAL } + -- Attributes affecting issuance + + CertTemplate ::= SEQUENCE { + version [0] Version OPTIONAL, + serialNumber [1] INTEGER OPTIONAL, + signingAlg [2] AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {SignatureAlgorithms}} OPTIONAL, + issuer [3] Name OPTIONAL, + validity [4] OptionalValidity OPTIONAL, + subject [5] Name OPTIONAL, + publicKey [6] SubjectPublicKeyInfo OPTIONAL, + issuerUID [7] UniqueIdentifier OPTIONAL, + subjectUID [8] UniqueIdentifier OPTIONAL, + extensions [9] Extensions{{CertExtensions}} OPTIONAL } + + OptionalValidity ::= SEQUENCE { + notBefore [0] Time OPTIONAL, + notAfter [1] Time OPTIONAL } -- at least one MUST be present + + Controls ::= SEQUENCE SIZE(1..MAX) OF SingleAttribute + {{RegControlSet}} + + ProofOfPossession ::= CHOICE { + raVerified [0] NULL, + -- used if the RA has already verified that the requester is in + -- possession of the private key + signature [1] POPOSigningKey, + keyEncipherment [2] POPOPrivKey, + keyAgreement [3] POPOPrivKey } + + POPOSigningKey ::= SEQUENCE { + poposkInput [0] POPOSigningKeyInput OPTIONAL, + algorithmIdentifier AlgorithmIdentifier{SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + signature BIT STRING } + -- The signature (using "algorithmIdentifier") is on the + -- DER-encoded value of poposkInput. NOTE: If the CertReqMsg + -- certReq CertTemplate contains the subject and publicKey values, + -- then poposkInput MUST be omitted and the signature MUST be + -- computed over the DER-encoded value of CertReqMsg certReq. If + -- the CertReqMsg certReq CertTemplate does not contain both the + -- public key and subject values (i.e., if it contains only one + -- of these, or neither), then poposkInput MUST be present and + -- MUST be signed. + + POPOSigningKeyInput ::= SEQUENCE { + authInfo CHOICE { + sender [0] GeneralName, + -- used only if an authenticated identity has been + -- established for the sender (e.g., a DN from a + -- previously-issued and currently-valid certificate) + publicKeyMAC PKMACValue }, + -- used if no authenticated GeneralName currently exists for + -- the sender; publicKeyMAC contains a password-based MAC + -- on the DER-encoded value of publicKey + publicKey SubjectPublicKeyInfo } -- from CertTemplate + + PKMACValue ::= SEQUENCE { + algId AlgorithmIdentifier{MAC-ALGORITHM, + {Password-MACAlgorithms}}, + value BIT STRING } + + -- + -- Define the currently only acceptable MAC algorithm to be used + -- for the PKMACValue structure + -- + + id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2) + usa(840) nt(113533) nsn(7) algorithms(66) 13 } + + Password-MACAlgorithms MAC-ALGORITHM ::= { + {IDENTIFIER id-PasswordBasedMac + PARAMS TYPE PBMParameter ARE required + IS-KEYED-MAC TRUE + }, ... + } + + PBMParameter ::= SEQUENCE { + salt OCTET STRING, + owf AlgorithmIdentifier{DIGEST-ALGORITHM, + {DigestAlgorithms}}, + -- AlgId for a One-Way Function (SHA-1 recommended) + iterationCount INTEGER, + -- number of times the OWF is applied + mac AlgorithmIdentifier{MAC-ALGORITHM, + {MACAlgorithms}} + -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC, or HMAC + } + + DigestAlgorithms DIGEST-ALGORITHM ::= { + mda-sha1, ... + } + + MACAlgorithms MAC-ALGORITHM ::= { + -- The modules containing the ASN.1 for the DES and 3DES MAC + -- algorithms have not been updated at the time that this is + -- being published. Users of this module should define the + -- appropriate MAC-ALGORITHM objects and uncomment the + -- following lines if they support these MAC algorithms. + -- maca-des-mac | maca-3des-mac -- + maca-hMAC-SHA1, + ... + } + + POPOPrivKey ::= CHOICE { + thisMessage [0] BIT STRING, -- Deprecated + -- possession is proven in this message (which contains + -- the private key itself (encrypted for the CA)) + subsequentMessage [1] SubsequentMessage, + -- possession will be proven in a subsequent message + dhMAC [2] BIT STRING, -- Deprecated + agreeMAC [3] PKMACValue, + encryptedKey [4] EnvelopedData } + -- for keyAgreement (only), possession is proven in this message + -- (which contains a MAC (over the DER-encoded value of the + -- certReq parameter in CertReqMsg, which MUST include both + -- subject and publicKey) based on a key derived from the end + -- entity's private DH key and the CA's public DH key); + + SubsequentMessage ::= INTEGER { + encrCert (0), + -- requests that resulting certificate be encrypted for the + -- end entity (following which, POP will be proven in a + -- confirmation message) + challengeResp (1) } + -- requests that CA engage in challenge-response exchange with + -- end entity in order to prove private key possession + + -- + -- id-ct-encKeyWithID content type used as the content type for the + -- EnvelopedData in POPOPrivKey. + -- It contains both a private key and an identifier for key escrow + -- agents to check against recovery requestors. + -- + + ct-encKeyWithID CONTENT-TYPE ::= + { EncKeyWithID IDENTIFIED BY id-ct-encKeyWithID } + + id-ct-encKeyWithID OBJECT IDENTIFIER ::= {id-ct 21} + + EncKeyWithID ::= SEQUENCE { + privateKey PrivateKeyInfo, + identifier CHOICE { + string UTF8String, + generalName GeneralName + } OPTIONAL + } + + PrivateKeyInfo ::= SEQUENCE { + version INTEGER, + privateKeyAlgorithm AlgorithmIdentifier{PUBLIC-KEY, {...}}, + privateKey OCTET STRING, + -- Structure of public key is in PUBLIC-KEY.&PrivateKey + attributes [0] IMPLICIT Attributes OPTIONAL + } + + Attributes ::= SET OF AttributeSet{{PrivateKeyAttributes}} + PrivateKeyAttributes ATTRIBUTE ::= {...} + + -- + -- 6. Registration Controls in CRMF + -- + + id-regCtrl OBJECT IDENTIFIER ::= { id-pkip 1 } + + RegControlSet ATTRIBUTE ::= { + regCtrl-regToken | regCtrl-authenticator | + regCtrl-pkiPublicationInfo | regCtrl-pkiArchiveOptions | + regCtrl-oldCertID | regCtrl-protocolEncrKey, ... } + + -- + -- 6.1. Registration Token Control + -- + + regCtrl-regToken ATTRIBUTE ::= + { TYPE RegToken IDENTIFIED BY id-regCtrl-regToken } + + id-regCtrl-regToken OBJECT IDENTIFIER ::= { id-regCtrl 1 } + + RegToken ::= UTF8String + + -- + -- 6.2. Authenticator Control + -- + + regCtrl-authenticator ATTRIBUTE ::= + { TYPE Authenticator IDENTIFIED BY id-regCtrl-authenticator } + + id-regCtrl-authenticator OBJECT IDENTIFIER ::= { id-regCtrl 2 } + + Authenticator ::= UTF8String + + -- + -- 6.3. Publication Information Control + -- + + regCtrl-pkiPublicationInfo ATTRIBUTE ::= + { TYPE PKIPublicationInfo IDENTIFIED BY + id-regCtrl-pkiPublicationInfo } + + id-regCtrl-pkiPublicationInfo OBJECT IDENTIFIER ::= { id-regCtrl 3 } + + PKIPublicationInfo ::= SEQUENCE { + action INTEGER { + dontPublish (0), + pleasePublish (1) }, + pubInfos SEQUENCE SIZE (1..MAX) OF SinglePubInfo OPTIONAL } + -- pubInfos MUST NOT be present if action is "dontPublish" + -- (if action is "pleasePublish" and pubInfos is omitted, + -- "dontCare" is assumed) + + SinglePubInfo ::= SEQUENCE { + pubMethod INTEGER { + dontCare (0), + x500 (1), + web (2), + ldap (3) }, + pubLocation GeneralName OPTIONAL } + + -- + -- 6.4. Archive Options Control + -- + + regCtrl-pkiArchiveOptions ATTRIBUTE ::= + { TYPE PKIArchiveOptions IDENTIFIED BY + id-regCtrl-pkiArchiveOptions } + + id-regCtrl-pkiArchiveOptions OBJECT IDENTIFIER ::= { id-regCtrl 4 } + + PKIArchiveOptions ::= CHOICE { + encryptedPrivKey [0] EncryptedKey, + -- the actual value of the private key + keyGenParameters [1] KeyGenParameters, + -- parameters that allow the private key to be re-generated + archiveRemGenPrivKey [2] BOOLEAN } + -- set to TRUE if sender wishes receiver to archive the private + -- key of a key pair that the receiver generates in response to + -- this request; set to FALSE if no archive is desired. + + EncryptedKey ::= CHOICE { + encryptedValue EncryptedValue, -- Deprecated + envelopedData [0] EnvelopedData } + -- The encrypted private key MUST be placed in the envelopedData + -- encryptedContentInfo encryptedContent OCTET STRING. + + -- + -- We skipped doing the full constraints here since this structure + -- has been deprecated in favor of EnvelopedData + -- + + EncryptedValue ::= SEQUENCE { + intendedAlg [0] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL, + -- the intended algorithm for which the value will be used + symmAlg [1] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL, + -- the symmetric algorithm used to encrypt the value + encSymmKey [2] BIT STRING OPTIONAL, + -- the (encrypted) symmetric key used to encrypt the value + keyAlg [3] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL, + -- algorithm used to encrypt the symmetric key + valueHint [4] OCTET STRING OPTIONAL, + -- a brief description or identifier of the encValue content + -- (may be meaningful only to the sending entity, and used only + -- if EncryptedValue might be re-examined by the sending entity + -- in the future) + encValue BIT STRING } + -- the encrypted value itself + -- When EncryptedValue is used to carry a private key (as opposed to + -- a certificate), implementations MUST support the encValue field + -- containing an encrypted PrivateKeyInfo as defined in [PKCS11], + -- section 12.11. If encValue contains some other format/encoding + -- for the private key, the first octet of valueHint MAY be used + -- to indicate the format/encoding (but note that the possible values + -- of this octet are not specified at this time). In all cases, the + -- intendedAlg field MUST be used to indicate at least the OID of + -- the intended algorithm of the private key, unless this information + -- is known a priori to both sender and receiver by some other means. + + KeyGenParameters ::= OCTET STRING + + -- + -- 6.5. OldCert ID Control + -- + + regCtrl-oldCertID ATTRIBUTE ::= + { TYPE OldCertId IDENTIFIED BY id-regCtrl-oldCertID } + + id-regCtrl-oldCertID OBJECT IDENTIFIER ::= { id-regCtrl 5 } + + OldCertId ::= CertId + + CertId ::= SEQUENCE { + issuer GeneralName, + serialNumber INTEGER } + + -- + -- 6.6. Protocol Encryption Key Control + -- + + regCtrl-protocolEncrKey ATTRIBUTE ::= + { TYPE ProtocolEncrKey IDENTIFIED BY id-regCtrl-protocolEncrKey } + id-regCtrl-protocolEncrKey OBJECT IDENTIFIER ::= { id-regCtrl 6 } + + ProtocolEncrKey ::= SubjectPublicKeyInfo + + -- + -- 7. Registration Info in CRMF + -- + + id-regInfo OBJECT IDENTIFIER ::= { id-pkip 2 } + + RegInfoSet ATTRIBUTE ::= + { regInfo-utf8Pairs | regInfo-certReq } + + -- + -- 7.1. utf8Pairs RegInfo Control + -- + + regInfo-utf8Pairs ATTRIBUTE ::= + { TYPE UTF8Pairs IDENTIFIED BY id-regInfo-utf8Pairs } + + id-regInfo-utf8Pairs OBJECT IDENTIFIER ::= { id-regInfo 1 } + --with syntax + UTF8Pairs ::= UTF8String + + -- + -- 7.2. certReq RegInfo Control + -- + + regInfo-certReq ATTRIBUTE ::= + { TYPE CertReq IDENTIFIED BY id-regInfo-certReq } + + id-regInfo-certReq OBJECT IDENTIFIER ::= { id-regInfo 2 } + --with syntax + CertReq ::= CertRequest + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1 index 5512f1590b..5512f1590b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1 index d6e88a2e47..d6e88a2e47 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1 index 258c5f0b23..258c5f0b23 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1 index c8f3a2ff33..c8f3a2ff33 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1 index 365144ff35..365144ff35 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1 index d00570b7e7..d00570b7e7 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1 index 4a59cc403b..4a59cc403b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1 index e55ea3c05e..e55ea3c05e 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1 index 671cf0e780..671cf0e780 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1 index b497e4126b..b497e4126b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1 index 73b49c8d7a..73b49c8d7a 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1 index e526ff4600..e526ff4600 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 new file mode 100644 index 0000000000..f74f76ff7c --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 @@ -0,0 +1,608 @@ + SCVP-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-scvp-02(52) } + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + Extensions{}, EXTENSION, ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) } + + AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, PUBLIC-KEY, KEY-AGREE, + DIGEST-ALGORITHM, KEY-DERIVATION, MAC-ALGORITHM + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + Certificate, CertificateList, CertificateSerialNumber, + SignatureAlgorithms, SubjectPublicKeyInfo + FROM PKIX1Explicit-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) } + + GeneralNames, GeneralName, KeyUsage, KeyPurposeId + FROM PKIX1Implicit-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) } + + AttributeCertificate + FROM PKIXAttributeCertificate-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) } + + OCSPResponse + FROM OCSP-2009 + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48) } + + ContentInfo, CONTENT-TYPE + FROM CryptographicMessageSyntax-2009 + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cms-2004-02(41) } + + mda-sha1 + FROM PKIXAlgs-2009 + { iso(1) identified-organization(3) dod(6) + internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56) } ; + + ContentTypes CONTENT-TYPE ::= {ct-scvp-certValRequest | + ct-scvp-certValResponse | ct-scvp-valPolRequest | + ct-scvp-valPolResponse, ... } + + id-ct OBJECT IDENTIFIER ::= + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs9(9) + id-smime(16) 1 } + + ct-scvp-certValRequest CONTENT-TYPE ::= + { CVRequest IDENTIFIED BY id-ct-scvp-certValRequest } + + id-ct-scvp-certValRequest OBJECT IDENTIFIER ::= { id-ct 10 } + + -- SCVP Certificate Validation Request + + CVRequest ::= SEQUENCE { + cvRequestVersion INTEGER DEFAULT 1, + query Query, + requestorRef [0] GeneralNames OPTIONAL, + requestNonce [1] OCTET STRING OPTIONAL, + requestorName [2] GeneralName OPTIONAL, + responderName [3] GeneralName OPTIONAL, + requestExtensions [4] Extensions{{RequestExtensions}} + OPTIONAL, + signatureAlg [5] AlgorithmIdentifier + {SIGNATURE-ALGORITHM, + {SignatureAlgorithms}} + OPTIONAL, + hashAlg [6] OBJECT IDENTIFIER OPTIONAL, + requestorText [7] UTF8String (SIZE (1..256)) OPTIONAL + } + + -- Set of signature algorithms is coming from RFC 5280 + -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {...} + + -- Add supported request extensions here; all new items should + -- be added after the extension marker + + RequestExtensions EXTENSION ::= {...} + + Query ::= SEQUENCE { + queriedCerts CertReferences, + checks CertChecks, + wantBack [1] WantBack OPTIONAL, + validationPolicy ValidationPolicy, + responseFlags ResponseFlags OPTIONAL, + serverContextInfo [2] OCTET STRING OPTIONAL, + validationTime [3] GeneralizedTime OPTIONAL, + intermediateCerts [4] CertBundle OPTIONAL, + revInfos [5] RevocationInfos OPTIONAL, + producedAt [6] GeneralizedTime OPTIONAL, + queryExtensions [7] Extensions{{QueryExtensions}} OPTIONAL + } + + -- Add supported query extensions here; all new items should be added + -- after the extension marker + + QueryExtensions EXTENSION ::= {...} + + CertReferences ::= CHOICE { + pkcRefs [0] SEQUENCE SIZE (1..MAX) OF PKCReference, + acRefs [1] SEQUENCE SIZE (1..MAX) OF ACReference + } + + CertReference::= CHOICE { + pkc PKCReference, + ac ACReference + } + + PKCReference ::= CHOICE { + cert [0] Certificate, + pkcRef [1] SCVPCertID + } + + ACReference ::= CHOICE { + attrCert [2] AttributeCertificate, + acRef [3] SCVPCertID + } + + HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM, + {mda-sha1, ...}} + + SCVPCertID ::= SEQUENCE { + certHash OCTET STRING, + issuerSerial SCVPIssuerSerial, + hashAlgorithm HashAlgorithm + DEFAULT { algorithm mda-sha1.&id } + } + + SCVPIssuerSerial ::= SEQUENCE { + issuer GeneralNames, + serialNumber CertificateSerialNumber + } + + ValidationPolicy ::= SEQUENCE { + validationPolRef ValidationPolRef, + validationAlg [0] ValidationAlg OPTIONAL, + userPolicySet [1] SEQUENCE SIZE (1..MAX) OF OBJECT + IDENTIFIER OPTIONAL, + inhibitPolicyMapping [2] BOOLEAN OPTIONAL, + requireExplicitPolicy [3] BOOLEAN OPTIONAL, + inhibitAnyPolicy [4] BOOLEAN OPTIONAL, + trustAnchors [5] TrustAnchors OPTIONAL, + keyUsages [6] SEQUENCE OF KeyUsage OPTIONAL, + extendedKeyUsages [7] SEQUENCE OF KeyPurposeId OPTIONAL, + specifiedKeyUsages [8] SEQUENCE OF KeyPurposeId OPTIONAL + } + + CertChecks ::= SEQUENCE SIZE (1..MAX) OF + OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... ) + + WantBack ::= SEQUENCE SIZE (1..MAX) OF + WANT-BACK.&id ({AllWantBacks}) + + POLICY ::= ATTRIBUTE + + ValidationPolRefSet POLICY ::= { + svp-defaultValPolicy, ... + } + + ValidationPolRef ::= SEQUENCE { + valPolId POLICY.&id, + valPolParams POLICY.&Type OPTIONAL + } + + ValidationAlgSet POLICY ::= { + svp-basicValAlg, ... + } + + ValidationAlg ::= SEQUENCE { + valAlgId POLICY.&id, + parameters POLICY.&Type OPTIONAL + } + + NameValidationAlgSet POLICY ::= { + svp-nameValAlg, ... + } + + NameValidationAlgParams ::= SEQUENCE { + nameCompAlgId OBJECT IDENTIFIER (NameCompAlgSet, ... ), + validationNames GeneralNames + } + + TrustAnchors ::= SEQUENCE SIZE (1..MAX) OF PKCReference + KeyAgreePublicKey ::= SEQUENCE { + algorithm AlgorithmIdentifier{KEY-AGREE, + {SupportedKeyAgreePublicKeys}}, + publicKey BIT STRING, + macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM, + {SupportedMACAlgorithms}}, + kDF AlgorithmIdentifier{KEY-DERIVATION, + {SupportedKeyDerivationFunctions}} + OPTIONAL + } + + SupportedKeyAgreePublicKeys KEY-AGREE ::= {...} + SupportedMACAlgorithms MAC-ALGORITHM ::= {...} + SupportedKeyDerivationFunctions KEY-DERIVATION ::= {...} + + ResponseFlags ::= SEQUENCE { + fullRequestInResponse [0] BOOLEAN DEFAULT FALSE, + responseValidationPolByRef [1] BOOLEAN DEFAULT TRUE, + protectResponse [2] BOOLEAN DEFAULT TRUE, + cachedResponse [3] BOOLEAN DEFAULT TRUE + } + + CertBundle ::= SEQUENCE SIZE (1..MAX) OF Certificate + + RevocationInfos ::= SEQUENCE SIZE (1..MAX) OF RevocationInfo + + RevocationInfo ::= CHOICE { + crl [0] CertificateList, + delta-crl [1] CertificateList, + ocsp [2] OCSPResponse, + other [3] OtherRevInfo + } + + REV-INFO ::= TYPE-IDENTIFIER + + OtherRevInfo ::= SEQUENCE { + riType REV-INFO.&id, + riValue REV-INFO.&Type + } + + -- SCVP Certificate Validation Response + + ct-scvp-certValResponse CONTENT-TYPE ::= + { CVResponse IDENTIFIED BY id-ct-scvp-certValResponse } + + id-ct-scvp-certValResponse OBJECT IDENTIFIER ::= { id-ct 11 } + + CVResponse ::= SEQUENCE { + cvResponseVersion INTEGER, + serverConfigurationID INTEGER, + producedAt GeneralizedTime, + responseStatus ResponseStatus, + respValidationPolicy [0] RespValidationPolicy OPTIONAL, + requestRef [1] RequestReference OPTIONAL, + requestorRef [2] GeneralNames OPTIONAL, + requestorName [3] GeneralNames OPTIONAL, + replyObjects [4] ReplyObjects OPTIONAL, + respNonce [5] OCTET STRING OPTIONAL, + serverContextInfo [6] OCTET STRING OPTIONAL, + cvResponseExtensions [7] Extensions{{CVResponseExtensions}} + OPTIONAL, + requestorText [8] UTF8String (SIZE (1..256)) OPTIONAL + } + + -- This document defines no extensions + CVResponseExtensions EXTENSION ::= {...} + + ResponseStatus ::= SEQUENCE { + statusCode CVStatusCode DEFAULT okay, + errorMessage UTF8String OPTIONAL + } + + CVStatusCode ::= ENUMERATED { + okay (0), + skipUnrecognizedItems (1), + tooBusy (10), + invalidRequest (11), + internalError (12), + badStructure (20), + unsupportedVersion (21), + abortUnrecognizedItems (22), + unrecognizedSigKey (23), + badSignatureOrMAC (24), + unableToDecode (25), + notAuthorized (26), + unsupportedChecks (27), + unsupportedWantBacks (28), + unsupportedSignatureOrMAC (29), + invalidSignatureOrMAC (30), + protectedResponseUnsupported (31), + unrecognizedResponderName (32), + relayingLoop (40), + unrecognizedValPol (50), + unrecognizedValAlg (51), + fullRequestInResponseUnsupported (52), + fullPolResponseUnsupported (53), + inhibitPolicyMappingUnsupported (54), + requireExplicitPolicyUnsupported (55), + inhibitAnyPolicyUnsupported (56), + validationTimeUnsupported (57), + unrecognizedCritQueryExt (63), + unrecognizedCritRequestExt (64), + ... + } + + RespValidationPolicy ::= ValidationPolicy + + RequestReference ::= CHOICE { + requestHash [0] HashValue, -- hash of CVRequest + fullRequest [1] CVRequest } + + HashValue ::= SEQUENCE { + algorithm HashAlgorithm + DEFAULT { algorithm mda-sha1.&id }, + value OCTET STRING } + + ReplyObjects ::= SEQUENCE SIZE (1..MAX) OF CertReply + + CertReply ::= SEQUENCE { + cert CertReference, + replyStatus ReplyStatus DEFAULT success, + replyValTime GeneralizedTime, + replyChecks ReplyChecks, + replyWantBacks ReplyWantBacks, + validationErrors [0] SEQUENCE SIZE (1..MAX) OF + OBJECT IDENTIFIER ( BasicValidationErrorSet | + NameValidationErrorSet, + ... ) OPTIONAL, + nextUpdate [1] GeneralizedTime OPTIONAL, + certReplyExtensions [2] Extensions{{...}} OPTIONAL + } + + ReplyStatus ::= ENUMERATED { + success (0), + malformedPKC (1), + malformedAC (2), + unavailableValidationTime (3), + referenceCertHashFail (4), + certPathConstructFail (5), + certPathNotValid (6), + certPathNotValidNow (7), + wantBackUnsatisfied (8) + } + ReplyChecks ::= SEQUENCE OF ReplyCheck + + ReplyCheck ::= SEQUENCE { + check OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... ), + status INTEGER DEFAULT 0 + } + + ReplyWantBacks ::= SEQUENCE OF ReplyWantBack + + ReplyWantBack::= SEQUENCE { + wb WANT-BACK.&id({AllWantBacks}), + value OCTET STRING + (CONTAINING WANT-BACK.&Type({AllWantBacks}{@wb})) + } + + WANT-BACK ::= TYPE-IDENTIFIER + + AllWantBacks WANT-BACK ::= { + WantBackSet | ACertWantBackSet | AnyWantBackSet, ... + } + + CertBundles ::= SEQUENCE SIZE (1..MAX) OF CertBundle + + RevInfoWantBack ::= SEQUENCE { + revocationInfo RevocationInfos, + extraCerts CertBundle OPTIONAL + } + + SCVPResponses ::= SEQUENCE OF ContentInfo + + -- SCVP Validation Policies Request + + ct-scvp-valPolRequest CONTENT-TYPE ::= + { ValPolRequest IDENTIFIED BY id-ct-scvp-valPolRequest } + + id-ct-scvp-valPolRequest OBJECT IDENTIFIER ::= { id-ct 12 } + + ValPolRequest ::= SEQUENCE { + vpRequestVersion INTEGER DEFAULT 1, + requestNonce OCTET STRING + } + + -- SCVP Validation Policies Response + + ct-scvp-valPolResponse CONTENT-TYPE ::= + { ValPolResponse IDENTIFIED BY id-ct-scvp-valPolResponse } + + id-ct-scvp-valPolResponse OBJECT IDENTIFIER ::= { id-ct 13 } + ValPolResponse ::= SEQUENCE { + vpResponseVersion INTEGER, + maxCVRequestVersion INTEGER, + maxVPRequestVersion INTEGER, + serverConfigurationID INTEGER, + thisUpdate GeneralizedTime, + nextUpdate GeneralizedTime OPTIONAL, + supportedChecks CertChecks, + supportedWantBacks WantBack, + validationPolicies SEQUENCE OF OBJECT IDENTIFIER, + validationAlgs SEQUENCE OF OBJECT IDENTIFIER, + authPolicies SEQUENCE OF AuthPolicy, + responseTypes ResponseTypes, + defaultPolicyValues RespValidationPolicy, + revocationInfoTypes RevocationInfoTypes, + signatureGeneration SEQUENCE OF AlgorithmIdentifier + {SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + signatureVerification SEQUENCE OF AlgorithmIdentifier + {SIGNATURE-ALGORITHM, + {SignatureAlgorithms}}, + hashAlgorithms SEQUENCE SIZE (1..MAX) OF + OBJECT IDENTIFIER, + serverPublicKeys SEQUENCE OF KeyAgreePublicKey + OPTIONAL, + clockSkew INTEGER DEFAULT 10, + requestNonce OCTET STRING OPTIONAL + } + + ResponseTypes ::= ENUMERATED { + cached-only (0), + non-cached-only (1), + cached-and-non-cached (2) + } + + RevocationInfoTypes ::= BIT STRING { + fullCRLs (0), + deltaCRLs (1), + indirectCRLs (2), + oCSPResponses (3) + } + + AuthPolicy ::= OBJECT IDENTIFIER + + -- SCVP Check Identifiers + + id-stc OBJECT IDENTIFIER ::= + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) 17 } + + CertCheckSet OBJECT IDENTIFIER ::= { + id-stc-build-pkc-path | id-stc-build-valid-pkc-path | + id-stc-build-status-checked-pkc-path, ... } + + id-stc-build-pkc-path OBJECT IDENTIFIER ::= { id-stc 1 } + id-stc-build-valid-pkc-path OBJECT IDENTIFIER ::= { id-stc 2 } + id-stc-build-status-checked-pkc-path + OBJECT IDENTIFIER ::= { id-stc 3 } + + ACertCheckSet OBJECT IDENTIFIER ::= { + id-stc-build-aa-path | id-stc-build-valid-aa-path | + id-stc-build-status-checked-aa-path | + id-stc-status-check-ac-and-build-status-checked-aa-path + } + + id-stc-build-aa-path OBJECT IDENTIFIER ::= { id-stc 4 } + id-stc-build-valid-aa-path OBJECT IDENTIFIER ::= { id-stc 5 } + id-stc-build-status-checked-aa-path + OBJECT IDENTIFIER ::= { id-stc 6 } + id-stc-status-check-ac-and-build-status-checked-aa-path + OBJECT IDENTIFIER ::= { id-stc 7 } + + -- SCVP WantBack Identifiers + + id-swb OBJECT IDENTIFIER ::= + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) 18 } + + WantBackSet WANT-BACK ::= { + swb-pkc-cert | swb-pkc-best-cert-path | + swb-pkc-revocation-info | swb-pkc-public-key-info | + swb-pkc-all-cert-paths | swb-pkc-ee-revocation-info | + swb-pkc-CAs-revocation-info + } + + ACertWantBackSet WANT-BACK ::= { + swb-ac-cert | swb-aa-cert-path | + swb-aa-revocation-info | swb-ac-revocation-info + } + + AnyWantBackSet WANT-BACK ::= { swb-relayed-responses } + + swb-pkc-best-cert-path WANT-BACK ::= + { CertBundle IDENTIFIED BY id-swb-pkc-best-cert-path } + id-swb-pkc-best-cert-path OBJECT IDENTIFIER ::= { id-swb 1 } + swb-pkc-revocation-info WANT-BACK ::= + { RevInfoWantBack IDENTIFIED BY id-swb-pkc-revocation-info } + id-swb-pkc-revocation-info OBJECT IDENTIFIER ::= { id-swb 2 } + + swb-pkc-public-key-info WANT-BACK ::= + { SubjectPublicKeyInfo IDENTIFIED BY id-swb-pkc-public-key-info } + id-swb-pkc-public-key-info OBJECT IDENTIFIER ::= { id-swb 4 } + + swb-aa-cert-path WANT-BACK ::= + {CertBundle IDENTIFIED BY id-swb-aa-cert-path } + id-swb-aa-cert-path OBJECT IDENTIFIER ::= { id-swb 5 } + + swb-aa-revocation-info WANT-BACK ::= + { RevInfoWantBack IDENTIFIED BY id-swb-aa-revocation-info } + id-swb-aa-revocation-info OBJECT IDENTIFIER ::= { id-swb 6 } + + swb-ac-revocation-info WANT-BACK ::= + { RevInfoWantBack IDENTIFIED BY id-swb-ac-revocation-info } + id-swb-ac-revocation-info OBJECT IDENTIFIER ::= { id-swb 7 } + + swb-relayed-responses WANT-BACK ::= + {SCVPResponses IDENTIFIED BY id-swb-relayed-responses } + + id-swb-relayed-responses OBJECT IDENTIFIER ::= { id-swb 9 } + + swb-pkc-all-cert-paths WANT-BACK ::= + {CertBundles IDENTIFIED BY id-swb-pkc-all-cert-paths } + id-swb-pkc-all-cert-paths OBJECT IDENTIFIER ::= { id-swb 12} + + swb-pkc-ee-revocation-info WANT-BACK ::= + { RevInfoWantBack IDENTIFIED BY id-swb-pkc-ee-revocation-info } + id-swb-pkc-ee-revocation-info OBJECT IDENTIFIER ::= { id-swb 13} + + swb-pkc-CAs-revocation-info WANT-BACK ::= + { RevInfoWantBack IDENTIFIED BY id-swb-pkc-CAs-revocation-info } + id-swb-pkc-CAs-revocation-info OBJECT IDENTIFIER ::= { id-swb 14} + + swb-pkc-cert WANT-BACK ::= + { Certificate IDENTIFIED BY id-swb-pkc-cert } + id-swb-pkc-cert OBJECT IDENTIFIER ::= { id-swb 10} + + swb-ac-cert WANT-BACK ::= + { AttributeCertificate IDENTIFIED BY id-swb-ac-cert } + id-swb-ac-cert OBJECT IDENTIFIER ::= { id-swb 11} + + -- SCVP Validation Policy and Algorithm Identifiers + + id-svp OBJECT IDENTIFIER ::= + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) 19 } + + svp-defaultValPolicy POLICY ::= + { IDENTIFIED BY id-svp-defaultValPolicy } + + id-svp-defaultValPolicy OBJECT IDENTIFIER ::= { id-svp 1 } + + -- SCVP Basic Validation Algorithm Identifier + + svp-basicValAlg POLICY ::= {IDENTIFIED BY id-svp-basicValAlg } + + id-svp-basicValAlg OBJECT IDENTIFIER ::= { id-svp 3 } + + -- SCVP Basic Validation Algorithm Errors + + id-bvae OBJECT IDENTIFIER ::= id-svp-basicValAlg + + BasicValidationErrorSet OBJECT IDENTIFIER ::= { + id-bvae-expired | id-bvae-not-yet-valid | + id-bvae-wrongTrustAnchor | id-bvae-noValidCertPath | + id-bvae-revoked | id-bvae-invalidKeyPurpose | + id-bvae-invalidKeyUsage | id-bvae-invalidCertPolicy + } + + id-bvae-expired OBJECT IDENTIFIER ::= { id-bvae 1 } + id-bvae-not-yet-valid OBJECT IDENTIFIER ::= { id-bvae 2 } + id-bvae-wrongTrustAnchor OBJECT IDENTIFIER ::= { id-bvae 3 } + id-bvae-noValidCertPath OBJECT IDENTIFIER ::= { id-bvae 4 } + id-bvae-revoked OBJECT IDENTIFIER ::= { id-bvae 5 } + id-bvae-invalidKeyPurpose OBJECT IDENTIFIER ::= { id-bvae 9 } + id-bvae-invalidKeyUsage OBJECT IDENTIFIER ::= { id-bvae 10 } + id-bvae-invalidCertPolicy OBJECT IDENTIFIER ::= { id-bvae 11 } + + -- SCVP Name Validation Algorithm Identifier + + svp-nameValAlg POLICY ::= + {TYPE NameValidationAlgParams IDENTIFIED BY id-svp-nameValAlg } + + id-svp-nameValAlg OBJECT IDENTIFIER ::= { id-svp 2 } + + -- SCVP Name Validation Algorithm DN comparison algorithm + + NameCompAlgSet OBJECT IDENTIFIER ::= { + id-nva-dnCompAlg + } + + id-nva-dnCompAlg OBJECT IDENTIFIER ::= { id-svp 4 } + -- SCVP Name Validation Algorithm Errors + + id-nvae OBJECT IDENTIFIER ::= id-svp-nameValAlg + + NameValidationErrorSet OBJECT IDENTIFIER ::= { + id-nvae-name-mismatch | id-nvae-no-name | id-nvae-unknown-alg | + id-nvae-bad-name | id-nvae-bad-name-type | id-nvae-mixed-names + } + + id-nvae-name-mismatch OBJECT IDENTIFIER ::= { id-nvae 1 } + id-nvae-no-name OBJECT IDENTIFIER ::= { id-nvae 2 } + id-nvae-unknown-alg OBJECT IDENTIFIER ::= { id-nvae 3 } + id-nvae-bad-name OBJECT IDENTIFIER ::= { id-nvae 4 } + id-nvae-bad-name-type OBJECT IDENTIFIER ::= { id-nvae 5 } + id-nvae-mixed-names OBJECT IDENTIFIER ::= { id-nvae 6 } + + -- SCVP Extended Key Usage Key Purpose Identifiers + + id-kp OBJECT IDENTIFIER ::= + { iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) 3 } + + SvcpExtKeyUsageSet OBJECT IDENTIFIER ::= { + id-kp-scvpServer | id-kp-scvpClient + } + + id-kp-scvpServer OBJECT IDENTIFIER ::= { id-kp 15 } + + id-kp-scvpClient OBJECT IDENTIFIER ::= { id-kp 16 } + + END diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 new file mode 100644 index 0000000000..2bd2aaa435 --- /dev/null +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 @@ -0,0 +1,122 @@ + SecureMimeMessageV3dot1-2009 + {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-msg-v3dot1-02(39)} + DEFINITIONS IMPLICIT TAGS ::= + BEGIN + IMPORTS + + SMIME-CAPS, SMIMECapabilities{} + FROM AlgorithmInformation-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-algorithmInformation-02(58)} + + ATTRIBUTE + FROM PKIX-CommonTypes-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)} + + SubjectKeyIdentifier, IssuerAndSerialNumber, RecipientKeyIdentifier + FROM CryptographicMessageSyntax-2009 + {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cms-2004-02(41)} + + rc2-cbc, SMimeCaps + FROM CryptographicMessageSyntaxAlgorithms-2009 + {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) modules(0) id-mod-cmsalg-2001-02(37)} + + SMimeCaps + FROM PKIXAlgs-2009 + {iso(1) identified-organization(3) dod(6) internet(1) security(5) + mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-algorithms2008-02(56)} + + SMimeCaps + FROM PKIX1-PSS-OAEP-Algorithms-2009 + {iso(1) identified-organization(3) dod(6) internet(1) + security(5) mechanisms(5) pkix(7) id-mod(0) + id-mod-pkix1-rsa-pkalgs-02(54)}; + + SMimeAttributeSet ATTRIBUTE ::= + { aa-smimeCapabilities | aa-encrypKeyPref, ... } + + -- id-aa is the arc with all new authenticated and unauthenticated + -- attributes produced by the S/MIME Working Group + + id-aa OBJECT IDENTIFIER ::= + { iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) pkcs-9(9) + smime(16) attributes(2)} + + -- The S/MIME Capabilities attribute provides a method of broadcasting + -- the symmetric capabilities understood. Algorithms SHOULD be ordered + -- by preference and grouped by type + + aa-smimeCapabilities ATTRIBUTE ::= + { TYPE SMIMECapabilities{{SMimeCapsSet}} IDENTIFIED BY + smimeCapabilities } + smimeCapabilities OBJECT IDENTIFIER ::= + { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + 15 } + + SMimeCapsSet SMIME-CAPS ::= + { cap-preferBinaryInside | cap-RC2CBC | + PKIXAlgs-2009.SMimeCaps | + CryptographicMessageSyntaxAlgorithms-2009.SMimeCaps | + PKIX1-PSS-OAEP-Algorithms-2009.SMimeCaps, ... } + + -- Encryption Key Preference provides a method of broadcasting the + -- preferred encryption certificate. + + aa-encrypKeyPref ATTRIBUTE ::= + { TYPE SMIMEEncryptionKeyPreference + IDENTIFIED BY id-aa-encrypKeyPref } + + id-aa-encrypKeyPref OBJECT IDENTIFIER ::= {id-aa 11} + + SMIMEEncryptionKeyPreference ::= CHOICE { + issuerAndSerialNumber [0] IssuerAndSerialNumber, + receipentKeyId [1] RecipientKeyIdentifier, + subjectAltKeyIdentifier [2] SubjectKeyIdentifier + } + + -- receipentKeyId is spelt incorrectly, but kept for historical + -- reasons. + + id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2) + us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 } + + id-cap OBJECT IDENTIFIER ::= { id-smime 11 } + + -- The preferBinaryInside indicates an ability to receive messages + -- with binary encoding inside the CMS wrapper + + cap-preferBinaryInside SMIME-CAPS ::= + { -- No value -- IDENTIFIED BY id-cap-preferBinaryInside } + + id-cap-preferBinaryInside OBJECT IDENTIFIER ::= { id-cap 1 } + + -- The following list OIDs to be used with S/MIME V3 + + -- Signature Algorithms Not Found in [RFC3370] + -- + -- md2WithRSAEncryption OBJECT IDENTIFIER ::= + -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-1(1) + -- 2} + -- + -- Other Signed Attributes + -- + -- signingTime OBJECT IDENTIFIER ::= + -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9) + -- 5} + -- See [RFC5652] for a description of how to encode the attribute + -- value. + + cap-RC2CBC SMIME-CAPS ::= + { TYPE SMIMECapabilitiesParametersForRC2CBC + IDENTIFIED BY rc2-cbc} + + SMIMECapabilitiesParametersForRC2CBC ::= INTEGER (40 | 128, ...) + -- (RC2 Key Length (number of bits)) + + END diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1 index 07bba30690..07bba30690 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1 index 2917122e94..2917122e94 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1 index 02205bd64c..02205bd64c 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1 index 8f033eab6f..8f033eab6f 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1 index bfcd0b5dbc..bfcd0b5dbc 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1 index 9633995e3b..9633995e3b 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1 index ccc64a52f5..ccc64a52f5 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1 index c97c83a569..c97c83a569 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1 index d9601bb7d0..d9601bb7d0 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1 diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1 index 18e51cbc0d..18e51cbc0d 100644 --- a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn +++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1 diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl index da07cd1118..a5f46046ff 100644 --- a/lib/asn1/test/asn1_test_lib.erl +++ b/lib/asn1/test/asn1_test_lib.erl @@ -21,6 +21,7 @@ -export([compile/3,compile_all/3,compile_erlang/3, hex_to_bin/1, + match_value/2, parallel/0, roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]). @@ -106,6 +107,24 @@ compile_erlang(Mod, Config, Options) -> hex_to_bin(S) -> << <<(hex2num(C)):4>> || C <- S, C =/= $\s >>. +%% match_value(Pattern, Value) -> ok. +%% Match Pattern against Value. If the Pattern contains in any +%% position, the corresponding position in the Value can be +%% anything. Generate an exception if the Pattern and Value don't +%% match. + +match_value('_', _) -> + ok; +match_value([H1|T1], [H2|T2]) -> + match_value(H1, H2), + match_value(T1, T2); +match_value(T1, T2) when tuple_size(T1) =:= tuple_size(T2) -> + match_value_tuple(1, T1, T2); +match_value(Same, Same) -> + ok; +match_value(V1, V2) -> + error({nomatch,V1,V2}). + roundtrip(Mod, Type, Value) -> roundtrip(Mod, Type, Value, Value). @@ -132,6 +151,12 @@ hex2num(C) when $0 =< C, C =< $9 -> C - $0; hex2num(C) when $A =< C, C =< $F -> C - $A + 10; hex2num(C) when $a =< C, C =< $f -> C - $a + 10. +match_value_tuple(I, T1, T2) when I =< tuple_size(T1) -> + match_value(element(I, T1), element(I, T2)), + match_value_tuple(I+1, T1, T2); +match_value_tuple(_, _, _) -> + ok. + test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) -> case Mod:encoding_rule() of ber -> diff --git a/lib/asn1/test/error_SUITE.erl b/lib/asn1/test/error_SUITE.erl index 1edd60f7c8..a9893b91cc 100644 --- a/lib/asn1/test/error_SUITE.erl +++ b/lib/asn1/test/error_SUITE.erl @@ -19,9 +19,12 @@ -module(error_SUITE). -export([suite/0,all/0,groups/0, - already_defined/1,bitstrings/1,enumerated/1, - imports/1,instance_of/1,integers/1,objects/1, - parameterization/1,values/1]). + already_defined/1,bitstrings/1, + classes/1,constraints/1,constructed/1,enumerated/1, + imports_exports/1,instance_of/1,integers/1,objects/1, + object_field_extraction/1,oids/1,rel_oids/1, + object_sets/1,parameterization/1, + syntax/1,table_constraints/1,tags/1,values/1]). -include_lib("test_server/include/test_server.hrl"). @@ -34,12 +37,22 @@ groups() -> [{p,parallel(), [already_defined, bitstrings, + classes, + constraints, + constructed, enumerated, - imports, + imports_exports, instance_of, integers, objects, + object_field_extraction, + object_sets, + oids, + rel_oids, parameterization, + syntax, + table_constraints, + tags, values]}]. parallel() -> @@ -94,6 +107,46 @@ bitstrings(Config) -> ]} = run(P, Config), ok. +classes(Config) -> + M = 'Classes', + P = {M, + <<"Classes DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + " LowerCase ::= CLASS { &id INTEGER UNIQUE }\n" + " CL ::= CLASS { &id INTEGER UNIQUE DEFAULT 42}\n" + "END\n">>}, + {error, + [{structured_error,{M,2},asn1ct_check, + {illegal_class_name,'LowerCase'}}, + {structured_error,{M,3},asn1ct_check, + {unique_and_default,id}} + ]} = run(P, Config), + ok. + +constraints(Config) -> + M = 'Constraints', + P = {M, + <<"Constraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + " II-1 ::= INTEGER (holder-1.&obj)\n" + " II-2 ::= INTEGER ('1234'H<..20)\n" + " II-3 ::= INTEGER (1..<\"abc\")\n" + " II-4 ::= INTEGER (10..1)\n" + + " HOLDER ::= CLASS {\n" + " &obj HOLDER OPTIONAL\n" + " }\n" + + " holder-1 HOLDER ::= { &obj holder-2 }\n" + " holder-2 HOLDER ::= { }\n" + "END\n">>}, + {error, + [ + {structured_error,{M,2},asn1ct_check,illegal_value}, + {structured_error,{M,3},asn1ct_check,illegal_integer_value}, + {structured_error,{M,4},asn1ct_check,illegal_integer_value}, + {structured_error,{M,5},asn1ct_check,reversed_range} + ]} = run(P, Config), + ok. + enumerated(Config) -> M = 'Enumerated', P = {M, @@ -111,38 +164,77 @@ enumerated(Config) -> " S2 ::= SEQUENCE {\n" " e2 EnumExt DEFAULT xyz\n" " }\n" + + " BadEnum1 ::= ENUMERATED {a, b, c, b }\n" + " BadEnum2 ::= ENUMERATED {a(1), b(2), b(3) }\n" + " BadEnum3 ::= ENUMERATED {a(1), b(1) }\n" + " BadEnum4 ::= ENUMERATED {a, b, ..., c(0) }\n" + " BadEnum5 ::= ENUMERATED {a, b, ..., c(10), d(5) }\n" "END\n">>}, {error, [ - {structured_error,{'Enumerated',3},asn1ct_check,{undefined,d}}, - {structured_error,{'Enumerated',5},asn1ct_check,{undefined,z}}, - {structured_error,{'Enumerated',10},asn1ct_check,{undefined,aa}}, - {structured_error,{'Enumerated',13},asn1ct_check,{undefined,xyz}} + {structured_error,{M,3},asn1ct_check,{undefined,d}}, + {structured_error,{M,5},asn1ct_check,{undefined,z}}, + {structured_error,{M,6},asn1ct_check,{undefined,aa}}, + {structured_error,{M,12},asn1ct_check,{undefined,xyz}}, + {structured_error,{M,15},asn1ct_check, + {enum_illegal_redefinition,b}}, + {structured_error,{M,16},asn1ct_check, + {enum_illegal_redefinition,b}}, + {structured_error,{M,17},asn1ct_check, + {enum_reused_value,b,1}}, + {structured_error,{M,18},asn1ct_check, + {enum_reused_value,c,0}}, + {structured_error,{M,19},asn1ct_check, + {enum_not_ascending,d,5,10}} ] } = run(P, Config), ok. -imports(Config) -> +imports_exports(Config) -> Ext = 'ExternalModule', ExtP = {Ext, <<"ExternalModule DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + "IMPORTS\n" + " Int, NonExistingImport FROM ImportsFrom;\n" + + " Existing ::= INTEGER\n" "END\n">>}, - ok = run(ExtP, Config), + {error, + [{structured_error, + {Ext,3}, + asn1ct_check, + {undefined_import,'NonExistingImport', + 'ImportsFrom'}}]} = run(ExtP, Config), M = 'Imports', P = {M, <<"Imports DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" - "IMPORTS NotDefined FROM ExternalModule\n" - "X FROM UndefinedModule objid\n" - "Y, Z FROM UndefinedModule2;\n" + "EXPORTS\n" + " T, UndefinedType;\n" + + "IMPORTS\n" + " NotDefined, Existing, Int, NonExistingImport\n" + " FROM ExternalModule\n" + " X FROM UndefinedModule objid\n" + " Y, Z FROM UndefinedModule2;\n" + "objid OBJECT IDENTIFIER ::= {joint-iso-ccitt(2) remote-operations(4)\n" " notation(0)}\n" + "T ::= INTEGER\n" "END\n">>}, - {error,[{structured_error,{M,2},asn1ct_check, - {undefined_import,'NotDefined','ExternalModule'}}, - {structured_error,{M,3},asn1ct_check,{undefined_import,'X','UndefinedModule'}}, - {structured_error,{M,4},asn1ct_check,{undefined_import,'Y','UndefinedModule2'}}, - {structured_error,{M,4},asn1ct_check,{undefined_import,'Z','UndefinedModule2'}} + {error,[{structured_error,{M,3},asn1ct_check, + {undefined_export, 'UndefinedType'}}, + {structured_error,{M,5},asn1ct_check, + {undefined_import,'NonExistingImport',Ext}}, + {structured_error,{M,5},asn1ct_check, + {undefined_import,'NotDefined',Ext}}, + {structured_error,{M,7},asn1ct_check, + {undefined_import,'X','UndefinedModule'}}, + {structured_error,{M,8},asn1ct_check, + {undefined_import,'Y','UndefinedModule2'}}, + {structured_error,{M,8},asn1ct_check, + {undefined_import,'Z','UndefinedModule2'}} ]} = run(P, Config), ok. @@ -170,11 +262,14 @@ integers(Config) -> " Int1 ::= INTEGER {a(1), a(1)}\n" " Int2 ::= INTEGER {a(1), b(2), a(3)}\n" " Int3 ::= INTEGER {x(1), y(1)}\n" + " i0 INTEGER ::= 1\n" + " Int4 ::= INTEGER {x(i0), y(undef) }\n" "END\n">>}, {error, [{structured_error,{M,2},asn1ct_check,{namelist_redefinition,a}}, {structured_error,{M,3},asn1ct_check,{namelist_redefinition,a}}, - {structured_error,{M,4},asn1ct_check,{value_reused,1}} + {structured_error,{M,4},asn1ct_check,{value_reused,1}}, + {structured_error,{M,6},asn1ct_check,{undefined,undef}} ]} = run(P, Config), ok. @@ -188,6 +283,11 @@ objects(Config) -> " obj3 CL ::= { &Data OCTET STRING }\n" " obj4 SMALL ::= { &code 42 }\n" " InvalidSet CL ::= { obj1 }\n" + " obj5 CL ::= {}\n" + " ErrSet ::= PT{ {PT{inst}}}\n" + " obj6 CL ::= 7\n" + " obj7 CL ::= int\n" + " obj8 NON-CLASS ::= { &id 1 }\n" " CL ::= CLASS {\n" " &code INTEGER UNIQUE,\n" @@ -203,6 +303,12 @@ objects(Config) -> " &code INTEGER UNIQUE,\n" " &i INTEGER\n" " }\n" + + " PT{SMALL:Small} ::= SEQUENCE { a SMALL.&code ({Small}) }\n" + " inst SMALL ::= {&code 42, &i 4711}\n" + + " int INTEGER ::= 42\n" + " NON-CLASS ::= SEQUENCE { a BOOLEAN }\n" "END\n">>}, {error, [ @@ -216,24 +322,490 @@ objects(Config) -> {structured_error,{M,5},asn1ct_check, {missing_mandatory_fields,[i],obj4}}, {structured_error,{M,6},asn1ct_check, - {invalid_fields,[wrong],'InvalidSet'}} + {invalid_fields,[wrong],'InvalidSet'}}, + {structured_error,{M,7},asn1ct_check, + {missing_mandatory_fields, + ['Data','Set','VarTypeValue',code,enum,object, + vartypevalue],obj5}}, + {structured_error,{M,8},asn1ct_check,invalid_objectset}, + {structured_error,{M,9},asn1ct_check,illegal_object}, + {structured_error,{M,10},asn1ct_check,illegal_object}, + {structured_error,{M,11},asn1ct_check,illegal_object} + ] + } = run(P, Config), + ok. + +object_field_extraction(Config) -> + M = 'ObjectFieldExtraction', + P = {M, + <<"ObjectFieldExtraction DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + + " DataObjSet DATA-CLASS ::= {\n" + " holder-object-1.&int,\n" + " ...\n" + " }\n" + + " DataObjSetNoExt DATA-CLASS ::= {\n" + " holder-object-1.&int\n" + " }\n" + + " holder-object-1 HOLDER-CLASS ::= {\n" + " &int 42\n" + " }\n" + + " HOLDER-CLASS ::= CLASS {\n" + " &int INTEGER\n" + " }\n" + + " DATA-CLASS ::= CLASS {\n" + " &id INTEGER\n" + " }\n" + + "END\n">>}, + {error, + [ + {structured_error,{M,2},asn1ct_check,illegal_object}, + {structured_error,{M,6},asn1ct_check,illegal_object} + ] + } = run(P, Config), + ok. + +object_sets(Config) -> + M = 'ObjectSets', + P = {M, <<"ObjectSets DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + "TEST-UNIQ ::= CLASS { &id INTEGER UNIQUE, &test INTEGER }\n" + "UniqSet TEST-UNIQ ::= { { &id 1, &test 1 } | {&id 1, &test 2} }\n" + + "DOUBLE-UNIQ ::= CLASS { &id1 INTEGER UNIQUE," + " &id INTEGER UNIQUE }\n" + "DoubleSet DOUBLE-UNIQ ::= { {&id1 1, &id2 2} }\n" + "END\n">>}, + {error, + [{structured_error,{M,3},asn1ct_check,{non_unique_object,1}}, + {structured_error,{M,5},asn1ct_check,multiple_uniqs} + ] + } = run(P, Config), + ok. + +oids(Config) -> + M = 'OIDS', + P = {M,<<"OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + "CONTAINER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE,\n" + " &int INTEGER OPTIONAL,\n" + " &seq SEQUENCE { a INTEGER } OPTIONAL\n" + "}\n" + + "-- This is line 6.\n" + "object-1 CONTAINER ::= { &id {1 2 3}, &int 42 }\n" + "object-2 CONTAINER ::= { &id {1 999}, &int 0 }\n" + "object-3 CONTAINER ::= { &id {1 2}, &seq { a 42 } }\n" + "oid-1 OBJECT IDENTIFIER ::= object-1.&int\n" + "oid-2 OBJECT IDENTIFIER ::= object-2.&id\n" + "oid-3 OBJECT IDENTIFIER ::= object-3.&seq\n" + "-- This is line 13.\n" + + "oid-5 OBJECT IDENTIFIER ::= { a 42, b 19 }\n" + + "oid-6 OBJECT IDENTIFIER ::= int\n" + "int INTEGER ::= 42\n" + + "oid-7 OBJECT IDENTIFIER ::= seq\n" + "seq SEQUENCE { x INTEGER } ::= { x 11 }\n" + + "oid-8 OBJECT IDENTIFIER ::= os\n" + "os OCTET STRING ::= '1234'H\n" + + "oid-9 OBJECT IDENTIFIER ::= { 1 os }\n" + + "oid-10 OBJECT IDENTIFIER ::= { 1 invalid }\n" + + "-- This is line 23.\n" + "oid-11 OBJECT IDENTIFIER ::= { 0 legal-oid }\n" + "legal-oid OBJECT IDENTIFIER ::= {1 2 3}\n" + + "bad-root-1 OBJECT IDENTIFIER ::= {99}\n" + "bad-root-2 OBJECT IDENTIFIER ::= {0 42}\n" + + "oid-object-ref-1 OBJECT IDENTIFIER ::= object-1\n" + "oid-object-ref-2 OBJECT IDENTIFIER ::= { object-1 19 } \n" + + "oid-int OBJECT IDENTIFIER ::= 42\n" + "oid-sequence OBJECT IDENTIFIER ::= {a 42, b 35}\n" + + "END\n">>}, + {error, + [ + {structured_error,{M,8},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,10},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,11},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,12},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,14},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,15},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,19},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,21},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,22},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,24},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,26},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,27},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,28},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,29},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,30},asn1ct_check,{illegal_oid,o_id}}, + {structured_error,{M,31},asn1ct_check,{illegal_oid,o_id}} ] } = run(P, Config), ok. +rel_oids(Config) -> + M = 'REL-OIDS', + P = {M,<<"REL-OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + "legal-oid OBJECT IDENTIFIER ::= {1 2}\n" + "legal-roid RELATIVE-OID ::= {1 2}\n" + "CONTAINER ::= CLASS { &oid OBJECT IDENTIFIER OPTIONAL,\n" + " &int INTEGER OPTIONAL,\n" + " &seq SEQUENCE { a INTEGER } OPTIONAL\n" + "}\n" + "object-1 CONTAINER ::= { &oid {1 2 3},\n" + " &int 42,\n", + " &seq {a 42}\n" + " }\n" + + "wrong-type-rel-oid-1 RELATIVE-OID ::= legal-oid\n" + "wrong-type-rel-oid-2 RELATIVE-OID ::= object-1.&oid\n" + "wrong-type-rel-oid-3 RELATIVE-OID ::= object-1.&int\n" + "wrong-type-rel-oid-4 RELATIVE-OID ::= object-1.&seq\n" + "wrong-type-rel-oid-5 RELATIVE-OID ::= object-1.&undef\n" + + "oid-bad-first OBJECT IDENTIFIER ::= {legal-roid 3}\n" + "END\n">>}, + {error, + [ + {structured_error,{M,12},asn1ct_check,{illegal_oid,rel_oid}}, + {structured_error,{M,13},asn1ct_check,{illegal_oid,rel_oid}}, + {structured_error,{M,14},asn1ct_check,{illegal_oid,rel_oid}}, + {structured_error,{M,15},asn1ct_check,{illegal_oid,rel_oid}}, + {structured_error,{M,16},asn1ct_check,{undefined_field,undef}}, + {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}} + ] + } = run(P, Config), + ok. + + parameterization(Config) -> M = 'Parameterization', P = {M, <<"Parameterization DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" " NotUppercase{lowercase} ::= INTEGER (lowercase)\n" + + " P{T1,T2} ::= SEQUENCE { a T1, b T2 }\n" + " S ::= P{OCTET STRING}\n" + + " Seq ::= SEQUENCE { a INTEGER }\n" + " Sbad ::= Seq{INTEGER}\n" + + "END\n">>}, + {error, + [{structured_error,{M,2},asn1ct_check, + {illegal_typereference,lowercase}}, + {structured_error,{M,4},asn1ct_check, + param_wrong_number_of_arguments}, + {structured_error,{M,6},asn1ct_check, + {param_bad_type, 'Seq'}} + ] + } = run(P, Config), + ok. + + +constructed(Config) -> + M = 'Const', + P = {M, + <<"Const DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + " Seq1 ::= SEQUENCE {a INTEGER, b BIT STRING, a BOOLEAN}\n" + " Ch ::= CHOICE {a INTEGER, b BIT STRING, a BOOLEAN}\n" + " Seq2 ::= SEQUENCE {COMPONENTS OF Ch}\n" + " CL ::= CLASS { &id INTEGER UNIQUE, &Type }\n" + " Seq3 ::= SEQUENCE { id CL.&id, d CL.&foo }\n" + + " Seq4 ::= SEQUENCE { a INTEGER, z INTEGER OPTIONAL, b Set1 }\n" + " Set1 ::= SET { c BOOLEAN, d INTEGER }\n" + " s1 Seq4 ::= {a 42, b {c TRUE, zz 4711}}\n" + " s2 Seq4 ::= {a 42, b {c TRUE, d FALSE}}\n" + " s3 Seq4 ::= {a 42, b {c TRUE}}\n" + " s4 Seq4 ::= {a 42, b {c TRUE, d 4711}, zz 4712}\n" + " s5 Seq4 ::= {a 42}\n" + " s6 Seq4 ::= {a 42, zz 4712, b {c TRUE, d 4711}}\n" "END\n">>}, {error, - [{structured_error,{'Parameterization',2},asn1ct_check, - {illegal_typereference,lowercase}} - ] - } = run(P, Config), + [{structured_error,{M,2},asn1ct_check,{duplicate_identifier,a}}, + {structured_error,{M,3},asn1ct_check,{duplicate_identifier,a}}, + {structured_error,{M,4},asn1ct_check,{illegal_COMPONENTS_OF,'Ch'}}, + {structured_error,{M,6},asn1ct_check,{illegal_object_field,foo}}, + + {structured_error,{M,9},asn1ct_check,{illegal_id,zz}}, + {structured_error,{M,10},asn1ct_check,illegal_integer_value}, + {structured_error,{M,11},asn1ct_check,{missing_id,d}}, + {structured_error,{M,12},asn1ct_check,{illegal_id,zz}}, + {structured_error,{M,13},asn1ct_check,{missing_id,b}}, + {structured_error,{M,14},asn1ct_check,{illegal_id,zz}} + ] + } = run(P, Config), + ok. + +syntax(Config) -> + M = 'Syntax', + P = {M, + <<"Syntax DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + " obj1 CL ::= { WRONG }\n" + " obj2 CL ::= { CODE 42 AGAIN WRONG }\n" + " obj3 CL ::= { INTEGER }\n" + " obj4 CL ::= { BIT STRING }\n" + " obj5 CL ::= { , }\n" + " obj6 CL ::= { CODE , }\n" + " obj7 CL ::= { CODE \"abc\" }\n" + " obj8 CL ::= { CODE }\n" + " obj9 CL ::= { CODE 42 ENUM}\n" + " obj10 CL ::= { CODE 42 ENUM BIT STRING}\n" + + " obj11 CL ::= { CODE 42 TYPE 13}\n" + " obj12 CL ::= { CODE 42 TYPE d}\n" + " obj13 CL ::= { CODE 42 TYPE bs-value}\n" + + " bad-syntax-1 BAD-SYNTAX-1 ::= { BAD 42 }\n" + + " obj14 CL ::= { CODE 42 OBJ-SET integer }\n" + " obj15 CL ::= { CODE 42 OBJ-SET { A B } }\n" + " obj16 CL ::= { CODE 42 OBJ-SET SEQUENCE { an INTEGER } }\n" + + " obj17 CL ::= { CODE 42 OID {seqtag 42} }\n" + " obj18 CL ::= { CODE 42 OID {seqtag 42, seqtag-again 43} }\n" + " obj19 CL ::= { CODE 42 OID {one 1 two 2} }\n" + + " BAD-SYNTAX-1 ::= CLASS {\n" + " &code INTEGER UNIQUE\n" + " } WITH SYNTAX {\n" + " BAD &bad\n" + " }\n" + + " BAD-SYNTAX-2 ::= CLASS {\n" + " &code INTEGER UNIQUE\n" + " } WITH SYNTAX {\n" + " BAD &Bad\n" + " }\n" + + " BAD-SYNTAX-3 ::= CLASS {\n" + " &code INTEGER UNIQUE\n" + " } WITH SYNTAX {\n" + " [ID &code]\n" + " }\n" + + " BAD-SYNTAX-4 ::= CLASS {\n" + " &code INTEGER UNIQUE\n" + " } WITH SYNTAX {\n" + " ID\n" + " }\n" + + " BAD-SYNTAX-5 ::= CLASS {\n" + " &code INTEGER UNIQUE,\n" + " &Type\n" + " } WITH SYNTAX {\n" + " ID\n" + " }\n" + + " BAD-SYNTAX-6 ::= CLASS {\n" + " &code INTEGER UNIQUE\n" + " } WITH SYNTAX {\n" + " ID &code, &code\n" + " }\n" + + " BAD-SYNTAX-7 ::= CLASS {\n" + " &code INTEGER UNIQUE,\n" + " &Type\n" + " } WITH SYNTAX {\n" + " ID &Type, &code, &code, &Type\n" + " }\n" + + " CL ::= CLASS {\n" + " &code INTEGER UNIQUE,\n" + " &enum ENUMERATED { a, b, c} OPTIONAL,\n" + " &Type OPTIONAL,\n" + " &ObjSet CL OPTIONAL,\n" + " &oid OBJECT IDENTIFIER OPTIONAL\n" + " } WITH SYNTAX {\n" + " CODE &code [ENUM &enum] [TYPE &Type] [OBJ-SET &ObjSet]\n" + " [OID &oid]\n" + " }\n" + + " bs-value BIT STRING ::= '1011'B\n" + + " integer INTEGER ::= 42\n" + "END\n">>}, + {error, + [ + {structured_error,{M,2},asn1ct_check, + {syntax_nomatch,"WRONG"}}, + {structured_error,{M,3},asn1ct_check, + {syntax_nomatch,"AGAIN"}}, + {structured_error,{M,4},asn1ct_check, + {syntax_nomatch,"INTEGER"}}, + {structured_error,{M,5},asn1ct_check, + {syntax_nomatch,"BIT STRING"}}, + {structured_error,{M,6},asn1ct_check, + {syntax_nomatch,"\",\""}}, + {structured_error,{M,7},asn1ct_check, + {syntax_nomatch,"\",\""}}, + {structured_error,{M,8},asn1ct_check, + {syntax_nomatch,"\"abc\""}}, + {structured_error,{M,9},asn1ct_check, + syntax_nomatch}, + {structured_error,{M,10},asn1ct_check, + syntax_nomatch}, + {structured_error,{M,11},asn1ct_check, + {syntax_nomatch,"BIT STRING"}}, + {structured_error,{M,12},asn1ct_check, + {syntax_nomatch,"13"}}, + {structured_error,{M,13},asn1ct_check, + {syntax_nomatch,"d"}}, + {structured_error,{M,14},asn1ct_check, + {syntax_nomatch,"bs-value"}}, + {structured_error,{M,15},asn1ct_check, + {syntax_undefined_field,bad}}, + {structured_error,{M,16},asn1ct_check, + {syntax_nomatch,"integer"}}, + {structured_error,{M,17},asn1ct_check, + {syntax_nomatch,"\"A B\""}}, + {structured_error,{M,18},asn1ct_check, + {syntax_nomatch,"SEQUENCE"}}, + {structured_error,{M,19},asn1ct_check, + {syntax_nomatch,"\"seqtag 42\""}}, + {structured_error,{M,20},asn1ct_check, + {syntax_nomatch,"\"seqtag 42 seqtag-again 43\""}}, + {structured_error,{M,21},asn1ct_check, + {syntax_nomatch,"\"one 1 two 2\""}}, + {structured_error,{M,22},asn1ct_check, + {syntax_undefined_field,bad}}, + {structured_error,{M,27},asn1ct_check, + {syntax_undefined_field,'Bad'}}, + {structured_error,{M,32},asn1ct_check, + {syntax_mandatory_in_optional_group,code}}, + {structured_error,{M,37},asn1ct_check, + {syntax_missing_mandatory_fields,[code]}}, + {structured_error,{M,42},asn1ct_check, + {syntax_missing_mandatory_fields,['Type',code]}}, + {structured_error,{M,48},asn1ct_check, + {syntax_duplicated_fields,[code]}}, + {structured_error,{M,53},asn1ct_check, + {syntax_duplicated_fields,['Type',code]}} + ] + } = run(P, Config), + ok. + +table_constraints(Config) -> + M = 'TableConstraints', + P = {M, + <<"TableConstraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + " Seq-1 ::= SEQUENCE {\n" + " contentType CONTENTS.&id,\n" + " content CONTENTS.&Type({Contents}{@contentType})\n" + " }\n" + + " Seq-2 ::= SEQUENCE {\n" + " contentType INTEGER,\n" + " content CONTENTS.&Type({Contents}{@contentType})\n" + " }\n" + + " Int ::= INTEGER ({1})\n" + + " Seq-3 ::= SEQUENCE {\n" + " contentType CONTENTS.&id({1})\n" + " }\n" + + "Contents CONTENTS ::= {\n" + " {OCTET STRING IDENTIFIED BY {2 1 1}}\n" + "}\n" + + "CONTENTS ::= TYPE-IDENTIFIER\n" + "END\n">>}, + {error, + [{structured_error, + {M,2},asn1ct_check, + {missing_table_constraint,contentType}}, + {structured_error, + {M,6},asn1ct_check, + {missing_ocft,contentType}}, + {structured_error, + {M,10},asn1ct_check, + illegal_table_constraint}, + {structured_error, + {M,11},asn1ct_check, + invalid_table_constraint} + ]} = run(P, Config), + ok. + +tags(Config) -> + M = 'Tags', + P = {M, + <<"Tags DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n" + "SeqOpt1 ::= SEQUENCE\n" + "{\n" + "bool1 BOOLEAN OPTIONAL,\n" + "int1 INTEGER,\n" + "seq1 SeqIn OPTIONAL\n" + "}\n" + + "SeqOpt1Imp ::= SEQUENCE \n" + "{\n" + "bool1 [1] BOOLEAN OPTIONAL,\n" + "int1 INTEGER,\n" + "seq1 [2] SeqIn OPTIONAL,\n" + "seq2 [2] SeqIn OPTIONAL,\n" + "...,\n" + "int2 [3] SeqIn,\n" + "int3 [3] SeqIn\n" + "}\n" + + "SeqIn ::= SEQUENCE \n" + "{\n" + "boolIn BOOLEAN,\n" + "intIn INTEGER\n" + "}\n" + "\n" + + + "Set1 ::= SET {\n" + " os [0] OCTET STRING,\n" + " bool [0] BOOLEAN\n" + "}\n" + + "Seq1 ::= SEQUENCE {\n" + "a [0] IMPLICIT Choice OPTIONAL\n" + "}\n" + "Seq2 ::= SEQUENCE {\n" + "a [0] IMPLICIT ANY OPTIONAL\n" + "}\n" + "Choice ::=\n" + "CHOICE {\n" + "a [0] BOOLEAN,\n" + "b [1] INTEGER\n" + "}\n" + + "END\n">>}, + {error, + [{structured_error, + {M,8},asn1ct_check, + {duplicate_tags,[seq1,seq2]}}, + {structured_error, + {M,24},asn1ct_check, + {duplicate_tags,[bool,os]}}, + {structured_error, + {M,28},asn1ct_check, + {implicit_tag_before,choice}}, + {structured_error, + {M,31},asn1ct_check, + {implicit_tag_before,open_type}} + ]} = run(P, Config), ok. + values(Config) -> M = 'Values', P = {M, @@ -241,6 +813,53 @@ values(Config) -> " os1 OCTET STRING ::= \"abc\"\n" " os2 OCTET STRING ::= 42\n" " os3 OCTET STRING ::= { 1, 3 }\n" + " os4 OCTET STRING ::= '1234'H\n" + " Seq ::= SEQUENCE {\n" + " an OCTET STRING\n" + " }\n" + " seq Seq ::= { an int }\n" + " os5 OCTET STRING ::= holder-1.&str\n" + " os6 OCTET STRING ::= int\n" + + " int1 INTEGER ::= \"string\"\n" + " int2 INTEGER ::= os4\n" + " int3 INTEGER ::= not-defined\n" + " int4 INTEGER ::= holder-1.&str\n" + " int5 INTEGER ::= holder-2.&obj\n" + " int6 INTEGER ::= holder-2.&undefined-field\n" + " int7 INTEGER ::= holder-2.&UndefinedField.&id\n" + + " bs1 BIT STRING ::= 42\n" + " bs2 BIT STRING ::= {a,b}\n" + " bs3 BIT STRING {a(0),z(25)} ::= {a,b}\n" + " bs4 BIT STRING {a(0),z(25)} ::= int\n" + " bs5 BIT STRING ::= holder-2.&str\n" + " bs6 BIT STRING ::= holder-2.&obj\n" + + " b1 BOOLEAN ::= 42\n" + " b2 BOOLEAN ::= {a,b}\n" + + " HOLDER ::= CLASS {\n" + " &str IA5String,\n" + " &obj HOLDER OPTIONAL\n" + " }\n" + + " holder-1 HOLDER ::= { &str \"xyz\" }\n" + " holder-2 HOLDER ::= { &str \"xyz\", &obj holder-1 }\n" + + " ext-1 EXTERNAL ::= {identification bad:{1 2 3}, data-value '123'H}\n" + " ext-2 EXTERNAL ::= {identification syntax:{1 2 3}, data '123'H}\n" + + " CH ::= CHOICE { a INTEGER, b BOOLEAN }\n" + " ch1 CH ::= 2344\n" + " ch2 CH ::= zz:34\n" + + " st1 an < Seq ::= 42\n" + " st2 zz < CH ::= 42\n" + " st3 a < HOLDER ::= 42\n" + " st4 a < INTEGER ::= 42\n" + + " int INTEGER ::= 42\n" "END\n">>}, {error, [ @@ -249,7 +868,59 @@ values(Config) -> {structured_error,{M,3},asn1ct_check, illegal_octet_string_value}, {structured_error,{M,4},asn1ct_check, - illegal_octet_string_value} + illegal_octet_string_value}, + {structured_error,{M,9},asn1ct_check, + illegal_octet_string_value}, + {structured_error,{M,10},asn1ct_check, + illegal_octet_string_value}, + {structured_error,{M,11},asn1ct_check, + illegal_octet_string_value}, + {structured_error,{M,12},asn1ct_check, + illegal_integer_value}, + {structured_error,{M,13},asn1ct_check, + illegal_integer_value}, + {structured_error,{M,14},asn1ct_check, + illegal_integer_value}, + {structured_error,{M,15},asn1ct_check, + illegal_integer_value}, + {structured_error,{M,16},asn1ct_check, + illegal_integer_value}, + {structured_error,{M,17},asn1ct_check, + {undefined_field,'undefined-field'}}, + {structured_error,{M,18},asn1ct_check, + {undefined_field,'UndefinedField'}}, + {structured_error,{M,19},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,20},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,21},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,22},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,23},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,24},asn1ct_check, + {illegal_value, "BIT STRING"}}, + {structured_error,{M,25},asn1ct_check, + {illegal_value, "BOOLEAN"}}, + {structured_error,{M,26},asn1ct_check, + {illegal_value, "BOOLEAN"}}, + {structured_error,{M,33},asn1ct_check, + illegal_external_value}, + {structured_error,{M,34},asn1ct_check, + illegal_external_value}, + {structured_error,{M,36},asn1ct_check, + {illegal_id, 2344}}, + {structured_error,{M,37},asn1ct_check, + {illegal_id, zz}}, + {structured_error,{M,38},asn1ct_check, + {illegal_choice_type, 'Seq'}}, + {structured_error,{M,39},asn1ct_check, + {illegal_id, zz}}, + {structured_error,{M,40},asn1ct_check, + {illegal_choice_type, 'HOLDER'}}, + {structured_error,{M,41},asn1ct_check, + {illegal_choice_type, 'INTEGER'}} ] } = run(P, Config), ok. @@ -258,5 +929,7 @@ values(Config) -> run({Mod,Spec}, Config) -> Base = atom_to_list(Mod) ++ ".asn1", File = filename:join(?config(priv_dir, Config), Base), + Include0 = filename:dirname(?config(data_dir, Config)), + Include = filename:join(filename:dirname(Include0), "asn1_SUITE_data"), ok = file:write_file(File, Spec), - asn1ct:compile(File). + asn1ct:compile(File, [{i, Include}]). diff --git a/lib/asn1/test/syntax_SUITE.erl b/lib/asn1/test/syntax_SUITE.erl new file mode 100644 index 0000000000..1a2c938fe5 --- /dev/null +++ b/lib/asn1/test/syntax_SUITE.erl @@ -0,0 +1,340 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(syntax_SUITE). +-export([suite/0,all/0,groups/0, + assignment/1, + class/1, + constraints/1, + exports/1, + header/1, + imports/1, + objects/1, + sequence/1, + syntax/1, + tokenizer/1, + types/1, + values/1]). + +-include_lib("test_server/include/test_server.hrl"). + +suite() -> [{ct_hooks, [ts_install_cth]}]. + +all() -> + [{group,p}]. + +groups() -> + [{p,parallel(), + [assignment, + class, + constraints, + exports, + header, + imports, + objects, + sequence, + syntax, + tokenizer, + types, + values]}]. + +parallel() -> + case erlang:system_info(schedulers) > 1 of + true -> [parallel]; + false -> [] + end. + +assignment(Config) -> + Head = "Assignment DEFINITIONS AUTOMATIC TAGS ::=\nBEGIN\n", + End = "\nEND\n", + L0 = [{"42",3,{syntax_error,42}}, + {"i",4,{syntax_error,'END'}}, + {"i ::=",3,{syntax_error,'::='}}, + {"i type",4,{syntax_error,'END'}}, + {"i type ::=",3,{syntax_error,'::='}}, + {"i TYPE",4,{syntax_error,'END'}}, + {"i TYPE ::= ",4,{syntax_error,'END'}}, + {"i INTEGER ::= 42 garbage",4,{syntax_error,'END'}}, + {"i{T} Type",4,{syntax_error,'END'}}, + {"TYPE",4,{syntax_error,'END'}}, + {"TYPE ::=",4,{syntax_error,'END'}}, + {"TYPE{ ::=",3,{syntax_error,'::='}}, + {"TYPE{P, ::=",3,{syntax_error,'::='}}, + {"TYPE{P,} ::=",3,{syntax_error,'}'}}, + {"TYPE{Gov:} ::=",3,{syntax_error,':'}}, + {"TYPE{A} CL ",4,{syntax_error,'END'}}, + {"ObjSet CL",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Assignment", Config), + ok. + +class(Config) -> + Head = "Class DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " CL ::= CLASS {", + End = "\nEND\n", + L0 = [{"id",3,{syntax_error,'id'}}, + {"&id INTEGER",4,{syntax_error,'END'}}, + {"&id INTEGER,",4,{syntax_error,'END'}}, + {"&id,",3,{syntax_error,','}}, + {"&id OPTIONAL",3,{syntax_error,'OPTIONAL'}}, + {"&id INTEGER OPTIONAL",4,{syntax_error,'END'}}, + {"&var &Field",4,{syntax_error,'END'}}, + {"&Type,",4,{syntax_error,'END'}}, + {"&Type OPTIONAL",4,{syntax_error,'END'}}, + {"&ValueSet INTEGER OPTIONAL",4,{syntax_error,'END'}}, + {"&ValueSet INTEGER DEFAULT",4,{syntax_error,'END'}}, + {"&ValueSet INTEGER DEFAULT {",4,{syntax_error,'END'}}, + {"&ValueSet INTEGER DEFAULT {a",4,{syntax_error,'END'}}, + {"&Var &Field",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Class", Config), + ok. + +constraints(Config) -> + Head = "Constraints DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " Type ::= ", + End = "\nEND\n", + L0 = [{"INTEGER (",4,{syntax_error,'END'}}, + {"INTEGER (10x",3,{syntax_error,x}}, + {"INTEGER (10|(10y",3,{syntax_error,y}}, + {"INTEGER (CONSTRAINED BY {}",4,{syntax_error,'END'}}, + {"INTEGER (CONSTRAINED BY {INTEGER garbage",3, + {syntax_error,garbage}}, + {"INTEGER ({ObjSet",4,{syntax_error,'END'}}, + {"INTEGER ({ObjSet}{",3,{syntax_error,'{'}}, + {"INTEGER ({ObjSet}{@",3,{syntax_error,'{'}}, + {"INTEGER ({ObjSet}{@x",3,{syntax_error,'{'}}, + {"INTEGER ({ObjSet}{@x}",4,{syntax_error,'END'}}, + {"INTEGER (10 !BOOLEAN",4,{syntax_error,'END'}}, + {"INTEGER (10 !BOOLEAN:",4,{syntax_error,'END'}}, + {"INTEGER (10 !BOOLEAN:FALSE",4,{syntax_error,'END'}}, + {"SEQUENCE {} (WITH COMPONENTS { Type })", + 3,{syntax_error,'Type'}}, + {"SEQUENCE {} (WITH COMPONENTS { x (10)", + 4,{syntax_error,'END'}}, + {"SEQUENCE {} (WITH COMPONENTS { ..., x (10)", + 4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Constraints", Config), + ok. + +exports(Config) -> + Head = "Exports DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " EXPORTS ", + End = "\nEND\n", + L0 = [{"Type",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Exports", Config), + ok. + +header(Config) -> + L = [{"lowercase",1,{syntax_error,lowercase}}, + {"H ",2,{syntax_error,'END-OF-FILE'}}, + {"H-",1,{syntax_error,'-'}}, + {"42",1,{syntax_error,42}}, + {"H definitions",1,{syntax_error,definitions}}, + {"H DEFINITIONS STUPID TAGS",1,{syntax_error,'STUPID'}}, + {"H DEFINITIONS WHATEVER",1,{syntax_error,'WHATEVER'}}, + {"H DEFINITIONS ::= BEGIN",2,{syntax_error,'END-OF-FILE'}}, + {"BOOLEAN",1,{syntax_error,'BOOLEAN'}} + ], + run(L, "H", Config), + ok. + +imports(Config) -> + Head = "Imports DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " IMPORTS ", + End = "\nEND\n", + L0 = [{"Type FROM X",4,{syntax_error,'END'}}, + {"Symbols TO Y",3,{syntax_error,'TO'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Imports", Config), + ok. + +objects(Config) -> + Head = "Objects DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " object CLASS-NAME ::= ", + End = "\nEND\n", + L0 = [{"{",4,{syntax_error,'END'}}, + {"{&min 1, max 10}",3,{syntax_error,max}}, + {"{&min 1, Max 10}",3,{syntax_error,'Max'}}, + {"{min 1, &max 10}",3,{syntax_error,'&max'}}, + {"{min 1, &Max 10}",3,{syntax_error,'&Max'}}, + {"{RESERVERD WORD BIT}",3,{syntax_error,'BIT'}}, + {"{&min 1",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Objects", Config), + ok. + +sequence(Config) -> + Head = "Sequence DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " Type ::= SEQUENCE {", + End = "\nEND\n", + L0 = [{"",4,{syntax_error,'END'}}, + {" UpperCase",3,{syntax_error,'UpperCase'}}, + {" a b",4,{syntax_error,'END'}}, + {" i INTEGER",4,{syntax_error,'END'}}, + {" ...",4,{syntax_error,'END'}}, + {" ..., [[",4,{syntax_error,'END'}}, + {" ..., [[ a INTEGER ]",3,{syntax_error,']'}}, + {" ..., [[ a INTEGER,",3,{syntax_error,','}}, + {" ..., [[ a INTEGER, ... ]]",3,{syntax_error,','}}, + {" ... !42 xxx",3,{syntax_error,'xxx'}}, + {" ... !42, a INTEGER,",3,{syntax_error,','}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Sequence", Config), + ok. + +syntax(Config) -> + Head = "Syntax DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " CL ::= CLASS { &id INTEGER UNIQUE } WITH SYNTAX ", + End = "\nEND\n", + L0 = [{"{}",3,{syntax_error,'}'}}, + {"WORD",3,{syntax_error,'WORD'}}, + {"{ Word }",3,{syntax_error,'Word'}}, + {"{ [ Word ] }",3,{syntax_error,'Word'}}, + {"{ [ WORD }",3,{syntax_error,'}'}}, + {"{ WORD;",3,{syntax_error,';'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Syntax", Config), + ok. + +tokenizer(Config) -> + Head = "Tokenize DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n", + End = "\nEND\n", + L0 = [{"'",3,eol_in_token}, + {"'42'B",3,{invalid_binary_number,"42"}}, + {"'ZZZ'H",3,{invalid_hex_number,"ZZZ"}}, + {"\"abc",3,missing_quote_at_eof}, + {"/*",3,eof_in_comment} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Tokenizer", Config, asn1ct_tok), + ok. + +types(Config) -> + Head = "Types DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " Type ::= ", + End = "\nEND\n", + L0 = [{"BIT STRING garbage",4,{syntax_error,'END'}}, + {"BIT STRING {",4,{syntax_error,'END'}}, + {"BIT STRING { a(42",3,{syntax_error,42}}, + {"BIT STRING { a(0)",4,{syntax_error,'END'}}, + {"CHOICE {",4,{syntax_error,'END'}}, + {"CHOICE { ..., a}",3,{syntax_error,'...'}}, + {"CHOICE { UpperCase",3,{syntax_error,'UpperCase'}}, + {"CHOICE { i INTEGER",4,{syntax_error,'END'}}, + {"CHOICE { ..., i INTEGER }",3,{syntax_error,'...'}}, + {"CHOICE { b BOOLEAN, ..., i INTEGER", + 4,{syntax_error,'END'}}, + {"CHOICE { b BOOLEAN, ..., [[ e BOOLEAN, ...]]}", + 3,{syntax_error,','}}, + {"CHOICE { b BOOLEAN, ..., i INTEGER, ..., x BIT STRING}", + 3,{syntax_error,','}}, + {"ENUMERATED {",4,{syntax_error,'END'}}, + {"ENUMERATED { 42 }",3,{syntax_error,42}}, + {"ENUMERATED { a, b",4,{syntax_error,'END'}}, + {"ENUMERATED { a, }",3,{syntax_error,','}}, + {"ENUMERATED { a, ...,\nb, ..., c }",4,{syntax_error,','}}, + {"INTEGER {",4,{syntax_error,'END'}}, + {"INTEGER { a(42)",4,{syntax_error,'END'}}, + {"SEQUENCE",3,{syntax_error,'SEQUENCE'}}, + %% More tests for SEQUENCE in sequence/1. + {"SEQUENCE SIZE (1..10)",4,{syntax_error,'END'}}, + {"SEQUENCE (SIZE (1..10))",4,{syntax_error,'END'}}, + {"SET { i INTEGER",4,{syntax_error,'END'}}, + {"SET { ...",4,{syntax_error,'END'}}, + {"SET SIZE (1..10)",4,{syntax_error,'END'}}, + {"SET (SIZE (1..10))",4,{syntax_error,'END'}}, + {"SET { ... !42 xxx",3,{syntax_error,'xxx'}}, + {"SET { ... !42, a INTEGER,",3,{syntax_error,','}}, + {"[",4,{syntax_error,'END'}}, + {"[42",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Types", Config), + ok. + +values(Config) -> + Head = "Values DEFINITIONS AUTOMATIC TAGS ::=\n" + "BEGIN\n" + " value Type ::= ", + End = "\nEND\n", + L0 = [{"",4,{syntax_error,'END'}} + ], + L = [{Head++S++End,Line,E} || {S,Line,E} <- L0], + run(L, "Values", Config), + ok. + +run(List, File, Config) -> + run(List, File, Config, asn1ct_parser2). + +run(List, File0, Config, Module) -> + Base = File0 ++ ".asn1", + File = filename:join(?config(priv_dir, Config), Base), + case run_1(List, Base, File, Module, 0) of + 0 -> ok; + Errors -> ?t:fail(Errors) + end. + +run_1([{Source,Line,Error}=Exp|T], Base, File, Module, N) -> + ok = file:write_file(File, Source), + io:format("~s", [Source]), + case asn1ct:compile(File) of + {error,[{structured_error,{Base,L},Module,E}]} -> + case {L,E} of + {Line,Error} -> + run_1(T, Base, File, Module, N); + {Line,OtherError} -> + io:format("*** Wrong error: ~p, expected ~p ***\n", + [OtherError,Error]), + run_1(T, Base, File, Module, N+1); + {OtherLine,Error} -> + io:format("*** Wrong line: ~p, expected ~p ***\n", + [OtherLine,Line]), + run_1(T, Base, File, Module, N+1); + {_,_} -> + io:format("*** Wrong line: ~p, expected ~p ***", + [L,Line]), + io:format("*** Wrong error: ~p, expected ~p ***\n", + [E,Error]), + run_1(T, Base, File, Module, N+1) + end; + Other -> + io:format("~p\nGOT: ~p", [Exp,Other]) + end; +run_1([], _, _, _, N) -> + N. diff --git a/lib/asn1/test/testChoExtension.erl b/lib/asn1/test/testChoExtension.erl index 09e19ceebb..f36d6c1cbf 100644 --- a/lib/asn1/test/testChoExtension.erl +++ b/lib/asn1/test/testChoExtension.erl @@ -39,11 +39,6 @@ extension(_Rules) -> roundtrip('ChoExt3', {int,33}), roundtrip('ChoExt4', {str,<<"abc">>}), - roundtrip('ChoEmptyRoot', {bool,false}), - roundtrip('ChoEmptyRoot', {bool,true}), - roundtrip('ChoEmptyRoot', {int,0}), - roundtrip('ChoEmptyRoot', {int,7}), - ok. diff --git a/lib/asn1/test/testConstraints.erl b/lib/asn1/test/testConstraints.erl index 3ccf883bd6..5d65cb8d73 100644 --- a/lib/asn1/test/testConstraints.erl +++ b/lib/asn1/test/testConstraints.erl @@ -231,6 +231,28 @@ int_constraints(Rules) -> seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 19000), seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 26900), + %%========================================================== + %% Constraints from object fields. + %%========================================================== + range_error(Rules, 'IntObjectConstr', 1), + roundtrip('IntObjectConstr', 2), + roundtrip('IntObjectConstr', 3), + roundtrip('IntObjectConstr', 4), + range_error(Rules, 'IntObjectConstr', 5), + + + %%========================================================== + %% INTEGER constraints defined using named INTEGERs. + %%========================================================== + 42 = 'Constraints':'constrainedNamedInt-1'(), + 100 = 'Constraints':'constrainedNamedInt-2'(), + range_error(Rules, 'ConstrainedNamedInt', 41), + roundtrip('ConstrainedNamedInt', v1), + range_error(Rules, 'ConstrainedNamedInt', 43), + + range_error(Rules, 'SeqWithNamedInt', {'SeqWithNamedInt',-100}), + roundtrip('SeqWithNamedInt', {'SeqWithNamedInt',v2}), + ok. %% PER: Ensure that if the lower bound is Lb, Lb+16#80 is encoded diff --git a/lib/asn1/test/testEnumExt.erl b/lib/asn1/test/testEnumExt.erl index 878518be11..29995d6340 100644 --- a/lib/asn1/test/testEnumExt.erl +++ b/lib/asn1/test/testEnumExt.erl @@ -78,6 +78,9 @@ common(Erule) -> v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e40,9357}), v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e80,9357}), + + v_roundtrip(Erule, 'EnumSkip', d), + ok. roundtrip(Type, Value) -> @@ -85,11 +88,20 @@ roundtrip(Type, Value) -> v_roundtrip(Erule, Type, Value) -> Encoded = roundtrip(Type, Value), - Encoded = asn1_test_lib:hex_to_bin(v(Erule, Value)). - -v(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D"; -v(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D"; -v(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D"; -v(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D"; -v(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0"; -v(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0". + Encoded = asn1_test_lib:hex_to_bin(v(Erule, Type, Value)). + +v(Erule, 'SeqBig', Value) -> + v_seq_big(Erule, Value); +v(Erule, 'EnumSkip', Value) -> + v_enum_skip(Erule, Value). + +v_seq_big(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D"; +v_seq_big(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D"; +v_seq_big(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D"; +v_seq_big(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D"; +v_seq_big(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0"; +v_seq_big(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0". + +v_enum_skip(per, d) -> "82"; +v_enum_skip(uper, d) -> "82"; +v_enum_skip(ber, d) -> "0A0103". diff --git a/lib/asn1/test/testExtensibilityImplied.erl b/lib/asn1/test/testExtensibilityImplied.erl new file mode 100644 index 0000000000..8049bb6e53 --- /dev/null +++ b/lib/asn1/test/testExtensibilityImplied.erl @@ -0,0 +1,29 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% + +-module(testExtensibilityImplied). +-export([main/0]). + +main() -> + M = 'ExtensibilityImplied', + {'Seq2',true} = M:decode('Seq2', M:encode('Seq1', {'Seq1',true,42})), + {'Set2',true} = M:decode('Set2', M:encode('Set1', {'Set1',true,42})), + {asn1_enum,_} = M:decode('Enum2', M:encode('Enum1', ext)), + ok. diff --git a/lib/asn1/test/testImporting.erl b/lib/asn1/test/testImporting.erl new file mode 100644 index 0000000000..de8beae38b --- /dev/null +++ b/lib/asn1/test/testImporting.erl @@ -0,0 +1,34 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% + +-module(testImporting). +-export([main/0]). + +main() -> + M = 'Importing', + roundtrip('Seq', {'Seq',5}), + roundtrip('OtherSeq', {'Seq',42,true}), + {'Seq',42,true} = M:seq(), + roundtrip('ObjSeq', {'ObjSeq',1,<<"XYZ">>}), + roundtrip('ObjSeq', {'ObjSeq',2,19}), + ok. + +roundtrip(Type, Value) -> + asn1_test_lib:roundtrip('Importing', Type, Value). diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl index 37c134b1b9..3044d5cd2a 100644 --- a/lib/asn1/test/testInfObj.erl +++ b/lib/asn1/test/testInfObj.erl @@ -74,6 +74,12 @@ main(_Erule) -> {'ConstructedPdu',7,[]}), roundtrip('InfObj', 'ConstructedPdu', {'ConstructedPdu',7,[64,1,19,17,35]}), + {'ConstructedPdu',8,[{_,-15,35},{_,533,-70}]} = + enc_dec('InfObj', 'ConstructedPdu', + {'ConstructedPdu',8,[{'_',-15,35},{'_',533,-70}]}), + {'ConstructedPdu',9,[{RecTag9,-15,35},{RecTag9,533,-70}]} = + enc_dec('InfObj', 'ConstructedPdu', + {'ConstructedPdu',9,[{'_',-15,35},{'_',533,-70}]}), roundtrip('InfObj', 'ConstructedSet', {'ConstructedSet',1,{'CONSTRUCTED-DEFAULT_Type',-2001,true}}), @@ -96,6 +102,12 @@ main(_Erule) -> {'ConstructedSet',7,[]}), roundtrip('InfObj', 'ConstructedSet', {'ConstructedSet',7,[64,1,19,17,35]}), + {'ConstructedSet',8,[{_,-15,35},{_,533,-70}]} = + enc_dec('InfObj', 'ConstructedSet', + {'ConstructedSet',8,[{'_',-15,35},{'_',533,-70}]}), + {'ConstructedSet',9,[{_,-15,35},{_,533,-70}]} = + enc_dec('InfObj', 'ConstructedSet', + {'ConstructedSet',9,[{'_',-15,35},{'_',533,-70}]}), roundtrip('InfObj', 'Seq2', {'Seq2',42,[true,false,false,true], @@ -126,12 +138,37 @@ main(_Erule) -> test_objset('OstSeq45', [4,5]), test_objset('OstSeq12345', [1,2,3,4,5]), + test_objset('OstSeq12Except', [1,2]), + test_objset('OstSeq123Except', [1,2]), + test_objset('ExOstSeq12', [1,2]), test_objset('ExOstSeq123', [1,2,3]), - %%test_objset('ExOstSeq1234', [1,2,3,4]), + test_objset('ExOstSeq1234', [1,2,3,4]), test_objset('ExOstSeq45', [4,5]), test_objset('ExOstSeq12345', [1,2,3,4,5]), + test_objset('ExOstSeq12Except', [1,2]), + test_objset('ExOstSeq123Except', [1,2]), + + roundtrip('InfObj', 'ExtClassSeq', {'ExtClassSeq', 4}), + + {1,2,42} = 'InfObj':'value-1'(), + {1,2,42,25} = 'InfObj':'value-2'(), + {100,101} = 'InfObj':'value-3'(), + {1,2,100,101} = 'InfObj':'value-4'(), + + roundtrip('InfObj', 'Rdn', {'Rdn',{2,5,4,41},"abc"}), + + roundtrip('InfObj', 'TiAliasSeq', + {'TiAliasSeq',{'TiAliasSeq_prf',{2,1,2},'NULL'}}), + + roundtrip('InfObj', 'ContentInfo', + {'ContentInfo',{2,7,8,9},"string"}), + {2,7,8,9} = 'InfObj':'id-content-type'(), + + <<2#1011:4>> = 'InfObj':'tricky-bit-string'(), + <<16#CAFE:16>> = 'InfObj':'tricky-octet-string'(), + ok. test_objset(Type, Keys) -> diff --git a/lib/asn1/test/testInfObjExtract.erl b/lib/asn1/test/testInfObjExtract.erl new file mode 100644 index 0000000000..0ef967c1f6 --- /dev/null +++ b/lib/asn1/test/testInfObjExtract.erl @@ -0,0 +1,72 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% + +-module(testInfObjExtract). + +-export([main/0]). + +main() -> + roundtrip_data_object_13('DataSeq-1'), + + roundtrip_data_object_1('DataSeq-2'), + roundtrip_data_object_1('DataSeq-3'), + roundtrip_data_object_1('DataSeq-4'), + + roundtrip_data_object_13('DataSeq-5'), + roundtrip_data_object_13('DataSeq-6'), + + roundtrip_data_object_1('DataSeqSingleSet-1'), + roundtrip_data_object_1('DataSeqSingleSet-2'), + + roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',1,true}), + roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',2,true}), + + roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',0,false}), + roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',3,true}), + roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',4,false}), + roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',5,true}), + + ok. + +roundtrip_data_object_13(SeqType) -> + roundtrip(SeqType, {SeqType,1,true}), + roundtrip(SeqType, {SeqType,2,<<"abc">>}), + roundtrip(SeqType, {SeqType,3,<<42:5>>}), + roundtrip_error(SeqType, {SeqType,4,42}). + +roundtrip_data_object_1(SeqType) -> + roundtrip(SeqType, {SeqType,1,false}), + roundtrip(SeqType, {SeqType,1,true}), + roundtrip_error(SeqType, {SeqType,1,42}), + roundtrip_error(SeqType, {SeqType,2,<<"abc">>}), + roundtrip_error(SeqType, {SeqType,3,<<42:5>>}), + roundtrip_error(SeqType, {SeqType,999,42}). + +roundtrip(T, V) -> + asn1_test_lib:roundtrip('InfObjExtract', T, V). + +roundtrip_error(T, V) -> + try asn1_test_lib:roundtrip('InfObjExtract', T, V) of + ok -> + test_server:fail() + catch + _:_ -> + ok + end. diff --git a/lib/asn1/test/testParamBasic.erl b/lib/asn1/test/testParamBasic.erl index 39f7947e8d..5f6116bba4 100644 --- a/lib/asn1/test/testParamBasic.erl +++ b/lib/asn1/test/testParamBasic.erl @@ -46,6 +46,14 @@ main(Rules) -> roundtrip('AnAlgorithm', {'AnAlgorithm',1,42}), roundtrip('AnAlgorithm', {'AnAlgorithm',2,true}), roundtrip('AnAlgorithm', {'AnAlgorithm',2,false}), + {'AnAlgorithm',1,42} = 'ParamBasic':'alg-seq-1'(), + {'AnAlgorithm',2,true} = 'ParamBasic':'alg-seq-2'(), + + roundtrip('Seq', {'Seq', + {'Seq_c1',{2,1,1},42}, + {'Seq_c2',{2,1,1,1},asn1_NOVALUE}}), + + {_,{2,9,9,9,7},'NULL'} = 'ParamBasic':'algid-hmacWithSHA1'(), ok. roundtrip(Type, Value) -> diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl index e07379e634..d7893a2d58 100644 --- a/lib/asn1/test/testPrim.erl +++ b/lib/asn1/test/testPrim.erl @@ -98,6 +98,11 @@ enum(Rules) -> ber -> ok end, + + roundtrip('NegEnumVal', neg), + roundtrip('NegEnumVal', zero), + roundtrip('EnumVal128', val), + ok. diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl new file mode 100644 index 0000000000..6281d09873 --- /dev/null +++ b/lib/asn1/test/testRfcs.erl @@ -0,0 +1,75 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% + +-module(testRfcs). + +-export([compile/3,test/0]). + +-include_lib("test_server/include/test_server.hrl"). + +compile(Config, Erules, Options0) -> + Options = [no_ok_wrapper|Options0], + DataDir = ?config(data_dir, Config), + Specs0 = filelib:wildcard("*.asn1", filename:join(DataDir, rfcs)), + Specs = [filename:join(rfcs, Spec) || Spec <- Specs0], + 122 = length(Specs), + CaseDir = ?config(case_dir, Config), + asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]). + +test() -> + {1,3,6,1,5,5,7,48,1,2} = + IdPkixOcspNonce = + 'OCSP-2009':'id-pkix-ocsp-nonce'(), + roundtrip('OCSP-2009', 'OCSPRequest', + {'OCSPRequest', + {'TBSRequest', + 0, + {rfc822Name,"name string"}, + [{'Request', + {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE}, + <<"POTATOHASH">>,<<"HASHBROWN">>,42}, + [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}], + asn1_NOVALUE}, + asn1_NOVALUE}), + otp_7759(), + ok. + +roundtrip(Module, Type, Value0) -> + Enc = Module:encode(Type, Value0), + Value1 = Module:decode(Type, Enc), + asn1_test_lib:match_value(Value0, Value1), + ok. + +otp_7759() -> + %% The release note for asn-1.6.6 says: + %% Decode of an open_type when the value was empty tagged + %% type encoded with indefinite length failed. + Mod = 'OLD-PKCS7', + Encoded = encoded_msg(), + ContentInfo = Mod:decode('ContentInfo', Encoded), + io:format("~p\n", [ContentInfo]), + {'ContentInfo',_Id,PKCS7_content} = ContentInfo, + X = Mod:decode('SignedData', PKCS7_content), + io:format("~p\n", [X]), + io:nl(), + ok. + +encoded_msg() -> + <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>. diff --git a/lib/asn1/test/testSelectionTypes.erl b/lib/asn1/test/testSelectionTypes.erl index 6d060321da..7d273fe656 100644 --- a/lib/asn1/test/testSelectionTypes.erl +++ b/lib/asn1/test/testSelectionTypes.erl @@ -23,10 +23,34 @@ -include_lib("test_server/include/test_server.hrl"). test() -> - Val = ["PrintableString","PrintableString","PrintableString"], ["Es"] = Val2 = ['SelectionType':einsteinium()], - roundtrip('MendeleyevTable', Val), + roundtrip('MendeleyevTable', ["fox","tree","cat","stone"]), roundtrip('MendeleyevTable', Val2), + roundtrip('MendeleyevSet', [42,57,93,101]), + + M = 'SelectionType', + true = M:boolv(), + 4 = M:intv(), + <<2#1001:4>> = M:bsv(), + <<16#3130:16>> = M:osv(), + 'NULL' = M:nullv(), + {2,1,1} = M:oiv(), + "ObjectDesc" = M:odv(), + "utf8" = M:utfv(), + {5,32767,256} = M:rov(), + "089" = M:numsv(), + "telet" = M:teletv(), + "t61" = M:t61v(), + "video" = M:videov(), + "ia5" = M:ia5v(), + "9805281429Z" = M:utctimev(), + "19980528142905.1" = M:gTime(), + "graphic" = M:gsv(), + "visible" = M:vsv(), + "general" = M:gStringv(), + "Universal" = M:univv(), + "bmp" = M:bmov(), + ok. roundtrip(T, V) -> diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl new file mode 100644 index 0000000000..1ef61a885a --- /dev/null +++ b/lib/asn1/test/testUniqueObjectSets.erl @@ -0,0 +1,175 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% + +-module(testUniqueObjectSets). +-export([main/3]). + +%% Run-time function called by the generated code. +seq_roundtrip(I, D0) -> + M = 'UniqueObjectSets', + try + {ok,Enc} = M:encode('Seq', {'Seq',I,D0}), + {ok,{'Seq',I,D}} = M:decode('Seq', Enc), + D + catch C:E -> + Stk = erlang:get_stacktrace(), + io:format("FAILED: ~p ~p\n", [I,D0]), + erlang:raise(C, E, Stk) + end. + +types() -> + [{"CHOICE { a INTEGER, b BIT STRING }", {b,<<42:3>>}}, + {"INTEGER",42}, + {"SEQUENCE {a OCTET STRING}",{'_',<<"abc">>}}, + {"SEQUENCE {b BOOLEAN, ...}",{'_',true}}, + {"SEQUENCE {b BOOLEAN, ..., s IA5String, ..., e ENUMERATED { x, y, z}}", + {'_',false,"string",y}}, + {"SET {a BIT STRING}",{'_',<<1:17>>}}, + {"SEQUENCE OF INTEGER",[-19,0,555,777]}, + {"SET OF BOOLEAN",[true,false,true]}, + {"SEQUENCE OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]}, + {"SET OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]} + ]. + +main(CaseDir, Rule, Opts) -> + D0 = types(), + {D1,_} = lists:mapfoldl(fun({T,S}, I) -> + {{I,T,S},I+1} + end, 1, D0), + Types = [gen_types(I, Type) || {I,Type,_} <- D1], + Set = [gen_set_items(I, T) || {I,T,_} <- D1], + Objs = [gen_obj(I) || {I,_,_} <- D1], + DupObjs = [gen_dup_obj(I, T) || {I,T,_} <- D1], + DupObjRefs0 = [gen_dup_obj_refs(I) || {I,_,_} <- D1], + DupObjRefs = string:join(DupObjRefs0, " |\n"), + Asn1Spec = 'UniqueObjectSets', + A = ["UniqueObjectSets DEFINITIONS AUTOMATIC TAGS ::=\n", + "BEGIN\n\n", + "TEST-UNIQUE ::= CLASS {\n" + " &id INTEGER UNIQUE,\n" + " &Type OPTIONAL\n" + "}\n" + "WITH SYNTAX {IDENTIFIED BY &id [TYPE &Type]}\n", + $\n, + "DUP-CONTAINER ::= CLASS {\n" + " &id INTEGER UNIQUE,\n" + " &data TEST-UNIQUE\n" + "} WITH SYNTAX {\n" + " ID &id, &data\n" + "}\n", + $\n, + Types,$\n, + "UniqSet TEST-UNIQUE ::= {\n", + Set, + " DupSet-1 |\n", + " DupSet-2, ...\n", + "}\n\n", + Objs,$\n, + DupObjs,$\n, + "DupSet-1 TEST-UNIQUE ::= {\n", + DupObjRefs,$\n, + "}\n\n", + "DupSet-2 TEST-UNIQUE ::= {\n", + DupObjRefs,",...\n", + "}\n\n", + "Seq ::= SEQUENCE {\n" + " id TEST-UNIQUE.&id ({UniqSet}),\n" + " type TEST-UNIQUE.&Type ({UniqSet}{@id})\n" + "}\n" + "END\n"], + Asn1File = filename:join(CaseDir, atom_to_list(Asn1Spec)++".asn1"), + ok = file:write_file(Asn1File, A), + + TestModule = 'unique_object_sets', + Test0 = [gen_test(I, Data) || {I,_,Data} <- D1], + Test = ["-module(",atom_to_list(TestModule),").\n" + "-export([main/1]).\n" + "\n" + "main(SeqRoundtrip) ->\n", + " ",atom_to_list(Rule)," = '",atom_to_list(Asn1Spec), + "':encoding_rule(),\n", + Test0, + " ok.\n" + ], + ErlFile = filename:join(CaseDir, atom_to_list(TestModule)++".erl"), + ok = file:write_file(ErlFile, Test), + + io:format("~s\n~s\n", [Asn1File,ErlFile]), + case Rule of + per -> + io:put_chars([A,$\n,Test,$\n]); + _ -> + ok + end, + + ok = asn1ct:compile(Asn1File, [Rule,{outdir,CaseDir}|Opts]), + {ok,TestModule} = c:c(ErlFile, [{outdir,CaseDir}]), + TestModule:main(fun seq_roundtrip/2), + ok. + +gen_types(I, Type) -> + io_lib:format("AType~p ::= ~s\n", [I,Type]). + +gen_set_items(I, T) -> + io_lib:format(" {IDENTIFIED BY ~p TYPE AType~p} |\n" + " {IDENTIFIED BY ~p TYPE AType~p} |\n" + " {IDENTIFIED BY ~p TYPE ~s} |\n" + " obj-~p |\n\n", + [I,I,I,I,I,T,I]). + +gen_obj(I) -> + io_lib:format("obj-~p TEST-UNIQUE ::= {IDENTIFIED BY ~p TYPE AType~p}\n", + [I,I,I]). + +gen_dup_obj(I, T) -> + io_lib:format("dup-obj-~p DUP-CONTAINER ::= " + "{ID ~p, {IDENTIFIED BY ~p TYPE ~s}}\n", + [I,I,I+1000,T]). + +gen_dup_obj_refs(I) -> + io_lib:format("dup-obj-~p.&data", [I]). + +gen_test(I, Data) -> + io_lib:format(" ~s = SeqRoundtrip(~p, ~p),\n", + [match_term(Data),I,Data]). + +match_term('_') -> + "_"; +match_term([H|T]=L) -> + case is_intlist(L) of + true -> + io_lib:format("~p", [L]); + false -> + ["[",match_term(H),"|",match_term(T),"]"] + end; +match_term(Tuple) when is_tuple(Tuple) -> + ["{",match_term_tuple(Tuple, 1),"}"]; +match_term(Other) -> + io_lib:format("~p", [Other]). + +match_term_tuple(T, I) when I =< tuple_size(T) -> + [match_term(element(I, T)), + if I < tuple_size(T) -> ","; + true -> "" end|match_term_tuple(T, I+1)]; +match_term_tuple(_, _) -> + []. + +is_intlist(L) -> + lists:all(fun is_integer/1, L). diff --git a/lib/asn1/test/testValueTest.erl b/lib/asn1/test/testValueTest.erl new file mode 100644 index 0000000000..8a8e973621 --- /dev/null +++ b/lib/asn1/test/testValueTest.erl @@ -0,0 +1,114 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% +-module(testValueTest). + +-export([main/0]). + +main() -> + M = 'ValueTest', + + %% Basic types + 12 = M:'vANY'(), + true = M:'vBOOLEAN'(), + 12 = M:'vINTEGER'(), + 0 = M:'vINTEGERNNL'(), + button1 = M:'vENUMERATED'(), + [zero,two] = M:'vBS'(), + 'NULL' = M:'vNULL'(), + <<16#31,16#32,16#33>> = M:'vOS'(), + + %% OID + {2,1,1} = M:'vOD'(), + {1,2} = M:'integer-first'(), + {2,4,5} = M:'rel-oid-1'(), + {0,2,4,5} = M:'include-roid'(), + {1,2,1} = M:'include-oid'(), + {1,2,1,2,4,5,42} = M:'include-all'(), + + %% Character strings + "01234567" = M:'numericstring'(), + "PrintableString" = M:'printablestring'(), + "VisibleString" = M:'visiblestring'(), + [0,13] = M:'cr'(), + ["First line",[0,13],"Second line"] = M:'ia5string1'(), + [[5,5],[4,4],[6,6]] = M:'ia5string2'(), + "TeletexString" = M:'teletexstring'(), + "VideotexString" = M:'videotexstring'(), + "97100211-0500" = M:'utctime'(), + "19971002103130.5" = M:'generalizedtime'(), + "ObjectDescriptor" = M:'objectdescriptor'(), + "GraphicString" = M:'graphicstring'(), + "GeneralString" = M:'generalstring'(), + "BMPString" = M:'bmpstring1'(), + [0,0,0,65] = M:'latinCapitalLetterA'(), + [0,0,3,145] = M:'greekCapitalLetterSigma'(), + ["This is a capital A: ", + [0,0,0,65], + ", and a capital sigma: ", + [0,0,3,145], + "; try and spot the difference!"] = M:'my-universalstring'(), + + %% Integers + 42 = M:someInteger(), + 42 = M:otherInteger(), + {'IntegerSeq',42} = M:integerSeq1(), + + %% Value from object + 2 = M:'int-from-object-1'(), + 4 = M:'int-from-object-2'(), + roundtrip_error('II', 1), + roundtrip('II', 2), + roundtrip('II', 3), + roundtrip('II', 4), + roundtrip_error('II', 5), + + %% Recursive value definitions. + {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq1(), + <<16#40,16#41,16#42>> = M:otherOctetString(), + <<16#40,16#41,16#42>> = M:someOctetString(), + {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq2(), + {'OctetStringSeq',<<16#40,16#41,16#FF>>} = M:octetStringSeq3(), + <<16#40,16#41,16#FF>> = M:'os-1'(), + <<16#40,16#41,16#FF>> = M:'os-2'(), + + %% Recursive BIT STRING definitions. + {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq1(), + {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq2(), + {'BsSeq',<<2#101:3>>,[a,c]} = M:bsSeq3(), + <<2#101101:6>> = M:someBitString(), + <<2#101101:6>> = M:otherBitString(), + <<2#101:3>> = M:bsFromObject(), + <<2#101:3>> = M:bsFromObjectInd(), + [c] = M:someNamedBs(), + [c] = M:someOtherNamedBs(), + + ok. + + +roundtrip(T, V) -> + asn1_test_lib:roundtrip('ValueTest', T, V). + +roundtrip_error(T, V) -> + try asn1_test_lib:roundtrip('ValueTest', T, V) of + ok -> + test_server:fail() + catch _:_ -> + ok + end. diff --git a/lib/asn1/test/testX420.erl b/lib/asn1/test/testX420.erl deleted file mode 100644 index 4ddc55dc16..0000000000 --- a/lib/asn1/test/testX420.erl +++ /dev/null @@ -1,93 +0,0 @@ -%% -%% %CopyrightBegin% -%% -%% Copyright Ericsson AB 2008-2013. All Rights Reserved. -%% -%% The contents of this file are subject to the Erlang Public License, -%% Version 1.1, (the "License"); you may not use this file except in -%% compliance with the License. You should have received a copy of the -%% Erlang Public License along with this software. If not, it can be -%% retrieved online at http://www.erlang.org/. -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and limitations -%% under the License. -%% -%% %CopyrightEnd% -%% -%% -%%------------------------------------------------------------------- - --module(testX420). - --export([compile/3, ticket7759/2]). - --include_lib("test_server/include/test_server.hrl"). - - -compile(Erule, Options, Config) -> - Specs0 = specs(), - 99 = length(Specs0), - CaseDir = ?config(case_dir, Config), - Specs = [filename:join(x420, Spec) || Spec <- Specs0], - asn1_test_lib:compile_all(Specs, Config, [Erule,{i,CaseDir}|Options]). - -specs() -> - ["ACSE-1", "AuthenticationFramework", "BasicAccessControl", - "CertificateExtensions", "Character-Coding-Attributes", - "Character-Presentation-Attributes", "Character-Profile-Attributes", - "Colour-Attributes", "DOR-definition", "DSAOperationalAttributeTypes", - "Default-Value-Lists", "DirectoryAbstractService", - "DirectoryAccessProtocol", "DirectoryInformationShadowProtocol", - "DirectoryOperationalBindingManagementProtocol", - "DirectoryOperationalBindingTypes", "DirectoryProtectionMappings", - "DirectoryShadowAbstractService", "DirectorySystemProtocol", - "DistributedOperations", "Document-Profile-Descriptor", - "EnhancedSecurity", "External-References", "GULSProtectionMappings", - "GenericProtectingTransferSyntax", "Geo-Gr-Coding-Attributes", - "Geo-Gr-Presentation-Attributes", "Geo-Gr-Profile-Attributes", - "GulsSecurityExchanges", "GulsSecurityTransformations", - "HierarchicalOperationalBindings", "IPMSAbstractService", - "IPMSAutoActionTypes", "IPMSExtendedBodyPartTypes", - "IPMSExtendedBodyPartTypes2", "IPMSExtendedVoiceBodyPartType", - "IPMSFileTransferBodyPartType", "IPMSForwardedContentBodyPartType", - "IPMSForwardedReportBodyPartType", "IPMSFunctionalObjects", - "IPMSHeadingExtensions", "IPMSInformationObjects", - "IPMSMessageStoreAttributes", "IPMSObjectIdentifiers", - "IPMSObjectIdentifiers2", "IPMSSecurityExtensions", "IPMSUpperBounds", - "ISO-STANDARD-9541-FONT-ATTRIBUTE-SET", "ISO8571-FTAM", "ISO9541-SN", - "Identifiers-and-Expressions", "InformationFramework", - "Interchange-Data-Elements", "Layout-Descriptors", "Link-Descriptors", - "Location-Expressions", "Logical-Descriptors", "MHSObjectIdentifiers", - "MHSProtocolObjectIdentifiers", "MSAbstractService", - "MSAccessProtocol", "MSGeneralAttributeTypes", - "MSGeneralAutoActionTypes", "MSMatchingRules", "MSObjectIdentifiers", - "MSUpperBounds", "MTAAbstractService", "MTSAbstractService", - "MTSAbstractService88", "MTSAccessProtocol", "MTSObjectIdentifiers", - "MTSUpperBounds", "Notation", "ObjectIdentifiers", - "OperationalBindingManagement", "PKCS7", "PKCS7BodyPartType", - "Protected-Part-Descriptors", "ProtocolObjectIdentifiers", - "Raster-Gr-Coding-Attributes", "Raster-Gr-Presentation-Attributes", - "Raster-Gr-Profile-Attributes", "Reliable-Transfer-APDU", - "Remote-Operations-Abstract-Syntaxes", - "Remote-Operations-Generic-ROS-PDUs", - "Remote-Operations-Information-Objects-extensions", - "Remote-Operations-Information-Objects", - "Remote-Operations-Realizations", - "Remote-Operations-Useful-Definitions", "SelectedAttributeTypes", - "SeseAPDUs", "SpkmGssTokens", "Style-Descriptors", "Subprofiles", - "Temporal-Relationships", "Text-Units", "UpperBounds", - "UsefulDefinitions", "Videotex-Coding-Attributes"]. - -ticket7759(_Erule,_Config) -> - Encoded = encoded_msg(), - io:format("Testing ticket7759 ...~n",[]), - {ok, ContentInfo} = 'PKCS7':decode('ContentInfo',Encoded), - {'ContentInfo',_Id,PKCS7_content} = ContentInfo, - {ok,_} = 'PKCS7':decode('SignedData',PKCS7_content), - ok. - - -encoded_msg() -> - <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>. diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl index 7f358e863c..4b6357a395 100644 --- a/lib/asn1/test/test_compile_options.erl +++ b/lib/asn1/test/test_compile_options.erl @@ -24,7 +24,7 @@ -export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1, - record_name_prefix/1,verbose/1,warnings_as_errors/1]). + record_name_prefix/1,verbose/1]). %% OTP-5689 wrong_path(Config) -> @@ -132,43 +132,6 @@ verbose(Config) when is_list(Config) -> ?line [] = test_server:capture_get(), ok. -warnings_as_errors(Config) when is_list(Config) -> - PrivDir = ?config(priv_dir,Config), - Asn1File = filename:join([PrivDir,"WERROR.asn1"]), - OutFile = filename:join([PrivDir,"WERROR.erl"]), - Opts = [{outdir,PrivDir},noobj,verbose], - - %% Generate WERR.asn to emit warning - %% Warning: Wrong format of type/value - %% false/{'Externalvaluereference',_,'WERR',noInvokeId} - Warn = <<"WERROR DEFINITIONS IMPLICIT TAGS ::=\n" - "\n" - "BEGIN\n" - "\n" - "InvokeId ::= CHOICE\n" - "{\n" - " present INTEGER,\n" - " absent NULL\n" - "}\n" - "\n" - "noInvokeId InvokeId ::= absent:NULL\n" - "\n" - "NoInvokeId InvokeId ::= {noInvokeId}\n" - "\n" - "END -- end of useful definitions.\n">>, - ?line ok = file:write_file(Asn1File, Warn), - - %% Test warnings_as_errors compile - ?line false = filelib:is_regular(OutFile), - ?line {error, _} = asn1ct:compile(Asn1File, [warnings_as_errors|Opts]), - ?line false = filelib:is_regular(OutFile), - - %% Test normal compile - ?line ok = asn1ct:compile(Asn1File, Opts), - ?line true = filelib:is_regular(OutFile), - ?line ok = file:delete(OutFile), - ok. - outfiles_check(OutDir) -> outfiles_check(OutDir,outfiles1()). diff --git a/lib/common_test/doc/src/ct_master_chapter.xml b/lib/common_test/doc/src/ct_master_chapter.xml index 37a0805055..adfe79e41a 100644 --- a/lib/common_test/doc/src/ct_master_chapter.xml +++ b/lib/common_test/doc/src/ct_master_chapter.xml @@ -198,7 +198,7 @@ <section> <title>Automatic startup of test target nodes</title> <marker id="ct_slave"></marker> - <p>Is is possible to automatically start, and perform initial actions, on + <p>It is possible to automatically start, and perform initial actions, on test target nodes by using the test specification term <c>init</c>.</p> <p>Currently, two sub-terms are supported, <c>node_start</c> and <c>eval</c>.</p> <p>Example:</p> diff --git a/lib/common_test/src/Makefile b/lib/common_test/src/Makefile index 8d74546880..2723b066f0 100644 --- a/lib/common_test/src/Makefile +++ b/lib/common_test/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 2003-2013. All Rights Reserved. +# Copyright Ericsson AB 2003-2014. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -75,7 +75,8 @@ MODULES= \ ct_conn_log_h \ cth_conn_log \ ct_groups \ - ct_property_test + ct_property_test \ + ct_release_test TARGET_MODULES= $(MODULES:%=$(EBIN)/%) BEAM_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) diff --git a/lib/common_test/src/ct_netconfc.erl b/lib/common_test/src/ct_netconfc.erl index 85fb1ea8d2..af82f2dcbf 100644 --- a/lib/common_test/src/ct_netconfc.erl +++ b/lib/common_test/src/ct_netconfc.erl @@ -190,6 +190,7 @@ get_config/4, edit_config/3, edit_config/4, + edit_config/5, delete_config/2, delete_config/3, copy_config/3, @@ -678,15 +679,39 @@ get_config(Client, Source, Filter, Timeout) -> %%---------------------------------------------------------------------- %% @spec edit_config(Client, Target, Config) -> Result -%% @equiv edit_config(Client, Target, Config, infinity) +%% @equiv edit_config(Client, Target, Config, [], infinity) edit_config(Client, Target, Config) -> edit_config(Client, Target, Config, ?DEFAULT_TIMEOUT). %%---------------------------------------------------------------------- --spec edit_config(Client, Target, Config, Timeout) -> Result when +-spec edit_config(Client, Target, Config, OptParamsOrTimeout) -> Result when Client :: client(), Target :: netconf_db(), Config :: simple_xml(), + OptParamsOrTimeout :: [simple_xml()] | timeout(), + Result :: ok | {error,error_reason()}. +%% @doc +%% +%% If `OptParamsOrTimeout' is a timeout value, then this is +%% equivalent to {@link edit_config/5. edit_config(Client, Target, +%% Config, [], Timeout)}. +%% +%% If `OptParamsOrTimeout' is a list of simple XML, then this is +%% equivalent to {@link edit_config/5. edit_config(Client, Target, +%% Config, OptParams, infinity)}. +%% +%% @end +edit_config(Client, Target, Config, Timeout) when ?is_timeout(Timeout) -> + edit_config(Client, Target, Config, [], Timeout); +edit_config(Client, Target, Config, OptParams) when is_list(OptParams) -> + edit_config(Client, Target, Config, OptParams, ?DEFAULT_TIMEOUT). + +%%---------------------------------------------------------------------- +-spec edit_config(Client, Target, Config, OptParams, Timeout) -> Result when + Client :: client(), + Target :: netconf_db(), + Config :: simple_xml(), + OptParams :: [simple_xml()], Timeout :: timeout(), Result :: ok | {error,error_reason()}. %% @doc Edit configuration data. @@ -695,10 +720,20 @@ edit_config(Client, Target, Config) -> %% include `:candidate' or `:startup' in its list of %% capabilities. %% +%% `OptParams' can be used for specifying optional parameters +%% (`default-operation', `test-option' or `error-option') that will be +%% added to the `edit-config' request. The value must be a list +%% containing valid simple XML, for example +%% +%% ``` +%% [{'default-operation', ["none"]}, +%% {'error-option', ["rollback-on-error"]}] +%%''' +%% %% @end %%---------------------------------------------------------------------- -edit_config(Client, Target, Config, Timeout) -> - call(Client, {send_rpc_op, edit_config, [Target,Config], Timeout}). +edit_config(Client, Target, Config, OptParams, Timeout) -> + call(Client, {send_rpc_op, edit_config, [Target,Config,OptParams], Timeout}). %%---------------------------------------------------------------------- @@ -1087,6 +1122,7 @@ handle_msg({get_event_streams=Op,Streams,Timeout}, From, State) -> SimpleXml = encode_rpc_operation(get,[Filter]), do_send_rpc(Op, SimpleXml, Timeout, From, State). +%% @private handle_msg({ssh_cm, CM, {data, Ch, _Type, Data}}, State) -> ssh_connection:adjust_window(CM,Ch,size(Data)), handle_data(Data, State); @@ -1235,8 +1271,8 @@ encode_rpc_operation(get,[Filter]) -> {get,filter(Filter)}; encode_rpc_operation(get_config,[Source,Filter]) -> {'get-config',[{source,[Source]}] ++ filter(Filter)}; -encode_rpc_operation(edit_config,[Target,Config]) -> - {'edit-config',[{target,[Target]},{config,[Config]}]}; +encode_rpc_operation(edit_config,[Target,Config,OptParams]) -> + {'edit-config',[{target,[Target]}] ++ OptParams ++ [{config,[Config]}]}; encode_rpc_operation(delete_config,[Target]) -> {'delete-config',[{target,[Target]}]}; encode_rpc_operation(copy_config,[Target,Source]) -> @@ -1711,6 +1747,7 @@ log(#connection{host=Host,port=Port,name=Name},Action,Data) -> %% Log callback - called from the error handler process +%% @private format_data(How,Data) -> %% Assuming that the data is encoded as UTF-8. If it is not, then %% the printout might be wrong, but the format function will not diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl new file mode 100644 index 0000000000..3f0b5bda67 --- /dev/null +++ b/lib/common_test/src/ct_release_test.erl @@ -0,0 +1,936 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%----------------------------------------------------------------- +%% @doc EXPERIMENTAL support for testing of upgrade. +%% +%% This is a library module containing support for test of release +%% related activities in one or more applications. Currenty it +%% supports upgrade only. +%% +%% == Configuration == +%% +%% In order to find version numbers of applications to upgrade from, +%% `{@module}' needs to access and start old OTP +%% releases. A `common_test' configuration file can be used for +%% specifying the location of such releases, for example: +%% +%% ``` +%% %% old-rels.cfg +%% {otp_releases,[{r16b,"/path/to/R16B03-1/bin/erl"}, +%% {'17',"/path/to/17.3/bin/erl"}]}.''' +%% +%% The configuration file should preferably point out the latest patch +%% level on each major release. +%% +%% If no such configuration file is given, {@link init/1} will return +%% `{skip,Reason}' and any attempt at running {@link upgrade/4} +%% will fail. +%% +%% == Callback functions == +%% +%% The following functions should be exported from a {@module} +%% callback module. +%% +%% All callback functions are called on the node where the upgrade is +%% executed. +%% +%% <dl> +%% <dt>Module:upgrade_init(CtData,State) -> NewState</dt> +%% <dd>Types: +%% +%% <b><code>CtData = {@link ct_data()}</code></b><br/> +%% <b><code>State = NewState = cb_state()</code></b> +%% +%% Initialyze system before upgrade test starts. +%% +%% This function is called before the upgrade is started. All +%% applications given in {@link upgrade/4} are already started by +%% the boot script, so this callback is intended for additional +%% initialization, if necessary. +%% +%% <code>CtData</code> is an opaque data structure which shall be used +%% in any call to <code>ct_release_test</code> inside the callback. +%% +%% Example: +%% +%% ``` +%% upgrade_init(CtData,State) -> +%% {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,myapp), +%% open_connection(State).''' +%% </dd> +%% +%% <dt>Module:upgrade_upgraded(CtData,State) -> NewState</dt> +%% <dd>Types: +%% +%% <b><code>CtData = {@link ct_data()}</code></b><br/> +%% <b><code>State = NewState = cb_state()</code></b> +%% +%% Check that upgrade was successful. +%% +%% This function is called after the release_handler has +%% successfully unpacked and installed the new release, and it has +%% been made permanent. It allows application specific checks to +%% ensure that the upgrade was successful. +%% +%% <code>CtData</code> is an opaque data structure which shall be used +%% in any call to <code>ct_release_test</code> inside the callback. +%% +%% Example: +%% +%% ``` +%% upgrade_upgraded(CtData,State) -> +%% check_connection_still_open(State).''' +%% </dd> +%% +%% <dt>Module:upgrade_downgraded(CtData,State) -> NewState</dt> +%% <dd>Types: +%% +%% <b><code>CtData = {@link ct_data()}</code></b><br/> +%% <b><code>State = NewState = cb_state()</code></b> +%% +%% Check that downgrade was successful. +%% +%% This function is called after the release_handler has +%% successfully re-installed the original release, and it has been +%% made permanent. It allows application specific checks to ensure +%% that the downgrade was successful. +%% +%% <code>CtData</code> is an opaque data structure which shall be used +%% in any call to <code>ct_release_test</code> inside the callback. +%% +%% Example: +%% +%% ``` +%% upgrade_downgraded(CtData,State) -> +%% check_connection_closed(State).''' +%% </dd> +%% </dl> +%% @end +%%----------------------------------------------------------------- +-module(ct_release_test). + +-export([init/1, upgrade/4, cleanup/1, get_app_vsns/2, get_appup/2]). + +-include_lib("kernel/include/file.hrl"). + +%%----------------------------------------------------------------- +-define(testnode, otp_upgrade). +-define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps + +%%----------------------------------------------------------------- +-record(ct_data, {from,to}). + +%%----------------------------------------------------------------- +-type config() :: [{atom(),term()}]. +-type cb_state() :: term(). +-opaque ct_data() :: #ct_data{}. +-export_type([ct_data/0]). + +-callback upgrade_init(ct_data(),cb_state()) -> cb_state(). +-callback upgrade_upgraded(ct_data(),cb_state()) -> cb_state(). +-callback upgrade_downgraded(ct_data(),cb_state()) -> cb_state(). + +%%----------------------------------------------------------------- +-spec init(Config) -> Result when + Config :: config(), + Result :: config() | SkipOrFail, + SkipOrFail :: {skip,Reason} | {fail,Reason}. +%% @doc Initialize `{@module}'. +%% +%% This function can be called from any of the +%% `init_per_*' functions in the test suite. It updates +%% the given `Config' with data that will be +%% used by future calls to other functions in this module. The +%% returned configuration must therefore also be returned from +%% the calling `init_per_*'. +%% +%% If the initialization fails, e.g. if a required release can +%% not be found, the function returns `{skip,Reason}'. In +%% this case the other test support functions in this mudule +%% can not be used. +%% +%% Example: +%% +%% ``` +%% init_per_suite(Config) -> +%% ct_release_test:init(Config).''' +%% +init(Config) -> + try init_upgrade_test() of + {Major,Minor} -> + [{release_test,[{major,Major},{minor,Minor}]} | Config] + catch throw:Thrown -> + Thrown + end. + +%%----------------------------------------------------------------- +-spec upgrade(App,Level,Callback,Config) -> any() when + App :: atom(), + Level :: minor | major, + Callback :: {module(),InitState}, + InitState :: cb_state(), + Config :: config(); + (Apps,Level,Callback,Config) -> any() when + Apps :: [App], + App :: atom(), + Level :: minor | major, + Callback :: {module(),InitState}, + InitState :: cb_state(), + Config :: config(). +%% @doc Test upgrade of the given application(s). +%% +%% This function can be called from a test case. It requires that +%% `Config' has been initialized by calling {@link +%% init/1} prior to this, for example from `init_per_suite/1'. +%% +%% Upgrade tests are performed as follows: +%% +%% <ol> +%% <li>Figure out which OTP release to test upgrade +%% from. Start a node running that release and find the +%% application versions on that node. Terminate the +%% node.</li> +%% <li>Figure out all dependencies for the applications under +%% test.</li> +%% <li>Create a release containing the core +%% applications `kernel', `stdlib' and `sasl' +%% in addition to the application(s) under test and all +%% dependencies of these. The versions of the applications +%% under test will be the ones found on the OTP release to +%% upgrade from. The versions of all other applications will +%% be those found on the current node, i.e. the common_test +%% node. This is the "From"-release.</li> +%% <li>Create another release containing the same +%% applications as in the previous step, but with all +%% application versions taken from the current node. This is +%% the "To"-release.</li> +%% <li>Install the "From"-release and start a new node +%% running this release.</li> +%% <li>Perform the upgrade test and allow customized +%% control by using callbacks: +%% <ol> +%% <li>Callback: `upgrade_init/2'</li> +%% <li>Unpack the new release</li> +%% <li>Install the new release</li> +%% <li>Callback: `upgrade_upgraded/2'</li> +%% <li>Install the original release</li> +%% <li>Callback: `upgrade_downgraded/2'</li> +%% </ol> +%% </li> +%% </ol> +%% +%% `App' or `Apps' +%% specifies the applications under test, i.e. the applications +%% which shall be upgraded. All other applications that are +%% included have the same releases in the "From"- and +%% "To"-releases and will therefore not be upgraded. +%% +%% `Level' specifies which OTP release to +%% pick the "From" versions from. +%% <dl> +%% <dt>major</dt> +%% <dd>From verions are picked from the previous major +%% release. For example, if the test is run on an OTP-17 +%% node, `{@module}' will pick the application +%% "From" versions from an OTP installation running OTP +%% R16B.</dd> +%% +%% <dt>minor</dt> +%% <dd>From verions are picked from the current major +%% release. For example, if the test is run on an OTP-17 +%% node, `{@module}' will pick the application +%% "From" versions from an OTP installation running an +%% earlier patch level of OTP-17.</dd> +%% </dl> +%% +%% The application "To" versions are allways picked from the +%% current node, i.e. the common_test node. +%% +%% `Callback' specifies the module (normally the +%% test suite) which implements the {@section Callback functions}, and +%% the initial value of the `State' variable used in these +%% functions. +%% +%% `Config' is the input argument received +%% in the test case function. +%% +%% Example: +%% +%% ``` +%% minor_upgrade(Config) -> +%% ct_release_test:upgrade(ssl,minor,{?MODULE,[]},Config). +%% ''' +%% +upgrade(App,Level,Callback,Config) when is_atom(App) -> + upgrade([App],Level,Callback,Config); +upgrade(Apps,Level,Callback,Config) -> + Dir = proplists:get_value(priv_dir,Config), + CreateDir = filename:join([Dir,Level,create]), + InstallDir = filename:join([Dir,Level,install]), + ok = filelib:ensure_dir(filename:join(CreateDir,"*")), + ok = filelib:ensure_dir(filename:join(InstallDir,"*")), + try upgrade(Apps,Level,Callback,CreateDir,InstallDir,Config) of + ok -> + %%rm_rf(CreateDir), + Tars = filelib:wildcard(filename:join(CreateDir,"*.tar.gz")), + _ = [file:delete(Tar) || Tar <- Tars], + rm_rf(InstallDir), + ok + catch throw:{fail,Reason} -> + ct:fail(Reason); + throw:{skip,Reason} -> + rm_rf(CreateDir), + rm_rf(InstallDir), + {skip,Reason} + after + %% Brutally kill all nodes that erroneously survived the test. + %% Note, we will not reach this if the test fails with a + %% timetrap timeout in the test suite! Thus we can have + %% hanging nodes... + Nodes = nodes(), + [rpc:call(Node,erlang,halt,[]) || Node <- Nodes] + end. + +%%----------------------------------------------------------------- +-spec cleanup(Config) -> Result when + Config :: config(), + Result :: config(). +%% @doc Clean up after tests. +%% +%% This function shall be called from the `end_per_*' function +%% complementing the `init_per_*' function where {@link init/1} +%% is called. +%% +%% It cleans up after the test, for example kills hanging +%% nodes. +%% +%% Example: +%% +%% ``` +%% end_per_suite(Config) -> +%% ct_release_test:cleanup(Config).''' +%% +cleanup(Config) -> + Nodes = [node_name(?testnode)|nodes()], + [rpc:call(Node,erlang,halt,[]) || Node <- Nodes], + Config. + +%%----------------------------------------------------------------- +-spec get_app_vsns(CtData,App) -> {ok,{From,To}} | {error,Reason} when + CtData :: ct_data(), + App :: atom(), + From :: string(), + To :: string(), + Reason :: {app_not_found,App}. +%% @doc Get versions involved in this upgrade for the given application. +%% +%% This function can be called from inside any of the callback +%% functions. It returns the old (From) and new (To) versions involved +%% in the upgrade/downgrade test for the given application. +%% +%% <code>CtData</code> must be the first argument received in the +%% calling callback function - an opaque data structure set by +%% <code>ct_release_tests</code>. +get_app_vsns(#ct_data{from=FromApps,to=ToApps},App) -> + case {lists:keyfind(App,1,FromApps),lists:keyfind(App,1,ToApps)} of + {{App,FromVsn,_},{App,ToVsn,_}} -> + {ok,{FromVsn,ToVsn}}; + _ -> + {error,{app_not_found,App}} + end. + +%%----------------------------------------------------------------- +-spec get_appup(CtData,App) -> {ok,Appup} | {error,Reason} when + CtData :: ct_data(), + App :: atom(), + Appup :: {From,To,Up,Down}, + From :: string(), + To :: string(), + Up :: [Instr], + Down :: [Instr], + Instr :: term(), + Reason :: {app_not_found,App} | {vsn_not_found,{App,From}}. +%% @doc Get appup instructions for the given application. +%% +%% This function can be called from inside any of the callback +%% functions. It reads the appup file for the given application and +%% returns the instructions for upgrade and downgrade for the versions +%% in the test. +%% +%% <code>CtData</code> must be the first argument received in the +%% calling callback function - an opaque data structure set by +%% <code>ct_release_tests</code>. +%% +%% See reference manual for appup files for types definitions for the +%% instructions. +get_appup(#ct_data{from=FromApps,to=ToApps},App) -> + case lists:keyfind(App,1,ToApps) of + {App,ToVsn,ToDir} -> + Appup = filename:join([ToDir, "ebin", atom_to_list(App)++".appup"]), + {ok, [{ToVsn, Ups, Downs}]} = file:consult(Appup), + {App,FromVsn,_} = lists:keyfind(App,1,FromApps), + case {systools_relup:appup_search_for_version(FromVsn,Ups), + systools_relup:appup_search_for_version(FromVsn,Downs)} of + {{ok,Up},{ok,Down}} -> + {ok,{FromVsn,ToVsn,Up,Down}}; + _ -> + {error,{vsn_not_found,{App,FromVsn}}} + end; + false -> + {error,{app_not_found,App}} + end. + +%%----------------------------------------------------------------- +init_upgrade_test() -> + %% Check that a real release is running, not e.g. cerl + ok = application:ensure_started(sasl), + case release_handler:which_releases() of + [{_,_,[],_}] -> + %% Fake release, no applications + throw({skip, "Need a real release running to create other releases"}); + _ -> + Major = init_upgrade_test(major), + Minor = init_upgrade_test(minor), + {Major,Minor} + end. + +init_upgrade_test(Level) -> + {FromVsn,ToVsn} = get_rels(Level), + OldRel = + case test_server:is_release_available(FromVsn) of + true -> + {release,FromVsn}; + false -> + case ct:get_config({otp_releases,list_to_atom(FromVsn)}) of + undefined -> + false; + Prog0 -> + case os:find_executable(Prog0) of + false -> + false; + Prog -> + {prog,Prog} + end + end + end, + case OldRel of + false -> + ct:log("Release ~p is not available." + " Upgrade on '~p' level can not be tested.", + [FromVsn,Level]), + undefined; + _ -> + init_upgrade_test(FromVsn,ToVsn,OldRel) + end. + +get_rels(major) -> + %% Given that the current major release is X, then this is an + %% upgrade from major release X-1 to the current release. + Current = erlang:system_info(otp_release), + PreviousMajor = previous_major(Current), + {PreviousMajor,Current}; +get_rels(minor) -> + %% Given that this is a (possibly) patched version of major + %% release X, then this is an upgrade from major release X to the + %% current release. + CurrentMajor = erlang:system_info(otp_release), + Current = CurrentMajor++"_patched", + {CurrentMajor,Current}. + +init_upgrade_test(FromVsn,ToVsn,OldRel) -> + OtpRel = list_to_atom("otp-"++FromVsn), + ct:log("Starting node to fetch application versions to upgrade from"), + {ok,Node} = test_server:start_node(OtpRel,peer,[{erl,[OldRel]}]), + {Apps,Path} = fetch_all_apps(Node), + test_server:stop_node(Node), + {FromVsn,ToVsn,Apps,Path}. + +fetch_all_apps(Node) -> + Paths = rpc:call(Node,code,get_path,[]), + %% Find all possible applications in the path + AppFiles = + lists:flatmap( + fun(P) -> + filelib:wildcard(filename:join(P,"*.app")) + end, + Paths), + %% Figure out which version of each application is running on this + %% node. Using application:load and application:get_key instead of + %% reading the .app files since there might be multiple versions + %% of a .app file and we only want the one that is actually + %% running. + AppVsns = + lists:flatmap( + fun(F) -> + A = list_to_atom(filename:basename(filename:rootname(F))), + _ = rpc:call(Node,application,load,[A]), + case rpc:call(Node,application,get_key,[A,vsn]) of + {ok,V} -> [{A,V}]; + _ -> [] + end + end, + AppFiles), + ErtsVsn = rpc:call(Node, erlang, system_info, [version]), + {[{erts,ErtsVsn}|AppVsns], Paths}. + + +%%----------------------------------------------------------------- +upgrade(Apps,Level,Callback,CreateDir,InstallDir,Config) -> + ct:log("Test upgrade of the following applications: ~p",[Apps]), + ct:log(".rel files and start scripts are created in:~n~ts",[CreateDir]), + ct:log("The release is installed in:~n~ts",[InstallDir]), + case proplists:get_value(release_test,Config) of + undefined -> + throw({fail,"ct_release_test:init/1 not run"}); + RTConfig -> + case proplists:get_value(Level,RTConfig) of + undefined -> + throw({skip,"Old release not available"}); + Data -> + {FromVsn,FromRel,FromAppsVsns} = + target_system(Apps, CreateDir, InstallDir, Data), + {ToVsn,ToRel,ToAppsVsns} = + upgrade_system(Apps, FromRel, CreateDir, + InstallDir, Data), + ct:log("Upgrade from: OTP-~ts, ~p",[FromVsn, FromAppsVsns]), + ct:log("Upgrade to: OTP-~ts, ~p",[ToVsn, ToAppsVsns]), + do_upgrade(Callback, FromVsn, FromAppsVsns, ToRel, + ToAppsVsns, InstallDir) + end + end. + +%%% This is similar to sasl/examples/src/target_system.erl, but with +%%% the following adjustments: +%%% - add a log directory +%%% - use an own 'start' script +%%% - chmod 'start' and 'start_erl' +target_system(Apps,CreateDir,InstallDir,{FromVsn,_,AllAppsVsns,Path}) -> + RelName0 = "otp-"++FromVsn, + + AppsVsns = [{A,V} || {A,V} <- AllAppsVsns, lists:member(A,Apps)], + {RelName,ErtsVsn} = create_relfile(AppsVsns,CreateDir,RelName0,FromVsn), + + %% Create .script and .boot + ok = systools(make_script,[RelName,[{path,Path}]]), + + %% Create base tar file - i.e. erts and all apps + ok = systools(make_tar,[RelName,[{erts,code:root_dir()}, + {path,Path}]]), + + %% Unpack the tar to complete the installation + erl_tar:extract(RelName ++ ".tar.gz", [{cwd, InstallDir}, compressed]), + + %% Add bin and log dirs + BinDir = filename:join([InstallDir, "bin"]), + file:make_dir(BinDir), + file:make_dir(filename:join(InstallDir,"log")), + + %% Delete start scripts - they will be added later + ErtsBinDir = filename:join([InstallDir, "erts-" ++ ErtsVsn, "bin"]), + file:delete(filename:join([ErtsBinDir, "erl"])), + file:delete(filename:join([ErtsBinDir, "start"])), + file:delete(filename:join([ErtsBinDir, "start_erl"])), + + %% Copy .boot to bin/start.boot + copy_file(RelName++".boot",filename:join([BinDir, "start.boot"])), + + %% Copy scripts from erts-xxx/bin to bin + copy_file(filename:join([ErtsBinDir, "epmd"]), + filename:join([BinDir, "epmd"]), [preserve]), + copy_file(filename:join([ErtsBinDir, "run_erl"]), + filename:join([BinDir, "run_erl"]), [preserve]), + copy_file(filename:join([ErtsBinDir, "to_erl"]), + filename:join([BinDir, "to_erl"]), [preserve]), + + %% create start_erl.data, sys.config and start.src + StartErlData = filename:join([InstallDir, "releases", "start_erl.data"]), + write_file(StartErlData, io_lib:fwrite("~s ~s~n", [ErtsVsn, FromVsn])), + SysConfig = filename:join([InstallDir, "releases", FromVsn, "sys.config"]), + write_file(SysConfig, "[]."), + StartSrc = filename:join(ErtsBinDir,"start.src"), + write_file(StartSrc,start_script()), + ok = file:change_mode(StartSrc,8#0755), + + %% Make start_erl executable + %% (this has been fixed in OTP 17 - it is now installed with + %% $INSTALL_SCRIPT instead of $INSTALL_DATA and should therefore + %% be executable from the start) + ok = file:change_mode(filename:join(ErtsBinDir,"start_erl.src"),8#0755), + + %% Substitute variables in erl.src, start.src and start_erl.src + %% (.src found in erts-xxx/bin - result stored in bin) + subst_src_scripts(["erl", "start", "start_erl"], ErtsBinDir, BinDir, + [{"FINAL_ROOTDIR", InstallDir}, {"EMU", "beam"}], + [preserve]), + + %% Create RELEASES + RelFile = filename:join([InstallDir, "releases", + filename:basename(RelName) ++ ".rel"]), + release_handler:create_RELEASES(InstallDir, RelFile), + + {FromVsn, RelName,AppsVsns}. + +systools(Func,Args) -> + case apply(systools,Func,Args) of + ok -> + ok; + error -> + throw({fail,{systools,Func,Args}}) + end. + +%%% This is a copy of $ROOT/erts-xxx/bin/start.src, modified to add +%%% sname and heart +start_script() -> + ["#!/bin/sh\n" + "ROOTDIR=%FINAL_ROOTDIR%\n" + "\n" + "if [ -z \"$RELDIR\" ]\n" + "then\n" + " RELDIR=$ROOTDIR/releases\n" + "fi\n" + "\n" + "START_ERL_DATA=${1:-$RELDIR/start_erl.data}\n" + "\n" + "$ROOTDIR/bin/run_erl -daemon /tmp/ $ROOTDIR/log \"exec $ROOTDIR/bin/start_erl $ROOTDIR $RELDIR $START_ERL_DATA -sname ",atom_to_list(?testnode)," -heart\"\n"]. + +%%% Create a release containing the current (the test node) OTP +%%% release, including relup to allow upgrade from an earlier OTP +%%% release. +upgrade_system(Apps, FromRel, CreateDir, InstallDir, {_,ToVsn,_,_}) -> + ct:log("Generating release to upgrade to."), + + RelName0 = "otp-"++ToVsn, + + AppsVsns = get_vsns(Apps), + {RelName,_} = create_relfile(AppsVsns,CreateDir,RelName0,ToVsn), + FromPath = filename:join([InstallDir,lib,"*",ebin]), + + ok = systools(make_script,[RelName]), + ok = systools(make_relup,[RelName,[FromRel],[FromRel], + [{path,[FromPath]}, + {outdir,CreateDir}]]), + SysConfig = filename:join([CreateDir, "sys.config"]), + write_file(SysConfig, "[]."), + + ok = systools(make_tar,[RelName,[{erts,code:root_dir()}]]), + + {ToVsn, RelName,AppsVsns}. + +%%% Start a new node running the release from target_system/6 +%%% above. Then upgrade to the system from upgrade_system/6. +do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) -> + ct:log("Upgrade test attempting to start node.~n" + "If test fails, logs can be found in:~n~ts", + [filename:join(InstallDir,log)]), + Start = filename:join([InstallDir,bin,start]), + {ok,Node} = start_node(Start,FromVsn,FromAppsVsns), + + %% Add path to this module, to allow calls to get_appup/2 + Dir = filename:dirname(code:which(?MODULE)), + _ = rpc:call(Node,code,add_pathz,[Dir]), + + ct:log("Node started: ~p",[Node]), + CtData = #ct_data{from = [{A,V,code:lib_dir(A)} || {A,V} <- FromAppsVsns], + to=[{A,V,code:lib_dir(A)} || {A,V} <- ToAppsVsns]}, + State1 = do_callback(Node,Cb,upgrade_init,[CtData,InitState]), + + [{"OTP upgrade test",FromVsn,_,permanent}] = + rpc:call(Node,release_handler,which_releases,[]), + ToRelName = filename:basename(ToRel), + copy_file(ToRel++".tar.gz", + filename:join([InstallDir,releases,ToRelName++".tar.gz"])), + ct:log("Unpacking new release"), + {ok,ToVsn} = rpc:call(Node,release_handler,unpack_release,[ToRelName]), + [{"OTP upgrade test",ToVsn,_,unpacked}, + {"OTP upgrade test",FromVsn,_,permanent}] = + rpc:call(Node,release_handler,which_releases,[]), + ct:log("Installing new release"), + case rpc:call(Node,release_handler,install_release,[ToVsn]) of + {ok,FromVsn,_} -> + ok; + {continue_after_restart,FromVsn,_} -> + ct:log("Waiting for node restart") + end, + %% even if install_release returned {ok,...} there might be an + %% emulator restart (instruction restart_emulator), so we must + %% always make sure the node is running. + wait_node_up(current,ToVsn,ToAppsVsns), + + [{"OTP upgrade test",ToVsn,_,current}, + {"OTP upgrade test",FromVsn,_,permanent}] = + rpc:call(Node,release_handler,which_releases,[]), + ct:log("Permanenting new release"), + ok = rpc:call(Node,release_handler,make_permanent,[ToVsn]), + [{"OTP upgrade test",ToVsn,_,permanent}, + {"OTP upgrade test",FromVsn,_,old}] = + rpc:call(Node,release_handler,which_releases,[]), + + State2 = do_callback(Node,Cb,upgrade_upgraded,[CtData,State1]), + + ct:log("Re-installing old release"), + case rpc:call(Node,release_handler,install_release,[FromVsn]) of + {ok,FromVsn,_} -> + ok; + {continue_after_restart,FromVsn,_} -> + ct:log("Waiting for node restart") + end, + %% even if install_release returned {ok,...} there might be an + %% emulator restart (instruction restart_emulator), so we must + %% always make sure the node is running. + wait_node_up(current,FromVsn,FromAppsVsns), + + [{"OTP upgrade test",ToVsn,_,permanent}, + {"OTP upgrade test",FromVsn,_,current}] = + rpc:call(Node,release_handler,which_releases,[]), + ct:log("Permanenting old release"), + ok = rpc:call(Node,release_handler,make_permanent,[FromVsn]), + [{"OTP upgrade test",ToVsn,_,old}, + {"OTP upgrade test",FromVsn,_,permanent}] = + rpc:call(Node,release_handler,which_releases,[]), + + _State3 = do_callback(Node,Cb,upgrade_downgraded,[CtData,State2]), + + ct:log("Terminating node ~p",[Node]), + erlang:monitor_node(Node,true), + _ = rpc:call(Node,init,stop,[]), + receive {nodedown,Node} -> ok end, + ct:log("Node terminated"), + + ok. + +do_callback(Node,Mod,Func,Args) -> + Dir = filename:dirname(code:which(Mod)), + _ = rpc:call(Node,code,add_path,[Dir]), + ct:log("Calling ~p:~p/1",[Mod,Func]), + R = rpc:call(Node,Mod,Func,Args), + ct:log("~p:~p/~w returned: ~p",[Mod,Func,length(Args),R]), + case R of + {badrpc,Error} -> + test_server:fail({test_upgrade_callback,Mod,Func,Args,Error}); + NewState -> + NewState + end. + +%%% Library functions +previous_major("17") -> + "r16b"; +previous_major(Rel) -> + integer_to_list(list_to_integer(Rel)-1). + +create_relfile(AppsVsns,CreateDir,RelName0,RelVsn) -> + UpgradeAppsVsns = [{A,V,restart_type(A)} || {A,V} <- AppsVsns], + + CoreAppVsns0 = get_vsns([kernel,stdlib,sasl]), + CoreAppVsns = + [{A,V,restart_type(A)} || {A,V} <- CoreAppVsns0, + false == lists:keymember(A,1,AppsVsns)], + + Apps = [App || {App,_} <- AppsVsns], + StartDepsVsns = get_start_deps(Apps,CoreAppVsns), + StartApps = [StartApp || {StartApp,_,_} <- StartDepsVsns] ++ Apps, + + {RuntimeDepsVsns,_} = get_runtime_deps(StartApps,StartApps,[],[]), + + AllAppsVsns0 = StartDepsVsns ++ UpgradeAppsVsns ++ RuntimeDepsVsns, + + %% Should test tools really be included? Some library functions + %% here could be used by callback, but not everything since + %% processes of these applications will not be running. + TestToolAppsVsns0 = get_vsns([test_server,common_test]), + TestToolAppsVsns = + [{A,V,none} || {A,V} <- TestToolAppsVsns0, + false == lists:keymember(A,1,AllAppsVsns0)], + + AllAppsVsns1 = AllAppsVsns0 ++ TestToolAppsVsns, + AllAppsVsns = [AV || AV={A,_,_} <- AllAppsVsns1, + false == lists:member(A,?exclude_apps)], + + ErtsVsn = erlang:system_info(version), + + %% Create the .rel file + RelContent = {release,{"OTP upgrade test",RelVsn},{erts,ErtsVsn},AllAppsVsns}, + RelName = filename:join(CreateDir,RelName0), + RelFile = RelName++".rel", + {ok,Fd} = file:open(RelFile,[write,{encoding,utf8}]), + io:format(Fd,"~tp.~n",[RelContent]), + ok = file:close(Fd), + {RelName,ErtsVsn}. + +get_vsns(Apps) -> + [begin + _ = application:load(A), + {ok,V} = application:get_key(A,vsn), + {A,V} + end || A <- Apps]. + +get_start_deps([App|Apps],Acc) -> + _ = application:load(App), + {ok,StartDeps} = application:get_key(App,applications), + StartDepsVsns = + [begin + _ = application:load(StartApp), + {ok,StartVsn} = application:get_key(StartApp,vsn), + {StartApp,StartVsn,restart_type(StartApp)} + end || StartApp <- StartDeps, + false == lists:keymember(StartApp,1,Acc)], + DepsStartDeps = get_start_deps(StartDeps,Acc ++ StartDepsVsns), + get_start_deps(Apps,DepsStartDeps); +get_start_deps([],Acc) -> + Acc. + +get_runtime_deps([App|Apps],StartApps,Acc,Visited) -> + case lists:member(App,Visited) of + true -> + get_runtime_deps(Apps,StartApps,Acc,Visited); + false -> + %% runtime_dependencies should be possible to read with + %% application:get_key/2, but still isn't so we need to + %% read the .app file... + AppFile = code:where_is_file(atom_to_list(App) ++ ".app"), + {ok,[{application,App,Attrs}]} = file:consult(AppFile), + RuntimeDeps = + lists:flatmap( + fun(Str) -> + [RuntimeAppStr,_] = string:tokens(Str,"-"), + RuntimeApp = list_to_atom(RuntimeAppStr), + case {lists:keymember(RuntimeApp,1,Acc), + lists:member(RuntimeApp,StartApps)} of + {false,false} when RuntimeApp=/=erts -> + [RuntimeApp]; + _ -> + [] + end + end, + proplists:get_value(runtime_dependencies,Attrs,[])), + RuntimeDepsVsns = + [begin + _ = application:load(RuntimeApp), + {ok,RuntimeVsn} = application:get_key(RuntimeApp,vsn), + {RuntimeApp,RuntimeVsn,none} + end || RuntimeApp <- RuntimeDeps], + {DepsRuntimeDeps,NewVisited} = + get_runtime_deps(RuntimeDeps,StartApps,Acc++RuntimeDepsVsns,[App|Visited]), + get_runtime_deps(Apps,StartApps,DepsRuntimeDeps,NewVisited) + end; +get_runtime_deps([],_,Acc,Visited) -> + {Acc,Visited}. + +restart_type(App) when App==kernel; App==stdlib; App==sasl -> + permanent; +restart_type(_) -> + temporary. + +copy_file(Src, Dest) -> + copy_file(Src, Dest, []). + +copy_file(Src, Dest, Opts) -> + {ok,_} = file:copy(Src, Dest), + case lists:member(preserve, Opts) of + true -> + {ok, FileInfo} = file:read_file_info(Src), + file:write_file_info(Dest, FileInfo); + false -> + ok + end. + +write_file(FName, Conts) -> + Enc = file:native_name_encoding(), + {ok, Fd} = file:open(FName, [write]), + file:write(Fd, unicode:characters_to_binary(Conts,Enc,Enc)), + file:close(Fd). + +%% Substitute all occurrences of %Var% for Val in the given scripts +subst_src_scripts(Scripts, SrcDir, DestDir, Vars, Opts) -> + lists:foreach(fun(Script) -> + subst_src_script(Script, SrcDir, DestDir, + Vars, Opts) + end, Scripts). + +subst_src_script(Script, SrcDir, DestDir, Vars, Opts) -> + subst_file(filename:join([SrcDir, Script ++ ".src"]), + filename:join([DestDir, Script]), + Vars, Opts). + +subst_file(Src, Dest, Vars, Opts) -> + {ok, Bin} = file:read_file(Src), + Conts = binary_to_list(Bin), + NConts = subst(Conts, Vars), + write_file(Dest, NConts), + case lists:member(preserve, Opts) of + true -> + {ok, FileInfo} = file:read_file_info(Src), + file:write_file_info(Dest, FileInfo); + false -> + ok + end. + +subst(Str, [{Var,Val}|Vars]) -> + subst(re:replace(Str,"%"++Var++"%",Val,[{return,list}]),Vars); +subst(Str, []) -> + Str. + +%%% Start a node by executing the given start command. This node will +%%% be used for upgrade. +start_node(Start,ExpVsn,ExpAppsVsns) -> + Port = open_port({spawn_executable, Start}, []), + unlink(Port), + erlang:port_close(Port), + wait_node_up(permanent,ExpVsn,ExpAppsVsns). + +wait_node_up(ExpStatus,ExpVsn,ExpAppsVsns) -> + Node = node_name(?testnode), + wait_node_up(Node,ExpStatus,ExpVsn,lists:keysort(1,ExpAppsVsns),60). + +wait_node_up(Node,ExpStatus,ExpVsn,ExpAppsVsns,0) -> + test_server:fail({node_not_started,app_check_failed,ExpVsn,ExpAppsVsns, + rpc:call(Node,release_handler,which_releases,[ExpStatus]), + rpc:call(Node,application,which_applications,[])}); +wait_node_up(Node,ExpStatus,ExpVsn,ExpAppsVsns,N) -> + case {rpc:call(Node,release_handler,which_releases,[ExpStatus]), + rpc:call(Node, application, which_applications, [])} of + {[{_,ExpVsn,_,_}],Apps} when is_list(Apps) -> + case [{A,V} || {A,_,V} <- lists:keysort(1,Apps), + lists:keymember(A,1,ExpAppsVsns)] of + ExpAppsVsns -> + {ok,Node}; + _ -> + timer:sleep(2000), + wait_node_up(Node,ExpStatus,ExpVsn,ExpAppsVsns,N-1) + end; + _ -> + timer:sleep(2000), + wait_node_up(Node,ExpStatus,ExpVsn,ExpAppsVsns,N-1) + end. + +node_name(Sname) -> + {ok,Host} = inet:gethostname(), + list_to_atom(atom_to_list(Sname) ++ "@" ++ Host). + +rm_rf(Dir) -> + case file:read_file_info(Dir) of + {ok, #file_info{type = directory}} -> + {ok, Content} = file:list_dir_all(Dir), + [rm_rf(filename:join(Dir,C)) || C <- Content], + ok=file:del_dir(Dir), + ok; + {ok, #file_info{}} -> + ok=file:delete(Dir); + _ -> + ok + end. diff --git a/lib/common_test/test/ct_netconfc_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE.erl index c89a4cdabe..2959f77087 100644 --- a/lib/common_test/test/ct_netconfc_SUITE.erl +++ b/lib/common_test/test/ct_netconfc_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2009-2012. All Rights Reserved. +%% Copyright Ericsson AB 2009-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -63,7 +63,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> [ - default + netconfc1_SUITE, + netconfc_remote_SUITE ]. %%-------------------------------------------------------------------- @@ -72,14 +73,21 @@ all() -> %%%----------------------------------------------------------------- %%% -default(Config) when is_list(Config) -> +netconfc1_SUITE(Config) when is_list(Config) -> DataDir = ?config(data_dir, Config), Suite = filename:join(DataDir, "netconfc1_SUITE"), CfgFile = filename:join(DataDir, "netconfc1.cfg"), {Opts,ERPid} = setup([{suite,Suite},{config,CfgFile}, - {label,default}], Config), + {label,netconfc1_SUITE}], Config), - ok = execute(default, Opts, ERPid, Config). + ok = execute(netconfc1_SUITE, Opts, ERPid, Config). + +netconfc_remote_SUITE(Config) when is_list(Config) -> + DataDir = ?config(data_dir, Config), + Suite = filename:join(DataDir, "netconfc_remote_SUITE"), + {Opts,ERPid} = setup([{suite,Suite},{label,netconfc_remote_SUITE}], Config), + + ok = execute(netconfc_remote_SUITE, Opts, ERPid, Config). %%%----------------------------------------------------------------- @@ -112,16 +120,15 @@ reformat(Events, EH) -> %%%----------------------------------------------------------------- %%% TEST EVENTS %%%----------------------------------------------------------------- -events_to_check(default,Config) -> - {module,_} = code:load_abs(filename:join(?config(data_dir,Config), - netconfc1_SUITE)), - TCs = netconfc1_SUITE:all(), - code:purge(netconfc1_SUITE), - code:delete(netconfc1_SUITE), +events_to_check(Suite,Config) -> + {module,_} = code:load_abs(filename:join(?config(data_dir,Config),Suite)), + TCs = Suite:all(), + code:purge(Suite), + code:delete(Suite), OneTest = [{?eh,start_logging,{'DEF','RUNDIR'}}] ++ - [{?eh,tc_done,{netconfc1_SUITE,TC,ok}} || TC <- TCs] ++ + [{?eh,tc_done,{Suite,TC,ok}} || TC <- TCs] ++ [{?eh,stop_logging,[]}], %% 2 tests (ct:run_test + script_start) is default diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl index 6f5db21f57..4580528f1b 100644 --- a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl +++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013. All Rights Reserved. +%% Copyright Ericsson AB 2013-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -30,25 +30,10 @@ -module(netconfc1_SUITE). -include_lib("common_test/include/ct.hrl"). -include_lib("common_test/src/ct_netconfc.hrl"). --include_lib("public_key/include/public_key.hrl"). +-include("netconfc_test_lib.hrl"). -compile(export_all). -%% Default timetrap timeout (set in init_per_testcase). --define(default_timeout, ?t:minutes(1)). - --define(NS,ns). --define(LOCALHOST, "127.0.0.1"). --define(SSH_PORT, 2060). - --define(DEFAULT_SSH_OPTS,[{ssh,?LOCALHOST}, - {port,?SSH_PORT}, - {user,"xxx"}, - {password,"xxx"}]). --define(DEFAULT_SSH_OPTS(Dir), ?DEFAULT_SSH_OPTS++[{user_dir,Dir}]). - --define(ok,ok). - suite() -> [{ct_hooks, [{cth_conn_log, [{ct_netconfc,[{log_type,html}, %will be overwritten by config @@ -91,6 +76,7 @@ all() -> get_config, get_config_xpath, edit_config, + edit_config_opt_params, copy_config, delete_config, lock, @@ -136,8 +122,8 @@ end_per_testcase(_Case, Config) -> init_per_suite(Config) -> case catch {crypto:start(), ssh:start()} of {ok, ok} -> - {ok, _} = get_id_keys(Config), - make_dsa_files(Config), + {ok, _} = netconfc_test_lib:get_id_keys(Config), + netconfc_test_lib:make_dsa_files(Config), Server = ?NS:start(?config(data_dir,Config)), [{server,Server}|Config]; _ -> @@ -148,7 +134,7 @@ end_per_suite(Config) -> ?NS:stop(?config(server,Config)), ssh:stop(), crypto:stop(), - remove_id_keys(Config), + netconfc_test_lib:remove_id_keys(Config), Config. hello(Config) -> @@ -415,6 +401,18 @@ edit_config(Config) -> ?ok = ct_netconfc:close_session(Client), ok. +edit_config_opt_params(Config) -> + DataDir = ?config(data_dir,Config), + {ok,Client} = open_success(DataDir), + ?NS:expect_reply({'edit-config',{'default-operation',"none"}},ok), + ?ok = ct_netconfc:edit_config(Client,running, + {server,[{xmlns,"myns"}], + [{name,["myserver"]}]}, + [{'default-operation',["none"]}]), + ?NS:expect_do_reply('close-session',close,ok), + ?ok = ct_netconfc:close_session(Client), + ok. + copy_config(Config) -> DataDir = ?config(data_dir,Config), {ok,Client} = open_success(DataDir), @@ -1008,165 +1006,3 @@ pad(I) when I<10 -> "0"++integer_to_list(I); pad(I) -> integer_to_list(I). - - -%%%----------------------------------------------------------------- -%%% BEGIN SSH key management -%% copy private keys to given dir from ~/.ssh -get_id_keys(Config) -> - DstDir = ?config(priv_dir, Config), - SrcDir = filename:join(os:getenv("HOME"), ".ssh"), - RsaOk = copyfile(SrcDir, DstDir, "id_rsa"), - DsaOk = copyfile(SrcDir, DstDir, "id_dsa"), - case {RsaOk, DsaOk} of - {{ok, _}, {ok, _}} -> {ok, both}; - {{ok, _}, _} -> {ok, rsa}; - {_, {ok, _}} -> {ok, dsa}; - {Error, _} -> Error - end. - -%% Remove later on. Use make_dsa_files instead. -remove_id_keys(Config) -> - Dir = ?config(priv_dir, Config), - file:delete(filename:join(Dir, "id_rsa")), - file:delete(filename:join(Dir, "id_dsa")). - - -make_dsa_files(Config) -> - make_dsa_files(Config, rfc4716_public_key). -make_dsa_files(Config, Type) -> - {DSA, EncodedKey} = gen_dsa(128, 20), - PKey = DSA#'DSAPrivateKey'.y, - P = DSA#'DSAPrivateKey'.p, - Q = DSA#'DSAPrivateKey'.q, - G = DSA#'DSAPrivateKey'.g, - Dss = #'Dss-Parms'{p=P, q=Q, g=G}, - {ok, Hostname} = inet:gethostname(), - {ok, {A, B, C, D}} = inet:getaddr(Hostname, inet), - IP = lists:concat([A, ".", B, ".", C, ".", D]), - Attributes = [], % Could be [{comment,"user@" ++ Hostname}], - HostNames = [{hostnames,[IP, IP]}], - PublicKey = [{{PKey, Dss}, Attributes}], - KnownHosts = [{{PKey, Dss}, HostNames}], - - KnownHostsEnc = public_key:ssh_encode(KnownHosts, known_hosts), - KnownHosts = public_key:ssh_decode(KnownHostsEnc, known_hosts), - - PublicKeyEnc = public_key:ssh_encode(PublicKey, Type), - - SystemTmpDir = ?config(data_dir, Config), - filelib:ensure_dir(SystemTmpDir), - file:make_dir(SystemTmpDir), - - DSAFile = filename:join(SystemTmpDir, "ssh_host_dsa_key.pub"), - file:delete(DSAFile), - - DSAPrivateFile = filename:join(SystemTmpDir, "ssh_host_dsa_key"), - file:delete(DSAPrivateFile), - - KHFile = filename:join(SystemTmpDir, "known_hosts"), - file:delete(KHFile), - - PemBin = public_key:pem_encode([EncodedKey]), - - file:write_file(DSAFile, PublicKeyEnc), - file:write_file(KHFile, KnownHostsEnc), - file:write_file(DSAPrivateFile, PemBin), - ok. - - -%%-------------------------------------------------------------------- -%% @doc Creates a dsa key (OBS: for testing only) -%% the sizes are in bytes -%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()} -%% @end -%%-------------------------------------------------------------------- -gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) -> - Key = gen_dsa2(LSize, NSize), - {Key, encode_key(Key)}. - -encode_key(Key = #'DSAPrivateKey'{}) -> - Der = public_key:der_encode('DSAPrivateKey', Key), - {'DSAPrivateKey', Der, not_encrypted}. - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -%% DSA key generation (OBS: for testing only) -%% See http://en.wikipedia.org/wiki/Digital_Signature_Algorithm -%% and the fips_186-3.pdf -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -gen_dsa2(LSize, NSize) -> - Q = prime(NSize), %% Choose N-bit prime Q - X0 = prime(LSize), - P0 = prime((LSize div 2) +1), - - %% Choose L-bit prime modulus P such that p-1 is a multiple of q. - case dsa_search(X0 div (2*Q*P0), P0, Q, 1000) of - error -> - gen_dsa2(LSize, NSize); - P -> - G = crypto:mod_pow(2, (P-1) div Q, P), % Choose G a number whose multiplicative order modulo p is q. - %% such that This may be done by setting g = h^(p-1)/q mod p, commonly h=2 is used. - - X = prime(20), %% Choose x by some random method, where 0 < x < q. - Y = crypto:mod_pow(G, X, P), %% Calculate y = g^x mod p. - - #'DSAPrivateKey'{version=0, p = P, q = Q, - g = crypto:bytes_to_integer(G), y = crypto:bytes_to_integer(Y), x = X} - end. - -%% See fips_186-3.pdf -dsa_search(T, P0, Q, Iter) when Iter > 0 -> - P = 2*T*Q*P0 + 1, - case is_prime(P, 50) of - true -> P; - false -> dsa_search(T+1, P0, Q, Iter-1) - end; -dsa_search(_,_,_,_) -> - error. - - -%%%%%%% Crypto Math %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -prime(ByteSize) -> - Rand = odd_rand(ByteSize), - prime_odd(Rand, 0). - -prime_odd(Rand, N) -> - case is_prime(Rand, 50) of - true -> - Rand; - false -> - prime_odd(Rand+2, N+1) - end. - -%% see http://en.wikipedia.org/wiki/Fermat_primality_test -is_prime(_, 0) -> true; -is_prime(Candidate, Test) -> - CoPrime = odd_rand(10000, Candidate), - Result = crypto:mod_pow(CoPrime, Candidate, Candidate) , - is_prime(CoPrime, crypto:bytes_to_integer(Result), Candidate, Test). - -is_prime(CoPrime, CoPrime, Candidate, Test) -> - is_prime(Candidate, Test-1); -is_prime(_,_,_,_) -> - false. - -odd_rand(Size) -> - Min = 1 bsl (Size*8-1), - Max = (1 bsl (Size*8))-1, - odd_rand(Min, Max). - -odd_rand(Min,Max) -> - Rand = crypto:rand_uniform(Min,Max), - case Rand rem 2 of - 0 -> - Rand + 1; - _ -> - Rand - end. - -copyfile(SrcDir, DstDir, Fn) -> - file:copy(filename:join(SrcDir, Fn), - filename:join(DstDir, Fn)). - -%%% END SSH key management -%%%----------------------------------------------------------------- diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_remote_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_remote_SUITE.erl new file mode 100644 index 0000000000..7a44d148dd --- /dev/null +++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_remote_SUITE.erl @@ -0,0 +1,147 @@ +%%-------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%%---------------------------------------------------------------------- +-module(netconfc_remote_SUITE). +-include_lib("common_test/include/ct.hrl"). +-include_lib("common_test/src/ct_netconfc.hrl"). +-include("netconfc_test_lib.hrl"). + +-compile(export_all). + +suite() -> + [{ct_hooks, [{cth_conn_log,[{ct_netconfc,[{log_type,html}]}]}]}]. + +all() -> + case os:find_executable("ssh") of + false -> + {skip, "SSH not installed on host"}; + _ -> + [remote_crash + ] + end. + +groups() -> + []. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +init_per_testcase(Case, Config) -> + stop_node(Case), + Dog = test_server:timetrap(?default_timeout), + [{watchdog, Dog}|Config]. + +end_per_testcase(Case, Config) -> + stop_node(Case), + Dog=?config(watchdog, Config), + test_server:timetrap_cancel(Dog), + ok. + +stop_node(Case) -> + {ok,Host} = inet:gethostname(), + Node = list_to_atom("nc_" ++ atom_to_list(Case)++ "@" ++ Host), + rpc:call(Node,erlang,halt,[]). + + +init_per_suite(Config) -> + case {crypto:start(),ssh:start()} of + {ok,ok} -> + {ok, _} = netconfc_test_lib:get_id_keys(Config), + netconfc_test_lib:make_dsa_files(Config), + Config; + _ -> + {skip, "Crypto and/or SSH could not be started locally!"} + end. + +end_per_suite(Config) -> + ssh:stop(), + crypto:stop(), + netconfc_test_lib:remove_id_keys(Config), + Config. + +%% This test case is related to seq12645 +%% Running the netconf server in a remote node, test that the client +%% process terminates if the remote node goes down. +remote_crash(Config) -> + {ok,Node} = ct_slave:start(nc_remote_crash), + Pa = filename:dirname(code:which(?NS)), + true = rpc:call(Node,code,add_patha,[Pa]), + + case {rpc:call(Node,crypto,start,[]),rpc:call(Node,ssh,start,[])} of + {ok,ok} -> + Server = rpc:call(Node,?NS,start,[?config(data_dir,Config)]), + remote_crash(Node,Config); + _ -> + {skip, "Crypto and/or SSH could not be started remote!"} + end. + +remote_crash(Node,Config) -> + DataDir = ?config(data_dir,Config), + {ok,Client} = open_success(Node,DataDir), + + ns(Node,expect_reply,[{'create-subscription',[stream]},ok]), + ?ok = ct_netconfc:create_subscription(Client), + + true = erlang:is_process_alive(Client), + Ref = erlang:monitor(process,Client), + rpc:call(Node,erlang,halt,[]), % take the node down as brutally as possible + receive {'DOWN',Ref,process,Client,_} -> + ok + after 10000 -> + ct:fail(client_still_alive) + end. + +%%%----------------------------------------------------------------- + +break(_Config) -> + test_server:break("break test case"). + +%%%----------------------------------------------------------------- +%% Open a netconf session which is not specified in a config file +open_success(Node,Dir) -> + open_success(Node,Dir,[]). + +%% Open a netconf session which is not specified in a config file, and +%% give som extra options in addition to the test defaults. +open_success(Node,Dir,ExtraOpts) when is_list(Dir), is_list(ExtraOpts) -> + ns(Node,hello,[1]), % tell server to send hello with session id 1 + ns(Node,expect,[hello]), % tell server to expect a hello message from client + open(Dir,ExtraOpts); + +%% Open a named netconf session which is not specified in a config file +open_success(Node,KeyOrName,Dir) when is_atom(KeyOrName), is_list(Dir) -> + ns(Node,hello,[1]), + ns(Node,expect,[hello]), + ct_netconfc:open(KeyOrName,?DEFAULT_SSH_OPTS(Dir)). + +open(Dir) -> + open(Dir,[]). +open(Dir,ExtraOpts) -> + Opts = lists:ukeymerge(1,lists:keysort(1,ExtraOpts), + lists:keysort(1,?DEFAULT_SSH_OPTS(Dir))), + ct_netconfc:open(Opts). + +%%%----------------------------------------------------------------- +%%% Call server on remote node +ns(Node,Func,Args) -> + rpc:call(Node,?NS,Func,Args). + diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.erl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.erl new file mode 100644 index 0000000000..e058bc7600 --- /dev/null +++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.erl @@ -0,0 +1,166 @@ +-module(netconfc_test_lib). + +-export([get_id_keys/1, remove_id_keys/1, make_dsa_files/1]). +-include_lib("common_test/include/ct.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +%%%----------------------------------------------------------------- +%%% BEGIN SSH key management +%% copy private keys to given dir from ~/.ssh +get_id_keys(Config) -> + DstDir = ?config(priv_dir, Config), + SrcDir = filename:join(os:getenv("HOME"), ".ssh"), + RsaOk = copyfile(SrcDir, DstDir, "id_rsa"), + DsaOk = copyfile(SrcDir, DstDir, "id_dsa"), + case {RsaOk, DsaOk} of + {{ok, _}, {ok, _}} -> {ok, both}; + {{ok, _}, _} -> {ok, rsa}; + {_, {ok, _}} -> {ok, dsa}; + {Error, _} -> Error + end. + +%% Remove later on. Use make_dsa_files instead. +remove_id_keys(Config) -> + Dir = ?config(priv_dir, Config), + file:delete(filename:join(Dir, "id_rsa")), + file:delete(filename:join(Dir, "id_dsa")). + + +make_dsa_files(Config) -> + make_dsa_files(Config, rfc4716_public_key). +make_dsa_files(Config, Type) -> + {DSA, EncodedKey} = gen_dsa(128, 20), + PKey = DSA#'DSAPrivateKey'.y, + P = DSA#'DSAPrivateKey'.p, + Q = DSA#'DSAPrivateKey'.q, + G = DSA#'DSAPrivateKey'.g, + Dss = #'Dss-Parms'{p=P, q=Q, g=G}, + {ok, Hostname} = inet:gethostname(), + {ok, {A, B, C, D}} = inet:getaddr(Hostname, inet), + IP = lists:concat([A, ".", B, ".", C, ".", D]), + Attributes = [], % Could be [{comment,"user@" ++ Hostname}], + HostNames = [{hostnames,[IP, IP]}], + PublicKey = [{{PKey, Dss}, Attributes}], + KnownHosts = [{{PKey, Dss}, HostNames}], + + KnownHostsEnc = public_key:ssh_encode(KnownHosts, known_hosts), + KnownHosts = public_key:ssh_decode(KnownHostsEnc, known_hosts), + + PublicKeyEnc = public_key:ssh_encode(PublicKey, Type), + + SystemTmpDir = ?config(data_dir, Config), + filelib:ensure_dir(SystemTmpDir), + file:make_dir(SystemTmpDir), + + DSAFile = filename:join(SystemTmpDir, "ssh_host_dsa_key.pub"), + file:delete(DSAFile), + + DSAPrivateFile = filename:join(SystemTmpDir, "ssh_host_dsa_key"), + file:delete(DSAPrivateFile), + + KHFile = filename:join(SystemTmpDir, "known_hosts"), + file:delete(KHFile), + + PemBin = public_key:pem_encode([EncodedKey]), + + file:write_file(DSAFile, PublicKeyEnc), + file:write_file(KHFile, KnownHostsEnc), + file:write_file(DSAPrivateFile, PemBin), + ok. + + +%%-------------------------------------------------------------------- +%% @doc Creates a dsa key (OBS: for testing only) +%% the sizes are in bytes +%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()} +%% @end +%%-------------------------------------------------------------------- +gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) -> + Key = gen_dsa2(LSize, NSize), + {Key, encode_key(Key)}. + +encode_key(Key = #'DSAPrivateKey'{}) -> + Der = public_key:der_encode('DSAPrivateKey', Key), + {'DSAPrivateKey', Der, not_encrypted}. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% DSA key generation (OBS: for testing only) +%% See http://en.wikipedia.org/wiki/Digital_Signature_Algorithm +%% and the fips_186-3.pdf +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +gen_dsa2(LSize, NSize) -> + Q = prime(NSize), %% Choose N-bit prime Q + X0 = prime(LSize), + P0 = prime((LSize div 2) +1), + + %% Choose L-bit prime modulus P such that p-1 is a multiple of q. + case dsa_search(X0 div (2*Q*P0), P0, Q, 1000) of + error -> + gen_dsa2(LSize, NSize); + P -> + G = crypto:mod_pow(2, (P-1) div Q, P), % Choose G a number whose multiplicative order modulo p is q. + %% such that This may be done by setting g = h^(p-1)/q mod p, commonly h=2 is used. + + X = prime(20), %% Choose x by some random method, where 0 < x < q. + Y = crypto:mod_pow(G, X, P), %% Calculate y = g^x mod p. + + #'DSAPrivateKey'{version=0, p = P, q = Q, + g = crypto:bytes_to_integer(G), y = crypto:bytes_to_integer(Y), x = X} + end. + +%% See fips_186-3.pdf +dsa_search(T, P0, Q, Iter) when Iter > 0 -> + P = 2*T*Q*P0 + 1, + case is_prime(P, 50) of + true -> P; + false -> dsa_search(T+1, P0, Q, Iter-1) + end; +dsa_search(_,_,_,_) -> + error. + + +%%%%%%% Crypto Math %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +prime(ByteSize) -> + Rand = odd_rand(ByteSize), + prime_odd(Rand, 0). + +prime_odd(Rand, N) -> + case is_prime(Rand, 50) of + true -> + Rand; + false -> + prime_odd(Rand+2, N+1) + end. + +%% see http://en.wikipedia.org/wiki/Fermat_primality_test +is_prime(_, 0) -> true; +is_prime(Candidate, Test) -> + CoPrime = odd_rand(10000, Candidate), + Result = crypto:mod_pow(CoPrime, Candidate, Candidate) , + is_prime(CoPrime, crypto:bytes_to_integer(Result), Candidate, Test). + +is_prime(CoPrime, CoPrime, Candidate, Test) -> + is_prime(Candidate, Test-1); +is_prime(_,_,_,_) -> + false. + +odd_rand(Size) -> + Min = 1 bsl (Size*8-1), + Max = (1 bsl (Size*8))-1, + odd_rand(Min, Max). + +odd_rand(Min,Max) -> + Rand = crypto:rand_uniform(Min,Max), + case Rand rem 2 of + 0 -> + Rand + 1; + _ -> + Rand + end. + +copyfile(SrcDir, DstDir, Fn) -> + file:copy(filename:join(SrcDir, Fn), + filename:join(DstDir, Fn)). + +%%% END SSH key management +%%%----------------------------------------------------------------- diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.hrl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.hrl new file mode 100644 index 0000000000..dcaad5ba93 --- /dev/null +++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc_test_lib.hrl @@ -0,0 +1,14 @@ +%% Default timetrap timeout (set in init_per_testcase). +-define(default_timeout, ?t:minutes(1)). + +-define(NS,ns). % netconf server module +-define(LOCALHOST, "127.0.0.1"). +-define(SSH_PORT, 2060). + +-define(DEFAULT_SSH_OPTS,[{ssh,?LOCALHOST}, + {port,?SSH_PORT}, + {user,"xxx"}, + {password,"xxx"}]). +-define(DEFAULT_SSH_OPTS(Dir), ?DEFAULT_SSH_OPTS++[{user_dir,Dir}]). + +-define(ok,ok). diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl index f503825c4e..3c419c8164 100644 --- a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl +++ b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2012-2013. All Rights Reserved. +%% Copyright Ericsson AB 2012-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -382,6 +382,7 @@ event({startElement,_,Name,_,Attrs},[ignore,{se,Name,As}|Match]) -> event({startPrefixMapping,_,Ns},[{ns,Ns}|Match]) -> Match; event({startPrefixMapping,_,Ns},[ignore,{ns,Ns}|Match]) -> Match; event({endPrefixMapping,_},Match) -> Match; +event({characters,Chs},[{characters,Chs}|Match]) -> Match; event({endElement,_,Name,_},[{ee,Name}|Match]) -> Match; event({endElement,_,Name,_},[ignore,{ee,Name}|Match]) -> Match; event(endDocument,Match) when Match==[]; Match==[ignore] -> ok; @@ -471,14 +472,17 @@ capabilities(no_caps) -> %%% expect_do_reply/3. %%% %%% match(term()) -> [Match]. -%%% Match = ignore | {se,Name} | {se,Name,Attrs} | {ee,Name} | {ns,Namespace} +%%% Match = ignore | {se,Name} | {se,Name,Attrs} | {ee,Name} | +%%% {ns,Namespace} | {characters,Chs} %%% Name = string() +%%% Chs = string() %%% Attrs = [{atom(),string()}] %%% Namespace = string() %%% %%% 'se' means start element, 'ee' means end element - i.e. to match %%% an XML element you need one 'se' entry and one 'ee' entry with the -%%% same name in the match list. +%%% same name in the match list. 'characters' can be used for matching +%%% character data (cdata) inside an element. match(hello) -> [ignore,{se,"hello"},ignore,{ee,"hello"},ignore]; match('close-session') -> @@ -487,6 +491,10 @@ match('close-session') -> match('edit-config') -> [ignore,{se,"rpc"},{se,"edit-config"},{se,"target"},ignore,{ee,"target"}, {se,"config"},ignore,{ee,"config"},{ee,"edit-config"},{ee,"rpc"},ignore]; +match({'edit-config',{'default-operation',DO}}) -> + [ignore,{se,"rpc"},{se,"edit-config"},{se,"target"},ignore,{ee,"target"}, + {se,"default-operation"},{characters,DO},{ee,"default-operation"}, + {se,"config"},ignore,{ee,"config"},{ee,"edit-config"},{ee,"rpc"},ignore]; match('get') -> match({get,subtree}); match({'get',FilterType}) -> diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl index 746469584d..2c1f98d63b 100644 --- a/lib/common_test/test/ct_test_support.erl +++ b/lib/common_test/test/ct_test_support.erl @@ -1340,12 +1340,7 @@ delete_old_logs(_, Config) -> delete_dirs(LogDir) -> Now = calendar:datetime_to_gregorian_seconds(calendar:local_time()), - SaveTime = case os:getenv("CT_SAVE_OLD_LOGS") of - false -> - 28800; - SaveTime0 -> - list_to_integer(SaveTime0) - end, + SaveTime = list_to_integer(os:getenv("CT_SAVE_OLD_LOGS", "28800")), Deadline = Now - SaveTime, Dirs = filelib:wildcard(filename:join(LogDir,"ct_run*")), Dirs2Del = diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile index c6d09d85eb..7c4cebdc28 100644 --- a/lib/compiler/src/Makefile +++ b/lib/compiler/src/Makefile @@ -81,6 +81,7 @@ MODULES = \ rec_env \ sys_core_dsetel \ sys_core_fold \ + sys_core_fold_lists \ sys_core_inline \ sys_pre_attributes \ sys_pre_expand \ @@ -158,6 +159,10 @@ $(EBIN)/beam_asm.beam: $(ESRC)/beam_asm.erl $(EGEN)/beam_opcodes.hrl $(EBIN)/cerl_inline.beam: $(ESRC)/cerl_inline.erl $(V_ERLC) $(ERL_COMPILE_FLAGS) +nowarn_shadow_vars -o$(EBIN) $< +# Inlining core_parse is slow and has no benefit. +$(EBIN)/core_parse.beam: $(EGEN)/core_parse.erl + $(V_ERLC) $(subst +inline,,$(ERL_COMPILE_FLAGS)) -o$(EBIN) $< + # ---------------------------------------------------- # Release Target # ---------------------------------------------------- @@ -187,6 +192,7 @@ $(EBIN)/core_parse.beam: core_parse.hrl $(EGEN)/core_parse.erl $(EBIN)/core_pp.beam: core_parse.hrl $(EBIN)/sys_core_dsetel.beam: core_parse.hrl $(EBIN)/sys_core_fold.beam: core_parse.hrl +$(EBIN)/sys_core_fold_lists.beam: core_parse.hrl $(EBIN)/sys_core_inline.beam: core_parse.hrl $(EBIN)/sys_pre_expand.beam: ../../stdlib/include/erl_bits.hrl $(EBIN)/v3_codegen.beam: v3_life.hrl diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl index fe4f473846..dd7e03dd28 100644 --- a/lib/compiler/src/beam_a.erl +++ b/lib/compiler/src/beam_a.erl @@ -54,6 +54,9 @@ rename_instrs([{call_only,A,F}|Is]) -> [{call,A,F},return|rename_instrs(Is)]; rename_instrs([{call_ext_only,A,F}|Is]) -> [{call_ext,A,F},return|rename_instrs(Is)]; +rename_instrs([{'%live',_}|Is]) -> + %% When compiling from old .S files. + rename_instrs(Is); rename_instrs([I|Is]) -> [rename_instr(I)|rename_instrs(Is)]; rename_instrs([]) -> []. diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl index 7a30c68593..5216f39296 100644 --- a/lib/compiler/src/beam_block.erl +++ b/lib/compiler/src/beam_block.erl @@ -155,7 +155,8 @@ collect(remove_message) -> {set,[],[],remove_message}; collect({put_map,F,Op,S,D,R,{list,Puts}}) -> {set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}}; collect({get_map_elements,F,S,{list,Gets}}) -> - {set,Gets,[S],{get_map_elements,F}}; + {Ss,Ds} = beam_utils:split_even(Gets), + {set,Ds,[S|Ss],{get_map_elements,F}}; collect({'catch',R,L}) -> {set,[R],[],{'catch',L}}; collect(fclearerror) -> {set,[],[],fclearerror}; collect({fcheckerror,{f,0}}) -> {set,[],[],fcheckerror}; @@ -183,7 +184,7 @@ embed_lines([], Acc) -> Acc. opt_blocks([{block,Bl0}|Is]) -> %% The live annotation at the beginning is not useful. - [{'%live',_}|Bl] = Bl0, + [{'%live',_,_}|Bl] = Bl0, [{block,opt_block(Bl)}|opt_blocks(Is)]; opt_blocks([I|Is]) -> [I|opt_blocks(Is)]; @@ -251,13 +252,6 @@ combine_alloc({_,Ns,Nh1,Init}, {_,nostack,Nh2,[]}) -> %% opt([Instruction]) -> [Instruction] %% Optimize the instruction stream inside a basic block. -opt([{set,[Dst],As,{bif,Bif,Fail}}=I1, - {set,[Dst],[Dst],{bif,'not',Fail}}=I2|Is]) -> - %% Get rid of the 'not' if the operation can be inverted. - case inverse_comp_op(Bif) of - none -> [I1,I2|opt(Is)]; - RevBif -> [{set,[Dst],As,{bif,RevBif,Fail}}|opt(Is)] - end; opt([{set,[X],[X],move}|Is]) -> opt(Is); opt([{set,_,_,{line,_}}=Line1, {set,[D1],[{integer,Idx1},Reg],{bif,element,{f,0}}}=I1, @@ -268,7 +262,7 @@ opt([{set,_,_,{line,_}}=Line1, opt([{set,Ds0,Ss,Op}|Is0]) -> {Ds,Is} = opt_moves(Ds0, Is0), [{set,Ds,Ss,Op}|opt(Is)]; -opt([{'%live',_}=I|Is]) -> +opt([{'%live',_,_}=I|Is]) -> [I|opt(Is)]; opt([]) -> []. @@ -427,18 +421,6 @@ x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N)); x_live([_|Rs], Regs) -> x_live(Rs, Regs); x_live([], Regs) -> Regs. -%% inverse_comp_op(Op) -> none|RevOp - -inverse_comp_op('=:=') -> '=/='; -inverse_comp_op('=/=') -> '=:='; -inverse_comp_op('==') -> '/='; -inverse_comp_op('/=') -> '=='; -inverse_comp_op('>') -> '=<'; -inverse_comp_op('<') -> '>='; -inverse_comp_op('>=') -> '<'; -inverse_comp_op('=<') -> '>'; -inverse_comp_op(_) -> none. - %%% %%% Evaluation of constant bit fields. %%% diff --git a/lib/compiler/src/beam_bool.erl b/lib/compiler/src/beam_bool.erl index a452d30b61..5ed9c16d61 100644 --- a/lib/compiler/src/beam_bool.erl +++ b/lib/compiler/src/beam_bool.erl @@ -787,6 +787,9 @@ is_not_used(R, Is, Label, #st{ll=Ll}) -> initialized_regs(Is) -> initialized_regs(Is, ordsets:new()). +initialized_regs([{set,Dst,_Src,{alloc,Live,_}}|_], Regs0) -> + Regs = add_init_regs(free_vars_regs(Live), Regs0), + add_init_regs(Dst, Regs); initialized_regs([{set,Dst,Src,_}|Is], Regs) -> initialized_regs(Is, add_init_regs(Dst, add_init_regs(Src, Regs))); initialized_regs([{test,_,_,Src}|Is], Regs) -> diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl index b653998252..b68b8702e0 100644 --- a/lib/compiler/src/beam_clean.erl +++ b/lib/compiler/src/beam_clean.erl @@ -234,31 +234,6 @@ replace([{bs_init,{f,Lbl},Info,Live,Ss,Dst}|Is], Acc, D) when Lbl =/= 0 -> replace(Is, [{bs_init,{f,label(Lbl, D)},Info,Live,Ss,Dst}|Acc], D); replace([{bs_put,{f,Lbl},Info,Ss}|Is], Acc, D) when Lbl =/= 0 -> replace(Is, [{bs_put,{f,label(Lbl, D)},Info,Ss}|Acc], D); -replace([{bs_init2,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_init2,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D); -replace([{bs_init_bits,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_init_bits,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D); -replace([{bs_put_integer,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_put_integer,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D); -replace([{bs_put_utf8=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D); -replace([{bs_put_utf16=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D); -replace([{bs_put_utf32=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D); -replace([{bs_put_binary,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_put_binary,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D); -replace([{bs_put_float,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_put_float,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D); -replace([{bs_add,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{bs_add,{f,label(Lbl, D)},Src,Dst}|Acc], D); -replace([{bs_append,{f,Lbl},_,_,_,_,_,_,_}=I0|Is], Acc, D) when Lbl =/= 0 -> - I = setelement(2, I0, {f,label(Lbl, D)}), - replace(Is, [I|Acc], D); -replace([{bs_utf8_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D); -replace([{bs_utf16_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 -> - replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D); replace([{put_map=I,{f,Lbl},Op,Src,Dst,Live,List}|Is], Acc, D) when Lbl =/= 0 -> replace(Is, [{I,{f,label(Lbl, D)},Op,Src,Dst,Live,List}|Acc], D); diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl index b15adfa889..f4515ba2a7 100644 --- a/lib/compiler/src/beam_dead.erl +++ b/lib/compiler/src/beam_dead.erl @@ -21,112 +21,10 @@ -export([module/2]). -%%% The following optimisations are done: -%%% -%%% (1) In this code -%%% -%%% move DeadValue {x,0} -%%% jump L2 -%%% . -%%% . -%%% . -%%% L2: move Anything {x,0} -%%% . -%%% . -%%% . -%%% -%%% the first assignment to {x,0} has no effect (is dead), -%%% so it can be removed. Besides removing a move instruction, -%%% if the move was preceeded by a label, the resulting code -%%% will look this -%%% -%%% L1: jump L2 -%%% . -%%% . -%%% . -%%% L2: move Anything {x,0} -%%% . -%%% . -%%% . -%%% -%%% which can be further optimized by the jump optimizer (beam_jump). -%%% -%%% (2) In this code -%%% -%%% L1: move AtomLiteral {x,0} -%%% jump L2 -%%% . -%%% . -%%% . -%%% L2: test is_atom FailLabel {x,0} -%%% select_val {x,0}, FailLabel [... AtomLiteral => L3...] -%%% . -%%% . -%%% . -%%% L3: ... -%%% -%%% FailLabel: ... -%%% -%%% the first code fragment can be changed to -%%% -%%% L1: move AtomLiteral {x,0} -%%% jump L3 -%%% -%%% If the literal is not included in the table of literals in the -%%% select_val instruction, the first code fragment will instead be -%%% rewritten as: -%%% -%%% L1: move AtomLiteral {x,0} -%%% jump FailLabel -%%% -%%% The move instruction will be removed by optimization (1) above, -%%% if the code following the L3 label overwrites {x,0}. -%%% -%%% The code following the L2 label will be kept, but it will be removed later -%%% by the jump optimizer. -%%% -%%% (3) In this code -%%% -%%% test is_eq_exact ALabel Src Dst -%%% move Src Dst -%%% -%%% the move instruction can be removed. -%%% Same thing for -%%% -%%% test is_nil ALabel Dst -%%% move [] Dst -%%% -%%% -%%% (4) In this code -%%% -%%% select_val {x,Reg}, ALabel [... Literal => L1...] -%%% . -%%% . -%%% . -%%% L1: move Literal {x,Reg} -%%% -%%% we can remove the move instruction. -%%% -%%% (5) In the following code -%%% -%%% bif '=:=' Fail Src1 Src2 {x,0} -%%% jump L1 -%%% . -%%% . -%%% . -%%% L1: select_val {x,0}, ALabel [... true => L2..., ...false => L3...] -%%% . -%%% . -%%% . -%%% L2: .... L3: .... -%%% -%%% the first two instructions can be replaced with -%%% -%%% test is_eq_exact L3 Src1 Src2 -%%% jump L2 -%%% -%%% provided that {x,0} is killed at both L2 and L3. -%%% +%%% Dead code is code that is executed but has no effect. This +%%% optimization pass either removes dead code or jumps around it, +%%% potentially making it unreachable and a target for the +%%% the beam_jump pass. -import(lists, [mapfoldl/3,reverse/1]). @@ -173,12 +71,39 @@ move_move_into_block([I|Is], Acc) -> move_move_into_block([], Acc) -> reverse(Acc). %%% -%%% Scan instructions in execution order and remove dead code. +%%% Scan instructions in execution order and remove redundant 'move' +%%% instructions. 'move' instructions are redundant if we know that +%%% the register already contains the value being assigned, as in the +%%% following code: +%%% +%%% test is_eq_exact SomeLabel Src Dst +%%% move Src Dst +%%% +%%% or in: +%%% +%%% test is_nil SomeLabel Dst +%%% move nil Dst +%%% +%%% or in: +%%% +%%% select_val Register FailLabel [... Literal => L1...] +%%% . +%%% . +%%% . +%%% L1: move Literal Register +%%% +%%% Also add extra labels to help the second backward pass. %%% forward(Is, Lc) -> forward(Is, gb_trees:empty(), Lc, []). +forward([{move,_,_}=Move|[{label,L}|_]=Is], D, Lc, Acc) -> + %% move/2 followed by jump/1 is optimized by backward/3. + forward([Move,{jump,{f,L}}|Is], D, Lc, Acc); +forward([{bif,_,_,_,_}=Bif|[{label,L}|_]=Is], D, Lc, Acc) -> + %% bif/4 followed by jump/1 is optimized by backward/3. + forward([Bif,{jump,{f,L}}|Is], D, Lc, Acc); forward([{block,[]}|Is], D, Lc, Acc) -> %% Empty blocks can prevent optimizations. forward(Is, D, Lc, Acc); @@ -205,6 +130,8 @@ forward([{label,Lbl}=LblI|[{move,Lit,Dst}|Is1]=Is0], D, Lc, Acc) -> _ -> Is0 %Keep move instruction. end, forward(Is, D, Lc, [LblI|Acc]); +forward([{test,is_eq_exact,_,[Same,Same]}|Is], D, Lc, Acc) -> + forward(Is, D, Lc, Acc); forward([{test,is_eq_exact,_,[Dst,Src]}=I, {block,[{set,[Dst],[Src],move}|Bl]}|Is], D, Lc, Acc) -> forward([I,{block,Bl}|Is], D, Lc, Acc); @@ -215,15 +142,13 @@ forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) -> forward([I|Is], D, Lc, Acc); forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) -> forward([I|Is], D, Lc, Acc); -forward([{test,is_eq_exact,_,_}=I|Is], D, Lc, Acc) -> - case Is of - [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]); - _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc]) - end; -forward([{test,is_ne_exact,_,_}=I|Is], D, Lc, Acc) -> - case Is of - [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]); - _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc]) +forward([{test,_,_,_}=I|Is]=Is0, D, Lc, Acc) -> + %% Help the second, backward pass to by inserting labels after + %% relational operators so that they can be skipped if they are + %% known to be true. + case useful_to_insert_label(Is0) of + false -> forward(Is, D, Lc, [I|Acc]); + true -> forward(Is, D, Lc+1, [{label,Lc},I|Acc]) end; forward([I|Is], D, Lc, Acc) -> forward(Is, D, Lc, [I|Acc]); @@ -239,9 +164,49 @@ update_value_dict([Lit,{f,Lbl}|T], Reg, D0) -> update_value_dict(T, Reg, D); update_value_dict([], _, D) -> D. +useful_to_insert_label([_,{label,_}|_]) -> + false; +useful_to_insert_label([{test,Op,_,_}|_]) -> + case Op of + is_lt -> true; + is_ge -> true; + is_eq_exact -> true; + is_ne_exact -> true; + _ -> false + end. + +%%% +%%% Scan instructions in reverse execution order and try to +%%% shortcut branch instructions. +%%% +%%% For example, in this code: +%%% +%%% move Literal Register +%%% jump L1 +%%% . +%%% . +%%% . +%%% L1: test is_{integer,atom} FailLabel Register +%%% select_val {x,0} FailLabel [... Literal => L2...] +%%% . +%%% . +%%% . +%%% L2: ... %%% -%%% Scan instructions in reverse execution order and remove dead code. +%%% the 'selectval' instruction will always transfer control to L2, +%%% so we can just as well jump to L2 directly by rewriting the +%%% first part of the sequence like this: %%% +%%% move Literal Register +%%% jump L2 +%%% +%%% If register Register is killed at label L2, we can remove the +%%% 'move' instruction, leaving just the 'jump' instruction: +%%% +%%% jump L2 +%%% +%%% These transformations may leave parts of the code unreachable. +%%% The beam_jump pass will remove the unreachable code. backward(Is, D) -> backward(Is, D, []). @@ -277,15 +242,8 @@ backward([{select,select_val,Reg,{f,Fail0},List0}|Is], D, Acc) -> Fail = shortcut_bs_test(Fail1, Is, D), Sel = {select,select_val,Reg,{f,Fail},List}, backward(Is, D, [Sel|Acc]); -backward([{jump,{f,To0}},{move,Src,Reg}=Move0|Is], D, Acc) -> - {To,Move} = case Src of - {atom,Val0} -> - To1 = shortcut_select_label(To0, Reg, Val0, D), - {To2,Val} = shortcut_boolean_label(To1, Reg, Val0, D), - {To2,{move,{atom,Val},Reg}}; - _ -> - {shortcut_label(To0, D),Move0} - end, +backward([{jump,{f,To0}},{move,Src,Reg}=Move|Is], D, Acc) -> + To = shortcut_select_label(To0, Reg, Src, D), Jump = {jump,{f,To}}, case beam_utils:is_killed_at(Reg, To, D) of false -> backward([Move|Is], D, [Jump|Acc]); @@ -301,28 +259,25 @@ backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) -> To = shortcut_bs_start_match(To0, Src, D), I = {test,bs_start_match2,{f,To},Live,Info,Dst}, backward(Is, D, [I|Acc]); -backward([{test,is_eq_exact,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) -> - To1 = shortcut_bs_test(To0, Is, D), - To = shortcut_fail_label(To1, Reg, Val, D), - I = combine_eqs(To, Ops, D, Acc), - backward(Is, D, [I|Acc]); backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) -> To1 = shortcut_bs_test(To0, Is, D), To2 = shortcut_label(To1, D), + To3 = shortcut_rel_op(To2, Op, Ops0, D), + %% Try to shortcut a repeated test: %% %% test Op {f,Fail1} Operands test Op {f,Fail2} Operands %% . . . ==> ... %% Fail1: test Op {f,Fail2} Operands Fail1: test Op {f,Fail2} Operands %% - To = case beam_utils:code_at(To2, D) of - [{test,Op,{f,To3},Ops}|_] -> + To = case beam_utils:code_at(To3, D) of + [{test,Op,{f,To4},Ops}|_] -> case equal_ops(Ops0, Ops) of - true -> To3; - false -> To2 + true -> To4; + false -> To3 end; _Code -> - To2 + To3 end, I = case Op of is_eq_exact -> combine_eqs(To, Ops0, D, Acc); @@ -367,8 +322,8 @@ equal_ops([Op|T0], [Op|T1]) -> equal_ops([], []) -> true; equal_ops(_, _) -> false. -shortcut_select_list([{_,Val}=Lit,{f,To0}|T], Reg, D, Acc) -> - To = shortcut_select_label(To0, Reg, Val, D), +shortcut_select_list([Lit,{f,To0}|T], Reg, D, Acc) -> + To = shortcut_select_label(To0, Reg, Lit, D), shortcut_select_list(T, Reg, D, [{f,To},Lit|Acc]); shortcut_select_list([], _, _, Acc) -> reverse(Acc). @@ -378,58 +333,29 @@ shortcut_label(To0, D) -> _ -> To0 end. -shortcut_select_label(To0, Reg, Val, D) -> - case beam_utils:code_at(To0, D) of - [{jump,{f,To}}|_] -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_atom,_,[Reg]},{select,select_val,Reg,{f,Fail},Map}|_] -> - To = find_select_val(Map, Val, Fail), - shortcut_select_label(To, Reg, Val, D); - [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{label,To}|_] when is_atom(Val) -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{jump,{f,To}}|_] when is_atom(Val) -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_eq_exact,{f,To},[Reg,{atom,AnotherVal}]}|_] - when is_atom(Val), Val =/= AnotherVal -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_ne_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_ne_exact,{f,_},[Reg,{atom,_}]},{label,To}|_] when is_atom(Val) -> - shortcut_select_label(To, Reg, Val, D); - [{test,is_tuple,{f,To},[Reg]}|_] when is_atom(Val) -> - shortcut_select_label(To, Reg, Val, D); - _ -> - To0 - end. +shortcut_select_label(To, Reg, Lit, D) -> + shortcut_rel_op(To, is_ne_exact, [Reg,Lit], D). -shortcut_fail_label(To0, Reg, Val, D) -> - case beam_utils:code_at(To0, D) of - [{jump,{f,To}}|_] -> - shortcut_fail_label(To, Reg, Val, D); - [{test,is_eq_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) -> - shortcut_fail_label(To, Reg, Val, D); - _ -> - To0 - end. - -shortcut_boolean_label(To0, Reg, Bool0, D) when is_boolean(Bool0) -> - case beam_utils:code_at(To0, D) of - [{line,_},{bif,'not',_,[Reg],Reg},{jump,{f,To}}|_] -> - Bool = not Bool0, - {shortcut_select_label(To, Reg, Bool, D),Bool}; - _ -> - {To0,Bool0} - end; -shortcut_boolean_label(To, _, Bool, _) -> {To,Bool}. - -find_select_val([{_,Val},{f,To}|_], Val, _) -> To; -find_select_val([{_,_}, {f,_}|T], Val, Fail) -> - find_select_val(T, Val, Fail); -find_select_val([], _, Fail) -> Fail. +%% Replace a comparison operator with a test instruction and a jump. +%% For example, if we have this code: +%% +%% bif '=:=' Fail Src1 Src2 {x,0} +%% jump L1 +%% . +%% . +%% . +%% L1: select_val {x,0} FailLabel [... true => L2..., ...false => L3...] +%% +%% the first two instructions can be replaced with +%% +%% test is_eq_exact L3 Src1 Src2 +%% jump L2 +%% +%% provided that {x,0} is killed at both L2 and L3. replace_comp_op(To, Reg, Op, Ops, D) -> - False = comp_op_find_shortcut(To, Reg, false, D), - True = comp_op_find_shortcut(To, Reg, true, D), + False = comp_op_find_shortcut(To, Reg, {atom,false}, D), + True = comp_op_find_shortcut(To, Reg, {atom,true}, D), [bif_to_test(Op, Ops, False),{jump,{f,True}}]. comp_op_find_shortcut(To0, Reg, Val, D) -> @@ -461,9 +387,9 @@ not_possible() -> throw(not_possible). %% %% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1 %% L1: . Lit2 L2 ] -%% . -%% . ==> -%% . +%% . +%% . ==> +%% . %% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2 %% L2: .... L2: %% @@ -488,31 +414,26 @@ remove_from_list(Lit, [Val,{f,_}=Fail|T]) -> [Val,Fail|remove_from_list(Lit, T)]; remove_from_list(_, []) -> []. -%% shortcut_bs_test(TargetLabel, [Instruction], D) -> TargetLabel' -%% Try to shortcut the failure label for a bit syntax matching. -%% We know that the binary contains at least Bits bits after -%% the latest save point. +%% shortcut_bs_test(TargetLabel, ReversedInstructions, D) -> TargetLabel' +%% Try to shortcut the failure label for bit syntax matching. shortcut_bs_test(To, Is, D) -> shortcut_bs_test_1(beam_utils:code_at(To, D), Is, To, D). -shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}|Is], PrevIs, To, D) -> - shortcut_bs_test_2(Is, {Reg,SavePoint}, PrevIs, To, D); -shortcut_bs_test_1([_|_], _, To, _) -> To. - -shortcut_bs_test_2([{label,_}|Is], Save, PrevIs, To, D) -> - shortcut_bs_test_2(Is, Save, PrevIs, To, D); -shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_], - {Reg,_Point} = RP, PrevIs, To0, D) -> - case count_bits_matched(PrevIs, RP, 0) of +shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}, + {label,_}, + {test,bs_test_tail2,{f,To},[_,TailBits]}|_], + PrevIs, To0, D) -> + case count_bits_matched(PrevIs, {Reg,SavePoint}, 0) of Bits when Bits > TailBits -> %% This instruction will fail. We know because a restore has been - %% done from the previous point SavePoint in the binary, and we also know - %% that the binary contains at least Bits bits from SavePoint. + %% done from the previous point SavePoint in the binary, and we + %% also know that the binary contains at least Bits bits from + %% SavePoint. %% %% Since we will skip a bs_restore2 if we shortcut to label To, - %% we must now make sure that code at To does not depend on the position - %% in the context in any way. + %% we must now make sure that code at To does not depend on + %% the position in the context in any way. case shortcut_bs_pos_used(To, Reg, D) of false -> To; true -> To0 @@ -520,8 +441,19 @@ shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_], _Bits -> To0 end; -shortcut_bs_test_2([_|_], _, _, To, _) -> To. +shortcut_bs_test_1([_|_], _, To, _) -> To. +%% counts_bits_matched(ReversedInstructions, SavePoint, Bits) -> Bits' +%% Given a reversed instruction stream, determine the minimum number +%% of bits that will be matched by bit syntax instructions up to the +%% given save point. + +count_bits_matched([{test,bs_get_utf8,{f,_},_,_,_}|Is], SavePoint, Bits) -> + count_bits_matched(Is, SavePoint, Bits+8); +count_bits_matched([{test,bs_get_utf16,{f,_},_,_,_}|Is], SavePoint, Bits) -> + count_bits_matched(Is, SavePoint, Bits+16); +count_bits_matched([{test,bs_get_utf32,{f,_},_,_,_}|Is], SavePoint, Bits) -> + count_bits_matched(Is, SavePoint, Bits+32); count_bits_matched([{test,_,_,_,[_,Sz,U,{field_flags,_}],_}|Is], SavePoint, Bits) -> case Sz of {integer,N} -> count_bits_matched(Is, SavePoint, Bits+N*U); @@ -545,20 +477,332 @@ shortcut_bs_pos_used_1(Is, Reg, D) -> not beam_utils:is_killed(Reg, Is, D). %% shortcut_bs_start_match(TargetLabel, Reg) -> TargetLabel -%% A failing bs_start_match2 instruction means that the source -%% cannot be a binary, so there is no need to jump bs_context_to_binary/1 -%% or another bs_start_match2 instruction. +%% A failing bs_start_match2 instruction means that the source (Reg) +%% cannot be a binary. That means that it is safe to skip +%% bs_context_to_binary instructions operating on Reg, and +%% bs_start_match2 instructions operating on Reg. shortcut_bs_start_match(To, Reg, D) -> - shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To). + shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To, D). + +shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To, D) -> + shortcut_bs_start_match_1(Is, Reg, To, D); +shortcut_bs_start_match_1([{jump,{f,To}}|_], Reg, _, D) -> + Code = beam_utils:code_at(To, D), + shortcut_bs_start_match_1(Code, Reg, To, D); +shortcut_bs_start_match_1([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], + Reg, _, D) -> + Code = beam_utils:code_at(To, D), + shortcut_bs_start_match_1(Code, Reg, To, D); +shortcut_bs_start_match_1(_, _, To, _) -> + To. -shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To) -> - shortcut_bs_start_match_2(Is, Reg, To); -shortcut_bs_start_match_1(_, _, To) -> To. +%% shortcut_rel_op(FailLabel, Operator, [Operand], D) -> FailLabel' +%% Try to shortcut the given test instruction. Example: +%% +%% is_ge L1 {x,0} 48 +%% . +%% . +%% . +%% L1: is_ge L2 {x,0} 65 +%% +%% The first test instruction can be rewritten to "is_ge L2 {x,0} 48" +%% since the instruction at L1 will also fail. +%% +%% If there are instructions between L1 and the other test instruction +%% it may still be possible to do the shortcut. For example: +%% +%% L1: is_eq_exact L3 {x,0} 92 +%% is_ge L2 {x,0} 65 +%% +%% Since the first test instruction failed, we know that {x,0} must +%% be less than 48; therefore, we know that {x,0} cannot be equal to +%% 92 and the jump to L3 cannot happen. + +shortcut_rel_op(To, Op, Ops, D) -> + case normalize_op({test,Op,{f,To},Ops}) of + {{NormOp,A,B},_} -> + Normalized = {negate_op(NormOp),A,B}, + shortcut_rel_op_fp(To, Normalized, D); + {_,_} -> + To; + error -> + To + end. -shortcut_bs_start_match_2([{jump,{f,To}}|_], _, _) -> - To; -shortcut_bs_start_match_2([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], Reg, _) -> - To; -shortcut_bs_start_match_2(_Is, _Reg, To) -> - To. +shortcut_rel_op_fp(To0, Normalized, D) -> + Code = beam_utils:code_at(To0, D), + case shortcut_any_label(Code, Normalized) of + error -> + To0; + To -> + shortcut_rel_op_fp(To, Normalized, D) + end. + +%% shortcut_any_label([Instruction], PrevCondition) -> FailLabel | error +%% Using PrevCondition (a previous condition known to be true), +%% try to shortcut to another failure label. + +shortcut_any_label([{jump,{f,Lbl}}|_], _Prev) -> + Lbl; +shortcut_any_label([{label,Lbl}|_], _Prev) -> + Lbl; +shortcut_any_label([{select,select_val,R,{f,Fail},L}|_], Prev) -> + shortcut_selectval(L, R, Fail, Prev); +shortcut_any_label([I|Is], Prev) -> + case normalize_op(I) of + error -> + error; + {Normalized,Fail} -> + %% We have a relational operator. + case will_succeed(Prev, Normalized) of + no -> + %% This test instruction will always branch + %% to Fail. + Fail; + yes -> + %% This test instruction will never branch, + %% so we will look at the next instruction. + shortcut_any_label(Is, Prev); + maybe -> + %% May or may not branch. From now on, we can only + %% shortcut to the this specific failure label + %% Fail. + shortcut_specific_label(Is, Fail, Prev) + end + end. + +%% shortcut_specific_label([Instruction], FailLabel, PrevCondition) -> +%% FailLabel | error +%% We have previously encountered a test instruction that may or +%% may not branch to FailLabel. Therefore we are only allowed +%% to do the shortcut to the same fail label (FailLabel). + +shortcut_specific_label([{label,_}|Is], Fail, Prev) -> + shortcut_specific_label(Is, Fail, Prev); +shortcut_specific_label([{select,select_val,R,{f,F},L}|_], Fail, Prev) -> + case shortcut_selectval(L, R, F, Prev) of + Fail -> Fail; + _ -> error + end; +shortcut_specific_label([I|Is], Fail, Prev) -> + case normalize_op(I) of + error -> + error; + {Normalized,Fail} -> + case will_succeed(Prev, Normalized) of + no -> + %% Will branch to FailLabel. + Fail; + yes -> + %% Will definitely never branch. + shortcut_specific_label(Is, Fail, Prev); + maybe -> + %% May branch, but still OK since it will branch + %% to FailLabel. + shortcut_specific_label(Is, Fail, Prev) + end; + {Normalized,_} -> + %% This test instruction will branch to a different + %% fail label, if it branches at all. + case will_succeed(Prev, Normalized) of + yes -> + %% Still OK, since the branch will never be + %% taken. + shortcut_specific_label(Is, Fail, Prev); + no -> + %% Give up. The branch will definitely be taken + %% to a different fail label. + error; + maybe -> + %% Give up. If the branch is taken, it will be + %% to a different fail label. + error + end + end. + + +%% shortcut_selectval(List, Reg, Fail, PrevCond) -> FailLabel | error +%% Try to shortcut a selectval instruction. A selectval instruction +%% is equivalent to the following instruction sequence: +%% +%% is_ne_exact L1 Reg Value1 +%% . +%% . +%% . +%% is_ne_exact LN Reg ValueN +%% jump DefaultFailLabel +%% +shortcut_selectval([Val,{f,Lbl}|T], R, Fail, Prev) -> + case will_succeed(Prev, {'=/=',R,get_literal(Val)}) of + yes -> shortcut_selectval(T, R, Fail, Prev); + no -> Lbl; + maybe -> error + end; +shortcut_selectval([], _, Fail, _) -> Fail. + +%% will_succeed(PrevCondition, Condition) -> yes | no | maybe +%% PrevCondition is a condition known to be true. This function +%% will tell whether Condition will succeed. + +will_succeed({Op1,Reg,A}, {Op2,Reg,B}) -> + will_succeed_1(Op1, A, Op2, B); +will_succeed({'=:=',Reg,{literal,A}}, {TypeTest,Reg}) -> + case erlang:TypeTest(A) of + false -> no; + true -> yes + end; +will_succeed({_,_,_}, maybe) -> + maybe; +will_succeed({_,_,_}, Test) when is_tuple(Test) -> + maybe. + +will_succeed_1('=:=', A, '<', B) -> + if + B =< A -> no; + true -> yes + end; +will_succeed_1('=:=', A, '=<', B) -> + if + B < A -> no; + true -> yes + end; +will_succeed_1('=:=', A, '=:=', B) -> + if + A =:= B -> yes; + true -> no + end; +will_succeed_1('=:=', A, '=/=', B) -> + if + A =:= B -> no; + true -> yes + end; +will_succeed_1('=:=', A, '>=', B) -> + if + B > A -> no; + true -> yes + end; +will_succeed_1('=:=', A, '>', B) -> + if + B >= A -> no; + true -> yes + end; + +will_succeed_1('=/=', A, '=/=', B) when A =:= B -> yes; +will_succeed_1('=/=', A, '=:=', B) when A =:= B -> no; + +will_succeed_1('<', A, '=:=', B) when B >= A -> no; +will_succeed_1('<', A, '=/=', B) when B >= A -> yes; +will_succeed_1('<', A, '<', B) when B >= A -> yes; +will_succeed_1('<', A, '=<', B) when B > A -> yes; +will_succeed_1('<', A, '>=', B) when B > A -> no; +will_succeed_1('<', A, '>', B) when B >= A -> no; + +will_succeed_1('=<', A, '=:=', B) when B > A -> no; +will_succeed_1('=<', A, '=/=', B) when B > A -> yes; +will_succeed_1('=<', A, '<', B) when B > A -> yes; +will_succeed_1('=<', A, '=<', B) when B >= A -> yes; +will_succeed_1('=<', A, '>=', B) when B > A -> no; +will_succeed_1('=<', A, '>', B) when B >= A -> no; + +will_succeed_1('>=', A, '=:=', B) when B < A -> no; +will_succeed_1('>=', A, '=/=', B) when B < A -> yes; +will_succeed_1('>=', A, '<', B) when B =< A -> no; +will_succeed_1('>=', A, '=<', B) when B < A -> no; +will_succeed_1('>=', A, '>=', B) when B =< A -> yes; +will_succeed_1('>=', A, '>', B) when B < A -> yes; + +will_succeed_1('>', A, '=:=', B) when B =< A -> no; +will_succeed_1('>', A, '=/=', B) when B =< A -> yes; +will_succeed_1('>', A, '<', B) when B =< A -> no; +will_succeed_1('>', A, '=<', B) when B < A -> no; +will_succeed_1('>', A, '>=', B) when B =< A -> yes; +will_succeed_1('>', A, '>', B) when B < A -> yes; + +will_succeed_1(_, _, _, _) -> maybe. + +%% normalize_op(Instruction) -> {Normalized,FailLabel} | error +%% Normalized = {Operator,Register,Literal} | +%% {TypeTest,Register} | +%% maybe +%% Operation = '<' | '=<' | '=:=' | '=/=' | '>=' | '>' +%% TypeTest = is_atom | is_integer ... +%% Literal = {literal,Term} +%% +%% Normalize a relational operator to facilitate further +%% comparisons between operators. Always make the register +%% operand the first operand. Thus the following instruction: +%% +%% {test,is_ge,{f,99},{integer,13},{x,0}} +%% +%% will be normalized to: +%% +%% {'=<',{x,0},{literal,13}} +%% +%% NOTE: Bit syntax test instructions are scary. They may change the +%% state of match contexts and update registers, so we don't dare +%% mess with them. + +normalize_op({test,is_ge,{f,Fail},Ops}) -> + normalize_op_1('>=', Ops, Fail); +normalize_op({test,is_lt,{f,Fail},Ops}) -> + normalize_op_1('<', Ops, Fail); +normalize_op({test,is_eq_exact,{f,Fail},Ops}) -> + normalize_op_1('=:=', Ops, Fail); +normalize_op({test,is_ne_exact,{f,Fail},Ops}) -> + normalize_op_1('=/=', Ops, Fail); +normalize_op({test,is_nil,{f,Fail},[R]}) -> + normalize_op_1('=:=', [R,nil], Fail); +normalize_op({test,Op,{f,Fail},[R]}) -> + case erl_internal:new_type_test(Op, 1) of + true -> {{Op,R},Fail}; + false -> {maybe,Fail} + end; +normalize_op({test,_,{f,Fail},_}=I) -> + case beam_utils:is_pure_test(I) of + true -> {maybe,Fail}; + false -> error + end; +normalize_op(_) -> + error. + +normalize_op_1(Op, [Op1,Op2], Fail) -> + case {get_literal(Op1),get_literal(Op2)} of + {error,error} -> + %% Both operands are registers. + {maybe,Fail}; + {error,Lit} -> + {{Op,Op1,Lit},Fail}; + {Lit,error} -> + {{turn_op(Op),Op2,Lit},Fail}; + {_,_} -> + %% Both operands are literals. Can probably only + %% happen if the Core Erlang optimizations passes were + %% turned off, so don't bother trying to do something + %% smart here. + {maybe,Fail} + end. + +turn_op('<') -> '>'; +turn_op('>=') -> '=<'; +turn_op('=:='=Op) -> Op; +turn_op('=/='=Op) -> Op. + +negate_op('>=') -> '<'; +negate_op('<') -> '>='; +negate_op('=<') -> '>'; +negate_op('>') -> '=<'; +negate_op('=:=') -> '=/='; +negate_op('=/=') -> '=:='. + +get_literal({atom,Val}) -> + {literal,Val}; +get_literal({integer,Val}) -> + {literal,Val}; +get_literal({float,Val}) -> + {literal,Val}; +get_literal(nil) -> + {literal,[]}; +get_literal({literal,_}=Lit) -> + Lit; +get_literal({_,_}) -> error. diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl index 46835bece1..54e06df995 100644 --- a/lib/compiler/src/beam_flatten.erl +++ b/lib/compiler/src/beam_flatten.erl @@ -63,8 +63,7 @@ norm({set,[],[S,D],{set_tuple_element,I}}) -> {set_tuple_element,S,D,I}; norm({set,[D1,D2],[S],get_list}) -> {get_list,S,D1,D2}; norm({set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}}) -> {put_map,F,Op,S,D,R,{list,Puts}}; -norm({set,Gets,[S],{get_map_elements,F}}) -> - {get_map_elements,F,S,{list,Gets}}; +%% get_map_elements is always handled in beam_split (moved out of block) norm({set,[],[],remove_message}) -> remove_message; norm({set,[],[],fclearerror}) -> fclearerror; norm({set,[],[],fcheckerror}) -> {fcheckerror,{f,0}}. diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl index b952139f2c..ba71d4efae 100644 --- a/lib/compiler/src/beam_jump.erl +++ b/lib/compiler/src/beam_jump.erl @@ -166,6 +166,12 @@ share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) -> end; share_1([{func_info,_,_,_}=I|Is], _, [], Acc) -> reverse(Is, [I|Acc]); +share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); +share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) -> + Dict = clean_non_sharable(Dict0), + share_1(Is, Dict, [I|Seq], Acc); share_1([I|Is], Dict, Seq, Acc) -> case is_unreachable_after(I) of false -> @@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) -> share_1(Is, Dict, [I], Acc) end. +clean_non_sharable(Dict) -> + %% We are passing in or out of a 'try' block. Remove + %% sequences that should not shared over the boundaries + %% of a 'try' block. Since the end of the sequence must match, + %% the only possible match between a sequence outside and + %% a sequence inside the 'try' block is a sequence that ends + %% with an instruction that causes an exception. Any sequence + %% that causes an exception must contain a line/1 instruction. + dict:filter(fun(K, _V) -> sharable_with_try(K) end, Dict). + +sharable_with_try([{line,_}|_]) -> + %% This sequence may cause an exception and may potentially + %% match a sequence on the other side of the 'try' block + %% boundary. + false; +sharable_with_try([_|Is]) -> + sharable_with_try(Is); +sharable_with_try([]) -> true. %% Eliminate all fallthroughs. Return the result reversed. @@ -295,12 +319,6 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) -> opt(Is, [I|Acc], label_used(Lbl, St)); opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) -> skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St)); -opt([{label,L}=I|Is], Acc, #st{entry=L}=St) -> - %% NEVER move the entry label. - opt(Is, [I|Acc], St); -opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) -> - St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)}, - opt([Prev,I|Is], Acc, label_used({f,L2}, St)); opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> case dict:find(Lbl, Mlbl) of {ok,Lbls} -> @@ -310,9 +328,20 @@ opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) -> insert_labels([Lbl|Lbls], Is, Acc, St); error -> opt(Is, [I|Acc], St0) end; -opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) -> - opt([I|Is], Acc, St); -opt([{jump,Lbl}=I|Is], Acc, St) -> +opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) -> + opt(Is, Acc, St); +opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) -> + %% All labels before this jump instruction should now be + %% moved to the location of the jump's target. + {Lbls,Acc} = collect_labels(Acc0, St0), + St = case Lbls of + [] -> St0; + [_|_] -> + Mlbl = dict:append_list(L, Lbls, Mlbl0), + St0#st{mlbl=Mlbl} + end, skip_unreachable(Is, [I|Acc], label_used(Lbl, St)); %% Optimization: quickly handle some common instructions that don't %% have any failure labels and where is_unreachable_after(I) =:= false. @@ -349,6 +378,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) -> end; insert_fc_labels([], _, Acc) -> Acc. +collect_labels(Is, #st{entry=Entry}) -> + collect_labels_1(Is, Entry, []). + +collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) -> + %% Never move the entry label. + {Acc,Is}; +collect_labels_1([{label,L}|Is], Entry, Acc) -> + collect_labels_1(Is, Entry, [L|Acc]); +collect_labels_1(Is, _Entry, Acc) -> + {Acc,Is}. + %% label_defined(Is, Label) -> true | false. %% Test whether the label Label is defined at the start of the instruction %% sequence, possibly preceeded by other label definitions. @@ -435,14 +475,14 @@ is_label_used_in(Lbl, Is) -> is_label_used_in_1(Is, Lbl, gb_sets:empty()). is_label_used_in_1([{block,Block}|Is], Lbl, Empty) -> - lists:any(fun(I) -> is_label_used_in_2(I, Lbl) end, Block) + lists:any(fun(I) -> is_label_used_in_block(I, Lbl) end, Block) orelse is_label_used_in_1(Is, Lbl, Empty); is_label_used_in_1([I|Is], Lbl, Empty) -> Used = ulbl(I, Empty), gb_sets:is_member(Lbl, Used) orelse is_label_used_in_1(Is, Lbl, Empty); is_label_used_in_1([], _, _) -> false. -is_label_used_in_2({set,_,_,Info}, Lbl) -> +is_label_used_in_block({set,_,_,Info}, Lbl) -> case Info of {bif,_,{f,F}} -> F =:= Lbl; {alloc,_,{gc_bif,_,{f,F}}} -> F =:= Lbl; @@ -452,7 +492,6 @@ is_label_used_in_2({set,_,_,Info}, Lbl) -> {put_tuple,_} -> false; {get_tuple_element,_} -> false; {set_tuple_element,_} -> false; - {get_map_elements,{f,F}} -> F =:= Lbl; {line,_} -> false; _ when is_atom(Info) -> false end. diff --git a/lib/compiler/src/beam_peep.erl b/lib/compiler/src/beam_peep.erl index 97a8c7ba70..5abacc8d5d 100644 --- a/lib/compiler/src/beam_peep.erl +++ b/lib/compiler/src/beam_peep.erl @@ -108,14 +108,14 @@ peep([{test,Op,_,Ops}=I|Is], SeenTests0, Acc) -> %% has succeeded. peep(Is, gb_sets:empty(), [I|Acc]); true -> - Test = {Op,Ops}, - case gb_sets:is_element(Test, SeenTests0) of + case is_test_redundant(Op, Ops, SeenTests0) of true -> - %% This test has already succeeded and + %% This test or a similar test has already succeeded and %% is therefore redundant. peep(Is, SeenTests0, Acc); false -> %% Remember that we have seen this test. + Test = {Op,Ops}, SeenTests = gb_sets:insert(Test, SeenTests0), peep(Is, SeenTests, [I|Acc]) end @@ -136,6 +136,15 @@ peep([I|Is], _, Acc) -> peep(Is, gb_sets:empty(), [I|Acc]); peep([], _, Acc) -> reverse(Acc). +is_test_redundant(Op, Ops, Seen) -> + gb_sets:is_element({Op,Ops}, Seen) orelse + is_test_redundant_1(Op, Ops, Seen). + +is_test_redundant_1(is_boolean, [R], Seen) -> + gb_sets:is_element({is_eq_exact,[R,{atom,false}]}, Seen) orelse + gb_sets:is_element({is_eq_exact,[R,{atom,true}]}, Seen); +is_test_redundant_1(_, _, _) -> false. + kill_seen(Dst, Seen0) -> gb_sets:from_ordset(kill_seen_1(gb_sets:to_list(Seen0), Dst)). diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl index 688bba9a94..0c62b0bf3d 100644 --- a/lib/compiler/src/beam_split.erl +++ b/lib/compiler/src/beam_split.erl @@ -53,8 +53,8 @@ split_block([{set,[D],[S|Puts],{alloc,R,{put_map,Op,{f,Lbl}=Fail}}}|Is], Bl, Acc) when Lbl =/= 0 -> split_block(Is, [], [{put_map,Fail,Op,S,D,R,{list,Puts}}| make_block(Bl, Acc)]); -split_block([{set,Gets,[S],{get_map_elements,{f,Lbl}=Fail}}|Is], Bl, Acc) - when Lbl =/= 0 -> +split_block([{set,Ds,[S|Ss],{get_map_elements,Fail}}|Is], Bl, Acc) -> + Gets = beam_utils:join_even(Ss,Ds), split_block(Is, [], [{get_map_elements,Fail,S,{list,Gets}}|make_block(Bl, Acc)]); split_block([{set,[R],[],{'catch',L}}|Is], Bl, Acc) -> split_block(Is, [], [{'catch',R,L}|make_block(Bl, Acc)]); diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl index 58c0f765ae..26c933481a 100644 --- a/lib/compiler/src/beam_type.erl +++ b/lib/compiler/src/beam_type.erl @@ -106,6 +106,20 @@ simplify_basic_1([{test,test_arity,_,[R,Arity]}=I|Is], Ts0, Acc) -> Ts = update(I, Ts0), simplify_basic_1(Is, Ts, [I|Acc]) end; +simplify_basic_1([{test,is_map,_,[R]}=I|Is], Ts0, Acc) -> + case tdb_find(R, Ts0) of + map -> simplify_basic_1(Is, Ts0, Acc); + _Other -> + Ts = update(I, Ts0), + simplify_basic_1(Is, Ts, [I|Acc]) + end; +simplify_basic_1([{test,is_nonempty_list,_,[R]}=I|Is], Ts0, Acc) -> + case tdb_find(R, Ts0) of + nonempty_list -> simplify_basic_1(Is, Ts0, Acc); + _Other -> + Ts = update(I, Ts0), + simplify_basic_1(Is, Ts, [I|Acc]) + end; simplify_basic_1([{test,is_eq_exact,Fail,[R,{atom,_}=Atom]}=I|Is0], Ts0, Acc0) -> Acc = case tdb_find(R, Ts0) of {atom,_}=Atom -> Acc0; @@ -230,7 +244,7 @@ clearerror([], OrigIs) -> [{set,[],[],fclearerror}|OrigIs]. %% Combine two blocks and eliminate any move instructions that assign %% to registers that are killed later in the block. %% -merge_blocks(B1, [{'%live',_}|B2]) -> +merge_blocks(B1, [{'%live',_,_}|B2]) -> merge_blocks_1(B1++[{set,[],[],stop_here}|B2]). merge_blocks_1([{set,[],_,stop_here}|Is]) -> Is; @@ -315,27 +329,27 @@ build_alloc(Words, Floats) -> {alloc,[{words,Words},{floats,Floats}]}. %% flt_liveness([Instruction]) -> [Instruction] %% (Re)calculate the number of live registers for each heap allocation -%% function. We base liveness of the number of live registers at -%% entry to the instruction sequence. +%% function. We base liveness of the number of register map at the +%% beginning of the instruction sequence. %% %% A 'not_possible' term will be thrown if the set of live registers %% is not continous at an allocation function (e.g. if {x,0} and {x,2} %% are live, but not {x,1}). -flt_liveness([{'%live',Live}=LiveInstr|Is]) -> - flt_liveness_1(Is, init_regs(Live), [LiveInstr]). +flt_liveness([{'%live',_Live,Regs}=LiveInstr|Is]) -> + flt_liveness_1(Is, Regs, [LiveInstr]). -flt_liveness_1([{set,Ds,Ss,{alloc,_,Alloc}}|Is], Regs0, Acc) -> - Live = live_regs(Regs0), +flt_liveness_1([{set,Ds,Ss,{alloc,Live0,Alloc}}|Is], Regs0, Acc) -> + Live = min(Live0, live_regs(Regs0)), I = {set,Ds,Ss,{alloc,Live,Alloc}}, - Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds), + Regs1 = init_regs(Live), + Regs = x_live(Ds, Regs1), flt_liveness_1(Is, Regs, [I|Acc]); flt_liveness_1([{set,Ds,_,_}=I|Is], Regs0, Acc) -> - Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds), - flt_liveness_1(Is, Regs, [I|Acc]); -flt_liveness_1([{'%live',_}=I|Is], Regs, Acc) -> + Regs = x_live(Ds, Regs0), flt_liveness_1(Is, Regs, [I|Acc]); -flt_liveness_1([], _Regs, Acc) -> reverse(Acc). +flt_liveness_1([{'%live',_,_}], _Regs, Acc) -> + reverse(Acc). init_regs(Live) -> (1 bsl Live) - 1. @@ -350,14 +364,15 @@ live_regs_1(R, N) -> 1 -> live_regs_1(R bsr 1, N+1) end. -set_live({x,X}, Regs) -> Regs bor (1 bsl X); -set_live(_, Regs) -> Regs. +x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N)); +x_live([_|Rs], Regs) -> x_live(Rs, Regs); +x_live([], Regs) -> Regs. %% update(Instruction, TypeDb) -> NewTypeDb %% Update the type database to account for executing an instruction. %% %% First the cases for instructions inside basic blocks. -update({'%live',_}, Ts) -> Ts; +update({'%live',_,_}, Ts) -> Ts; update({set,[D],[S],move}, Ts) -> tdb_copy(S, D, Ts); update({set,[D],[{integer,I},Reg],{bif,element,_}}, Ts0) -> @@ -402,6 +417,10 @@ update({test,is_float,_Fail,[Src]}, Ts0) -> tdb_update([{Src,float}], Ts0); update({test,test_arity,_Fail,[Src,Arity]}, Ts0) -> tdb_update([{Src,{tuple,Arity,[]}}], Ts0); +update({test,is_map,_Fail,[Src]}, Ts0) -> + tdb_update([{Src,map}], Ts0); +update({test,is_nonempty_list,_Fail,[Src]}, Ts0) -> + tdb_update([{Src,nonempty_list}], Ts0); update({test,is_eq_exact,_,[Reg,{atom,_}=Atom]}, Ts) -> case tdb_find(Reg, Ts) of error -> @@ -451,6 +470,7 @@ is_math_bif(erf, 1) -> true; is_math_bif(erfc, 1) -> true; is_math_bif(exp, 1) -> true; is_math_bif(log, 1) -> true; +is_math_bif(log2, 1) -> true; is_math_bif(log10, 1) -> true; is_math_bif(sqrt, 1) -> true; is_math_bif(atan2, 2) -> true; @@ -710,6 +730,8 @@ merge_type_info(NewType, _) -> verify_type(NewType), NewType. +verify_type(map) -> ok; +verify_type(nonempty_list) -> ok; verify_type({tuple,Sz,[]}) when is_integer(Sz) -> ok; verify_type({tuple,Sz,[_]}) when is_integer(Sz) -> ok; verify_type({tuple_element,_,_}) -> ok; diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl index 8ca368c167..7704690f86 100644 --- a/lib/compiler/src/beam_utils.erl +++ b/lib/compiler/src/beam_utils.erl @@ -26,6 +26,8 @@ code_at/2,bif_to_test/3,is_pure_test/1, live_opt/1,delete_live_annos/1,combine_heap_needs/2]). +-export([join_even/2,split_even/1]). + -import(lists, [member/2,sort/1,reverse/1,splitwith/2]). -record(live, @@ -185,7 +187,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true; is_pure_test({test,is_nil,_,[_]}) -> true; is_pure_test({test,is_nonempty_list,_,[_]}) -> true; is_pure_test({test,test_arity,_,[_,_]}) -> true; -is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true; +is_pure_test({test,has_map_fields,_,[_|_]}) -> true; is_pure_test({test,Op,_,Ops}) -> erl_internal:new_type_test(Op, length(Ops)). @@ -194,7 +196,7 @@ is_pure_test({test,Op,_,Ops}) -> %% Go through the instruction sequence in reverse execution %% order, keep track of liveness and remove 'move' instructions %% whose destination is a register that will not be used. -%% Also insert {'%live',Live} annotations at the beginning +%% Also insert {'%live',Live,Regs} annotations at the beginning %% and end of each block. %% live_opt(Is0) -> @@ -215,7 +217,7 @@ delete_live_annos([{block,Bl0}|Is]) -> [] -> delete_live_annos(Is); [_|_]=Bl -> [{block,Bl}|delete_live_annos(Is)] end; -delete_live_annos([{'%live',_}|Is]) -> +delete_live_annos([{'%live',_,_}|Is]) -> delete_live_annos(Is); delete_live_annos([I|Is]) -> [I|delete_live_annos(Is)]; @@ -364,11 +366,6 @@ check_liveness(R, [{apply,Args}|Is], St) -> {x,_} -> {killed,St}; {y,_} -> check_liveness(R, Is, St) end; -check_liveness({x,R}, [{'%live',Live}|Is], St) -> - if - R < Live -> check_liveness(R, Is, St); - true -> {killed,St} - end; check_liveness(R, [{bif,Op,{f,Fail},Ss,D}|Is], St0) -> case check_liveness_fail(R, Op, Ss, Fail, St0) of {killed,St} = Killed -> @@ -552,7 +549,7 @@ check_killed_block(R, [{set,Ds,Ss,_Op}|Is]) -> false -> check_killed_block(R, Is) end end; -check_killed_block(R, [{'%live',Live}|Is]) -> +check_killed_block(R, [{'%live',Live,_}|Is]) -> case R of {x,X} when X >= Live -> killed; _ -> check_killed_block(R, Is) @@ -575,7 +572,7 @@ check_used_block({x,X}=R, [{set,Ds,Ss,{alloc,Live,Op}}|Is], St) -> end; check_used_block(R, [{set,Ds,Ss,Op}|Is], St) -> check_used_block_1(R, Ss, Ds, Op, Is, St); -check_used_block(R, [{'%live',Live}|Is], St) -> +check_used_block(R, [{'%live',Live,_}|Is], St) -> case R of {x,X} when X >= Live -> {killed,St}; _ -> check_used_block(R, Is, St) @@ -676,9 +673,9 @@ live_opt([{test,bs_start_match2,Fail,Live,[Src,_],_}=I|Is], _, D, Acc) -> %% Other instructions. live_opt([{block,Bl0}|Is], Regs0, D, Acc) -> - Live0 = {'%live',live_regs(Regs0)}, + Live0 = {'%live',live_regs(Regs0),Regs0}, {Bl,Regs} = live_opt_block(reverse(Bl0), Regs0, D, [Live0]), - Live = {'%live',live_regs(Regs)}, + Live = {'%live',live_regs(Regs),Regs}, live_opt(Is, Regs, D, [{block,[Live|Bl]}|Acc]); live_opt([{label,L}=I|Is], Regs, D0, Acc) -> D = gb_trees:insert(L, Regs, D0), @@ -756,13 +753,9 @@ live_opt([{line,_}=I|Is], Regs, D, Acc) -> live_opt(Is, Regs, D, [I|Acc]); %% The following instructions can occur if the "compilation" has been -%% started from a .S file using the 'asm' option. +%% started from a .S file using the 'from_asm' option. live_opt([{trim,_,_}=I|Is], Regs, D, Acc) -> live_opt(Is, Regs, D, [I|Acc]); -live_opt([{allocate,_,Live}=I|Is], _, D, Acc) -> - live_opt(Is, live_call(Live), D, [I|Acc]); -live_opt([{allocate_heap,_,_,Live}=I|Is], _, D, Acc) -> - live_opt(Is, live_call(Live), D, [I|Acc]); live_opt([{'%',_}=I|Is], Regs, D, Acc) -> live_opt(Is, Regs, D, [I|Acc]); live_opt([{recv_set,_}=I|Is], Regs, D, Acc) -> @@ -832,3 +825,15 @@ x_live([_|Rs], Regs) -> x_live(Rs, Regs); x_live([], Regs) -> Regs. is_live(X, Regs) -> ((Regs bsr X) band 1) =:= 1. + +%% split_even/1 +%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]} +split_even(Rs) -> split_even(Rs,[],[]). +split_even([],Ss,Ds) -> {reverse(Ss),reverse(Ds)}; +split_even([S,D|Rs],Ss,Ds) -> + split_even(Rs,[S|Ss],[D|Ds]). + +%% join_even/1 +%% {[1,3,5],[2,4,6]} -> [1,2,3,4,5,6] +join_even([],[]) -> []; +join_even([S|Ss],[D|Ds]) -> [S,D|join_even(Ss,Ds)]. diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl index 9d5563d13b..4d4536b79c 100644 --- a/lib/compiler/src/beam_validator.erl +++ b/lib/compiler/src/beam_validator.erl @@ -22,7 +22,6 @@ %% Avoid warning for local function error/1 clashing with autoimported BIF. -compile({no_auto_import,[error/1]}). --export([file/1, files/1]). %% Interface for compiler. -export([module/2, format_error/1]). @@ -40,38 +39,12 @@ -define(DBG_FORMAT(F, D), ok). -endif. -%%% -%%% API functions. -%%% - --spec file(file:filename()) -> 'ok' | {'error', term()}. - -file(Name) when is_list(Name) -> - case case filename:extension(Name) of - ".S" -> s_file(Name); - ".beam" -> beam_file(Name) - end of - [] -> ok; - Es -> {error,Es} - end. - --spec files([file:filename()]) -> 'ok'. - -files([F|Fs]) -> - ?DBG_FORMAT("# Verifying: ~p~n", [F]), - case file(F) of - ok -> ok; - {error,Es} -> - io:format("~tp:~n~ts~n", [F,format_error(Es)]) - end, - files(Fs); -files([]) -> ok. - %% To be called by the compiler. module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) -> case validate(Mod, Fs) of - [] -> {ok,Code}; + [] -> + {ok,Code}; Es0 -> Es = [{?MODULE,E} || E <- Es0], {error,[{atom_to_list(Mod),Es}]} @@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts) -spec format_error(term()) -> iolist(). -format_error([]) -> []; -format_error([{{M,F,A},{I,Off,Desc}}|Es]) -> - [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n", - [M,F,A,Off,I,Desc])|format_error(Es)]; -format_error([Error|Es]) -> - [format_error(Error)|format_error(Es)]; format_error({{_M,F,A},{I,Off,limit}}) -> io_lib:format( "function ~p/~p+~p:~n" @@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) -> " Internal consistency check failed - please report this bug.~n" " Instruction: ~p~n" " Error: ~p:~n", [F,A,Off,I,Desc]); -format_error({Module,Error}) -> - [Module:format_error(Error)]; format_error(Error) -> io_lib:format("~p~n", [Error]). @@ -112,36 +77,6 @@ format_error(Error) -> %%% Local functions follow. %%% -s_file(Name) -> - {ok,Is} = file:consult(Name), - {module,Module} = lists:keyfind(module, 1, Is), - Fs = find_functions(Is), - validate(Module, Fs). - -find_functions(Fs) -> - find_functions_1(Fs, none, [], []). - -find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) -> - Acc = add_func(Func, FuncAcc, Acc0), - find_functions_1(Is, {Name,Arity,Entry}, [], Acc); -find_functions_1([I|Is], Func, FuncAcc, Acc) -> - find_functions_1(Is, Func, [I|FuncAcc], Acc); -find_functions_1([], Func, FuncAcc, Acc) -> - reverse(add_func(Func, FuncAcc, Acc)). - -add_func(none, _, Acc) -> Acc; -add_func({Name,Arity,Entry}, Is, Acc) -> - [{function,Name,Arity,Entry,reverse(Is)}|Acc]. - -beam_file(Name) -> - try beam_disasm:file(Name) of - {error,beam_lib,Reason} -> [{beam_lib,Reason}]; - #beam_file{module=Module, code=Code0} -> - Code = normalize_disassembled_code(Code0), - validate(Module, Code) - catch _:_ -> [disassembly_failed] - end. - %%% %%% The validator follows. %%% @@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) -> try validate_1(Code, Name, Ar, Entry, Ft) of _ -> validate_0(Module, Fs, Ft) catch - Error -> + throw:Error -> + %% Controlled error. [Error|validate_0(Module, Fs, Ft)]; - error:Error -> - [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)] + Class:Error -> + %% Crash. + Stack = erlang:get_stacktrace(), + io:fwrite("Function: ~w/~w\n", [Name,Ar]), + erlang:raise(Class, Error, Stack) end. --ifdef(DEBUG). -validate_error(Error, Module, Name, Ar) -> - exit(validate_error_1(Error, Module, Name, Ar)). --else. -validate_error(Error, Module, Name, Ar) -> - validate_error_1(Error, Module, Name, Ar). --endif. -validate_error_1(Error, Module, Name, Ar) -> - {{Module,Name,Ar}, - {internal_error,'_',{Error,erlang:get_stacktrace()}}}. - -type index() :: non_neg_integer(). -type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}). @@ -225,7 +153,6 @@ validate_error_1(Error, Module, Name, Ar) -> hf=0, %Available heap size for floats. fls=undefined, %Floating point state. ct=[], %List of hot catch/try labels - bsm=undefined, %Bit syntax matching state. bits=undefined, %Number of bits in bit syntax binary. setelem=false %Previous instruction was setelement/3. }). @@ -308,7 +235,7 @@ labels_1([{label,L}|Is], R) -> labels_1([{line,_}|Is], R) -> labels_1(Is, R); labels_1(Is, R) -> - {lists:reverse(R),Is}. + {reverse(R),Is}. init_state(Arity) -> Xs = init_regs(Arity, term), @@ -403,10 +330,6 @@ valfun_1({init,{y,_}=Reg}, Vst) -> set_type_y(initialized, Reg, Vst); valfun_1({test_heap,Heap,Live}, Vst) -> test_heap(Heap, Live, Vst); -valfun_1({bif,_Op,nofail,Src,Dst}, Vst) -> - %% The 'nofail' atom only occurs in disassembled code. - validate_src(Src, Vst), - set_type_reg(term, Dst, Vst); valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) -> case is_bif_safe(Op, length(Src)) of false -> @@ -432,18 +355,12 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) -> valfun_1({put,Src}, Vst) -> assert_term(Src, Vst), eat_heap(1, Vst); -valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) -> - Vst = eat_heap(2*Sz, Vst0), - set_type_reg(cons, Dst, Vst); %% Instructions for optimization of selective receives. valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) -> Vst; valfun_1({recv_set,{f,Fail}}, Vst) when is_integer(Fail) -> Vst; %% Misc. -valfun_1({'%live',Live}, Vst) -> - verify_live(Live, Vst), - Vst; valfun_1(remove_message, Vst) -> Vst; valfun_1({'%',_}, Vst) -> @@ -602,8 +519,6 @@ valfun_4({call_ext_last,Live,Func,StkSize}, tail_call(Func, Live, Vst); valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) -> error({allocated,NumY}); -valfun_4({make_fun,_,_,Live}, Vst) -> - call('fun', Live, Vst); valfun_4({make_fun2,_,_,_,Live}, Vst) -> call(make_fun, Live, Vst); %% Other BIFs @@ -620,8 +535,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) -> TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0), Vst = set_type(TupleType, Tuple, Vst1), set_type_reg(term, Dst, Vst); -valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) -> - valfun_4({bif,raise,Fail,Src,Dst}, Vst); valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) -> validate_src(Src, Vst0), Vst = branch_state(Fail, Vst0), @@ -738,32 +651,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) -> valfun_4({bs_restore2,Ctx,SavePoint}, Vst) -> bsm_restore(Ctx, SavePoint, Vst); -%% Bit syntax instructions. -valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) -> - valfun_4({test,bs_start_match,F,[Src]}, Vst); -valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) -> - assert_term(Src, Vst), - bs_start_match(branch_state(Fail, Vst)); - -valfun_4({bs_save,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_save(SavePoint, Vst); -valfun_4({bs_restore,SavePoint}, Vst) -> - bs_assert_state(Vst), - bs_assert_savepoint(SavePoint, Vst), - Vst; -valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) -> - bs_assert_state(Vst), - assert_term(Src, Vst), - branch_state(Fail, Vst); -valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) -> - bs_assert_state(Vst), - branch_state(Fail, Vst); -valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) -> - bs_assert_state(Vst0), - Vst = branch_state(Fail, Vst0), - set_type_reg({integer,[]}, Dst, Vst); - %% Other test instructions. valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) -> assert_term(Float, Vst), @@ -779,9 +666,17 @@ valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) -> assert_type(tuple, Tuple, Vst), set_type_reg({tuple,Sz}, Tuple, branch_state(Lbl, Vst)); valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) -> - validate_src([Src], Vst), + assert_type(map, Src, Vst), assert_strict_literal_termorder(List), branch_state(Lbl, Vst); +valfun_4({test,is_map,{f,Lbl},[Src]}, Vst0) -> + Vst = branch_state(Lbl, Vst0), + case Src of + {Tag,_} when Tag =:= x; Tag =:= y -> + set_type_reg(map, Src, Vst); + _ -> + Vst + end; valfun_4({test,_Op,{f,Lbl},Src}, Vst) -> validate_src(Src, Vst), branch_state(Lbl, Vst); @@ -795,9 +690,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) -> valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) -> assert_term(A, Vst), set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); -valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) -> - assert_term(Src, Vst), - set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst)); valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> verify_live(Live, Vst0), if @@ -868,16 +760,6 @@ valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) -> assert_term(Src, Vst0), Vst = bs_align_check(I, Vst0), branch_state(Fail, Vst); -%% Old bit syntax construction (before R10B). -valfun_4({bs_init,_,_}, Vst) -> - bs_zero_bits(Vst); -valfun_4({bs_need_buf,_}, Vst) -> Vst; -valfun_4({bs_final,{f,Fail},Dst}, Vst0) -> - Vst = branch_state(Fail, Vst0), - set_type_reg(binary, Dst, Vst); -valfun_4({bs_final2,Src,Dst}, Vst0) -> - assert_term(Src, Vst0), - set_type_reg(binary, Dst, Vst0); %% Map instructions. valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> verify_put_map(Fail, Src, Dst, Live, List, Vst); @@ -889,26 +771,30 @@ valfun_4(_, _) -> error(unknown_instruction). verify_get_map(Fail, Src, List, Vst0) -> - assert_term(Src, Vst0), + assert_type(map, Src, Vst0), Vst1 = branch_state(Fail, Vst0), - Lits = mmap(fun(L,_R) -> [L] end, List), - assert_strict_literal_termorder(Lits), + Keys = extract_map_keys(List), + assert_strict_literal_termorder(Keys), verify_get_map_pair(List,Vst0,Vst1). +extract_map_keys([Key,_Val|T]) -> + [Key|extract_map_keys(T)]; +extract_map_keys([]) -> []. + verify_get_map_pair([],_,Vst) -> Vst; verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) -> assert_term(Src, Vst0), verify_get_map_pair(Vs,Vst0,set_type_reg(term,Dst,Vsti)). verify_put_map(Fail, Src, Dst, Live, List, Vst0) -> + assert_type(map, Src, Vst0), verify_live(Live, Vst0), verify_y_init(Vst0), foreach(fun (Term) -> assert_term(Term, Vst0) end, List), - assert_term(Src, Vst0), Vst1 = heap_alloc(0, Vst0), Vst2 = branch_state(Fail, Vst1), Vst = prune_x_regs(Live, Vst2), - set_type_reg(term, Dst, Vst). + set_type_reg(map, Dst, Vst). %% %% Common code for validating bs_get* instructions. @@ -936,9 +822,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) -> %% val_dsetel({move,_,_}, Vst) -> Vst; -val_dsetel({put_string,0,{string,""},_}, Vst) -> - %% An empty string is OK since it doesn't build anything. - Vst; val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) -> Vst#vst{current=St#st{setelem=true}}; val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) -> @@ -972,7 +855,7 @@ call(Name, Live, #vst{current=St}=Vst) -> Type when Type =/= exception -> %% Type is never 'exception' because it has been handled earlier. Xs = gb_trees_from_list([{0,Type}]), - Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}} + Vst#vst{current=St#st{x=Xs,f=init_fregs()}} end. %% Tail call. @@ -1030,7 +913,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) -> error({existing_stack_frame,{size,Numy}}). deallocate(#vst{current=St}=Vst) -> - Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}. + Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}. test_heap(Heap, Live, Vst0) -> verify_live(Live, Vst0), @@ -1038,7 +921,7 @@ test_heap(Heap, Live, Vst0) -> heap_alloc(Heap, Vst). heap_alloc(Heap, #vst{current=St0}=Vst) -> - St1 = kill_heap_allocation(St0#st{bsm=undefined}), + St1 = kill_heap_allocation(St0), St = heap_alloc_1(Heap, St1), Vst#vst{current=St}. @@ -1122,72 +1005,30 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}). %%% Maps -%% ensure that a list of literals has a strict -%% ascending term order (also meaning unique literals) -assert_strict_literal_termorder(Ls) -> - Vs = lists:map(fun (L) -> get_literal(L) end, Ls), +%% A single item list may be either a list or a register. +%% +%% A list with more than item must contain literals in +%% ascending term order. +%% +%% An empty list is not allowed. + +assert_strict_literal_termorder([]) -> + %% There is no reason to use the get_map_elements and + %% has_map_fields instructions with empty lists. + error(empty_field_list); +assert_strict_literal_termorder([_]) -> + ok; +assert_strict_literal_termorder([_,_|_]=Ls) -> + Vs = [get_literal(L) || L <- Ls], case check_strict_value_termorder(Vs) of true -> ok; - false -> error({not_strict_order, Ls}) + false -> error(not_strict_order) end. -%% usage: -%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]), -%% [{1,2},{3,4}] - -mmap(F,List) -> - {arity,Ar} = erlang:fun_info(F,arity), - mmap(F,Ar,List). -mmap(_F,_,[]) -> []; -mmap(F,Ar,List) -> - {Hd,Tl} = lists:split(Ar,List), - apply(F,Hd) ++ mmap(F,Ar,Tl). - -check_strict_value_termorder([]) -> true; -check_strict_value_termorder([_]) -> true; -check_strict_value_termorder([V1,V2]) -> - erts_internal:cmp_term(V1,V2) < 0; -check_strict_value_termorder([V1,V2|Vs]) -> - case erts_internal:cmp_term(V1,V2) < 0 of - true -> check_strict_value_termorder([V2|Vs]); - false -> false - end. - -%%% -%%% Binary matching. -%%% -%%% Possible values for the bsm field (=bit syntax matching state). -%%% -%%% undefined - Undefined (initial state). No matching instructions allowed. -%%% -%%% (gb set) - The gb set contains the defined save points. -%%% -%%% The bsm field is reset to 'undefined' by instructions that may cause a -%%% a garbage collection (might move the binary) and/or context switch -%%% (may invalidate the save points). - -bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) -> - Vst#vst{current=St#st{bsm=gb_sets:empty()}}; -bs_start_match(Vst) -> - %% Must retain save points here - it is possible to restore back - %% to a previous binary. - Vst. - -bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst) - when is_integer(Reg), Reg < ?MAXREG -> - Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}}; -bs_save(_, _) -> error(limit). - -bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) -> - case gb_sets:is_member(Reg, Saved) of - false -> error({no_save_point,Reg}); - true -> ok - end. - -bs_assert_state(#vst{current=#st{bsm=undefined}}) -> - error(no_bs_match_state); -bs_assert_state(_) -> ok. - +check_strict_value_termorder([V1|[V2|_]=Vs]) -> + erts_internal:cmp_term(V1, V2) < 0 andalso + check_strict_value_termorder(Vs); +check_strict_value_termorder([_]) -> true. %%% %%% New binary matching instructions. @@ -1387,7 +1228,8 @@ assert_term(Src, Vst) -> %% %% number Integer or Float of unknown value %% - +%% map Map. +%% assert_type(WantedType, Term, Vst) -> assert_type(WantedType, get_term_type(Term, Vst)). @@ -1469,6 +1311,7 @@ get_term_type_1(nil=T, _) -> T; get_term_type_1({atom,A}=T, _) when is_atom(A) -> T; get_term_type_1({float,F}=T, _) when is_float(F) -> T; get_term_type_1({integer,I}=T, _) when is_integer(I) -> T; +get_term_type_1({literal,Map}, _) when is_map(Map) -> map; get_term_type_1({literal,_}=T, _) -> T; get_term_type_1({x,X}=Reg, #vst{current=#st{x=Xs}}) when is_integer(X) -> case gb_trees:lookup(X, Xs) of @@ -1523,14 +1366,13 @@ merge_states(L, St, Branched) when L =/= 0 -> {value,OtherSt} -> merge_states_1(St, OtherSt) end. -merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St, - #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) -> +merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0}=St, + #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) -> NumY = merge_stk(NumY0, NumY1), Xs = merge_regs(Xs0, Xs1), Ys = merge_y_regs(Ys0, Ys1), Ct = merge_ct(Ct0, Ct1), - Bsm = merge_bsm(Bsm0, Bsm1), - St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}. + St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}. merge_stk(S, S) -> S; merge_stk(_, _) -> undecided. @@ -1613,10 +1455,6 @@ merge_types(T1, T2) when T1 =/= T2 -> %% Too different. All we know is that the type is a 'term'. term. -merge_bsm(undefined, _) -> undefined; -merge_bsm(_, undefined) -> undefined; -merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1). - tuple_sz([Sz]) -> Sz; tuple_sz(Sz) -> Sz. @@ -1723,6 +1561,7 @@ bif_type(is_float, [_], _) -> bool; bif_type(is_function, [_], _) -> bool; bif_type(is_integer, [_], _) -> bool; bif_type(is_list, [_], _) -> bool; +bif_type(is_map, [_], _) -> bool; bif_type(is_number, [_], _) -> bool; bif_type(is_pid, [_], _) -> bool; bif_type(is_port, [_], _) -> bool; @@ -1752,6 +1591,7 @@ is_bif_safe(is_float, 1) -> true; is_bif_safe(is_function, 1) -> true; is_bif_safe(is_integer, 1) -> true; is_bif_safe(is_list, 1) -> true; +is_bif_safe(is_map, 1) -> true; is_bif_safe(is_number, 1) -> true; is_bif_safe(is_pid, 1) -> true; is_bif_safe(is_port, 1) -> true; @@ -1816,6 +1656,7 @@ return_type_math(erf, 1) -> {float,[]}; return_type_math(erfc, 1) -> {float,[]}; return_type_math(exp, 1) -> {float,[]}; return_type_math(log, 1) -> {float,[]}; +return_type_math(log2, 1) -> {float,[]}; return_type_math(log10, 1) -> {float,[]}; return_type_math(sqrt, 1) -> {float,[]}; return_type_math(atan2, 2) -> {float,[]}; @@ -1837,52 +1678,3 @@ error(Error) -> exit(Error). -else. error(Error) -> throw(Error). -endif. - - -%%% -%%% Rewrite disassembled code to the same format as we used internally -%%% to not have to worry later. -%%% - -normalize_disassembled_code(Fs) -> - Index = ndc_index(Fs, []), - ndc(Fs, Index, []). - -ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) -> - ndc_index(Fs, [{{Name,Arity},Entry}|Acc]); -ndc_index([], Acc) -> - gb_trees:from_orddict(lists:sort(Acc)). - -ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) -> - Code = ndc_1(Code0, D, []), - ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]); -ndc([], _, Acc) -> reverse(Acc). - -ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]); -ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) -> - ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]); -ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) -> - ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]); -ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]); -ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) -> - ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]); -ndc_1([I|Is], D, Acc) -> - ndc_1(Is, D, [I|Acc]); -ndc_1([], _, Acc) -> - reverse(Acc). diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl index c2a6ef604e..0c7bef9183 100644 --- a/lib/compiler/src/beam_z.erl +++ b/lib/compiler/src/beam_z.erl @@ -79,17 +79,9 @@ undo_rename({put_map,Fail,assoc,S,D,R,L}) -> undo_rename({put_map,Fail,exact,S,D,R,L}) -> {put_map_exact,Fail,S,D,R,L}; undo_rename({test,has_map_fields,Fail,[Src|List]}) -> - {test,has_map_fields,Fail,Src,{list,[to_typed_literal(V)||V<-List]}}; -undo_rename({get_map_elements,Fail,Src,{list, List}}) -> - {get_map_elements,Fail,Src,{list,[to_typed_literal(V)||V<-List]}}; + {test,has_map_fields,Fail,Src,{list,List}}; +undo_rename({get_map_elements,Fail,Src,{list,List}}) -> + {get_map_elements,Fail,Src,{list,List}}; undo_rename({select,I,Reg,Fail,List}) -> {I,Reg,Fail,{list,List}}; undo_rename(I) -> I. - -%% to_typed_literal(Arg) -%% transform Arg to specific literal i.e. float | integer | atom if applicable -to_typed_literal({literal, V}) when is_float(V) -> {float, V}; -to_typed_literal({literal, V}) when is_atom(V) -> {atom, V}; -to_typed_literal({literal, V}) when is_integer(V) -> {integer, V}; -to_typed_literal({literal, []}) -> nil; -to_typed_literal(V) -> V. diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl index 9d6768b157..8367a1e19e 100644 --- a/lib/compiler/src/cerl.erl +++ b/lib/compiler/src/cerl.erl @@ -123,11 +123,14 @@ bitstr_flags/1, %% keep map exports here for now + c_map_pattern/1, + is_c_map/1, map_es/1, map_arg/1, update_c_map/3, c_map/1, is_c_map_empty/1, ann_c_map/2, ann_c_map/3, + ann_c_map_pattern/2, map_pair_op/1,map_pair_key/1,map_pair_val/1, update_c_map_pair/4, c_map_pair/2, @@ -135,7 +138,8 @@ ]). -export_type([c_binary/0, c_bitstr/0, c_call/0, c_clause/0, c_cons/0, c_fun/0, - c_literal/0, c_map/0, c_map_pair/0, c_module/0, c_tuple/0, + c_let/0, c_literal/0, c_map/0, c_map_pair/0, + c_module/0, c_tuple/0, c_values/0, c_var/0, cerl/0, var_name/0]). -include("core_parse.hrl"). @@ -431,6 +435,8 @@ is_literal_term([H | T]) -> is_literal_term(T) when is_tuple(T) -> is_literal_term_list(tuple_to_list(T)); is_literal_term(B) when is_bitstring(B) -> true; +is_literal_term(M) when is_map(M) -> + is_literal_term_list(maps:to_list(M)); is_literal_term(_) -> false. @@ -1577,6 +1583,20 @@ ann_make_list(_, [], Node) -> %% --------------------------------------------------------------------- %% maps +%% @spec is_c_map(Node::cerl()) -> boolean() +%% +%% @doc Returns <code>true</code> if <code>Node</code> is an abstract +%% map constructor, otherwise <code>false</code>. + +-spec is_c_map(cerl()) -> boolean(). + +is_c_map(#c_map{}) -> + true; +is_c_map(#c_literal{val = V}) when is_map(V) -> + true; +is_c_map(_) -> + false. + -spec map_es(c_map()) -> [c_map_pair()]. map_es(#c_map{es = Es}) -> @@ -1590,7 +1610,17 @@ map_arg(#c_map{arg=M}) -> -spec c_map([c_map_pair()]) -> c_map(). c_map(Pairs) -> - #c_map{es=Pairs}. + ann_c_map([], Pairs). + +-spec c_map_pattern([c_map_pair()]) -> c_map(). + +c_map_pattern(Pairs) -> + #c_map{es=Pairs, is_pat=true}. + +-spec ann_c_map_pattern([term()], [c_map_pair()]) -> c_map(). + +ann_c_map_pattern(As, Pairs) -> + #c_map{anno=As, es=Pairs, is_pat=true}. -spec is_c_map_empty(c_map() | c_literal()) -> boolean(). @@ -1598,25 +1628,13 @@ is_c_map_empty(#c_map{ es=[] }) -> true; is_c_map_empty(#c_literal{val=M}) when is_map(M),map_size(M) =:= 0 -> true; is_c_map_empty(_) -> false. --spec ann_c_map([term()], [cerl()]) -> c_map() | c_literal(). +-spec ann_c_map([term()], [c_map_pair()]) -> c_map() | c_literal(). -ann_c_map(As,Es) -> +ann_c_map(As, Es) -> ann_c_map(As, #c_literal{val=#{}}, Es). -spec ann_c_map([term()], c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal(). -ann_c_map(As,#c_literal{val=Mval}=M,Es) when is_map(Mval), map_size(Mval) =:= 0 -> - Pairs = [[Ck,Cv]||#c_map_pair{key=Ck,val=Cv}<-Es], - IsLit = lists:foldl(fun(Pair,Res) -> - Res andalso is_lit_list(Pair) - end, true, Pairs), - Fun = fun(Pair) -> [K,V] = lit_list_vals(Pair), {K,V} end, - case IsLit of - false -> - #c_map{arg=M, es=Es, anno=As }; - true -> - #c_literal{anno=As, val=maps:from_list(lists:map(Fun, Pairs))} - end; ann_c_map(As,#c_literal{val=M},Es) when is_map(M) -> fold_map_pairs(As,Es,M); ann_c_map(As,M,Es) -> @@ -1644,14 +1662,14 @@ fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=exact},key=Ck,val=Cv}=E|Es],M) end; false -> #c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As } - end; -fold_map_pairs(As,Es,M) -> - #c_map{arg=#c_literal{val=M,anno=As}, es=Es, anno=As }. + end. -%-spec update_c_map(c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal(). +-spec update_c_map(c_map(), cerl(), [cerl()]) -> c_map() | c_literal(). -update_c_map(Old,M,Es) -> - #c_map{arg=M, es = Es, anno = get_ann(Old)}. +update_c_map(#c_map{is_pat=true}=Old, M, Es) -> + Old#c_map{arg=M, es=Es}; +update_c_map(#c_map{is_pat=false}=Old, M, Es) -> + ann_c_map(get_ann(Old), M, Es). map_pair_key(#c_map_pair{key=K}) -> K. map_pair_val(#c_map_pair{val=V}) -> V. @@ -3063,10 +3081,12 @@ pat_vars(Node, Vs) -> map -> pat_list_vars(map_es(Node), Vs); map_pair -> - pat_list_vars([map_pair_op(Node),map_pair_key(Node),map_pair_val(Node)],Vs); + %% map_pair_key is not a pattern var, excluded + pat_list_vars([map_pair_op(Node),map_pair_val(Node)],Vs); binary -> pat_list_vars(binary_segments(Node), Vs); bitstr -> + %% bitstr_size is not a pattern var, excluded pat_vars(bitstr_val(Node), Vs); alias -> pat_vars(alias_pat(Node), [alias_var(Node) | Vs]) diff --git a/lib/compiler/src/cerl_clauses.erl b/lib/compiler/src/cerl_clauses.erl index 87bd47c08b..ef74c5b76f 100644 --- a/lib/compiler/src/cerl_clauses.erl +++ b/lib/compiler/src/cerl_clauses.erl @@ -354,29 +354,29 @@ match(P, E, Bs) -> {false, Bs} end end; - map -> - %% The most we can do is to say "definitely no match" if a - %% map pattern is matched against non-map data. - case E of - any -> - {false, Bs}; - _ -> - case type(E) of - literal -> - case is_map(concrete(E)) of - false -> - none; - true -> - {false, Bs} - end; - cons -> - none; - tuple -> - none; - _ -> - {false, Bs} - end - end; + map -> + %% The most we can do is to say "definitely no match" if a + %% map pattern is matched against non-map data. + case E of + any -> + {false, Bs}; + _ -> + case type(E) of + literal -> + case is_map(concrete(E)) of + false -> + none; + true -> + {false, Bs} + end; + cons -> + none; + tuple -> + none; + _ -> + {false, Bs} + end + end; _ -> match_1(P, E, Bs) end. diff --git a/lib/compiler/src/cerl_inline.erl b/lib/compiler/src/cerl_inline.erl index 75740e8b9d..f8489a800b 100644 --- a/lib/compiler/src/cerl_inline.erl +++ b/lib/compiler/src/cerl_inline.erl @@ -1341,23 +1341,23 @@ i_bitstr(E, Ren, Env, S) -> S3 = count_size(weight(bitstr), S2), {update_c_bitstr(E, Val, Size, Unit, Type, Flags), S3}. -i_map(E, Ctx, Ren, Env, S) -> +i_map(E, Ctx, Ren, Env, S0) -> %% Visit the segments for value. - {M1, S1} = i(map_arg(E), value, Ren, Env, S), + {M1, S1} = i(map_arg(E), value, Ren, Env, S0), {Es, S2} = mapfoldl(fun (E, S) -> i_map_pair(E, Ctx, Ren, Env, S) end, S1, map_es(E)), S3 = count_size(weight(map), S2), {update_c_map(E, M1,Es), S3}. -i_map_pair(E, Ctx, Ren, Env, S) -> - %% It is not necessary to visit the Op and Key fields, - %% since these are always literals. - {Val, S1} = i(map_pair_val(E), Ctx, Ren, Env, S), +i_map_pair(E, Ctx, Ren, Env, S0) -> + %% It is not necessary to visit the Op field + %% since it is always a literal. + {Key, S1} = i(map_pair_key(E), value, Ren, Env, S0), + {Val, S2} = i(map_pair_val(E), Ctx, Ren, Env, S1), Op = map_pair_op(E), - Key = map_pair_key(E), - S2 = count_size(weight(map_pair), S1), - {update_c_map_pair(E, Op, Key, Val), S2}. + S3 = count_size(weight(map_pair), S2), + {update_c_map_pair(E, Op, Key, Val), S3}. %% This is a simplified version of `i_pattern', for lists of parameter @@ -1420,15 +1420,11 @@ i_pattern(E, Ren, Env, Ren0, Env0, S) -> S2 = count_size(weight(binary), S1), {update_c_binary(E, Es), S2}; map -> - %% map patterns should not have args - M = map_arg(E), - {Es, S1} = mapfoldl(fun (E, S) -> i_map_pair_pattern(E, Ren, Env, Ren0, Env0, S) - end, - S, map_es(E)), + end, S, map_es(E)), S2 = count_size(weight(map), S1), - {update_c_map(E, M, Es), S2}; + {update_c_map(E, map_arg(E), Es), S2}; _ -> case is_literal(E) of true -> @@ -1464,12 +1460,12 @@ i_bitstr_pattern(E, Ren, Env, Ren0, Env0, S) -> i_map_pair_pattern(E, Ren, Env, Ren0, Env0, S) -> %% It is not necessary to visit the Op it is always a literal. - %% Same goes for Key - {Val, S1} = i_pattern(map_pair_val(E), Ren, Env, Ren0, Env0, S), + %% Key is an expression + {Key, S1} = i(map_pair_key(E), value, Ren0, Env0, S), + {Val, S2} = i_pattern(map_pair_val(E), Ren, Env, Ren0, Env0, S1), Op = map_pair_op(E), %% should be 'exact' literal - Key = map_pair_key(E), - S2 = count_size(weight(map_pair), S1), - {update_c_map_pair(E, Op, Key, Val), S2}. + S3 = count_size(weight(map_pair), S2), + {update_c_map_pair(E, Op, Key, Val), S3}. %% --------------------------------------------------------------------- diff --git a/lib/compiler/src/cerl_trees.erl b/lib/compiler/src/cerl_trees.erl index e53bdd4efb..b93da8e97f 100644 --- a/lib/compiler/src/cerl_trees.erl +++ b/lib/compiler/src/cerl_trees.erl @@ -520,9 +520,9 @@ variables(T, S) -> tuple -> vars_in_list(tuple_es(T), S); map -> - vars_in_list(map_es(T), S); + vars_in_list([map_arg(T)|map_es(T)], S); map_pair -> - vars_in_list([map_pair_op(T),map_pair_key(T), map_pair_val(T)], S); + vars_in_list([map_pair_op(T),map_pair_key(T),map_pair_val(T)], S); 'let' -> Vs = variables(let_body(T), S), Vs1 = var_list_names(let_vars(T)), diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl index c7d91070f6..f347438509 100644 --- a/lib/compiler/src/compile.erl +++ b/lib/compiler/src/compile.erl @@ -431,11 +431,6 @@ pass(from_core) -> {".core",[?pass(parse_core)|core_passes()]}; pass(from_asm) -> {".S",[?pass(beam_consult_asm)|asm_passes()]}; -pass(asm) -> - %% TODO: remove 'asm' in 18.0 - io:format("compile:file/2 option 'asm' has been deprecated and will be~n" - "removed in the 18.0 release. Use 'from_asm' instead.~n"), - pass(from_asm); pass(from_beam) -> {".beam",[?pass(read_beam_file)|binary_passes()]}; pass(_) -> none. diff --git a/lib/compiler/src/compiler.app.src b/lib/compiler/src/compiler.app.src index 8f68915f8e..fbaa7a96fe 100644 --- a/lib/compiler/src/compiler.app.src +++ b/lib/compiler/src/compiler.app.src @@ -56,6 +56,7 @@ rec_env, sys_core_dsetel, sys_core_fold, + sys_core_fold_lists, sys_core_inline, sys_pre_attributes, sys_pre_expand, diff --git a/lib/compiler/src/core_lib.erl b/lib/compiler/src/core_lib.erl index 0d95971f91..66319dbd36 100644 --- a/lib/compiler/src/core_lib.erl +++ b/lib/compiler/src/core_lib.erl @@ -20,6 +20,12 @@ -module(core_lib). +-deprecated({get_anno,1,next_major_release}). +-deprecated({set_anno,2,next_major_release}). +-deprecated({is_literal,1,next_major_release}). +-deprecated({is_literal_list,1,next_major_release}). +-deprecated({literal_value,1,next_major_release}). + -export([get_anno/1,set_anno/2]). -export([is_literal/1,is_literal_list/1]). -export([literal_value/1]). @@ -33,59 +39,27 @@ %% -spec get_anno(cerl:cerl()) -> term(). -get_anno(C) -> element(2, C). +get_anno(C) -> cerl:get_ann(C). -spec set_anno(cerl:cerl(), term()) -> cerl:cerl(). -set_anno(C, A) -> setelement(2, C, A). +set_anno(C, A) -> cerl:set_ann(C, A). -spec is_literal(cerl:cerl()) -> boolean(). -is_literal(#c_literal{}) -> true; -is_literal(#c_cons{hd=H,tl=T}) -> - is_literal(H) andalso is_literal(T); -is_literal(#c_tuple{es=Es}) -> is_literal_list(Es); -is_literal(#c_binary{segments=Es}) -> is_lit_bin(Es); -is_literal(_) -> false. +is_literal(Cerl) -> + cerl:is_literal(cerl:fold_literal(Cerl)). -spec is_literal_list([cerl:cerl()]) -> boolean(). is_literal_list(Es) -> lists:all(fun is_literal/1, Es). -is_lit_bin(Es) -> - lists:all(fun (#c_bitstr{val=E,size=S}) -> - is_literal(E) andalso is_literal(S) - end, Es). - %% Return the value of LitExpr. -spec literal_value(cerl:c_literal() | cerl:c_binary() | cerl:c_map() | cerl:c_cons() | cerl:c_tuple()) -> term(). -literal_value(#c_literal{val=V}) -> V; -literal_value(#c_binary{segments=Es}) -> - list_to_binary([literal_value_bin(Bit) || Bit <- Es]); -literal_value(#c_cons{hd=H,tl=T}) -> - [literal_value(H)|literal_value(T)]; -literal_value(#c_tuple{es=Es}) -> - list_to_tuple(literal_value_list(Es)); -literal_value(#c_map{arg=Cm,es=Cmps}) -> - M = literal_value(Cm), - lists:foldl(fun(#c_map_pair{ key=Ck, val=Cv },Mi) -> - K = literal_value(Ck), - V = literal_value(Cv), - maps:put(K,V,Mi) - end, M, Cmps). - -literal_value_list(Vals) -> [literal_value(V) || V <- Vals]. - -literal_value_bin(#c_bitstr{val=Val,size=Sz,unit=U,type=T,flags=Fs}) -> - %% We will only handle literals constructed by make_literal/1. - %% Could be made more general in the future if the need arises. - 8 = literal_value(Sz), - 1 = literal_value(U), - integer = literal_value(T), - [unsigned,big] = literal_value(Fs), - literal_value(Val). +literal_value(Cerl) -> + cerl:concrete(cerl:fold_literal(Cerl)). %% Make a suitable values structure, expr or values, depending on Expr. -spec make_values([cerl:cerl()] | cerl:cerl()) -> cerl:cerl(). @@ -236,10 +210,15 @@ vu_pat_seg_list(V, Ss, St) -> end end, St, Ss). -vu_map_pairs(V, [#c_map_pair{val=Pat}|T], St0) -> - case vu_pattern(V, Pat, St0) of - {true,_}=St -> St; - St -> vu_map_pairs(V, T, St) +vu_map_pairs(V, [#c_map_pair{key=Key,val=Pat}|T], St0) -> + case vu_expr(V, Key) of + true -> + {true,false}; + false -> + case vu_pattern(V, Pat, St0) of + {true,_}=St -> St; + St -> vu_map_pairs(V, T, St) + end end; vu_map_pairs(_, [], St) -> St. diff --git a/lib/compiler/src/core_lint.erl b/lib/compiler/src/core_lint.erl index 25df33a287..f62b2bb0ba 100644 --- a/lib/compiler/src/core_lint.erl +++ b/lib/compiler/src/core_lint.erl @@ -33,9 +33,6 @@ %% Values only as multiple values/variables/patterns. %% Return same number of values as requested %% Correct number of arguments -%% -%% Checks to add: -%% %% Consistency of values/variables %% Consistency of function return values/calls. %% @@ -176,7 +173,7 @@ check_exports(Es, St) -> end. check_attrs(As, St) -> - case all(fun ({#c_literal{},V}) -> core_lib:is_literal(V); + case all(fun ({#c_literal{},#c_literal{}}) -> true; (_) -> false end, As) of true -> St; @@ -211,7 +208,7 @@ functions(Fs, Def, St0) -> function({#c_var{name={_,_}},B}, Def, St) -> %% Body must be a fun! case B of - #c_fun{} -> expr(B, Def, any, St); + #c_fun{} -> expr(B, Def, 1, St); _ -> add_error({illegal_expr,St#lint.func}, St) end. @@ -247,40 +244,42 @@ gbody(E, Def, Rt, St0) -> false -> St1 end. -gexpr(#c_var{name=N}, Def, _Rt, St) when is_atom(N); is_integer(N) -> - expr_var(N, Def, St); -gexpr(#c_literal{}, _Def, _Rt, St) -> St; -gexpr(#c_cons{hd=H,tl=T}, Def, _Rt, St) -> - gexpr_list([H,T], Def, St); -gexpr(#c_tuple{es=Es}, Def, _Rt, St) -> - gexpr_list(Es, Def, St); -gexpr(#c_map{es=Es}, Def, _Rt, St) -> - gexpr_list(Es, Def, St); -gexpr(#c_map_pair{key=K,val=V}, Def, _Rt, St) -> - gexpr_list([K,V], Def, St); -gexpr(#c_binary{segments=Ss}, Def, _Rt, St) -> - gbitstr_list(Ss, Def, St); +gexpr(#c_var{name=N}, Def, Rt, St) when is_atom(N); is_integer(N) -> + return_match(Rt, 1, expr_var(N, Def, St)); +gexpr(#c_literal{}, _Def, Rt, St) -> + return_match(Rt, 1, St); +gexpr(#c_cons{hd=H,tl=T}, Def, Rt, St) -> + return_match(Rt, 1, gexpr_list([H,T], Def, St)); +gexpr(#c_tuple{es=Es}, Def, Rt, St) -> + return_match(Rt, 1, gexpr_list(Es, Def, St)); +gexpr(#c_map{es=Es}, Def, Rt, St) -> + return_match(Rt, 1, gexpr_list(Es, Def, St)); +gexpr(#c_map_pair{key=K,val=V}, Def, Rt, St) -> + return_match(Rt, 1, gexpr_list([K,V], Def, St)); +gexpr(#c_binary{segments=Ss}, Def, Rt, St) -> + return_match(Rt, 1, gbitstr_list(Ss, Def, St)); gexpr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) -> - St1 = gexpr(Arg, Def, any, St0), %Ignore values - gbody(B, Def, Rt, St1); + St1 = gexpr(Arg, Def, 1, St0), + return_match(Rt, 1, gbody(B, Def, Rt, St1)); gexpr(#c_let{vars=Vs,arg=Arg,body=B}, Def, Rt, St0) -> St1 = gbody(Arg, Def, let_varcount(Vs), St0), %This is a guard body {Lvs,St2} = variable_list(Vs, St1), gbody(B, union(Lvs, Def), Rt, St2); gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=is_record}, args=[Arg,#c_literal{val=Tag},#c_literal{val=Size}]}, - Def, 1, St) when is_atom(Tag), is_integer(Size) -> - gexpr(Arg, Def, 1, St); + Def, Rt, St) when is_atom(Tag), is_integer(Size) -> + return_match(Rt, 1, gexpr(Arg, Def, 1, St)); gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=is_record}}, - _Def, 1, St) -> - add_error({illegal_guard,St#lint.func}, St); + _Def, Rt, St) -> + return_match(Rt, 1, add_error({illegal_guard,St#lint.func}, St)); gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=Name},args=As}, - Def, 1, St) when is_atom(Name) -> + Def, Rt, St0) when is_atom(Name) -> + St1 = return_match(Rt, 1, St0), case is_guard_bif(Name, length(As)) of true -> - gexpr_list(As, Def, St); + gexpr_list(As, Def, St1); false -> - add_error({illegal_guard,St#lint.func}, St) + add_error({illegal_guard,St1#lint.func}, St1) end; gexpr(#c_primop{name=#c_literal{val=A},args=As}, Def, _Rt, St0) when is_atom(A) -> gexpr_list(As, Def, St0); @@ -319,23 +318,25 @@ is_guard_bif(Name, Arity) -> %% expr(Expr, Defined, RetCount, State) -> State. -expr(#c_var{name={_,_}=FA}, Def, _Rt, St) -> - expr_fname(FA, Def, St); -expr(#c_var{name=N}, Def, _Rt, St) -> expr_var(N, Def, St); -expr(#c_literal{}, _Def, _Rt, St) -> St; -expr(#c_cons{hd=H,tl=T}, Def, _Rt, St) -> - expr_list([H,T], Def, St); -expr(#c_tuple{es=Es}, Def, _Rt, St) -> - expr_list(Es, Def, St); -expr(#c_map{es=Es}, Def, _Rt, St) -> - expr_list(Es, Def, St); -expr(#c_map_pair{key=K,val=V},Def,_Rt,St) -> - expr_list([K,V],Def,St); -expr(#c_binary{segments=Ss}, Def, _Rt, St) -> - bitstr_list(Ss, Def, St); +expr(#c_var{name={_,_}=FA}, Def, Rt, St) -> + return_match(Rt, 1, expr_fname(FA, Def, St)); +expr(#c_var{name=N}, Def, Rt, St) -> + return_match(Rt, 1, expr_var(N, Def, St)); +expr(#c_literal{}, _Def, Rt, St) -> + return_match(Rt, 1, St); +expr(#c_cons{hd=H,tl=T}, Def, Rt, St) -> + return_match(Rt, 1, expr_list([H,T], Def, St)); +expr(#c_tuple{es=Es}, Def, Rt, St) -> + return_match(Rt, 1, expr_list(Es, Def, St)); +expr(#c_map{es=Es}, Def, Rt, St) -> + return_match(Rt, 1, expr_list(Es, Def, St)); +expr(#c_map_pair{key=K,val=V}, Def, Rt, St) -> + return_match(Rt, 1, expr_list([K,V], Def, St)); +expr(#c_binary{segments=Ss}, Def, Rt, St) -> + return_match(Rt, 1, bitstr_list(Ss, Def, St)); expr(#c_fun{vars=Vs,body=B}, Def, Rt, St0) -> {Vvs,St1} = variable_list(Vs, St0), - return_match(Rt, 1, body(B, union(Vvs, Def), any, St1)); + return_match(Rt, 1, body(B, union(Vvs, Def), 1, St1)); expr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) -> St1 = expr(Arg, Def, 1, St0), body(B, Def, Rt, St1); @@ -361,15 +362,26 @@ expr(#c_receive{clauses=Cs,timeout=T,action=A}, Def, Rt, St0) -> St1 = expr(T, Def, 1, St0), St2 = body(A, Def, Rt, St1), clauses(Cs, Def, 1, Rt, St2); -expr(#c_apply{op=Op,args=As}, Def, _Rt, St0) -> +expr(#c_apply{op=Op,args=As}, Def, Rt, St0) -> St1 = apply_op(Op, Def, length(As), St0), - expr_list(As, Def, St1); + return_match(Rt, 1, expr_list(As, Def, St1)); +expr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=Name},args=As}, + Def, Rt, St0) when is_atom(Name) -> + St1 = expr_list(As, Def, St0), + case erl_bifs:is_exit_bif(erlang, Name, length(As)) of + true -> St1; + false -> return_match(Rt, 1, St1) + end; expr(#c_call{module=M,name=N,args=As}, Def, _Rt, St0) -> St1 = expr(M, Def, 1, St0), St2 = expr(N, Def, 1, St1), expr_list(As, Def, St2); -expr(#c_primop{name=#c_literal{val=A},args=As}, Def, _Rt, St0) when is_atom(A) -> - expr_list(As, Def, St0); +expr(#c_primop{name=#c_literal{val=A},args=As}, Def, Rt, St0) when is_atom(A) -> + St1 = expr_list(As, Def, St0), + case A of + match_fail -> St1; + _ -> return_match(Rt, 1, St1) + end; expr(#c_catch{body=B}, Def, Rt, St) -> return_match(Rt, 1, body(B, Def, 1, St)); expr(#c_try{arg=A,vars=Vs,body=B,evars=Evs,handler=H}, Def, Rt, St0) -> diff --git a/lib/compiler/src/core_parse.hrl b/lib/compiler/src/core_parse.hrl index 4a00535360..7fd4a82e4e 100644 --- a/lib/compiler/src/core_parse.hrl +++ b/lib/compiler/src/core_parse.hrl @@ -72,7 +72,8 @@ -record(c_map, {anno=[], arg=#c_literal{val=#{}} :: cerl:c_var() | cerl:c_literal(), - es :: [cerl:c_map_pair()]}). + es :: [cerl:c_map_pair()], + is_pat=false :: boolean()}). -record(c_map_pair, {anno=[], op :: #c_literal{val::'assoc'} | #c_literal{val::'exact'}, diff --git a/lib/compiler/src/core_parse.yrl b/lib/compiler/src/core_parse.yrl index a66ad4235f..eeb9f5dba7 100644 --- a/lib/compiler/src/core_parse.yrl +++ b/lib/compiler/src/core_parse.yrl @@ -58,7 +58,8 @@ Terminals %% Separators -'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#' '~' '::' +'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#' +'~' '=>' ':=' %% Keywords (atoms are assumed to always be single-quoted). @@ -123,7 +124,7 @@ function_definition -> {'$1','$3'}. anno_fun -> '(' fun_expr '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). anno_fun -> fun_expr : '$1'. %% Constant terms for annotations and attributes. @@ -162,7 +163,7 @@ tail_constant -> ',' constant tail_constant : ['$2'|'$3']. %% ( ( V -| <anno> ) = ( {a} -| <anno> ) -| <anno> ) anno_pattern -> '(' other_pattern '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). anno_pattern -> other_pattern : '$1'. anno_pattern -> anno_variable : '$1'. @@ -182,23 +183,24 @@ atomic_pattern -> atomic_literal : '$1'. tuple_pattern -> '{' '}' : c_tuple([]). tuple_pattern -> '{' anno_patterns '}' : c_tuple('$2'). -map_pattern -> '~' '{' '}' '~' : #c_map{es=[]}. +map_pattern -> '~' '{' '}' '~' : c_map_pattern([]). map_pattern -> '~' '{' map_pair_patterns '}' '~' : - #c_map{es=lists:sort('$3')}. + c_map_pattern(lists:sort('$3')). map_pair_patterns -> map_pair_pattern : ['$1']. map_pair_patterns -> map_pair_pattern ',' map_pair_patterns : ['$1' | '$3']. -map_pair_pattern -> '~' '<' anno_pattern ',' anno_pattern '>' : - #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}. +map_pair_pattern -> anno_expression ':=' anno_pattern : + #c_map_pair{op=#c_literal{val=exact}, + key='$1',val='$3'}. cons_pattern -> '[' anno_pattern tail_pattern : - #c_cons{hd='$2',tl='$3'}. + c_cons('$2', '$3'). tail_pattern -> ']' : #c_literal{val=[]}. tail_pattern -> '|' anno_pattern ']' : '$2'. tail_pattern -> ',' anno_pattern tail_pattern : - #c_cons{hd='$2',tl='$3'}. + c_cons('$2', '$3'). binary_pattern -> '#' '{' '}' '#' : #c_binary{segments=[]}. binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}. @@ -206,7 +208,7 @@ binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}. segment_patterns -> segment_pattern ',' segment_patterns : ['$1' | '$3']. segment_patterns -> segment_pattern : ['$1']. -segment_pattern -> '#' '<' anno_pattern '>' '(' anno_patterns ')': +segment_pattern -> '#' '<' anno_pattern '>' '(' anno_expressions ')': case '$6' of [S,U,T,Fs] -> #c_bitstr{val='$3',size=S,unit=U,type=T,flags=Fs}; @@ -222,7 +224,7 @@ anno_variables -> anno_variable : ['$1']. anno_variable -> variable : '$1'. anno_variable -> '(' variable '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). %% Expressions %% Must split expressions into two levels as nested value expressions @@ -230,7 +232,7 @@ anno_variable -> '(' variable '-|' annotation ')' : anno_expression -> expression : '$1'. anno_expression -> '(' expression '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). anno_expressions -> anno_expression ',' anno_expressions : ['$1' | '$3']. anno_expressions -> anno_expression : ['$1']. @@ -279,15 +281,15 @@ cons_literal -> '[' literal tail_literal : c_cons('$2', '$3'). tail_literal -> ']' : #c_literal{val=[]}. tail_literal -> '|' literal ']' : '$2'. -tail_literal -> ',' literal tail_literal : #c_cons{hd='$2',tl='$3'}. +tail_literal -> ',' literal tail_literal : c_cons('$2', '$3'). tuple -> '{' '}' : c_tuple([]). tuple -> '{' anno_expressions '}' : c_tuple('$2'). -map_expr -> '~' '{' '}' '~' : #c_map{es=[]}. -map_expr -> '~' '{' map_pairs '}' '~' : #c_map{es='$3'}. -map_expr -> '~' '{' map_pairs '|' variable '}' '~' : #c_map{arg='$5',es='$3'}. -map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : #c_map{arg='$5',es='$3'}. +map_expr -> '~' '{' '}' '~' : c_map([]). +map_expr -> '~' '{' map_pairs '}' '~' : c_map('$3'). +map_expr -> '~' '{' map_pairs '|' variable '}' '~' : ann_c_map([], '$5', '$3'). +map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : ann_c_map([], '$5', '$3'). map_pairs -> map_pair : ['$1']. map_pairs -> map_pair ',' map_pairs : ['$1' | '$3']. @@ -295,10 +297,10 @@ map_pairs -> map_pair ',' map_pairs : ['$1' | '$3']. map_pair -> map_pair_assoc : '$1'. map_pair -> map_pair_exact : '$1'. -map_pair_assoc -> '::' '<' anno_expression ',' anno_expression'>' : - #c_map_pair{op=#c_literal{val=assoc},key='$3',val='$5'}. -map_pair_exact -> '~' '<' anno_expression ',' anno_expression'>' : - #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}. +map_pair_assoc -> anno_expression '=>' anno_expression : + #c_map_pair{op=#c_literal{val=assoc},key='$1',val='$3'}. +map_pair_exact -> anno_expression ':=' anno_expression : + #c_map_pair{op=#c_literal{val=exact},key='$1',val='$3'}. cons -> '[' anno_expression tail : c_cons('$2', '$3'). @@ -307,7 +309,7 @@ tail -> '|' anno_expression ']' : '$2'. tail -> ',' anno_expression tail : c_cons('$2', '$3'). binary -> '#' '{' '}' '#' : #c_literal{val = <<>>}. -binary -> '#' '{' segments '}' '#' : #c_binary{segments='$3'}. +binary -> '#' '{' segments '}' '#' : make_binary('$3'). segments -> segment ',' segments : ['$1' | '$3']. segments -> segment : ['$1']. @@ -326,7 +328,7 @@ function_name -> atom '/' integer : anno_function_name -> function_name : '$1'. anno_function_name -> '(' function_name '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). let_vars -> anno_variable : ['$1']. let_vars -> '<' '>' : []. @@ -354,7 +356,7 @@ anno_clauses -> anno_clause : ['$1']. anno_clause -> clause : '$1'. anno_clause -> '(' clause '-|' annotation ')' : - core_lib:set_anno('$2', '$4'). + cerl:set_ann('$2', '$4'). clause -> clause_pattern 'when' anno_expression '->' anno_expression : #c_clause{pats='$1',guard='$3',body='$5'}. @@ -410,9 +412,55 @@ Erlang code. -include("core_parse.hrl"). --import(cerl, [c_cons/2,c_tuple/1]). +-import(cerl, [ann_c_map/3,c_cons/2,c_map/1,c_map_pattern/1,c_tuple/1]). tok_val(T) -> element(3, T). tok_line(T) -> element(2, T). +%% make_binary([#c_bitstr{}]) -> #c_binary{} | #c_literal{} +%% Create either #c_binary{} or #c_literal{} from the binary segments. +%% In certain contexts, such as keys for maps, only literals and +%% variables are allowed, so we must not create a #c_binary{} +%% record in those situation. +%% +%% To keep this function simple, we use a crude heuristic. We will +%% assume that Core Erlang has been produced by core_pp. If the +%% segments *could* have been output from a literal binary by +%% core_pp, we will create a #c_literal{}. Otherwise we will create a +%% #c_binary{} record. + +make_binary(Segs) -> + try make_lit_bin(<<>>, Segs) of + Bs when is_bitstring(Bs) -> + #c_literal{val=Bs} + catch + throw:impossible -> + #c_binary{segments=Segs} + end. + +make_lit_bin(Acc, [#c_bitstr{val=I0,size=Sz0,unit=U0,type=Type0,flags=F0}|T]) -> + I = get_lit_val(I0), + Sz = get_lit_val(Sz0), + U = get_lit_val(U0), + Type = get_lit_val(Type0), + F = get_lit_val(F0), + if + is_integer(I), U =:= 1, Type =:= integer, F =:= [unsigned,big] -> + ok; + true -> + throw(impossible) + end, + if + Sz =< 8, T =:= [] -> + <<Acc/binary,I:Sz>>; + Sz =:= 8 -> + make_lit_bin(<<Acc/binary,I:8>>, T); + true -> + throw(impossible) + end; +make_lit_bin(Acc, []) -> Acc. + +get_lit_val(#c_literal{val=Val}) -> Val; +get_lit_val(_) -> throw(impossible). + %% vim: syntax=erlang diff --git a/lib/compiler/src/core_pp.erl b/lib/compiler/src/core_pp.erl index 83412ecdd7..9cfca88e8c 100644 --- a/lib/compiler/src/core_pp.erl +++ b/lib/compiler/src/core_pp.erl @@ -45,7 +45,7 @@ format(Node) -> format(Node, #ctxt{}). maybe_anno(Node, Fun, Ctxt) -> - As = core_lib:get_anno(Node), + As = cerl:get_ann(Node), case get_line(As) of none -> maybe_anno(Node, Fun, Ctxt, As); @@ -125,8 +125,8 @@ format_1(#c_literal{anno=A,val=M},Ctxt) when is_map(M) -> _ -> assoc end, Cpairs = [#c_map_pair{op=#c_literal{val=Op}, - key=#c_literal{val=V}, - val=#c_literal{val=K}} || {K,V} <- Pairs], + key=#c_literal{val=K}, + val=#c_literal{val=V}} || {K,V} <- Pairs], format_1(#c_map{anno=A,arg=#c_literal{val=#{}},es=Cpairs},Ctxt); format_1(#c_var{name={I,A}}, _) -> [core_atom(I),$/,integer_to_list(A)]; @@ -183,15 +183,9 @@ format_1(#c_map{arg=Var,es=Es}, Ctxt) -> "}~" ]; format_1(#c_map_pair{op=#c_literal{val=assoc},key=K,val=V}, Ctxt) -> - ["::<", - format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2), - ">" - ]; + format_map_pair("=>", K, V, Ctxt); format_1(#c_map_pair{op=#c_literal{val=exact},key=K,val=V}, Ctxt) -> - ["~<", - format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2), - ">" - ]; + format_map_pair(":=", K, V, Ctxt); format_1(#c_cons{hd=H,tl=T}, Ctxt) -> Txt = ["["|format(H, add_indent(Ctxt, 1))], [Txt|format_list_tail(T, add_indent(Ctxt, width(Txt, Ctxt)))]; @@ -201,7 +195,7 @@ format_1(#c_alias{var=V,pat=P}, Ctxt) -> Txt = [format(V, Ctxt)|" = "], [Txt|format(P, add_indent(Ctxt, width(Txt, Ctxt)))]; format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) -> - Vs = [core_lib:set_anno(V, []) || V <- Vs0], + Vs = [cerl:set_ann(V, []) || V <- Vs0], case is_simple_term(A) of false -> Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent), @@ -219,7 +213,7 @@ format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) -> ["let ", format_values(Vs, add_indent(Ctxt, 4)), " = ", - format(core_lib:set_anno(A, []), Ctxt1), + format(cerl:set_ann(A, []), Ctxt1), nl_indent(Ctxt), "in " | format(B, add_indent(Ctxt, 4)) @@ -448,6 +442,12 @@ format_list_tail(#c_cons{anno=[],hd=H,tl=T}, Ctxt) -> format_list_tail(Tail, Ctxt) -> ["|",format(Tail, add_indent(Ctxt, 1)),"]"]. +format_map_pair(Op, K, V, Ctxt0) -> + Ctxt1 = add_indent(Ctxt0, 1), + Txt = format(K, set_class(Ctxt1, expr)), + Ctxt2 = add_indent(Ctxt0, width(Txt, Ctxt1)), + [Txt,Op,format(V, Ctxt2)]. + indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt). indent(N, _) when N =< 0 -> ""; diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl index b7799b373a..8ab20b1982 100644 --- a/lib/compiler/src/core_scan.erl +++ b/lib/compiler/src/core_scan.erl @@ -271,8 +271,10 @@ scan1("->" ++ Cs, Toks, Pos) -> scan1(Cs, [{'->',Pos}|Toks], Pos); scan1("-|" ++ Cs, Toks, Pos) -> scan1(Cs, [{'-|',Pos}|Toks], Pos); -scan1("::" ++ Cs, Toks, Pos) -> - scan1(Cs, [{'::',Pos}|Toks], Pos); +scan1(":=" ++ Cs, Toks, Pos) -> + scan1(Cs, [{':=',Pos}|Toks], Pos); +scan1("=>" ++ Cs, Toks, Pos) -> + scan1(Cs, [{'=>',Pos}|Toks], Pos); scan1([C|Cs], Toks, Pos) -> %Punctuation character P = list_to_atom([C]), scan1(Cs, [{P,Pos}|Toks], Pos); diff --git a/lib/compiler/src/erl_bifs.erl b/lib/compiler/src/erl_bifs.erl index 6c75538194..bcc2297250 100644 --- a/lib/compiler/src/erl_bifs.erl +++ b/lib/compiler/src/erl_bifs.erl @@ -134,6 +134,7 @@ is_pure(math, erf, 1) -> true; is_pure(math, erfc, 1) -> true; is_pure(math, exp, 1) -> true; is_pure(math, log, 1) -> true; +is_pure(math, log2, 1) -> true; is_pure(math, log10, 1) -> true; is_pure(math, pow, 2) -> true; is_pure(math, sin, 1) -> true; diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl index ed8f609082..0d020578f5 100644 --- a/lib/compiler/src/sys_core_fold.erl +++ b/lib/compiler/src/sys_core_fold.erl @@ -96,6 +96,10 @@ t=[], %Types in_guard=false}). %In guard or not. +-type type_info() :: cerl:cerl() | 'bool' | 'integer'. +-type yes_no_maybe() :: 'yes' | 'no' | 'maybe'. +-type sub() :: #sub{}. + -spec module(cerl:c_module(), [compile:option()]) -> {'ok', cerl:c_module(), [_]}. @@ -293,7 +297,8 @@ expr(#c_seq{arg=Arg0,body=B0}=Seq0, Ctxt, Sub) -> false -> Seq0#c_seq{arg=Arg,body=B1} end end; -expr(#c_let{}=Let, Ctxt, Sub) -> +expr(#c_let{}=Let0, Ctxt, Sub) -> + Let = opt_case_in_let(Let0), case simplify_let(Let, Sub) of impossible -> %% The argument for the let is "simple", i.e. has no @@ -313,7 +318,7 @@ expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) -> Fs1 = map(fun ({Name,Fb}) -> {Name,expr(Fb, {letrec,Ctxt}, Sub)} end, Fs0), - B1 = body(B0, value, Sub), + B1 = body(B0, Ctxt, Sub), Letrec#c_letrec{defs=Fs1,body=B1}; expr(#c_case{}=Case0, Ctxt, Sub) -> %% Ideally, the compiler should only emit warnings when there is @@ -462,10 +467,7 @@ is_safe_simple(#c_call{module=#c_literal{val=erlang}, case erl_internal:bool_op(Name, NumArgs) of true -> %% Boolean operators are safe if the arguments are boolean. - all(fun(#c_var{name=V}) -> is_boolean_type(V, Sub); - (#c_literal{val=Lit}) -> is_boolean(Lit); - (_) -> false - end, Args); + all(fun(C) -> is_boolean_type(C, Sub) =:= yes end, Args); false -> %% We need a rather complicated test to ensure that %% we only allow safe calls that are allowed in a guard. @@ -607,14 +609,6 @@ eval_binary_1([#c_bitstr{val=#c_literal{val=Val},size=#c_literal{val=Sz}, error:_ -> throw(impossible) end; -eval_binary_1([#c_bitstr{val=#c_literal{},size=#c_literal{}, - unit=#c_literal{},type=#c_literal{}, - flags=#c_cons{}=Flags}=Bitstr|Ss], Acc0) -> - case cerl:fold_literal(Flags) of - #c_literal{} = Flags1 -> - eval_binary_1([Bitstr#c_bitstr{flags=Flags1}|Ss], Acc0); - _ -> throw(impossible) - end; eval_binary_1([], Acc) -> Acc; eval_binary_1(_, _) -> throw(impossible). @@ -688,23 +682,15 @@ count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64). %% a rewritten expression consisting of a sequence of %% the arguments only is returned. -useless_call(effect, #c_call{anno=Anno, - module=#c_literal{val=Mod}, +useless_call(effect, #c_call{module=#c_literal{val=Mod}, name=#c_literal{val=Name}, args=Args}=Call) -> A = length(Args), case erl_bifs:is_safe(Mod, Name, A) of false -> case erl_bifs:is_pure(Mod, Name, A) of - true -> - case member(result_not_wanted, Anno) of - false -> - add_warning(Call, result_ignored); - true -> - ok - end; - false -> - ok + true -> add_warning(Call, result_ignored); + false -> ok end, no; true -> @@ -730,385 +716,23 @@ make_effect_seq([], _) -> void(). call(#c_call{args=As}=Call, #c_literal{val=M}=M0, #c_literal{val=N}=N0, Sub) -> case get(no_inline_list_funcs) of true -> - call_0(Call, M0, N0, As, Sub); + call_1(Call, M0, N0, As, Sub); false -> - call_1(Call, M, N, As, Sub) + case sys_core_fold_lists:call(Call, M, N, As) of + none -> + call_1(Call, M, N, As, Sub); + Core -> + expr(Core, Sub) + end + end; call(#c_call{args=As}=Call, M, N, Sub) -> - call_0(Call, M, N, As, Sub). + call_1(Call, M, N, As, Sub). -call_0(Call, M, N, As0, Sub) -> +call_1(Call, M, N, As0, Sub) -> As1 = expr_list(As0, value, Sub), fold_call(Call#c_call{args=As1}, M, N, As1, Sub). -%% We inline some very common higher order list operations. -%% We use the same evaluation order as the library function. - -call_1(#c_call{anno=Anno}, lists, all, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^all',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, - CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, - body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}, - CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, - body=#c_literal{val=false}}, - CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, - body=match_fail(Anno, Err1)}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]}, - clauses = [CC1, CC2, CC3]}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=true}}, - Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, any, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^any',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, - CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, - body=#c_literal{val=true}}, - CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, - body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}, - CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, - body=match_fail(Anno, Err1)}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]}, - clauses = [CC1, CC2, CC3]}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=false}}, - Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^foreach',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]}, - body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=ok}}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, map, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^map',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - H = #c_var{name='H'}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_let{vars=[H], arg=#c_apply{anno=Anno, - op=F, - args=[X]}, - body=#c_cons{hd=H, - anno=[compiler_generated], - tl=#c_apply{anno=Anno, - op=Loop, - args=[Xs]}}}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=[]}}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^flatmap',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - H = #c_var{name='H'}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_let{vars=[H], - arg=#c_apply{anno=Anno, op=F, args=[X]}, - body=#c_call{anno=[compiler_generated|Anno], - module=#c_literal{val=erlang}, - name=#c_literal{val='++'}, - args=[H, - #c_apply{anno=Anno, - op=Loop, - args=[Xs]}]}}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=[]}}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2], Sub) -> - Loop = #c_var{name={'lists^filter',1}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - B = #c_var{name='B'}, - Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, - CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, - body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}}, - CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, - body=Xs}, - CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, - body=match_fail(Anno, Err1)}, - Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_let{vars=[B], - arg=#c_apply{anno=Anno, op=F, args=[X]}, - body=#c_let{vars=[Xs], - arg=#c_apply{anno=Anno, - op=Loop, - args=[Xs]}, - body=Case}}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=1}]}, - body=#c_literal{val=[]}}, - Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)}, - Fun = #c_fun{vars=[Xs], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3], Sub) -> - Loop = #c_var{name={'lists^foldl',2}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - A = #c_var{name='A'}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_apply{anno=Anno, - op=Loop, - args=[Xs, #c_apply{anno=Anno, - op=F, - args=[X, A]}]}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=2}]}, - body=A}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs, A], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3], Sub) -> - Loop = #c_var{name={'lists^foldr',2}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - A = #c_var{name='A'}, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=#c_apply{anno=Anno, - op=F, - args=[X, #c_apply{anno=Anno, - op=Loop, - args=[Xs, A]}]}}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=2}]}, - body=A}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs, A], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, - body=#c_letrec{defs=[{Loop,Fun}], - body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3], Sub) -> - Loop = #c_var{name={'lists^mapfoldl',2}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - Avar = #c_var{name='A'}, - Match = - fun (A, P, E) -> - C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E}, - Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]}, - C2 = #c_clause{pats=[X], guard=#c_literal{val=true}, - body=match_fail(Anno, Err)}, - #c_case{arg=A, clauses=[C1, C2]} - end, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, - body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]}, - #c_tuple{es=[X, Avar]}, -%%% Tuple passing version - Match(#c_apply{anno=Anno, - op=Loop, - args=[Xs, Avar]}, - #c_tuple{es=[Xs, Avar]}, - #c_tuple{anno=[compiler_generated], - es=[#c_cons{anno=[compiler_generated], - hd=X, tl=Xs}, - Avar]}) -%%% Multiple-value version -%%% #c_let{vars=[Xs,A], -%%% %% The tuple here will be optimised -%%% %% away later; no worries. -%%% arg=#c_apply{op=Loop, args=[Xs, A]}, -%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs}, -%%% A]}} - )}, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=2}]}, -%%% Tuple passing version - body=#c_tuple{anno=[compiler_generated], - es=[#c_literal{val=[]}, Avar]}}, -%%% Multiple-value version -%%% body=#c_values{es=[#c_literal{val=[]}, A]}}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs, Avar], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, - body=#c_letrec{defs=[{Loop,Fun}], -%%% Tuple passing version - body=#c_apply{anno=Anno, - op=Loop, - args=[L, Avar]}}}, -%%% Multiple-value version -%%% body=#c_let{vars=[Xs, A], -%%% arg=#c_apply{op=Loop, -%%% args=[L, A]}, -%%% body=#c_tuple{es=[Xs, A]}}}}, - Sub); -call_1(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3], Sub) -> - Loop = #c_var{name={'lists^mapfoldr',2}}, - F = #c_var{name='F'}, - Xs = #c_var{name='Xs'}, - X = #c_var{name='X'}, - Avar = #c_var{name='A'}, - Match = - fun (A, P, E) -> - C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E}, - Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]}, - C2 = #c_clause{pats=[X], guard=#c_literal{val=true}, - body=match_fail(Anno, Err)}, - #c_case{arg=A, clauses=[C1, C2]} - end, - C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, -%%% Tuple passing version - body=Match(#c_apply{anno=Anno, - op=Loop, - args=[Xs, Avar]}, - #c_tuple{es=[Xs, Avar]}, - Match(#c_apply{anno=Anno, op=F, args=[X, Avar]}, - #c_tuple{es=[X, Avar]}, - #c_tuple{anno=[compiler_generated], - es=[#c_cons{anno=[compiler_generated], - hd=X, tl=Xs}, Avar]})) -%%% Multiple-value version -%%% body=#c_let{vars=[Xs,A], -%%% %% The tuple will be optimised away -%%% arg=#c_apply{op=Loop, args=[Xs, A]}, -%%% body=Match(#c_apply{op=F, args=[X, A]}, -%%% #c_tuple{es=[X, A]}, -%%% #c_values{es=[#c_cons{hd=X, tl=Xs}, -%%% A]})} - }, - C2 = #c_clause{pats=[#c_literal{val=[]}], - guard=#c_call{module=#c_literal{val=erlang}, - name=#c_literal{val=is_function}, - args=[F, #c_literal{val=2}]}, -%%% Tuple passing version - body=#c_tuple{anno=[compiler_generated], - es=[#c_literal{val=[]}, Avar]}}, -%%% Multiple-value version -%%% body=#c_values{es=[#c_literal{val=[]}, A]}}, - Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]}, - C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, - body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)}, - Fun = #c_fun{vars=[Xs, Avar], - body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, - L = #c_var{name='L'}, - expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, - body=#c_letrec{defs=[{Loop,Fun}], -%%% Tuple passing version - body=#c_apply{anno=Anno, - op=Loop, - args=[L, Avar]}}}, -%%% Multiple-value version -%%% body=#c_let{vars=[Xs, A], -%%% arg=#c_apply{op=Loop, -%%% args=[L, A]}, -%%% body=#c_tuple{es=[Xs, A]}}}}, - Sub); -call_1(#c_call{module=M, name=N}=Call, _, _, As, Sub) -> - call_0(Call, M, N, As, Sub). - -match_fail(Anno, Arg) -> - #c_primop{anno=Anno, - name=#c_literal{val='match_fail'}, - args=[Arg]}. - %% fold_call(Call, Mod, Name, Args, Sub) -> Expr. %% Try to safely evaluate the call. Just try to evaluate arguments, %% do the call and convert return values to literals. If this @@ -1133,29 +757,33 @@ fold_call_1(Call, Mod, Name, Args, Sub) -> true -> fold_call_2(Call, Mod, Name, Args, Sub) end. -fold_call_2(Call, Module, Name, Args0, Sub) -> - try - Args = [core_lib:literal_value(A) || A <- Args0], - try apply(Module, Name, Args) of - Val -> - case cerl:is_literal_term(Val) of - true -> - #c_literal{val=Val}; - false -> - %% Successful evaluation, but it was not - %% possible to express the computed value as a literal. - Call - end - catch - error:Reason -> - %% Evaluation of the function failed. Warn and replace - %% the call with a call to erlang:error/1. - eval_failure(Call, Reason) - end +fold_call_2(Call, Module, Name, Args, Sub) -> + case all(fun cerl:is_literal/1, Args) of + true -> + %% All arguments are literals. + fold_lit_args(Call, Module, Name, Args); + false -> + %% At least one non-literal argument. + fold_non_lit_args(Call, Module, Name, Args, Sub) + end. + +fold_lit_args(Call, Module, Name, Args0) -> + Args = [cerl:concrete(A) || A <- Args0], + try apply(Module, Name, Args) of + Val -> + case cerl:is_literal_term(Val) of + true -> + cerl:abstract(Val); + false -> + %% Successful evaluation, but it was not possible + %% to express the computed value as a literal. + Call + end catch - error:_ -> - %% There was at least one non-literal argument. - fold_non_lit_args(Call, Module, Name, Args0, Sub) + error:Reason -> + %% Evaluation of the function failed. Warn and replace + %% the call with a call to erlang:error/1. + eval_failure(Call, Reason) end. %% fold_non_lit_args(Call, Module, Name, Args, Sub) -> Expr. @@ -1194,36 +822,43 @@ fold_non_lit_args(Call, _, _, _, _) -> Call. %% Evaluate a relational operation using type information. eval_rel_op(Call, Op, [#c_var{name=V},#c_var{name=V}], _) -> Bool = erlang:Op(same, same), - #c_literal{anno=core_lib:get_anno(Call),val=Bool}; -eval_rel_op(Call, '=:=', [#c_var{name=V}=Var,#c_literal{val=true}], Sub) -> + #c_literal{anno=cerl:get_ann(Call),val=Bool}; +eval_rel_op(Call, '=:=', [Term,#c_literal{val=true}], Sub) -> %% BoolVar =:= true ==> BoolVar - case is_boolean_type(V, Sub) of - true -> Var; - false -> Call + case is_boolean_type(Term, Sub) of + yes -> Term; + maybe -> Call; + no -> #c_literal{val=false} end; -eval_rel_op(Call, '==', Ops, _Sub) -> - case is_exact_eq_ok(Ops) of +eval_rel_op(Call, '==', Ops, Sub) -> + case is_exact_eq_ok(Ops, Sub) of true -> - Name = #c_literal{anno=core_lib:get_anno(Call),val='=:='}, + Name = #c_literal{anno=cerl:get_ann(Call),val='=:='}, Call#c_call{name=Name}; false -> Call end; -eval_rel_op(Call, '/=', Ops, _Sub) -> - case is_exact_eq_ok(Ops) of +eval_rel_op(Call, '/=', Ops, Sub) -> + case is_exact_eq_ok(Ops, Sub) of true -> - Name = #c_literal{anno=core_lib:get_anno(Call),val='=/='}, + Name = #c_literal{anno=cerl:get_ann(Call),val='=/='}, Call#c_call{name=Name}; false -> Call end; eval_rel_op(Call, _, _, _) -> Call. -is_exact_eq_ok([#c_literal{val=Lit}|_]) -> +is_exact_eq_ok([A,B]=L, Sub) -> + case is_int_type(A, Sub) =:= yes andalso is_int_type(B, Sub) =:= yes of + true -> true; + false -> is_exact_eq_ok_1(L) + end. + +is_exact_eq_ok_1([#c_literal{val=Lit}|_]) -> is_non_numeric(Lit); -is_exact_eq_ok([_|T]) -> - is_exact_eq_ok(T); -is_exact_eq_ok([]) -> false. +is_exact_eq_ok_1([_|T]) -> + is_exact_eq_ok_1(T); +is_exact_eq_ok_1([]) -> false. is_non_numeric([H|T]) -> is_non_numeric(H) andalso is_non_numeric(T); @@ -1247,40 +882,31 @@ is_non_numeric_tuple(_Tuple, 0) -> true. %% there must be at least one non-literal argument (i.e. %% there is no need to handle the case that all argments %% are literal). -eval_bool_op(Call, 'and', [#c_literal{val=true},#c_var{name=V}=Res], Sub) -> - case is_boolean_type(V, Sub) of - true -> Res; - false-> Call - end; -eval_bool_op(Call, 'and', [#c_var{name=V}=Res,#c_literal{val=true}], Sub) -> - case is_boolean_type(V, Sub) of - true -> Res; - false-> Call - end; -eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,#c_var{name=V}], Sub) -> - case is_boolean_type(V, Sub) of - true -> Res; - false-> Call - end; -eval_bool_op(Call, 'and', [#c_var{name=V},#c_literal{val=false}=Res], Sub) -> - case is_boolean_type(V, Sub) of - true -> Res; - false-> Call - end; + +eval_bool_op(Call, 'and', [#c_literal{val=true},Term], Sub) -> + eval_bool_op_1(Call, Term, Term, Sub); +eval_bool_op(Call, 'and', [Term,#c_literal{val=true}], Sub) -> + eval_bool_op_1(Call, Term, Term, Sub); +eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,Term], Sub) -> + eval_bool_op_1(Call, Res, Term, Sub); +eval_bool_op(Call, 'and', [Term,#c_literal{val=false}=Res], Sub) -> + eval_bool_op_1(Call, Res, Term, Sub); eval_bool_op(Call, _, _, _) -> Call. +eval_bool_op_1(Call, Res, Term, Sub) -> + case is_boolean_type(Term, Sub) of + yes -> Res; + no -> eval_failure(Call, badarg); + maybe -> Call + end. + %% Evaluate is_boolean/1 using type information. -eval_is_boolean(Call, #c_var{name=V}, Sub) -> - case is_boolean_type(V, Sub) of - true -> #c_literal{val=true}; - false -> Call - end; -eval_is_boolean(_, #c_cons{}, _) -> - #c_literal{val=false}; -eval_is_boolean(_, #c_tuple{}, _) -> - #c_literal{val=false}; -eval_is_boolean(Call, _, _) -> - Call. +eval_is_boolean(Call, Term, Sub) -> + case is_boolean_type(Term, Sub) of + no -> #c_literal{val=false}; + yes -> #c_literal{val=true}; + maybe -> Call + end. %% eval_length(Call, List) -> Val. %% Evaluates the length for the prefix of List which has a known @@ -1330,36 +956,33 @@ eval_append(Call, X, Y) -> %% Evaluates element/2 if the position Pos is a literal and %% the shape of the tuple Tuple is known. %% -eval_element(Call, #c_literal{val=Pos}, #c_tuple{es=Es}, _Types) when is_integer(Pos) -> - if - 1 =< Pos, Pos =< length(Es) -> - lists:nth(Pos, Es); - true -> - eval_failure(Call, badarg) - end; -eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types) +eval_element(Call, #c_literal{val=Pos}, Tuple, Types) when is_integer(Pos) -> - case orddict:find(V, Types#sub.t) of - {ok,#c_tuple{es=Elements}} -> + case get_type(Tuple, Types) of + none -> + Call; + Type -> + Es = case cerl:is_c_tuple(Type) of + false -> []; + true -> cerl:tuple_es(Type) + end, if - 1 =< Pos, Pos =< length(Elements) -> - El = lists:nth(Pos, Elements), + 1 =< Pos, Pos =< length(Es) -> + El = lists:nth(Pos, Es), try - pat_to_expr(El) + cerl:set_ann(pat_to_expr(El), [compiler_generated]) catch throw:impossible -> Call end; true -> + %% Index outside tuple or not a tuple. eval_failure(Call, badarg) - end; - {ok,_} -> - eval_failure(Call, badarg); - error -> - Call + end end; -eval_element(Call, Pos, Tuple, _Types) -> - case is_not_integer(Pos) orelse is_not_tuple(Tuple) of +eval_element(Call, Pos, Tuple, Sub) -> + case is_int_type(Pos, Sub) =:= no orelse + is_tuple_type(Tuple, Sub) =:= no of true -> eval_failure(Call, badarg); false -> @@ -1369,58 +992,55 @@ eval_element(Call, Pos, Tuple, _Types) -> %% eval_is_record(Call, Var, Tag, Size, Types) -> Val. %% Evaluates is_record/3 using type information. %% -eval_is_record(Call, #c_var{name=V}, #c_literal{val=NeededTag}=Lit, +eval_is_record(Call, Term, #c_literal{val=NeededTag}, #c_literal{val=Size}, Types) -> - case orddict:find(V, Types#sub.t) of - {ok,#c_tuple{es=[#c_literal{val=Tag}|_]=Es}} -> - Lit#c_literal{val=Tag =:= NeededTag andalso - length(Es) =:= Size}; - _ -> - Call + case get_type(Term, Types) of + none -> + Call; + Type -> + Es = case cerl:is_c_tuple(Type) of + false -> []; + true -> cerl:tuple_es(Type) + end, + case Es of + [#c_literal{val=Tag}|_] -> + Bool = Tag =:= NeededTag andalso + length(Es) =:= Size, + #c_literal{val=Bool}; + _ -> + #c_literal{val=false} + end end; eval_is_record(Call, _, _, _, _) -> Call. -%% is_not_integer(Core) -> true | false. -%% Returns true if Core is definitely not an integer. - -is_not_integer(#c_literal{val=Val}) when not is_integer(Val) -> true; -is_not_integer(#c_tuple{}) -> true; -is_not_integer(#c_cons{}) -> true; -is_not_integer(#c_map{}) -> true; -is_not_integer(_) -> false. - -%% is_not_tuple(Core) -> true | false. -%% Returns true if Core is definitely not a tuple. - -is_not_tuple(#c_literal{val=Val}) when not is_tuple(Val) -> true; -is_not_tuple(#c_cons{}) -> true; -is_not_tuple(#c_map{}) -> true; -is_not_tuple(_) -> false. - %% eval_setelement(Call, Pos, Tuple, NewVal) -> Core. %% Evaluates setelement/3 if position Pos is an integer -%% the shape of the tuple Tuple is known. +%% and the shape of the tuple Tuple is known. %% -eval_setelement(Call, Pos, Tuple, NewVal) -> - try - eval_setelement_1(Pos, Tuple, NewVal) - catch - error:_ -> - Call - end. - -eval_setelement_1(#c_literal{val=Pos}, #c_tuple{anno=A,es=Es}, NewVal) +eval_setelement(Call, #c_literal{val=Pos}, Tuple, NewVal) when is_integer(Pos) -> - ann_c_tuple(A, eval_setelement_2(Pos, Es, NewVal)); -eval_setelement_1(#c_literal{val=Pos}, #c_literal{anno=A,val=Es0}, NewVal) - when is_integer(Pos) -> - Es = [#c_literal{anno=A,val=E} || E <- tuple_to_list(Es0)], - ann_c_tuple(A, eval_setelement_2(Pos, Es, NewVal)). + case cerl:is_data(Tuple) of + false -> + Call; + true -> + Es0 = case cerl:is_c_tuple(Tuple) of + false -> []; + true -> cerl:tuple_es(Tuple) + end, + if + 1 =< Pos, Pos =< length(Es0) -> + Es = eval_setelement_1(Pos, Es0, NewVal), + cerl:update_c_tuple(Tuple, Es); + true -> + eval_failure(Call, badarg) + end + end; +eval_setelement(Call, _, _, _) -> Call. -eval_setelement_2(1, [_|T], NewVal) -> +eval_setelement_1(1, [_|T], NewVal) -> [NewVal|T]; -eval_setelement_2(Pos, [H|T], NewVal) when Pos > 1 -> - [H|eval_setelement_2(Pos-1, T, NewVal)]. +eval_setelement_1(Pos, [H|T], NewVal) when Pos > 1 -> + [H|eval_setelement_1(Pos-1, T, NewVal)]. %% eval_failure(Call, Reason) -> Core. %% Warn for a call that will fail and replace the call with @@ -1500,7 +1120,7 @@ clause(#c_clause{pats=Ps0,guard=G0,body=B0}=Cl, Cexpr, Ctxt, Sub0) -> let_substs(Vs0, As0, Sub0) -> {Vs1,Sub1} = pattern_list(Vs0, Sub0), {Vs2,As1,Ss} = let_substs_1(Vs1, As0, Sub1), - Sub2 = scope_add([V || #c_var{name=V} <- Vs2], Sub1), + Sub2 = sub_add_scope([V || #c_var{name=V} <- Vs2], Sub1), {Vs2,As1, foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}. @@ -1535,7 +1155,7 @@ pattern(#c_var{}=Pat, Isub, Osub) -> true -> V1 = make_var_name(), Pat1 = #c_var{name=V1}, - {Pat1,sub_set_var(Pat, Pat1, scope_add([V1], Osub))}; + {Pat1,sub_set_var(Pat, Pat1, sub_add_scope([V1], Osub))}; false -> {Pat,sub_del_var(Pat, Osub)} end; @@ -1605,6 +1225,7 @@ is_subst(_) -> false. %% sub_del_var(Var, #sub{}) -> #sub{}. %% sub_subst_var(Var, Value, #sub{}) -> [{Name,Value}]. %% sub_is_val(Var, #sub{}) -> boolean(). +%% sub_add_scope(#sub{}) -> #sub{} %% sub_subst_scope(#sub{}) -> #sub{} %% %% We use the variable name as key so as not have problems with @@ -1615,9 +1236,10 @@ is_subst(_) -> false. %% In addition to the list of substitutions, we also keep track of %% all variable currently live (the scope). %% -%% sub_subst_scope/1 adds dummy substitutions for all variables -%% in the scope in order to force renaming if variables in the -%% scope occurs as pattern variables. +%% sub_add_scope/2 adds variables to the scope. sub_subst_scope/1 +%% adds dummy substitutions for all variables in the scope in order +%% to force renaming if variables in the scope occurs as pattern +%% variables. sub_new() -> #sub{v=orddict:new(),s=gb_trees:empty(),t=[]}. @@ -1657,6 +1279,12 @@ sub_subst_var(#c_var{name=V}, Val, #sub{v=S0}) -> %% Fold chained substitutions. [{V,Val}] ++ [ {K,Val} || {K,#c_var{name=V1}} <- S0, V1 =:= V]. +sub_add_scope(Vs, #sub{s=Scope0}=Sub) -> + Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) -> + gb_sets:add(V, S) + end, Scope0, Vs), + Sub#sub{s=Scope}. + sub_subst_scope(#sub{v=S0,s=Scope}=Sub) -> S = [{-1,#c_var{name=Sv}} || Sv <- gb_sets:to_list(Scope)]++S0, Sub#sub{v=S}. @@ -1704,7 +1332,7 @@ clauses(E, [C0|Cs], Ctxt, Sub, LitExpr) -> {yes,yes} -> case LitExpr of false -> - Line = get_line(core_lib:get_anno(C1)), + Line = get_line(cerl:get_ann(C1)), shadow_warning(Cs, Line); true -> %% If the case expression is a literal, @@ -1938,7 +1566,7 @@ opt_bool_case_guard(#c_case{arg=Arg,clauses=Cs0}=Case) -> Case; true -> Cs = opt_bool_case_guard(Arg, Cs0), - Case#c_case{arg=#c_values{anno=core_lib:get_anno(Arg),es=[]}, + Case#c_case{arg=#c_values{anno=cerl:get_ann(Arg),es=[]}, clauses=Cs} end. @@ -1986,6 +1614,7 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0, %% is correct, the clause will always match at run-time. Case; {true,Bs} -> + eval_case_warn(B), {Ps,As} = unzip(Bs), InnerLet = cerl:c_let(Ps, core_lib:make_values(As), B), Let = cerl:c_let(Vs, E, InnerLet), @@ -1993,6 +1622,19 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0, end; eval_case(Case, _) -> Case. +eval_case_warn(#c_primop{anno=Anno, + name=#c_literal{val=match_fail}, + args=[#c_literal{val=Reason}]}=Core) + when is_atom(Reason) -> + case member(eval_failure, Anno) of + false -> + ok; + true -> + %% Example: M = not_map, M#{k:=v} + add_warning(Core, {eval_failure,Reason}) + end; +eval_case_warn(_) -> ok. + %% case_opt(CaseArg, [Clause]) -> {CaseArg,[Clause]}. %% Try and optimise a case by avoid building tuples or lists %% in the case expression. Instead combine the variable parts @@ -2048,12 +1690,31 @@ case_opt_args([], Cs, _Sub, _LitExpr, Acc) -> %% Try to expand one argument to several arguments (if tuple/list) %% or to remove a literal argument. %% -case_opt_arg(E0, Sub, Cs0, LitExpr) -> - E = maybe_replace_var(E0, Sub), - case cerl:is_data(E) of +case_opt_arg(E0, Sub, Cs, LitExpr) -> + case cerl:is_c_var(E0) of + false -> + case_opt_arg_1(E0, Cs, LitExpr); + true -> + case case_will_var_match(Cs) of + true -> + %% All clauses will match a variable in the + %% current position. Don't expand this variable + %% (that can only make the code worse). + {error,Cs}; + false -> + %% If possible, expand this variable to a previously + %% matched term. + E = case_expand_var(E0, Sub), + case_opt_arg_1(E, Cs, LitExpr) + end + end. + +case_opt_arg_1(E0, Cs0, LitExpr) -> + case cerl:is_data(E0) of false -> {error,Cs0}; true -> + E = case_opt_compiler_generated(E0), Cs = case_opt_nomatch(E, Cs0, LitExpr), case cerl:data_type(E) of {atomic,_} -> @@ -2063,18 +1724,42 @@ case_opt_arg(E0, Sub, Cs0, LitExpr) -> end end. -%% maybe_replace_var(Expr0, Sub) -> Expr +%% case_will_var_match([Clause]) -> true | false. +%% Return if all clauses will match a variable in the +%% current position. +%% +case_will_var_match(Cs) -> + all(fun({[P|_],_,_,_}) -> + case cerl_clauses:match(P, any) of + {true,_} -> true; + _ -> false + end + end, Cs). + + +%% case_opt_compiler_generated(Core) -> Core' +%% Mark Core expressions as compiler generated to ensure that +%% no warnings are generated if they turn out to be unused. +%% To pretty-printed Core Erlang easier to read, don't mark +%% constructs that can't cause warnings to be emitted. +%% +case_opt_compiler_generated(Core) -> + F = fun(C) -> + case cerl:type(C) of + alias -> C; + var -> C; + _ -> cerl:set_ann(C, [compiler_generated]) + end + end, + cerl_trees:map(F, Core). + + +%% case_expand_var(Expr0, Sub) -> Expr %% If Expr0 is a variable that has been previously matched and %% is known to be a tuple, return the tuple instead. Otherwise %% return Expr0 unchanged. %% -maybe_replace_var(E, Sub) -> - case cerl:is_c_var(E) of - false -> E; - true -> maybe_replace_var_1(E, Sub) - end. - -maybe_replace_var_1(E, #sub{t=Tdb}) -> +case_expand_var(E, #sub{t=Tdb}) -> case orddict:find(cerl:var_name(E), Tdb) of {ok,T0} -> case cerl:is_c_tuple(T0) of @@ -2091,9 +1776,8 @@ maybe_replace_var_1(E, #sub{t=Tdb}) -> %% operator will fail when used in map %% construction (only the '=>' operator is allowed %% when constructing a map from scratch). - ToData = fun coerce_to_data/1, try - cerl_trees:map(ToData, T0) + cerl_trees:map(fun coerce_to_data/1, T0) catch throw:impossible -> %% Something unsuitable was found (map or @@ -2147,8 +1831,9 @@ case_opt_nomatch(_, [], _) -> []. %% will match, and we can remove the corresponding pattern from %% each clause. %% -%% The only complication is if the literal is a binary. Binary -%% pattern matching is tricky, so we will give up in that case. +%% The only complication is if the literal is a binary or map. +%% In general, it is difficult to know whether a binary or +%% map pattern will match, so we give up in that case. case_opt_lit(Lit, Cs0) -> try case_opt_lit_1(Lit, Cs0) of @@ -2175,6 +1860,10 @@ case_opt_lit_1(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) -> case_opt_lit_1(_, []) -> []. %% case_opt_data(Expr, Clauses0, LitExpr) -> {ok,Exprs,Clauses} +%% The case expression is a non-atomic data constructor (cons +%% or tuple). We can know at compile time whether each clause +%% will match, and we can delay the building of the data to +%% the clauses where it is actually needed. case_opt_data(E, Cs0) -> Es = cerl:data_es(E), @@ -2184,45 +1873,48 @@ case_opt_data(E, Cs0) -> {ok,Es,Cs} catch throw:impossible -> + %% The pattern contained a binary or map. {error,Cs0} end. -case_opt_data_1([{[P|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig) -> - {ok,Ps1,Bs1} = case_data_pat(P, TypeSig), - [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}| - case_opt_data_1(Cs, Es, TypeSig)]; +case_opt_data_1([{[P0|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig) -> + P = case_opt_compiler_generated(P0), + BindTo = #c_var{name=dummy}, + {Ps1,[{BindTo,_}|Bs1]} = case_data_pat_alias(P, BindTo, TypeSig, []), + [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|case_opt_data_1(Cs, Es, TypeSig)]; case_opt_data_1([], _, _) -> []. -%% case_data_pat(Pattern, Type, Arity) -> {ok,[Pattern],[{AliasVar,Pat}]} | error. - -case_data_pat(P, TypeSig) -> - case cerl:is_data(P) of - false -> - case_data_pat_var(P, TypeSig); - true -> - {ok,cerl:data_es(P),[]} - end. - -%% case_data_pat_var(Pattern, {DataType,ArityType}) -> -%% {ok,[Pattern],[{AliasVar,Pat}]} - -case_data_pat_var(P, {Type,Arity}=TypeSig) -> - %% If the entire case statement is evaluated in an effect - %% context (e.g. "case {A,B} of ... end, ok"), there will - %% be a warning that a term is constructed but never used. - %% To avoid that warning, we must annotate the data - %% constructor as compiler generated. - Ann = [compiler_generated|cerl:get_ann(P)], +case_data_pat_alias(P, BindTo0, TypeSig, Bs0) -> case cerl:type(P) of - var -> - Vars = make_vars(cerl:get_ann(P), Arity), - {ok,Vars,[{P,cerl:ann_make_data(Ann, Type, Vars)}]}; alias -> - V = cerl:alias_var(P), - Apat = cerl:alias_pat(P), - {ok,Ps,Bs} = case_data_pat(Apat, TypeSig), - {ok,Ps,[{V,cerl:ann_make_data(Ann, Type, - pat_to_expr_list(Ps))}|Bs]} + %% Recursively handle the pattern and bind to + %% the alias variable. + BindTo = cerl:alias_var(P), + Apat0 = cerl:alias_pat(P), + Ann = [compiler_generated], + Apat = cerl:set_ann(Apat0, Ann), + {Ps,Bs} = case_data_pat_alias(Apat, BindTo, TypeSig, Bs0), + {Ps,[{BindTo0,BindTo}|Bs]}; + var -> + %% Here we will need to actually build the data and bind + %% it to the variable. + {Type,Arity} = TypeSig, + Vars = make_vars([], Arity), + Ann = [compiler_generated], + Data = cerl:ann_make_data(Ann, Type, Vars), + Bs = [{BindTo0,P},{P,Data}|Bs0], + {Vars,Bs}; + _ -> + %% Since case_opt_nomatch/3 has removed all clauses that + %% cannot match, we KNOW that this clause must match and + %% that the pattern must be a data constructor. + %% Here we must build the data and bind it to the variable. + {Type,_} = TypeSig, + DataEs = cerl:data_es(P), + Vars = pat_to_expr_list(DataEs), + Ann = [compiler_generated], + Data = cerl:ann_make_data(Ann, Type, Vars), + {DataEs,[{BindTo0,Data}]} end. %% pat_to_expr(Pattern) -> Expression. @@ -2269,58 +1961,130 @@ make_var_name() -> list_to_atom("fol"++integer_to_list(N)). letify(Bs, Body) -> + Ann = cerl:get_ann(Body), foldr(fun({V,Val}, B) -> - letify(V, Val, B) + cerl:ann_c_let(Ann, [V], Val, B) end, Body, Bs). -letify(#c_var{name=Vname}=Var, Val, Body) -> - case core_lib:is_var_used(Vname, Body) of - true -> - A = element(2, Body), - #c_let{anno=A,vars=[Var],arg=Val,body=Body}; - false -> Body - end. - -%% opt_case_in_let(LetExpr) -> LetExpr' +%% opt_not_in_let(Let) -> Cerl +%% Try to optimize away a 'not' operator in a 'let'. -opt_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) -> - opt_case_in_let_0(Vs, Arg, B, Let, Sub). +-spec opt_not_in_let(cerl:c_let()) -> cerl:cerl(). -opt_case_in_let_0([#c_var{name=V}], Arg, - #c_case{arg=#c_var{name=V},clauses=Cs}=Case, Let, Sub) -> - case opt_case_in_let_1(V, Arg, Cs) of - impossible -> - case is_simple_case_arg(Arg) andalso - not core_lib:is_var_used(V, Case#c_case{arg=#c_literal{val=nil}}) of - true -> - expr(opt_bool_case(Case#c_case{arg=Arg,clauses=Cs}), sub_new(Sub)); - false -> - Let +opt_not_in_let(#c_let{vars=[_]=Vs0,arg=Arg0,body=Body0}=Let) -> + case opt_not_in_let(Vs0, Arg0, Body0) of + {[],#c_values{es=[]},Body} -> + Body; + {Vs,Arg,Body} -> + Let#c_let{vars=Vs,arg=Arg,body=Body} + end; +opt_not_in_let(Let) -> Let. + +%% opt_not_in_let(Vs, Arg, Body) -> {Vs',Arg',Body'} +%% Try to optimize away a 'not' operator in a 'let'. + +-spec opt_not_in_let([cerl:c_var()], cerl:cerl(), cerl:cerl()) -> + {[cerl:c_var()],cerl:cerl(),cerl:cerl()}. + +opt_not_in_let([#c_var{name=V}]=Vs0, Arg0, Body0) -> + case cerl:type(Body0) of + call -> + %% let <V> = Expr in not V ==> + %% let <> = <> in notExpr + case opt_not_in_let_1(V, Body0, Arg0) of + no -> + {Vs0,Arg0,Body0}; + {yes,Body} -> + {[],#c_values{es=[]},Body} end; - Expr -> Expr + 'let' -> + %% let <V> = Expr in let <Var> = not V in Body ==> + %% let <Var> = notExpr in Body + %% V must not be used in Body. + LetArg = cerl:let_arg(Body0), + case opt_not_in_let_1(V, LetArg, Arg0) of + no -> + {Vs0,Arg0,Body0}; + {yes,Arg} -> + LetBody = cerl:let_body(Body0), + case core_lib:is_var_used(V, LetBody) of + true -> + {Vs0,Arg0,Body0}; + false -> + LetVars = cerl:let_vars(Body0), + {LetVars,Arg,LetBody} + end + end; + _ -> + {Vs0,Arg0,Body0} end; -opt_case_in_let_0(_, _, _, Let, _) -> Let. +opt_not_in_let(Vs, Arg, Body) -> + {Vs,Arg,Body}. -opt_case_in_let_1(V, Arg, Cs) -> - try - opt_case_in_let_2(V, Arg, Cs) - catch - _:_ -> impossible +opt_not_in_let_1(V, Call, Body) -> + case Call of + #c_call{module=#c_literal{val=erlang}, + name=#c_literal{val='not'}, + args=[#c_var{name=V}]} -> + opt_not_in_let_2(Body); + _ -> + no end. -opt_case_in_let_2(V, Arg0, - [#c_clause{pats=[#c_tuple{es=Es}], - guard=#c_literal{val=true},body=B}|_]) -> - - %% In {V1,V2,...} = case E of P -> ... {Val1,Val2,...}; ... end. - %% avoid building tuples, by converting tuples to multiple values. - %% (The optimisation is not done if the built tuple is used or returned.) - - true = all(fun (#c_var{}) -> true; - (_) -> false end, Es), %Only variables in tuple - false = core_lib:is_var_used(V, B), %Built tuple must not be used. - Arg1 = tuple_to_values(Arg0, length(Es)), %Might fail. - #c_let{vars=Es,arg=Arg1,body=B}. +opt_not_in_let_2(#c_case{clauses=Cs0}=Case) -> + Vars = make_vars([], 1), + Body = #c_call{module=#c_literal{val=erlang}, + name=#c_literal{val='not'}, + args=Vars}, + Cs = [begin + Let = #c_let{vars=Vars,arg=B,body=Body}, + C#c_clause{body=opt_not_in_let(Let)} + end || #c_clause{body=B}=C <- Cs0], + {yes,Case#c_case{clauses=Cs}}; +opt_not_in_let_2(#c_call{}=Call0) -> + invert_call(Call0); +opt_not_in_let_2(_) -> no. + +invert_call(#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=Name0}, + args=[_,_]}=Call) -> + case inverse_rel_op(Name0) of + no -> no; + Name -> {yes,Call#c_call{name=#c_literal{val=Name}}} + end; +invert_call(#c_call{}) -> no. + +%% inverse_rel_op(Op) -> no | RevOp + +inverse_rel_op('=:=') -> '=/='; +inverse_rel_op('=/=') -> '=:='; +inverse_rel_op('==') -> '/='; +inverse_rel_op('/=') -> '=='; +inverse_rel_op('>') -> '=<'; +inverse_rel_op('<') -> '>='; +inverse_rel_op('>=') -> '<'; +inverse_rel_op('=<') -> '>'; +inverse_rel_op(_) -> no. + + +%% opt_bool_case_in_let(LetExpr, Sub) -> Core + +opt_bool_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) -> + opt_case_in_let_1(Vs, Arg, B, Let, Sub). + +opt_case_in_let_1([#c_var{name=V}], Arg, + #c_case{arg=#c_var{name=V}}=Case0, Let, Sub) -> + case is_simple_case_arg(Arg) of + true -> + Case = opt_bool_case(Case0#c_case{arg=Arg}), + case core_lib:is_var_used(V, Case) of + false -> expr(Case, sub_new(Sub)); + true -> Let + end; + false -> + Let + end; +opt_case_in_let_1(_, _, _, Let, _) -> Let. %% is_simple_case_arg(Expr) -> true|false %% Determine whether the Expr is simple enough to be worth @@ -2362,18 +2126,15 @@ is_bool_expr(#c_clause{body=B}, Sub) -> is_bool_expr(B, Sub); is_bool_expr(#c_let{vars=[V],arg=Arg,body=B}, Sub0) -> Sub = case is_bool_expr(Arg, Sub0) of - true -> update_types(V, [#c_literal{val=true}], Sub0); + true -> update_types(V, [bool], Sub0); false -> Sub0 end, is_bool_expr(B, Sub); is_bool_expr(#c_let{body=B}, Sub) -> %% Binding of multiple variables. is_bool_expr(B, Sub); -is_bool_expr(#c_literal{val=Bool}, _) when is_boolean(Bool) -> - true; -is_bool_expr(#c_var{name=V}, Sub) -> - is_boolean_type(V, Sub); -is_bool_expr(_, _) -> false. +is_bool_expr(C, Sub) -> + is_boolean_type(C, Sub) =:= yes. is_bool_expr_list([C|Cs], Sub) -> is_bool_expr(C, Sub) andalso is_bool_expr_list(Cs, Sub); @@ -2451,38 +2212,6 @@ is_safe_bool_expr_list([C|Cs], Sub, BoolVars) -> end; is_safe_bool_expr_list([], _, _) -> true. -%% tuple_to_values(Expr, TupleArity) -> Expr' -%% Convert tuples in return position of arity TupleArity to values. -%% Throws an exception for constructs that are not handled. - -tuple_to_values(#c_tuple{es=Es}, Arity) when length(Es) =:= Arity -> - core_lib:make_values(Es); -tuple_to_values(#c_literal{val=Tuple}=Lit, Arity) when tuple_size(Tuple) =:= Arity -> - Es = [Lit#c_literal{val=E} || E <- tuple_to_list(Tuple)], - core_lib:make_values(Es); -tuple_to_values(#c_case{clauses=Cs0}=Case, Arity) -> - Cs1 = [tuple_to_values(E, Arity) || E <- Cs0], - Case#c_case{clauses=Cs1}; -tuple_to_values(#c_seq{body=B0}=Seq, Arity) -> - Seq#c_seq{body=tuple_to_values(B0, Arity)}; -tuple_to_values(#c_let{body=B0}=Let, Arity) -> - Let#c_let{body=tuple_to_values(B0, Arity)}; -tuple_to_values(#c_receive{clauses=Cs0,timeout=Timeout,action=A0}=Rec, Arity) -> - Cs = [tuple_to_values(E, Arity) || E <- Cs0], - A = case Timeout of - #c_literal{val=infinity} -> A0; - _ -> tuple_to_values(A0, Arity) - end, - Rec#c_receive{clauses=Cs,action=A}; -tuple_to_values(#c_clause{body=B0}=Clause, Arity) -> - B = tuple_to_values(B0, Arity), - Clause#c_clause{body=B}; -tuple_to_values(Expr, _) -> - case will_fail(Expr) of - true -> Expr; - false -> erlang:error({not_handled,Expr}) - end. - %% simplify_let(Let, Sub) -> Expr | impossible %% If the argument part of an let contains a complex expression, such %% as a let or a sequence, move the original let body into the complex @@ -2509,7 +2238,7 @@ move_let_into_expr(#c_let{vars=InnerVs0,body=InnerBody0}=Inner, Arg = body(Arg0, Sub0), ScopeSub0 = sub_subst_scope(Sub0#sub{t=[]}), {OuterVs,ScopeSub} = pattern_list(OuterVs0, ScopeSub0), - + OuterBody = body(OuterBody0, ScopeSub), {InnerVs,Sub} = pattern_list(InnerVs0, Sub0), @@ -2587,86 +2316,232 @@ move_let_into_expr(_Let, _Expr, _Sub) -> impossible. is_failing_clause(#c_clause{body=B}) -> will_fail(B). -scope_add(Vs, #sub{s=Scope0}=Sub) -> - Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) -> - gb_sets:add(V, S) - end, Scope0, Vs), - Sub#sub{s=Scope}. +%% opt_case_in_let(Let) -> Let' +%% Try to avoid building tuples that are immediately matched. +%% A common pattern is: +%% +%% {V1,V2,...} = case E of P -> ... {Val1,Val2,...}; ... end +%% +%% In Core Erlang the pattern would look like this: +%% +%% let <V> = case E of +%% ... -> ... {Val1,Val2} +%% ... +%% end, +%% in case V of +%% {A,B} -> ... <use A and B> ... +%% end +%% +%% Rewrite this to: +%% +%% let <V1,V2> = case E of +%% ... -> ... <Val1,Val2> +%% ... +%% end, +%% in +%% let <V> = {V1,V2} +%% in case V of +%% {A,B} -> ... <use A and B> ... +%% end +%% +%% Note that the second 'case' is unchanged. The other optimizations +%% in this module will eliminate the building of the tuple and +%% rewrite the second case to: +%% +%% case <V1,V2> of +%% <A,B> -> ... <use A and B> ... +%% end +%% + +opt_case_in_let(#c_let{vars=Vs,arg=Arg0,body=B}=Let0) -> + case matches_data(Vs, B) of + {yes,TypeSig} -> + case delay_build(Arg0, TypeSig) of + no -> + Let0; + {yes,Vars,Arg,Data} -> + InnerLet = Let0#c_let{arg=Data}, + Let0#c_let{vars=Vars,arg=Arg,body=InnerLet} + end; + no -> + Let0 + end. + +matches_data([#c_var{name=V}], #c_case{arg=#c_var{name=V}, + clauses=[#c_clause{pats=[P]}|_]}) -> + case cerl:is_data(P) of + false -> + no; + true -> + case cerl:data_type(P) of + {atomic,_} -> + no; + Type -> + {yes,{Type,cerl:data_arity(P)}} + end + end; +matches_data(_, _) -> no. + +delay_build(Core, TypeSig) -> + case cerl:is_data(Core) of + true -> no; + false -> delay_build_1(Core, TypeSig) + end. + +delay_build_1(Core0, TypeSig) -> + try delay_build_expr(Core0, TypeSig) of + Core -> + {Type,Arity} = TypeSig, + Vars = make_vars([], Arity), + Data = cerl:ann_make_data([compiler_generated], Type, Vars), + {yes,Vars,Core,Data} + catch + throw:impossible -> + no + end. + +delay_build_cs([#c_clause{body=B0}=C0|Cs], TypeSig) -> + B = delay_build_expr(B0, TypeSig), + C = C0#c_clause{body=B}, + [C|delay_build_cs(Cs, TypeSig)]; +delay_build_cs([], _) -> []. + +delay_build_expr(Core, {Type,Arity}=TypeSig) -> + case cerl:is_data(Core) of + false -> + delay_build_expr_1(Core, TypeSig); + true -> + case {cerl:data_type(Core),cerl:data_arity(Core)} of + {Type,Arity} -> + core_lib:make_values(cerl:data_es(Core)); + {_,_} -> + throw(impossible) + end + end. + +delay_build_expr_1(#c_case{clauses=Cs0}=Case, TypeSig) -> + Cs = delay_build_cs(Cs0, TypeSig), + Case#c_case{clauses=Cs}; +delay_build_expr_1(#c_let{body=B0}=Let, TypeSig) -> + B = delay_build_expr(B0, TypeSig), + Let#c_let{body=B}; +delay_build_expr_1(#c_receive{clauses=Cs0, + timeout=Timeout, + action=A0}=Rec, TypeSig) -> + Cs = delay_build_cs(Cs0, TypeSig), + A = case Timeout of + #c_literal{val=infinity} -> A0; + _ -> delay_build_expr(A0, TypeSig) + end, + Rec#c_receive{clauses=Cs,action=A}; +delay_build_expr_1(#c_seq{body=B0}=Seq, TypeSig) -> + B = delay_build_expr(B0, TypeSig), + Seq#c_seq{body=B}; +delay_build_expr_1(Core, _TypeSig) -> + case will_fail(Core) of + true -> Core; + false -> throw(impossible) + end. %% opt_simple_let(#c_let{}, Context, Sub) -> CoreTerm %% Optimize a let construct that does not contain any lets in %% in its argument. -opt_simple_let(#c_let{arg=Arg0}=Let, Ctxt, Sub0) -> - Arg = body(Arg0, value, Sub0), %This is a body +opt_simple_let(Let0, Ctxt, Sub) -> + case opt_not_in_let(Let0) of + #c_let{}=Let -> + opt_simple_let_0(Let, Ctxt, Sub); + Expr -> + expr(Expr, Ctxt, Sub) + end. + +opt_simple_let_0(#c_let{arg=Arg0}=Let, Ctxt, Sub) -> + Arg = body(Arg0, value, Sub), %This is a body case will_fail(Arg) of true -> Arg; - false -> opt_simple_let_1(Let, Arg, Ctxt, Sub0) + false -> opt_simple_let_1(Let, Arg, Ctxt, Sub) end. opt_simple_let_1(#c_let{vars=Vs0,body=B0}=Let, Arg0, Ctxt, Sub0) -> %% Optimise let and add new substitutions. - {Vs,Args,Sub1} = let_substs(Vs0, Arg0, Sub0), - BodySub = case {Vs,Args} of - {[V],[A]} -> - case is_bool_expr(A, Sub0) of - true -> - update_types(V, [#c_literal{val=true}], Sub1); - false -> - Sub1 - end; - {_,_} -> Sub1 - end, - B = body(B0, Ctxt, BodySub), - Arg = core_lib:make_values(Args), - opt_simple_let_2(Let, Vs, Arg, B, Ctxt, Sub1). - -opt_simple_let_2(Let0, Vs0, Arg0, Body0, effect, Sub) -> - case {Vs0,Arg0,Body0} of - {[],#c_values{es=[]},Body} -> - %% No variables left (because of substitutions). - Body; - {[_|_],Arg,#c_literal{}} -> - %% The body is a literal. That means that we can ignore - %% it and that the return value is Arg revisited in - %% effect context. - body(Arg, effect, sub_new_preserve_types(Sub)); - {Vs,Arg,Body} -> - %% Since we are in effect context, there is a chance - %% that the body no longer references the variables. - %% In that case we can construct a sequence and visit - %% that in effect context: - %% let <Var> = Arg in BodyWithoutVar ==> seq Arg BodyWithoutVar - case is_any_var_used(Vs, Body) of - false -> - expr(#c_seq{arg=Arg,body=Body}, effect, sub_new_preserve_types(Sub)); - true -> - Let = Let0#c_let{vars=Vs,arg=Arg,body=Body}, - opt_case_in_let_arg(opt_case_in_let(Let, Sub), effect, Sub) - end - end; -opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) -> + {Vs1,Args,Sub1} = let_substs(Vs0, Arg0, Sub0), + BodySub = update_let_types(Vs1, Args, Sub1), + B1 = body(B0, Ctxt, BodySub), + Arg1 = core_lib:make_values(Args), + {Vs,Arg,B} = opt_not_in_let(Vs1, Arg1, B1), + opt_simple_let_2(Let, Vs, Arg, B, B0, Ctxt, Sub1). + +opt_simple_let_2(Let0, Vs0, Arg0, Body, PrevBody, Ctxt, Sub) -> case {Vs0,Arg0,Body} of - {[#c_var{name=N1}],Arg,#c_var{name=N2}} -> + {[#c_var{name=N1}],Arg1,#c_var{name=N2}} -> case N1 =:= N2 of true -> %% let <Var> = Arg in <Var> ==> Arg - Arg; + Arg1; false -> %% let <Var> = Arg in <OtherVar> ==> seq Arg OtherVar - expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub)) + Arg = maybe_suppress_warnings(Arg1, Vs0, PrevBody, Ctxt), + expr(#c_seq{arg=Arg,body=Body}, Ctxt, + sub_new_preserve_types(Sub)) end; {[],#c_values{es=[]},_} -> %% No variables left. Body; - {_,Arg,#c_literal{}} -> - %% The variable is not used in the body. The argument - %% can be evaluated in effect context to simplify it. - expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub)); - {Vs,Arg,Body} -> - opt_case_in_let_arg( - opt_case_in_let(Let#c_let{vars=Vs,arg=Arg,body=Body}, Sub), - value, Sub) + {Vs,Arg1,#c_literal{}} -> + Arg = maybe_suppress_warnings(Arg1, Vs, PrevBody, Ctxt), + E = case Ctxt of + effect -> + %% Throw away the literal body. + Arg; + value -> + %% Since the variable is not used in the body, we + %% can rewrite the let to a sequence. + %% let <Var> = Arg in Literal ==> seq Arg Literal + #c_seq{arg=Arg,body=Body} + end, + expr(E, Ctxt, sub_new_preserve_types(Sub)); + {Vs,Arg1,Body} -> + %% If none of the variables are used in the body, we can + %% rewrite the let to a sequence: + %% let <Var> = Arg in BodyWithoutVar ==> + %% seq Arg BodyWithoutVar + case is_any_var_used(Vs, Body) of + false -> + Arg = maybe_suppress_warnings(Arg1, Vs, PrevBody, Ctxt), + expr(#c_seq{arg=Arg,body=Body}, Ctxt, + sub_new_preserve_types(Sub)); + true -> + Let1 = Let0#c_let{vars=Vs,arg=Arg1,body=Body}, + Let2 = opt_bool_case_in_let(Let1, Sub), + opt_case_in_let_arg(Let2, Ctxt, Sub) + end + end. + +%% maybe_suppress_warnings(Arg, [#c_var{}], PreviousBody, Context) -> Arg' +%% Try to suppress false warnings when a variable is not used. +%% For instance, we don't expect a warning for useless building in: +%% +%% R = #r{}, %No warning expected. +%% R#r.f %Optimization would remove the reference to R. +%% +%% To avoid false warnings, we will check whether the variables were +%% referenced in the original unoptimized code. If they were, we will +%% consider the warning false and suppress it. + +maybe_suppress_warnings(Arg, _, _, effect) -> + %% Don't suppress any warnings in effect context. + Arg; +maybe_suppress_warnings(Arg, Vs, PrevBody, value) -> + case suppress_warning(Arg) of + true -> + Arg; %Already suppressed. + false -> + case is_any_var_used(Vs, PrevBody) of + true -> + cerl:set_ann(Arg, [compiler_generated]); + false -> + Arg + end end. move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg, @@ -2756,7 +2631,7 @@ move_case_into_arg(_, _) -> %% <> when 'true' -> %% let <Var> = Literal2 in LetBody %% end -%% +%% %% In the worst case, the size of the code could increase. %% In practice, though, substituting the literals into %% LetBody and doing constant folding will decrease the code @@ -2789,14 +2664,114 @@ is_any_var_used([#c_var{name=V}|Vs], Expr) -> end; is_any_var_used([], _) -> false. -is_boolean_type(V, #sub{t=Tdb}) -> +%%% +%%% Retrieving information about types. +%%% + +-spec get_type(cerl:cerl(), #sub{}) -> type_info() | 'none'. + +get_type(#c_var{name=V}, #sub{t=Tdb}) -> case orddict:find(V, Tdb) of - {ok,bool} -> true; - _ -> false + {ok,Type} -> Type; + error -> none + end; +get_type(C, _) -> + case cerl:type(C) of + binary -> C; + map -> C; + _ -> + case cerl:is_data(C) of + true -> C; + false -> none + end + end. + +-spec is_boolean_type(cerl:cerl(), sub()) -> yes_no_maybe(). + +is_boolean_type(Var, Sub) -> + case get_type(Var, Sub) of + none -> + maybe; + bool -> + yes; + C -> + B = cerl:is_c_atom(C) andalso + is_boolean(cerl:atom_val(C)), + yes_no(B) end. +-spec is_int_type(cerl:cerl(), sub()) -> yes_no_maybe(). + +is_int_type(Var, Sub) -> + case get_type(Var, Sub) of + none -> maybe; + integer -> yes; + C -> yes_no(cerl:is_c_int(C)) + end. + +-spec is_tuple_type(cerl:cerl(), sub()) -> yes_no_maybe(). + +is_tuple_type(Var, Sub) -> + case get_type(Var, Sub) of + none -> maybe; + C -> yes_no(cerl:is_c_tuple(C)) + end. + +yes_no(true) -> yes; +yes_no(false) -> no. + +%%% +%%% Update type information. +%%% + +update_let_types(Vs, Args, Sub) when is_list(Args) -> + update_let_types_1(Vs, Args, Sub); +update_let_types(_Vs, _Arg, Sub) -> + %% The argument is a complex expression (such as a 'case') + %% that returns multiple values. + Sub. + +update_let_types_1([#c_var{}=V|Vs], [A|As], Sub0) -> + Sub = update_types_from_expr(V, A, Sub0), + update_let_types_1(Vs, As, Sub); +update_let_types_1([], [], Sub) -> Sub. + +update_types_from_expr(V, Expr, Sub) -> + Type = extract_type(Expr, Sub), + update_types(V, [Type], Sub). + +extract_type(#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=Name}, + args=Args}=Call, Sub) -> + case returns_integer(Name, Args) of + true -> integer; + false -> extract_type_1(Call, Sub) + end; +extract_type(Expr, Sub) -> + extract_type_1(Expr, Sub). + +extract_type_1(Expr, Sub) -> + case is_bool_expr(Expr, Sub) of + false -> Expr; + true -> bool + end. + +returns_integer(bit_size, [_]) -> true; +returns_integer('bsl', [_,_]) -> true; +returns_integer('bsr', [_,_]) -> true; +returns_integer(byte_size, [_]) -> true; +returns_integer(length, [_]) -> true; +returns_integer('rem', [_,_]) -> true; +returns_integer(size, [_]) -> true; +returns_integer(tuple_size, [_]) -> true; +returns_integer(trunc, [_]) -> true; +returns_integer(_, _) -> false. + %% update_types(Expr, Pattern, Sub) -> Sub' %% Update the type database. + +-spec update_types(cerl:cerl(), [type_info()], sub()) -> sub(). + update_types(Expr, Pat, #sub{t=Tdb0}=Sub) -> Tdb = update_types_1(Expr, Pat, Tdb0), Sub#sub{t=Tdb}. @@ -2816,6 +2791,8 @@ update_types_2(V, [#c_tuple{}=P], Types) -> orddict:store(V, P, Types); update_types_2(V, [#c_literal{val=Bool}], Types) when is_boolean(Bool) -> orddict:store(V, bool, Types); +update_types_2(V, [Type], Types) when is_atom(Type) -> + orddict:store(V, Type, Types); update_types_2(_, _, Types) -> Types. %% kill_types(V, Tdb) -> Tdb' @@ -3082,7 +3059,7 @@ bsm_ensure_no_partition_after([#c_clause{pats=Ps}|Cs], Pos) -> bsm_problem(P, bin_partition) end; bsm_ensure_no_partition_after([], _) -> ok. - + bsm_could_match_binary(#c_alias{pat=P}) -> bsm_could_match_binary(P); bsm_could_match_binary(#c_cons{}) -> false; bsm_could_match_binary(#c_tuple{}) -> false; @@ -3116,11 +3093,11 @@ add_bin_opt_info(Core, Term) -> end. add_warning(Core, Term) -> - case is_compiler_generated(Core) of + case suppress_warning(Core) of true -> ok; false -> - Anno = core_lib:get_anno(Core), + Anno = cerl:get_ann(Core), Line = get_line(Anno), File = get_file(Anno), Key = {?MODULE,warnings}, @@ -3141,9 +3118,17 @@ get_file([{file,File}|_]) -> File; get_file([_|T]) -> get_file(T); get_file([]) -> "no_file". % should not happen +suppress_warning(Core) -> + is_compiler_generated(Core) orelse + is_result_unwanted(Core). + is_compiler_generated(Core) -> - Anno = core_lib:get_anno(Core), - member(compiler_generated, Anno). + Ann = cerl:get_ann(Core), + member(compiler_generated, Ann). + +is_result_unwanted(Core) -> + Ann = cerl:get_ann(Core), + member(result_not_wanted, Ann). get_warnings() -> ordsets:from_list((erase({?MODULE,warnings}))). diff --git a/lib/compiler/src/sys_core_fold_lists.erl b/lib/compiler/src/sys_core_fold_lists.erl new file mode 100644 index 0000000000..49dc59052a --- /dev/null +++ b/lib/compiler/src/sys_core_fold_lists.erl @@ -0,0 +1,386 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +%% Purpose : Inline high order lists functions from the lists module. + +-module(sys_core_fold_lists). + +-export([call/4]). + +-include("core_parse.hrl"). + +%% We inline some very common higher order list operations. +%% We use the same evaluation order as the library function. + +-spec call(cerl:c_call(), atom(), atom(), [cerl:cerl()]) -> + 'none' | cerl:cerl(). + +call(#c_call{anno=Anno}, lists, all, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^all',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, + CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, + body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}, + CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, + body=#c_literal{val=false}}, + CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, + body=match_fail(Anno, Err1)}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]}, + clauses = [CC1, CC2, CC3]}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=true}}, + Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, any, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^any',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, + CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, + body=#c_literal{val=true}}, + CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, + body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}, + CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, + body=match_fail(Anno, Err1)}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]}, + clauses = [CC1, CC2, CC3]}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=false}}, + Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^foreach',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]}, + body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=ok}}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, map, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^map',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + H = #c_var{name='H'}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_let{vars=[H], arg=#c_apply{anno=Anno, + op=F, + args=[X]}, + body=#c_cons{hd=H, + anno=[compiler_generated], + tl=#c_apply{anno=Anno, + op=Loop, + args=[Xs]}}}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=[]}}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^flatmap',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + H = #c_var{name='H'}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_let{vars=[H], + arg=#c_apply{anno=Anno, op=F, args=[X]}, + body=#c_call{anno=[compiler_generated|Anno], + module=#c_literal{val=erlang}, + name=#c_literal{val='++'}, + args=[H, + #c_apply{anno=Anno, + op=Loop, + args=[Xs]}]}}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=[]}}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2]) -> + Loop = #c_var{name={'lists^filter',1}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + B = #c_var{name='B'}, + Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]}, + CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true}, + body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}}, + CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true}, + body=Xs}, + CC3 = #c_clause{pats=[X], guard=#c_literal{val=true}, + body=match_fail(Anno, Err1)}, + Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_let{vars=[B], + arg=#c_apply{anno=Anno, op=F, args=[X]}, + body=#c_let{vars=[Xs], + arg=#c_apply{anno=Anno, + op=Loop, + args=[Xs]}, + body=Case}}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=1}]}, + body=#c_literal{val=[]}}, + Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)}, + Fun = #c_fun{vars=[Xs], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L]}}}; +call(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3]) -> + Loop = #c_var{name={'lists^foldl',2}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + A = #c_var{name='A'}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_apply{anno=Anno, + op=Loop, + args=[Xs, #c_apply{anno=Anno, + op=F, + args=[X, A]}]}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=2}]}, + body=A}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs, A], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}}; +call(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3]) -> + Loop = #c_var{name={'lists^foldr',2}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + A = #c_var{name='A'}, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=#c_apply{anno=Anno, + op=F, + args=[X, #c_apply{anno=Anno, + op=Loop, + args=[Xs, A]}]}}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=2}]}, + body=A}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs, A], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, + body=#c_letrec{defs=[{Loop,Fun}], + body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}}; +call(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3]) -> + Loop = #c_var{name={'lists^mapfoldl',2}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + Avar = #c_var{name='A'}, + Match = + fun (A, P, E) -> + C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E}, + Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]}, + C2 = #c_clause{pats=[X], guard=#c_literal{val=true}, + body=match_fail(Anno, Err)}, + #c_case{arg=A, clauses=[C1, C2]} + end, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, + body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]}, + #c_tuple{es=[X, Avar]}, +%%% Tuple passing version + Match(#c_apply{anno=Anno, + op=Loop, + args=[Xs, Avar]}, + #c_tuple{es=[Xs, Avar]}, + #c_tuple{anno=[compiler_generated], + es=[#c_cons{anno=[compiler_generated], + hd=X, tl=Xs}, + Avar]}) +%%% Multiple-value version +%%% #c_let{vars=[Xs,A], +%%% %% The tuple here will be optimised +%%% %% away later; no worries. +%%% arg=#c_apply{op=Loop, args=[Xs, A]}, +%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs}, +%%% A]}} + )}, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=2}]}, +%%% Tuple passing version + body=#c_tuple{anno=[compiler_generated], + es=[#c_literal{val=[]}, Avar]}}, +%%% Multiple-value version +%%% body=#c_values{es=[#c_literal{val=[]}, A]}}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs, Avar], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, + body=#c_letrec{defs=[{Loop,Fun}], +%%% Tuple passing version + body=#c_apply{anno=Anno, + op=Loop, + args=[L, Avar]}}}; +%%% Multiple-value version +%%% body=#c_let{vars=[Xs, A], +%%% arg=#c_apply{op=Loop, +%%% args=[L, A]}, +%%% body=#c_tuple{es=[Xs, A]}}}}; +call(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3]) -> + Loop = #c_var{name={'lists^mapfoldr',2}}, + F = #c_var{name='F'}, + Xs = #c_var{name='Xs'}, + X = #c_var{name='X'}, + Avar = #c_var{name='A'}, + Match = + fun (A, P, E) -> + C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E}, + Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]}, + C2 = #c_clause{pats=[X], guard=#c_literal{val=true}, + body=match_fail(Anno, Err)}, + #c_case{arg=A, clauses=[C1, C2]} + end, + C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true}, +%%% Tuple passing version + body=Match(#c_apply{anno=Anno, + op=Loop, + args=[Xs, Avar]}, + #c_tuple{es=[Xs, Avar]}, + Match(#c_apply{anno=Anno, op=F, args=[X, Avar]}, + #c_tuple{es=[X, Avar]}, + #c_tuple{anno=[compiler_generated], + es=[#c_cons{anno=[compiler_generated], + hd=X, tl=Xs}, Avar]})) +%%% Multiple-value version +%%% body=#c_let{vars=[Xs,A], +%%% %% The tuple will be optimised away +%%% arg=#c_apply{op=Loop, args=[Xs, A]}, +%%% body=Match(#c_apply{op=F, args=[X, A]}, +%%% #c_tuple{es=[X, A]}, +%%% #c_values{es=[#c_cons{hd=X, tl=Xs}, +%%% A]})} + }, + C2 = #c_clause{pats=[#c_literal{val=[]}], + guard=#c_call{module=#c_literal{val=erlang}, + name=#c_literal{val=is_function}, + args=[F, #c_literal{val=2}]}, +%%% Tuple passing version + body=#c_tuple{anno=[compiler_generated], + es=[#c_literal{val=[]}, Avar]}}, +%%% Multiple-value version +%%% body=#c_values{es=[#c_literal{val=[]}, A]}}, + Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]}, + C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true}, + body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)}, + Fun = #c_fun{vars=[Xs, Avar], + body=#c_case{arg=Xs, clauses=[C1, C2, C3]}}, + L = #c_var{name='L'}, + #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]}, + body=#c_letrec{defs=[{Loop,Fun}], +%%% Tuple passing version + body=#c_apply{anno=Anno, + op=Loop, + args=[L, Avar]}}}; +%%% Multiple-value version +%%% body=#c_let{vars=[Xs, A], +%%% arg=#c_apply{op=Loop, +%%% args=[L, A]}, +%%% body=#c_tuple{es=[Xs, A]}}}}; +call(_, _, _, _) -> + none. + +match_fail(Ann, Arg) -> + Name = cerl:abstract(match_fail), + Args = [Arg], + cerl:ann_c_primop(Ann, Name, Args). diff --git a/lib/compiler/src/sys_core_inline.erl b/lib/compiler/src/sys_core_inline.erl index 9f93acb666..1e3a735e9b 100644 --- a/lib/compiler/src/sys_core_inline.erl +++ b/lib/compiler/src/sys_core_inline.erl @@ -195,10 +195,10 @@ kill_id_anns(Body) -> A = kill_id_anns_1(A0), CFun#c_fun{anno=A}; (Expr) -> - %% Mark everything as compiler generated to suppress - %% bogus warnings. - A = compiler_generated(core_lib:get_anno(Expr)), - core_lib:set_anno(Expr, A) + %% Mark everything as compiler generated to + %% suppress bogus warnings. + A = compiler_generated(cerl:get_ann(Expr)), + cerl:set_ann(Expr, A) end, Body). kill_id_anns_1([{'id',_}|As]) -> diff --git a/lib/compiler/src/sys_pre_expand.erl b/lib/compiler/src/sys_pre_expand.erl index 761ae8409c..f99307c865 100644 --- a/lib/compiler/src/sys_pre_expand.erl +++ b/lib/compiler/src/sys_pre_expand.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2012. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,12 +33,15 @@ -include("../include/erl_bits.hrl"). +-type fa() :: {atom(), arity()}. + -record(expand, {module=[], %Module name exports=[], %Exports imports=[], %Imports compile=[], %Compile flags attributes=[], %Attributes callbacks=[], %Callbacks + optional_callbacks=[] :: [fa()], %Optional callbacks defined, %Defined functions (gb_set) vcount=0, %Variable counter func=[], %Current function @@ -99,7 +102,21 @@ define_functions(Forms, #expand{defined=Predef}=St) -> module_attrs(#expand{attributes=Attributes}=St) -> Attrs = [{attribute,Line,Name,Val} || {Name,Line,Val} <- Attributes], Callbacks = [Callback || {_,_,callback,_}=Callback <- Attrs], - {Attrs,St#expand{callbacks=Callbacks}}. + OptionalCallbacks = get_optional_callbacks(Attrs), + {Attrs,St#expand{callbacks=Callbacks, + optional_callbacks=OptionalCallbacks}}. + +get_optional_callbacks(Attrs) -> + L = [O || + {attribute, _, optional_callbacks, O} <- Attrs, + is_fa_list(O)], + lists:append(L). + +is_fa_list([{FuncName, Arity}|L]) + when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> + is_fa_list(L); +is_fa_list([]) -> true; +is_fa_list(_) -> false. module_predef_funcs(St) -> {Mpf1,St1}=module_predef_func_beh_info(St), @@ -108,19 +125,24 @@ module_predef_funcs(St) -> module_predef_func_beh_info(#expand{callbacks=[]}=St) -> {[], St}; -module_predef_func_beh_info(#expand{callbacks=Callbacks,defined=Defined, +module_predef_func_beh_info(#expand{callbacks=Callbacks, + optional_callbacks=OptionalCallbacks, + defined=Defined, exports=Exports}=St) -> PreDef=[{behaviour_info,1}], PreExp=PreDef, - {[gen_beh_info(Callbacks)], + {[gen_beh_info(Callbacks, OptionalCallbacks)], St#expand{defined=gb_sets:union(gb_sets:from_list(PreDef), Defined), exports=union(from_list(PreExp), Exports)}}. -gen_beh_info(Callbacks) -> +gen_beh_info(Callbacks, OptionalCallbacks) -> List = make_list(Callbacks), + OptionalList = make_optional_list(OptionalCallbacks), {function,0,behaviour_info,1, [{clause,0,[{atom,0,callbacks}],[], - [List]}]}. + [List]}, + {clause,0,[{atom,0,optional_callbacks}],[], + [OptionalList]}]}. make_list([]) -> {nil,0}; make_list([{_,_,_,[{{Name,Arity},_}]}|Rest]) -> @@ -130,6 +152,14 @@ make_list([{_,_,_,[{{Name,Arity},_}]}|Rest]) -> {integer,0,Arity}]}, make_list(Rest)}. +make_optional_list([]) -> {nil,0}; +make_optional_list([{Name,Arity}|Rest]) -> + {cons,0, + {tuple,0, + [{atom,0,Name}, + {integer,0,Arity}]}, + make_optional_list(Rest)}. + module_predef_funcs_mod_info(St) -> PreDef = [{module_info,0},{module_info,1}], PreExp = PreDef, @@ -232,9 +262,18 @@ pattern({map,Line,Ps}, St0) -> {TPs,St1} = pattern_list(Ps, St0), {{map,Line,TPs},St1}; pattern({map_field_exact,Line,K0,V0}, St0) -> - {K,St1} = expr(K0, St0), + %% Key should be treated as an expression + %% but since expressions are not allowed yet, + %% process it through pattern .. and handle assoc + %% (normalise unary op integer -> integer) + {K,St1} = pattern(K0, St0), {V,St2} = pattern(V0, St1), {{map_field_exact,Line,K,V},St2}; +pattern({map_field_assoc,Line,K0,V0}, St0) -> + %% when keys are Maps + {K,St1} = pattern(K0, St0), + {V,St2} = pattern(V0, St1), + {{map_field_assoc,Line,K,V},St2}; %%pattern({struct,Line,Tag,Ps}, St0) -> %% {TPs,TPsvs,St1} = pattern_list(Ps, St0), %% {{tuple,Line,[{atom,Line,Tag}|TPs]},TPsvs,St1}; diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl index 47a357c23d..7eec9dd62b 100644 --- a/lib/compiler/src/v3_codegen.erl +++ b/lib/compiler/src/v3_codegen.erl @@ -69,10 +69,8 @@ stk=[], %Stack table res=[]}). %Reserved regs: [{reserved,I,V}] -module({Mod,Exp,Attr,Forms}, Options) -> - put(?MODULE, Options), +module({Mod,Exp,Attr,Forms}, _Options) -> {Fs,St} = functions(Forms, {atom,Mod}), - erase(?MODULE), {ok,{Mod,Exp,Attr,Fs,St#cg.lcount}}. functions(Forms, AtomMod) -> @@ -210,7 +208,7 @@ need_heap_0([], H, Acc) -> need_heap_1(#l{ke={set,_,{binary,_}},i=I}, H) -> {need_heap_need(I, H),0}; -need_heap_1(#l{ke={set,_,{map,_,_}},i=I}, H) -> +need_heap_1(#l{ke={set,_,{map,_,_,_}},i=I}, H) -> {need_heap_need(I, H),0}; need_heap_1(#l{ke={set,_,Val}}, H) -> %% Just pass through adding to needed heap. @@ -643,10 +641,6 @@ select_val_cg(tuple, R, [Arity,{f,Lbl}], Tf, Vf, [{label,Lbl}|Sis]) -> [{test,is_tuple,{f,Tf},[R]},{test,test_arity,{f,Vf},[R,Arity]}|Sis]; select_val_cg(tuple, R, Vls, Tf, Vf, Sis) -> [{test,is_tuple,{f,Tf},[R]},{select_tuple_arity,R,{f,Vf},{list,Vls}}|Sis]; -select_val_cg(map, R, [_Val,{f,Lbl}], Fail, Fail, [{label,Lbl}|Sis]) -> - [{test,is_map,{f,Fail},[R]}|Sis]; -select_val_cg(map, R, [_Val,{f,Lbl}|_], Tf, _Vf, [{label,Lbl}|Sis]) -> - [{test,is_map,{f,Tf},[R]}|Sis]; select_val_cg(Type, R, [Val, {f,Lbl}], Fail, Fail, [{label,Lbl}|Sis]) -> [{test,is_eq_exact,{f,Fail},[R,{Type,Val}]}|Sis]; select_val_cg(Type, R, [Val, {f,Lbl}], Tf, Vf, [{label,Lbl}|Sis]) -> @@ -928,7 +922,7 @@ select_extract_tuple(Src, Vs, I, Vdb, Bef, St) -> select_map(Scs, V, Tf, Vf, Bef, St0) -> Reg = fetch_var(V, Bef), {Is,Aft,St1} = - match_fmf(fun(#l{ke={val_clause,{map,_,Es},B},i=I,vdb=Vdb}, Fail, St1) -> + match_fmf(fun(#l{ke={val_clause,{map,exact,_,Es},B},i=I,vdb=Vdb}, Fail, St1) -> select_map_val(V, Es, B, Fail, I, Vdb, Bef, St1) end, Vf, St0, Scs), {[{test,is_map,{f,Tf},[Reg]}|Is],Aft,St1}. @@ -947,27 +941,34 @@ select_extract_map(Src, Vs, Fail, I, Vdb, Bef, St) -> %% Assume keys are term-sorted Rsrc = fetch_var(Src, Bef), - {{HasKs,GetVs},Aft} = lists:foldr(fun - ({map_pair,Key,{var,V}},{{HasKsi,GetVsi},Int0}) -> + {{HasKs,GetVs,HasVarKs,GetVarVs},Aft} = lists:foldr(fun + ({map_pair,{var,K},{var,V}},{{HasKsi,GetVsi,HasVarVsi,GetVarVsi},Int0}) -> case vdb_find(V, Vdb) of {V,_,L} when L =< I -> - {{[Key|HasKsi],GetVsi},Int0}; + RK = fetch_var(K,Int0), + {{HasKsi,GetVsi,[RK|HasVarVsi],GetVarVsi},Int0}; _Other -> Reg1 = put_reg(V, Int0#sr.reg), Int1 = Int0#sr{reg=Reg1}, - {{HasKsi,[Key,fetch_reg(V, Reg1)|GetVsi]},Int1} + RK = fetch_var(K,Int0), + RV = fetch_reg(V,Reg1), + {{HasKsi,GetVsi,HasVarVsi,[[RK,RV]|GetVarVsi]},Int1} + end; + ({map_pair,Key,{var,V}},{{HasKsi,GetVsi,HasVarVsi,GetVarVsi},Int0}) -> + case vdb_find(V, Vdb) of + {V,_,L} when L =< I -> + {{[Key|HasKsi],GetVsi,HasVarVsi,GetVarVsi},Int0}; + _Other -> + Reg1 = put_reg(V, Int0#sr.reg), + Int1 = Int0#sr{reg=Reg1}, + {{HasKsi,[Key,fetch_reg(V, Reg1)|GetVsi],HasVarVsi,GetVarVsi},Int1} end - end, {{[],[]},Bef}, Vs), - - Code = case {HasKs,GetVs} of - {HasKs,[]} -> - [{test,has_map_fields,{f,Fail},Rsrc,{list,HasKs}}]; - {[],GetVs} -> - [{get_map_elements, {f,Fail},Rsrc,{list,GetVs}}]; - {HasKs,GetVs} -> - [{test,has_map_fields,{f,Fail},Rsrc,{list,HasKs}}, - {get_map_elements, {f,Fail},Rsrc,{list,GetVs}}] - end, + end, {{[],[],[],[]},Bef}, Vs), + + Code = [{test,has_map_fields,{f,Fail},Rsrc,{list,HasKs}} || HasKs =/= []] ++ + [{test,has_map_fields,{f,Fail},Rsrc,{list,[K]}} || K <- HasVarKs] ++ + [{get_map_elements, {f,Fail},Rsrc,{list,GetVs}} || GetVs =/= []] ++ + [{get_map_elements, {f,Fail},Rsrc,{list,[K,V]}} || [K,V] <- GetVarVs], {Code, Aft, St}. @@ -1504,9 +1505,41 @@ set_cg([{var,R}], {binary,Segs}, Le, Vdb, Bef, %% Now generate the complete code for constructing the binary. Code = cg_binary(PutCode, Target, Temp, Fail, MaxRegs, Le#l.a), {Sis++Code,Aft,St}; +% Map single variable key +set_cg([{var,R}], {map,Op,Map,[{map_pair,{var,_}=K,V}]}, Le, Vdb, Bef, + #cg{in_catch=InCatch,bfail=Bfail}=St) -> + + Fail = {f,Bfail}, + {Sis,Int0} = + case InCatch of + true -> adjust_stack(Bef, Le#l.i, Le#l.i+1, Vdb); + false -> {[],Bef} + end, + SrcReg = cg_reg_arg(Map,Int0), + Line = line(Le#l.a), + + List = [cg_reg_arg(K,Int0),cg_reg_arg(V,Int0)], + + Live = max_reg(Bef#sr.reg), + + %% The target register can reuse one of the source registers. + Aft0 = clear_dead(Int0, Le#l.i, Vdb), + Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)}, + Target = fetch_reg(R, Aft#sr.reg), + + I = case Op of + assoc -> put_map_assoc; + exact -> put_map_exact + end, + {Sis++[Line]++[{I,Fail,SrcReg,Target,Live,{list,List}}],Aft,St}; + +% Map (possibly) multiple literal keys set_cg([{var,R}], {map,Op,Map,Es}, Le, Vdb, Bef, #cg{in_catch=InCatch,bfail=Bfail}=St) -> + %% assert key literals + [] = [Var||{map_pair,{var,_}=Var,_} <- Es], + Fail = {f,Bfail}, {Sis,Int0} = case InCatch of @@ -1524,9 +1557,11 @@ set_cg([{var,R}], {map,Op,Map,Es}, Le, Vdb, Bef, List = flatmap(fun({K,V}) -> [K,cg_reg_arg(V,Int0)] end, Pairs), Live = max_reg(Bef#sr.reg), - Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)}, - Aft = clear_dead(Int1, Le#l.i, Vdb), - Target = fetch_reg(R, Int1#sr.reg), + + %% The target register can reuse one of the source registers. + Aft0 = clear_dead(Int0, Le#l.i, Vdb), + Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)}, + Target = fetch_reg(R, Aft#sr.reg), I = case Op of assoc -> put_map_assoc; diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl index 59ec0d4199..c954d21e59 100644 --- a/lib/compiler/src/v3_core.erl +++ b/lib/compiler/src/v3_core.erl @@ -66,6 +66,7 @@ %% match arguments are novars %% case arguments are novars %% receive timeouts are novars +%% binaries and maps are novars %% let/set arguments are expressions %% fun is not a safe @@ -77,8 +78,8 @@ splitwith/2,keyfind/3,sort/1,foreach/2,droplast/1,last/1]). -import(ordsets, [add_element/2,del_element/2,is_element/2, union/1,union/2,intersection/2,subtract/2]). --import(cerl, [ann_c_cons/3,ann_c_cons_skel/3,ann_c_tuple/2,c_tuple/1, - ann_c_map/2, ann_c_map/3]). +-import(cerl, [ann_c_cons/3,ann_c_tuple/2,c_tuple/1, + ann_c_map/3]). -include("core_parse.hrl"). @@ -105,7 +106,9 @@ -record(iset, {anno=#a{},var,arg}). -record(itry, {anno=#a{},args,vars,body,evars,handler}). -record(ifilter, {anno=#a{},arg}). --record(igen, {anno=#a{},acc_pat,acc_guard,skip_pat,tail,tail_pat,arg}). +-record(igen, {anno=#a{},ceps=[],acc_pat,acc_guard, + skip_pat,tail,tail_pat,arg}). +-record(isimple, {anno=#a{},term :: cerl:cerl()}). -type iapply() :: #iapply{}. -type ibinary() :: #ibinary{}. @@ -124,11 +127,12 @@ -type itry() :: #itry{}. -type ifilter() :: #ifilter{}. -type igen() :: #igen{}. +-type isimple() :: #isimple{}. -type i() :: iapply() | ibinary() | icall() | icase() | icatch() | iclause() | ifun() | iletrec() | imatch() | iprimop() | iprotect() | ireceive1() | ireceive2() | iset() | itry() - | ifilter() | igen(). + | ifilter() | igen() | isimple(). -type warning() :: {file:filename(), [{integer(), module(), term()}]}. @@ -169,60 +173,81 @@ form({attribute,_,_,_}=F, {Fs,As,Ws,File}, _Opts) -> attribute({attribute,Line,Name,Val}) -> {#c_literal{val=Name, anno=[Line]}, #c_literal{val=Val, anno=[Line]}}. +%% function_dump(module_info,_,_,_) -> ok; +%% function_dump(Name,Arity,Format,Terms) -> +%% io:format("~w/~w " ++ Format,[Name,Arity]++Terms), +%% ok. + function({function,_,Name,Arity,Cs0}, Ws0, File, Opts) -> - %%ok = io:fwrite("~p - ", [{Name,Arity}]), St0 = #core{vcount=0,opts=Opts,ws=Ws0,file=[{file,File}]}, {B0,St1} = body(Cs0, Name, Arity, St0), - %%ok = io:fwrite("1", []), - %%ok = io:fwrite("~w:~p~n", [?LINE,B0]), + %% ok = function_dump(Name,Arity,"body:~n~p~n",[B0]), {B1,St2} = ubody(B0, St1), - %%ok = io:fwrite("2", []), - %%ok = io:fwrite("~w:~p~n", [?LINE,B1]), + %% ok = function_dump(Name,Arity,"ubody:~n~p~n",[B1]), {B2,#core{ws=Ws}} = cbody(B1, St2), - %%ok = io:fwrite("3~n", []), - %%ok = io:fwrite("~w:~p~n", [?LINE,B2]), + %% ok = function_dump(Name,Arity,"cbody:~n~p~n",[B2]), {{#c_var{name={Name,Arity}},B2},Ws}. body(Cs0, Name, Arity, St0) -> Anno = lineno_anno(element(2, hd(Cs0)), St0), {Args,St1} = new_vars(Anno, Arity, St0), - {Cs1,St2} = clauses(Cs0, St1), - {Ps,St3} = new_vars(Arity, St2), %Need new variables here - Fc = function_clause(Ps, Anno, {Name,Arity}), - {#ifun{anno=#a{anno=Anno},id=[],vars=Args,clauses=Cs1,fc=Fc},St3}. + case clauses(Cs0, St1) of + {Cs1,[],St2} -> + {Ps,St3} = new_vars(Arity, St2), %Need new variables here + Fc = function_clause(Ps, Anno, {Name,Arity}), + {#ifun{anno=#a{anno=Anno},id=[],vars=Args,clauses=Cs1,fc=Fc},St3}; + {Cs1,Eps,St2} -> + %% We have pre-expressions from patterns and + %% these needs to be letified before matching + %% since only bound variables are allowed + AnnoGen = #a{anno=[compiler_generated]}, + {Ps1,St3} = new_vars(Arity, St2), %Need new variables here + Fc1 = function_clause(Ps1, Anno, {Name,Arity}), + {Ps2,St4} = new_vars(Arity, St3), %Need new variables here + Fc2 = function_clause(Ps2, Anno, {Name,Arity}), + Case = #icase{anno=AnnoGen,args=Args, + clauses=Cs1, + fc=Fc2}, + {#ifun{anno=#a{anno=Anno},id=[],vars=Args, + clauses=[#iclause{anno=AnnoGen,pats=Ps1, + guard=[#c_literal{val=true}], + body=Eps ++ [Case]}], + fc=Fc1},St4} + end. %% clause(Clause, State) -> {Cclause,State} | noclause. %% clauses([Clause], State) -> {[Cclause],State}. %% Convert clauses. Trap bad pattern aliases and remove clause from %% clause list. -clauses([C0|Cs0], St0) -> +clauses([C0|Cs0],St0) -> case clause(C0, St0) of - {noclause,St} -> clauses(Cs0, St); - {C,St1} -> - {Cs,St2} = clauses(Cs0, St1), - {[C|Cs],St2} + {noclause,_,St} -> clauses(Cs0,St); + {C,Eps1,St1} -> + {Cs,Eps2,St2} = clauses(Cs0, St1), + {[C|Cs],Eps1++Eps2,St2} end; -clauses([], St) -> {[],St}. +clauses([],St) -> {[],[],St}. clause({clause,Lc,H0,G0,B0}, St0) -> try head(H0, St0) of - H1 -> - {G1,St1} = guard(G0, St0), - {B1,St2} = exprs(B0, St1), - Anno = lineno_anno(Lc, St2), - {#iclause{anno=#a{anno=Anno},pats=H1,guard=G1,body=B1},St2} + {H1,Eps,St1} -> + {G1,St2} = guard(G0, St1), + {B1,St3} = exprs(B0, St2), + Anno = lineno_anno(Lc, St3), + {#iclause{anno=#a{anno=Anno},pats=H1,guard=G1,body=B1},Eps,St3} catch throw:nomatch -> St = add_warning(Lc, nomatch, St0), - {noclause,St} %Bad pattern + {noclause,[],St} %Bad pattern end. clause_arity({clause,_,H0,_,_}) -> length(H0). -%% head([P], State) -> [P]. +%% head([P], State) -> {[P],[Cexpr],State}. -head(Ps, St) -> pattern_list(Ps, St). +head(Ps, St) -> + pattern_list(Ps, St). %% guard([Expr], State) -> {[Cexpr],State}. %% Build an explict and/or tree of guard alternatives, then traverse @@ -266,13 +291,15 @@ gexpr({protect,Line,Arg}, Bools0, St0) -> {#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St} end; gexpr({op,L,'andalso',E1,E2}, Bools, St0) -> - {#c_var{name=V0},St} = new_var(L, St0), + Anno = lineno_anno(L, St0), + {#c_var{name=V0},St} = new_var(Anno, St0), V = {var,L,V0}, False = {atom,L,false}, E = make_bool_switch_guard(L, E1, V, E2, False), gexpr(E, Bools, St); gexpr({op,L,'orelse',E1,E2}, Bools, St0) -> - {#c_var{name=V0},St} = new_var(L, St0), + Anno = lineno_anno(L, St0), + {#c_var{name=V0},St} = new_var(Anno, St0), V = {var,L,V0}, True = {atom,L,true}, E = make_bool_switch_guard(L, E1, V, True, E2), @@ -361,33 +388,30 @@ gexpr_test(E0, Bools0, St0) -> Lanno = Anno#a.anno, {New,St2} = new_var(Lanno, St1), Bools = [New|Bools0], - {#icall{anno=Anno, %Must have an #a{} - module=#c_literal{anno=Lanno,val=erlang}, - name=#c_literal{anno=Lanno,val='=:='}, - args=[New,#c_literal{anno=Lanno,val=true}]}, + {icall_eq_true(New), Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2} end; _ -> - Anno = get_ianno(E1), Lanno = get_lineno_anno(E1), + ACompGen = #a{anno=[compiler_generated]}, case is_simple(E1) of true -> Bools = [E1|Bools0], - {#icall{anno=Anno, %Must have an #a{} - module=#c_literal{anno=Lanno,val=erlang}, - name=#c_literal{anno=Lanno,val='=:='}, - args=[E1,#c_literal{anno=Lanno,val=true}]},Eps0,Bools,St1}; + {icall_eq_true(E1),Eps0,Bools,St1}; false -> {New,St2} = new_var(Lanno, St1), Bools = [New|Bools0], - {#icall{anno=Anno, %Must have an #a{} - module=#c_literal{anno=Lanno,val=erlang}, - name=#c_literal{anno=Lanno,val='=:='}, - args=[New,#c_literal{anno=Lanno,val=true}]}, - Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2} + {icall_eq_true(New), + Eps0 ++ [#iset{anno=ACompGen,var=New,arg=E1}],Bools,St2} end end. +icall_eq_true(Arg) -> + #icall{anno=#a{anno=[compiler_generated]}, + module=#c_literal{val=erlang}, + name=#c_literal{val='=:='}, + args=[Arg,#c_literal{val=true}]}. + force_booleans(Vs0, E, Eps, St) -> Vs1 = [set_anno(V, []) || V <- Vs0], Vs = unforce(E, Eps, Vs1), @@ -397,16 +421,15 @@ force_booleans_1([], E, Eps, St) -> {E,Eps,St}; force_booleans_1([V|Vs], E0, Eps0, St0) -> {E1,Eps1,St1} = force_safe(E0, St0), - Lanno = element(2, V), - Anno = #a{anno=Lanno}, - Call = #icall{anno=Anno,module=#c_literal{anno=Lanno,val=erlang}, - name=#c_literal{anno=Lanno,val=is_boolean}, + ACompGen = #a{anno=[compiler_generated]}, + Call = #icall{anno=ACompGen,module=#c_literal{val=erlang}, + name=#c_literal{val=is_boolean}, args=[V]}, - {New,St} = new_var(Lanno, St1), - Iset = #iset{anno=Anno,var=New,arg=Call}, + {New,St} = new_var([], St1), + Iset = #iset{var=New,arg=Call}, Eps = Eps0 ++ Eps1 ++ [Iset], - E = #icall{anno=Anno, - module=#c_literal{anno=Lanno,val=erlang},name=#c_literal{anno=Lanno,val='and'}, + E = #icall{anno=ACompGen, + module=#c_literal{val=erlang},name=#c_literal{val='and'}, args=[E1,New]}, force_booleans_1(Vs, E, Eps, St). @@ -493,43 +516,28 @@ exprs([], St) -> {[],St}. %% Generate an internal core expression. expr({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St}; -expr({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},[],St}; -expr({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},[],St}; -expr({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},[],St}; -expr({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},[],St}; -expr({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},[],St}; -expr({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},[],St}; +expr({char,L,C}, St) -> {#c_literal{anno=full_anno(L, St),val=C},[],St}; +expr({integer,L,I}, St) -> {#c_literal{anno=full_anno(L, St),val=I},[],St}; +expr({float,L,F}, St) -> {#c_literal{anno=full_anno(L, St),val=F},[],St}; +expr({atom,L,A}, St) -> {#c_literal{anno=full_anno(L, St),val=A},[],St}; +expr({nil,L}, St) -> {#c_literal{anno=full_anno(L, St),val=[]},[],St}; +expr({string,L,S}, St) -> {#c_literal{anno=full_anno(L, St),val=S},[],St}; expr({cons,L,H0,T0}, St0) -> {H1,Hps,St1} = safe(H0, St0), {T1,Tps,St2} = safe(T0, St1), - A = lineno_anno(L, St2), + A = full_anno(L, St2), {annotate_cons(A, H1, T1, St2),Hps ++ Tps,St2}; expr({lc,L,E,Qs0}, St0) -> {Qs1,St1} = preprocess_quals(L, Qs0, St0), lc_tq(L, E, Qs1, #c_literal{anno=lineno_anno(L, St1),val=[]}, St1); expr({bc,L,E,Qs}, St) -> - bc_tq(L, E, Qs, {nil,L}, St); + bc_tq(L, E, Qs, St); expr({tuple,L,Es0}, St0) -> {Es1,Eps,St1} = safe_list(Es0, St0), A = record_anno(L, St1), {annotate_tuple(A, Es1, St1),Eps,St1}; expr({map,L,Es0}, St0) -> - % erl_lint should make sure only #{ K => V } are allowed - % in map construction. - try map_pair_list(Es0, St0) of - {Es1,Eps,St1} -> - A = lineno_anno(L, St1), - {ann_c_map(A,Es1),Eps,St1} - catch - throw:{bad_map,Warning} -> - St = add_warning(L, Warning, St0), - LineAnno = lineno_anno(L, St), - As = [#c_literal{anno=LineAnno,val=badarg}], - {#icall{anno=#a{anno=LineAnno}, %Must have an #a{} - module=#c_literal{anno=LineAnno,val=erlang}, - name=#c_literal{anno=LineAnno,val=error}, - args=As},[],St} - end; + map_build_pairs(#c_literal{val=#{}}, Es0, full_anno(L, St0), St0); expr({map,L,M0,Es0}, St0) -> try expr_map(M0,Es0,lineno_anno(L, St0),St0) of {_,_,_}=Res -> Res @@ -544,7 +552,7 @@ expr({map,L,M0,Es0}, St0) -> args=As},[],St} end; expr({bin,L,Es0}, St0) -> - try expr_bin(Es0, lineno_anno(L, St0), St0) of + try expr_bin(Es0, full_anno(L, St0), St0) of {_,_,_}=Res -> Res catch throw:bad_binary -> @@ -562,26 +570,26 @@ expr({block,_,Es0}, St0) -> {E1,Eps,St2} = expr(last(Es0), St1), {E1,Es1 ++ Eps,St2}; expr({'if',L,Cs0}, St0) -> - {Cs1,St1} = clauses(Cs0, St0), + {Cs1,Ceps,St1} = clauses(Cs0, St0), Lanno = lineno_anno(L, St1), Fc = fail_clause([], Lanno, #c_literal{val=if_clause}), - {#icase{anno=#a{anno=Lanno},args=[],clauses=Cs1,fc=Fc},[],St1}; + {#icase{anno=#a{anno=Lanno},args=[],clauses=Cs1,fc=Fc},Ceps,St1}; expr({'case',L,E0,Cs0}, St0) -> {E1,Eps,St1} = novars(E0, St0), - {Cs1,St2} = clauses(Cs0, St1), + {Cs1,Ceps,St2} = clauses(Cs0, St1), {Fpat,St3} = new_var(St2), Lanno = lineno_anno(L, St2), Fc = fail_clause([Fpat], Lanno, c_tuple([#c_literal{val=case_clause},Fpat])), - {#icase{anno=#a{anno=Lanno},args=[E1],clauses=Cs1,fc=Fc},Eps,St3}; + {#icase{anno=#a{anno=Lanno},args=[E1],clauses=Cs1,fc=Fc},Eps++Ceps,St3}; expr({'receive',L,Cs0}, St0) -> - {Cs1,St1} = clauses(Cs0, St0), - {#ireceive1{anno=#a{anno=lineno_anno(L, St1)},clauses=Cs1}, [], St1}; + {Cs1,Ceps,St1} = clauses(Cs0, St0), + {#ireceive1{anno=#a{anno=lineno_anno(L, St1)},clauses=Cs1},Ceps, St1}; expr({'receive',L,Cs0,Te0,Tes0}, St0) -> {Te1,Teps,St1} = novars(Te0, St0), {Tes1,St2} = exprs(Tes0, St1), - {Cs1,St3} = clauses(Cs0, St2), + {Cs1,Ceps,St3} = clauses(Cs0, St2), {#ireceive2{anno=#a{anno=lineno_anno(L, St3)}, - clauses=Cs1,timeout=Te1,action=Tes1},Teps,St3}; + clauses=Cs1,timeout=Te1,action=Tes1},Teps++Ceps,St3}; expr({'try',L,Es0,[],Ecs,[]}, St0) -> %% 'try ... catch ... end' {Es1,St1} = exprs(Es0, St0), @@ -595,7 +603,7 @@ expr({'try',L,Es0,Cs0,Ecs,[]}, St0) -> %% 'try ... of ... catch ... end' {Es1,St1} = exprs(Es0, St0), {V,St2} = new_var(St1), %This name should be arbitrary - {Cs1,St3} = clauses(Cs0, St2), + {Cs1,Ceps,St3} = clauses(Cs0, St2), {Fpat,St4} = new_var(St3), Lanno = lineno_anno(L, St4), Fc = fail_clause([Fpat], Lanno, @@ -604,7 +612,7 @@ expr({'try',L,Es0,Cs0,Ecs,[]}, St0) -> {#itry{anno=#a{anno=lineno_anno(L, St5)},args=Es1, vars=[V],body=[#icase{anno=#a{anno=Lanno},args=[V],clauses=Cs1,fc=Fc}], evars=Evs,handler=Hs}, - [],St5}; + Ceps,St5}; expr({'try',L,Es0,[],[],As0}, St0) -> %% 'try ... after ... end' {Es1,St1} = exprs(Es0, St0), @@ -634,11 +642,11 @@ expr({'catch',L,E0}, St0) -> Lanno = lineno_anno(L, St1), {#icatch{anno=#a{anno=Lanno},body=Eps ++ [E1]},[],St1}; expr({'fun',L,{function,F,A},{_,_,_}=Id}, St) -> - Lanno = lineno_anno(L, St), + Lanno = full_anno(L, St), {#c_var{anno=Lanno++[{id,Id}],name={F,A}},[],St}; expr({'fun',L,{function,M,F,A}}, St0) -> {As,Aps,St1} = safe_list([M,F,A], St0), - Lanno = lineno_anno(L, St1), + Lanno = full_anno(L, St1), {#icall{anno=#a{anno=Lanno}, module=#c_literal{val=erlang}, name=#c_literal{val=make_fun}, @@ -649,13 +657,9 @@ expr({named_fun,L,'_',Cs,Id}, St) -> fun_tq(Id, Cs, L, St, unnamed); expr({named_fun,L,Name,Cs,Id}, St) -> fun_tq(Id, Cs, L, St, {named,Name}); -expr({call,L,{remote,_,M,F},As0}, #core{wanted=Wanted}=St0) -> +expr({call,L,{remote,_,M,F},As0}, St0) -> {[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0), - Lanno = lineno_anno(L, St1), - Anno = case Wanted of - false -> [result_not_wanted|Lanno]; - true -> Lanno - end, + Anno = full_anno(L, St1), {#icall{anno=#a{anno=Anno},module=M1,name=F1,args=As1},Aps,St1}; expr({call,Lc,{atom,Lf,F},As0}, St0) -> {As1,Aps,St1} = safe_list(As0, St0), @@ -673,24 +677,24 @@ expr({match,L,P0,E0}, St0) -> {var,_,'_'} -> St0#core{wanted=false}; _ -> St0 end, - {E2,Eps,St2} = novars(E1, St1), + {E2,Eps1,St2} = novars(E1, St1), St3 = St2#core{wanted=St0#core.wanted}, - P2 = try - pattern(P1, St3) + {P2,Eps2,St4} = try + pattern(P1, St3) catch throw:Thrown -> - Thrown + {Thrown,[],St3} end, - {Fpat,St4} = new_var(St3), - Lanno = lineno_anno(L, St4), + {Fpat,St5} = new_var(St4), + Lanno = lineno_anno(L, St5), Fc = fail_clause([Fpat], Lanno, c_tuple([#c_literal{val=badmatch},Fpat])), case P2 of nomatch -> - St = add_warning(L, nomatch, St4), + St = add_warning(L, nomatch, St5), {#icase{anno=#a{anno=Lanno}, - args=[E2],clauses=[],fc=Fc},Eps,St}; + args=[E2],clauses=[],fc=Fc},Eps1++Eps2,St}; Other when not is_atom(Other) -> - {#imatch{anno=#a{anno=Lanno},pat=P2,arg=E2,fc=Fc},Eps,St4} + {#imatch{anno=#a{anno=Lanno},pat=P2,arg=E2,fc=Fc},Eps1++Eps2,St5} end; expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) -> %% Optimise '++' here because of the list comprehension algorithm. @@ -704,26 +708,28 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) -> {Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2), {Y,Mps++Yps,St}; expr({op,L,'andalso',E1,E2}, St0) -> - {#c_var{name=V0},St} = new_var(L, St0), + Anno = lineno_anno(L, St0), + {#c_var{name=V0},St} = new_var(Anno, St0), V = {var,L,V0}, False = {atom,L,false}, E = make_bool_switch(L, E1, V, E2, False, St0), expr(E, St); expr({op,L,'orelse',E1,E2}, St0) -> - {#c_var{name=V0},St} = new_var(L, St0), + Anno = lineno_anno(L, St0), + {#c_var{name=V0},St} = new_var(Anno, St0), V = {var,L,V0}, True = {atom,L,true}, E = make_bool_switch(L, E1, V, True, E2, St0), expr(E, St); expr({op,L,Op,A0}, St0) -> {A1,Aps,St1} = safe(A0, St0), - LineAnno = lineno_anno(L, St1), + LineAnno = full_anno(L, St1), {#icall{anno=#a{anno=LineAnno}, %Must have an #a{} module=#c_literal{anno=LineAnno,val=erlang}, name=#c_literal{anno=LineAnno,val=Op},args=[A1]},Aps,St1}; expr({op,L,Op,L0,R0}, St0) -> {As,Aps,St1} = safe_list([L0,R0], St0), - LineAnno = lineno_anno(L, St1), + LineAnno = full_anno(L, St1), {#icall{anno=#a{anno=LineAnno}, %Must have an #a{} module=#c_literal{anno=LineAnno,val=erlang}, name=#c_literal{anno=LineAnno,val=Op},args=As},Aps,St1}. @@ -752,83 +758,58 @@ make_bool_switch_guard(L, E, V, T, F) -> {clause,NegL,[V],[],[V]} ]}. -expr_map(M0,Es0,A,St0) -> - {M1,Mps,St1} = safe(M0, St0), +expr_map(M0, Es0, A, St0) -> + {M1,Eps0,St1} = safe(M0, St0), case is_valid_map_src(M1) of true -> - case {M1,Es0} of - {#c_var{}, []} -> - %% transform M#{} to is_map(M) - {Vpat,St2} = new_var(St1), - {Fpat,St3} = new_var(St2), - Cs = [#iclause{ - anno=A, - pats=[Vpat], - guard=[#icall{anno=#a{anno=A}, + {M2,Eps1,St2} = map_build_pairs(M1, Es0, A, St1), + M3 = case Es0 of + [] -> M1; + [_|_] -> M2 + end, + Cs = [#iclause{ + anno=#a{anno=[compiler_generated|A]}, + pats=[], + guard=[#icall{anno=#a{anno=A}, module=#c_literal{anno=A,val=erlang}, name=#c_literal{anno=A,val=is_map}, - args=[Vpat]}], - body=[Vpat]}], - Fc = fail_clause([Fpat], A, #c_literal{val=badarg}), - {#icase{anno=#a{anno=A},args=[M1],clauses=Cs,fc=Fc},Mps,St3}; - {_,_} -> - {Es1,Eps,St2} = map_pair_list(Es0, St1), - {ann_c_map(A,M1,Es1),Mps++Eps,St2} - end; - false -> throw({bad_map,bad_map}) + args=[M1]}], + body=[M3]}], + Fc = fail_clause([], [eval_failure|A], #c_literal{val=badarg}), + Eps = Eps0 ++ Eps1, + {#icase{anno=#a{anno=A},args=[],clauses=Cs,fc=Fc},Eps,St2}; + false -> + throw({bad_map,bad_map}) end. +map_build_pairs(Map, Es0, Ann, St0) -> + {Es,Pre,St1} = map_build_pairs_1(Es0, St0), + {ann_c_map(Ann, Map, Es),Pre,St1}. + +map_build_pairs_1([{Op0,L,K0,V0}|Es], St0) -> + {K,Pre0,St1} = safe(K0, St0), + {V,Pre1,St2} = safe(V0, St1), + {Pairs,Pre2,St3} = map_build_pairs_1(Es, St2), + As = lineno_anno(L, St3), + Op = map_op(Op0), + Pair = cerl:ann_c_map_pair(As, Op, K, V), + {[Pair|Pairs],Pre0++Pre1++Pre2,St3}; +map_build_pairs_1([], St) -> + {[],[],St}. + +map_op(map_field_assoc) -> #c_literal{val=assoc}; +map_op(map_field_exact) -> #c_literal{val=exact}. + is_valid_map_src(#c_literal{val = M}) when is_map(M) -> true; -is_valid_map_src(#c_map{}) -> true; is_valid_map_src(#c_var{}) -> true; is_valid_map_src(_) -> false. -map_pair_list(Es, St) -> - foldr(fun - ({map_field_assoc,L,K0,V0}, {Ces,Esp,St0}) -> - {K1,Ep0,St1} = safe(K0, St0), - K = ensure_valid_map_key(K1), - {V,Ep1,St2} = safe(V0, St1), - A = lineno_anno(L, St2), - Pair = #c_map_pair{op=#c_literal{val=assoc},anno=A,key=K,val=V}, - {[Pair|Ces],Ep0 ++ Ep1 ++ Esp,St2}; - ({map_field_exact,L,K0,V0}, {Ces,Esp,St0}) -> - {K1,Ep0,St1} = safe(K0, St0), - K = ensure_valid_map_key(K1), - {V,Ep1,St2} = safe(V0, St1), - A = lineno_anno(L, St2), - Pair = #c_map_pair{op=#c_literal{val=exact},anno=A,key=K,val=V}, - {[Pair|Ces],Ep0 ++ Ep1 ++ Esp,St2} - end, {[],[],St}, Es). - -ensure_valid_map_key(K0) -> - case coalesced_map_key(K0) of - {ok,K1} -> K1; - error -> throw({bad_map,bad_map_key}) - end. - -coalesced_map_key(#c_literal{}=K) -> {ok,K}; -%% Dialyzer hack redux -%% DO coalesce tuples and list in maps for dialyzer -%% Dialyzer tries to break this apart, don't let it -coalesced_map_key(#c_tuple{}=K) -> - case core_lib:is_literal(K) of - true -> {ok,cerl:fold_literal(K)}; - false -> error - end; -coalesced_map_key(#c_cons{}=K) -> - case core_lib:is_literal(K) of - true -> {ok,cerl:fold_literal(K)}; - false -> error - end; -coalesced_map_key(_) -> error. - %% try_exception([ExcpClause], St) -> {[ExcpVar],Handler,St}. try_exception(Ecs0, St0) -> %% Note that Tag is not needed for rethrow - it is already in Info. {Evs,St1} = new_vars(3, St0), % Tag, Value, Info - {Ecs1,St2} = clauses(Ecs0, St1), + {Ecs1,Ceps,St2} = clauses(Ecs0, St1), [_,Value,Info] = Evs, Ec = #iclause{anno=#a{anno=[compiler_generated]}, pats=[c_tuple(Evs)],guard=[#c_literal{val=true}], @@ -836,15 +817,15 @@ try_exception(Ecs0, St0) -> name=#c_literal{val=raise}, args=[Info,Value]}]}, Hs = [#icase{anno=#a{},args=[c_tuple(Evs)],clauses=Ecs1,fc=Ec}], - {Evs,Hs,St2}. + {Evs,Ceps++Hs,St2}. try_after(As, St0) -> %% See above. - {Evs,St1} = new_vars(3, St0), % Tag, Value, Info + {Evs,St1} = new_vars(3, St0), % Tag, Value, Info [_,Value,Info] = Evs, - B = As ++ [#iprimop{anno=#a{}, %Must have an #a{} - name=#c_literal{val=raise}, - args=[Info,Value]}], + B = As ++ [#iprimop{anno=#a{}, % Must have an #a{} + name=#c_literal{val=raise}, + args=[Info,Value]}], Ec = #iclause{anno=#a{anno=[compiler_generated]}, pats=[c_tuple(Evs)],guard=[#c_literal{val=true}], body=B}, @@ -978,20 +959,21 @@ bitstr({bin_element,_,E0,Size0,[Type,{unit,Unit}|Flags]}, St0) -> fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) -> Arity = clause_arity(hd(Cs0)), - {Cs1,St1} = clauses(Cs0, St0), + {Cs1,Ceps,St1} = clauses(Cs0, St0), {Args,St2} = new_vars(Arity, St1), {Ps,St3} = new_vars(Arity, St2), %Need new variables here - Anno = lineno_anno(L, St3), + Anno = full_anno(L, St3), Fc = function_clause(Ps, Anno, {Name,Arity}), Fun = #ifun{anno=#a{anno=Anno}, id=[{id,Id}], %We KNOW! vars=Args,clauses=Cs1,fc=Fc,name=NameInfo}, - {Fun,[],St3}. + {Fun,Ceps,St3}. %% lc_tq(Line, Exp, [Qualifier], Mc, State) -> {LetRec,[PreExp],State}. %% This TQ from Simon PJ pp 127-138. -lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard, +lc_tq(Line, E, [#igen{anno=GAnno,ceps=Ceps, + acc_pat=AccPat,acc_guard=AccGuard, skip_pat=SkipPat,tail=Tail,tail_pat=TailPat, arg={Pre,Arg}}|Qs], Mc, St0) -> {Name,St1} = new_fun_name("lc", St0), @@ -1026,7 +1008,7 @@ lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard, Fun = #ifun{anno=LAnno,id=[],vars=[Var],clauses=Cs,fc=Fc}, {#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,1},Fun}], body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg]}]}, - [],St4}; + Ceps,St4}; lc_tq(Line, E, [#ifilter{}=Filter|Qs], Mc, St) -> filter_tq(Line, E, Filter, Mc, St, Qs, fun lc_tq/5); lc_tq(Line, E0, [], Mc0, St0) -> @@ -1040,7 +1022,7 @@ lc_tq(Line, E0, [], Mc0, St0) -> %% This TQ from Gustafsson ERLANG'05. %% More could be transformed before calling bc_tq. -bc_tq(Line, Exp, Qs0, _, St0) -> +bc_tq(Line, Exp, Qs0, St0) -> {BinVar,St1} = new_var(St0), {Sz,SzPre,St2} = bc_initial_size(Exp, Qs0, St1), {Qs,St3} = preprocess_quals(Line, Qs0, St2), @@ -1051,7 +1033,8 @@ bc_tq(Line, Exp, Qs0, _, St0) -> args=[Sz]}}] ++ BcPre, {E,Pre,St}. -bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard, +bc_tq1(Line, E, [#igen{anno=GAnno,ceps=Ceps, + acc_pat=AccPat,acc_guard=AccGuard, skip_pat=SkipPat,tail=Tail,tail_pat=TailPat, arg={Pre,Arg}}|Qs], Mc, St0) -> {Name,St1} = new_fun_name("lbc", St0), @@ -1089,7 +1072,7 @@ bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard, Fun = #ifun{anno=LAnno,id=[],vars=Vars,clauses=Cs,fc=Fc}, {#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,2},Fun}], body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg,Mc]}]}, - [],St4}; + Ceps,St4}; bc_tq1(Line, E, [#ifilter{}=Filter|Qs], Mc, St) -> filter_tq(Line, E, Filter, Mc, St, Qs, fun bc_tq1/5); bc_tq1(_, {bin,Bl,Elements}, [], AccVar, St0) -> @@ -1153,7 +1136,7 @@ preprocess_quals(Line, [Q|Qs0], St0, Acc) -> {Gen,St} = generator(Line, Q, Gs, St0), preprocess_quals(Line, Qs, St, [Gen|Acc]); false -> - LAnno = #a{anno=lineno_anno(get_anno(Q), St0)}, + LAnno = #a{anno=lineno_anno(get_qual_anno(Q), St0)}, case is_guard_test(Q) of true -> %% When a filter is a guard test, its argument in the @@ -1178,6 +1161,11 @@ is_generator({generate,_,_,_}) -> true; is_generator({b_generate,_,_,_}) -> true; is_generator(_) -> false. +%% Retrieve the annotation from an Erlang AST form. +%% (Use get_anno/1 to retrieve the annotation from Core Erlang forms). + +get_qual_anno(Abstract) -> element(2, Abstract). + %% %% Generators are abstracted as sextuplets: %% - acc_pat is the accumulator pattern, e.g. [Pat|Tail] for Pat <- Expr. @@ -1200,7 +1188,7 @@ is_generator(_) -> false. generator(Line, {generate,Lg,P0,E}, Gs, St0) -> LA = lineno_anno(Line, St0), GA = lineno_anno(Lg, St0), - {Head,St1} = list_gen_pattern(P0, Line, St0), + {Head,Ceps,St1} = list_gen_pattern(P0, Line, St0), {[Tail,Skip],St2} = new_vars(2, St1), {Cg,St3} = lc_guard_tests(Gs, St2), {AccPat,SkipPat} = case Head of @@ -1220,25 +1208,27 @@ generator(Line, {generate,Lg,P0,E}, Gs, St0) -> ann_c_cons(LA, Skip, Tail)} end, {Ce,Pre,St4} = safe(E, St3), - Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat, + Gen = #igen{anno=#a{anno=GA},ceps=Ceps, + acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat, tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Pre,Ce}}, {Gen,St4}; generator(Line, {b_generate,Lg,P,E}, Gs, St0) -> LA = lineno_anno(Line, St0), GA = lineno_anno(Lg, St0), - Cp = #c_binary{segments=Segs} = pattern(P, St0), + {Cp = #c_binary{segments=Segs},[],St1} = pattern(P, St0), + %% The function append_tail_segment/2 keeps variable patterns as-is, making %% it possible to have the same skip clause removal as with list generators. - {AccSegs,Tail,TailSeg,St1} = append_tail_segment(Segs, St0), + {AccSegs,Tail,TailSeg,St2} = append_tail_segment(Segs, St1), AccPat = Cp#c_binary{segments=AccSegs}, - {Cg,St2} = lc_guard_tests(Gs, St1), - {SkipSegs,St3} = emasculate_segments(AccSegs, St2), + {Cg,St3} = lc_guard_tests(Gs, St2), + {SkipSegs,St4} = emasculate_segments(AccSegs, St3), SkipPat = Cp#c_binary{segments=SkipSegs}, - {Ce,Pre,St4} = safe(E, St3), + {Ce,Pre,St5} = safe(E, St4), Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat, tail=Tail,tail_pat=#c_binary{anno=LA,segments=[TailSeg]}, arg={Pre,Ce}}, - {Gen,St4}. + {Gen,St5}. append_tail_segment(Segs, St0) -> {Var,St} = new_var(St0), @@ -1267,9 +1257,9 @@ lc_guard_tests(Gs0, St0) -> list_gen_pattern(P0, Line, St) -> try - {pattern(P0, St),St} + pattern(P0,St) catch - nomatch -> {nomatch,add_warning(Line, nomatch, St)} + nomatch -> {nomatch,[],add_warning(Line, nomatch, St)} end. %%% @@ -1489,9 +1479,22 @@ force_novars(#iapply{}=App, St) -> {App,[],St}; force_novars(#icall{}=Call, St) -> {Call,[],St}; force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too force_novars(#ibinary{}=Bin, St) -> {Bin,[],St}; +force_novars(#c_map{}=Bin, St) -> {Bin,[],St}; force_novars(Ce, St) -> force_safe(Ce, St). + +%% safe_pattern_expr(Expr, State) -> {Cexpr,[PreExpr],State}. +%% only literals and variables are safe expressions in patterns +safe_pattern_expr(E,St0) -> + case safe(E,St0) of + {#c_var{},_,_}=Safe -> Safe; + {#c_literal{},_,_}=Safe -> Safe; + {Ce,Eps,St1} -> + {V,St2} = new_var(St1), + {V,Eps++[#iset{var=V,arg=Ce}],St2} + end. + %% safe(Expr, State) -> {Safe,[PreExpr],State}. %% Generate an internal safe expression. These are simples without %% binaries which can fail. At this level we do not need to do a @@ -1566,90 +1569,90 @@ fold_match({match,L,P0,E0}, P) -> {{match,L,P0,P1},E1}; fold_match(E, P) -> {P,E}. -%% pattern(Pattern, State) -> CorePat. +%% pattern(Pattern, State) -> {CorePat,[PreExp],State}. %% Transform a pattern by removing line numbers. We also normalise %% aliases in patterns to standard form, {alias,Pat,[Var]}. - -pattern({var,L,V}, St) -> #c_var{anno=lineno_anno(L, St),name=V}; -pattern({char,L,C}, St) -> #c_literal{anno=lineno_anno(L, St),val=C}; -pattern({integer,L,I}, St) -> #c_literal{anno=lineno_anno(L, St),val=I}; -pattern({float,L,F}, St) -> #c_literal{anno=lineno_anno(L, St),val=F}; -pattern({atom,L,A}, St) -> #c_literal{anno=lineno_anno(L, St),val=A}; -pattern({string,L,S}, St) -> #c_literal{anno=lineno_anno(L, St),val=S}; -pattern({nil,L}, St) -> #c_literal{anno=lineno_anno(L, St),val=[]}; +%% +%% In patterns we may have expressions +%% 1) Binaries -> #c_bitstr{size=Expr} +%% 2) Maps -> #c_map_pair{key=Expr} +%% +%% Both of these may generate pre-expressions since only bound variables +%% or literals are allowed for these in core patterns. +%% +%% Therefor, we need to drag both the state and the collection of pre-expression +%% around in the whole pattern transformation tree. + +pattern({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St}; +pattern({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},[],St}; +pattern({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},[],St}; +pattern({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},[],St}; +pattern({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},[],St}; +pattern({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},[],St}; +pattern({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},[],St}; pattern({cons,L,H,T}, St) -> - annotate_cons(lineno_anno(L, St), pattern(H, St), pattern(T, St), St); + {Ph,Eps1,St1} = pattern(H, St), + {Pt,Eps2,St2} = pattern(T, St1), + {annotate_cons(lineno_anno(L, St), Ph, Pt, St2),Eps1++Eps2,St2}; pattern({tuple,L,Ps}, St) -> - annotate_tuple(record_anno(L, St), pattern_list(Ps, St), St); -pattern({map,L,Ps}, St) -> - #c_map{anno=lineno_anno(L, St), es=pattern_map_pairs(Ps, St)}; + {Ps1,Eps,St1} = pattern_list(Ps,St), + {annotate_tuple(record_anno(L, St), Ps1, St),Eps,St1}; +pattern({map,L,Pairs}, St0) -> + {Ps,Eps,St1} = pattern_map_pairs(Pairs, St0), + {#c_map{anno=lineno_anno(L, St1),es=Ps,is_pat=true},Eps,St1}; pattern({bin,L,Ps}, St) -> %% We don't create a #ibinary record here, since there is %% no need to hold any used/new annotations in a pattern. - #c_binary{anno=lineno_anno(L, St),segments=pat_bin(Ps, St)}; + {#c_binary{anno=lineno_anno(L, St),segments=pat_bin(Ps, St)},[],St}; pattern({match,_,P1,P2}, St) -> - pat_alias(pattern(P1, St), pattern(P2, St)). + {Cp1,Eps1,St1} = pattern(P1,St), + {Cp2,Eps2,St2} = pattern(P2,St1), + {pat_alias(Cp1,Cp2),Eps1++Eps2,St2}. %% pattern_map_pairs([MapFieldExact],State) -> [#c_map_pairs{}] pattern_map_pairs(Ps, St) -> - %% check literal key uniqueness (dict is needed) - %% pattern all pairs - {CMapPairs, Kdb} = lists:mapfoldl(fun - (P,Kdbi) -> - #c_map_pair{key=Ck,val=Cv} = CMapPair = pattern_map_pair(P,St), - K = core_lib:literal_value(Ck), - case dict:find(K,Kdbi) of - {ok, Vs} -> - {CMapPair, dict:store(K,[Cv|Vs],Kdbi)}; - _ -> - {CMapPair, dict:store(K,[Cv],Kdbi)} - end - end, dict:new(), Ps), - pattern_alias_map_pairs(CMapPairs,Kdb,dict:new(),St). - -pattern_alias_map_pairs([],_,_,_) -> []; -pattern_alias_map_pairs([#c_map_pair{key=Ck}=Pair|Pairs],Kdb,Kset,St) -> - %% alias same keys if needed - K = core_lib:literal_value(Ck), - case dict:find(K,Kset) of - {ok,processed} -> - pattern_alias_map_pairs(Pairs,Kdb,Kset,St); - _ -> - Cvs = dict:fetch(K,Kdb), - Cv = pattern_alias_map_pair_patterns(Cvs), - Kset1 = dict:store(K, processed, Kset), - [Pair#c_map_pair{val=Cv}|pattern_alias_map_pairs(Pairs,Kdb,Kset1,St)] - end. - -pattern_alias_map_pair_patterns([Cv]) -> Cv; -pattern_alias_map_pair_patterns([Cv1,Cv2|Cvs]) -> - pattern_alias_map_pair_patterns([pat_alias(Cv1,Cv2)|Cvs]). - -pattern_map_pair({map_field_exact,L,K,V},St) -> - #c_map_pair{anno=lineno_anno(L, St), - op=#c_literal{val=exact}, - key=pattern_map_key(K,St), - val=pattern(V, St)}. - -pattern_map_key(K,St) -> - %% Throws 'nomatch' if the key can't be a literal - %% this will be a cryptic error message but it is better than nothing - case expr(K,St) of - {Key0,[],_} -> - %% Dialyzer hack redux - case coalesced_map_key(Key0) of - {ok,Key1} -> Key1; - error -> throw(nomatch) - end; - _ -> throw(nomatch) - end. + %% check literal key uniqueness + %% - guaranteed via aliasing map pairs + %% pattern all pairs in two steps + %% 1) Construct Core Pattern + %% 2) Alias Keys in Core Pattern + {CMapPairs, {Eps,St1}} = lists:mapfoldl(fun + (P,{EpsM,Sti0}) -> + {CMapPair,EpsP,Sti1} = pattern_map_pair(P,Sti0), + {CMapPair, {EpsM++EpsP,Sti1}} + end, {[],St}, Ps), + {pat_alias_map_pairs(CMapPairs),Eps,St1}. + +pattern_map_pair({map_field_exact,L,K,V}, St0) -> + {Ck,EpsK,St1} = safe_pattern_expr(K, St0), + {Cv,EpsV,St2} = pattern(V, St1), + {#c_map_pair{anno=lineno_anno(L, St2), + op=#c_literal{val=exact}, + key=Ck, + val=Cv},EpsK++EpsV,St2}. + +pat_alias_map_pairs(Ps) -> + D = foldl(fun(#c_map_pair{key=K0}=Pair, D0) -> + K = cerl:set_ann(K0, []), + dict:append(K, Pair, D0) + end, dict:new(), Ps), + pat_alias_map_pairs_1(dict:to_list(D)). + +pat_alias_map_pairs_1([{_,[#c_map_pair{val=V0}=Pair|Vs]}|T]) -> + V = foldl(fun(#c_map_pair{val=V}, Pat) -> + pat_alias(V, Pat) + end, V0, Vs), + [Pair#c_map_pair{val=V}|pat_alias_map_pairs_1(T)]; +pat_alias_map_pairs_1([]) -> []. %% pat_bin([BinElement], State) -> [BinSeg]. pat_bin(Ps, St) -> [pat_segment(P, St) || P <- Ps]. -pat_segment({bin_element,_,Term,Size,[Type,{unit,Unit}|Flags]}, St) -> - #c_bitstr{val=pattern(Term, St),size=pattern(Size, St), +pat_segment({bin_element,_,Val,Size,[Type,{unit,Unit}|Flags]}, St) -> + {Pval,[],St1} = pattern(Val,St), + {Psize,[],_St2} = pattern(Size,St1), + #c_bitstr{val=Pval,size=Psize, unit=#c_literal{val=Unit}, type=#c_literal{val=Type}, flags=#c_literal{val=Flags}}. @@ -1657,38 +1660,55 @@ pat_segment({bin_element,_,Term,Size,[Type,{unit,Unit}|Flags]}, St) -> %% pat_alias(CorePat, CorePat) -> AliasPat. %% Normalise aliases. Trap bad aliases by throwing 'nomatch'. -pat_alias(#c_var{name=V1}, P2) -> #c_alias{var=#c_var{name=V1},pat=P2}; -pat_alias(P1, #c_var{name=V2}) -> #c_alias{var=#c_var{name=V2},pat=P1}; -pat_alias(#c_cons{}=Cons, #c_literal{anno=A,val=[H|T]}=S) -> - pat_alias(Cons, ann_c_cons_skel(A, #c_literal{anno=A,val=H}, - S#c_literal{val=T})); -pat_alias(#c_literal{anno=A,val=[H|T]}=S, #c_cons{}=Cons) -> - pat_alias(ann_c_cons_skel(A, #c_literal{anno=A,val=H}, - S#c_literal{val=T}), Cons); -pat_alias(#c_cons{anno=Anno,hd=H1,tl=T1}, #c_cons{hd=H2,tl=T2}) -> - ann_c_cons(Anno, pat_alias(H1, H2), pat_alias(T1, T2)); -pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_literal{val=T}) when is_tuple(T) -> - Es2 = [#c_literal{val=E} || E <- tuple_to_list(T)], - ann_c_tuple(Anno, pat_alias_list(Es1, Es2)); -pat_alias(#c_literal{anno=Anno,val=T}, #c_tuple{es=Es2}) when is_tuple(T) -> - Es1 = [#c_literal{val=E} || E <- tuple_to_list(T)], - ann_c_tuple(Anno, pat_alias_list(Es1, Es2)); -pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_tuple{es=Es2}) -> - ann_c_tuple(Anno, pat_alias_list(Es1, Es2)); -pat_alias(#c_alias{var=V1,pat=P1}, - #c_alias{var=V2,pat=P2}) -> - if V1 =:= V2 -> #c_alias{var=V1,pat=pat_alias(P1, P2)}; - true -> #c_alias{var=V1,pat=#c_alias{var=V2,pat=pat_alias(P1, P2)}} +pat_alias(#c_var{name=V1}=P, #c_var{name=V1}) -> P; +pat_alias(#c_var{name=V1}=Var, + #c_alias{var=#c_var{name=V2},pat=Pat}=Alias) -> + if + V1 =:= V2 -> + Alias; + true -> + Alias#c_alias{pat=pat_alias(Var, Pat)} end; -pat_alias(#c_alias{var=V1,pat=P1}, P2) -> - #c_alias{var=V1,pat=pat_alias(P1, P2)}; -pat_alias(P1, #c_alias{var=V2,pat=P2}) -> - #c_alias{var=V2,pat=pat_alias(P1, P2)}; +pat_alias(#c_var{}=P1, P2) -> #c_alias{var=P1,pat=P2}; + +pat_alias(#c_alias{var=#c_var{name=V1}}=Alias, #c_var{name=V1}) -> + Alias; +pat_alias(#c_alias{var=#c_var{name=V1}=Var1,pat=P1}, + #c_alias{var=#c_var{name=V2}=Var2,pat=P2}) -> + Pat = pat_alias(P1, P2), + if + V1 =:= V2 -> + #c_alias{var=Var1,pat=Pat}; + true -> + pat_alias(Var1, pat_alias(Var2, Pat)) + end; +pat_alias(#c_alias{var=#c_var{}=Var,pat=P1}, P2) -> + #c_alias{var=Var,pat=pat_alias(P1, P2)}; + +pat_alias(#c_map{es=Es1}=M, #c_map{es=Es2}) -> + M#c_map{es=pat_alias_map_pairs(Es1 ++ Es2)}; + +pat_alias(P1, #c_var{}=Var) -> + #c_alias{var=Var,pat=P1}; +pat_alias(P1, #c_alias{pat=P2}=Alias) -> + Alias#c_alias{pat=pat_alias(P1, P2)}; + pat_alias(P1, P2) -> - case {set_anno(P1, []),set_anno(P2, [])} of - {P,P} -> P; + %% Aliases between binaries are not allowed, so the only + %% legal patterns that remain are data patterns. + case cerl:is_data(P1) andalso cerl:is_data(P2) of + false -> throw(nomatch); + true -> ok + end, + Type = cerl:data_type(P1), + case cerl:data_type(P2) of + Type -> ok; _ -> throw(nomatch) - end. + end, + Es1 = cerl:data_es(P1), + Es2 = cerl:data_es(P2), + Es = pat_alias_list(Es1, Es2), + cerl:make_data(Type, Es). %% pat_alias_list([A1], [A2]) -> [A]. @@ -1697,9 +1717,15 @@ pat_alias_list([A1|A1s], [A2|A2s]) -> pat_alias_list([], []) -> []; pat_alias_list(_, _) -> throw(nomatch). -%% pattern_list([P], State) -> [P]. +%% pattern_list([P], State) -> {[P],Exprs,St} + +pattern_list([P0|Ps0], St0) -> + {P1,Eps,St1} = pattern(P0, St0), + {Ps1,Epsl,St2} = pattern_list(Ps0, St1), + {[P1|Ps1], Eps ++ Epsl, St2}; +pattern_list([], St) -> + {[],[],St}. -pattern_list(Ps, St) -> [pattern(P, St) || P <- Ps]. %% make_vars([Name]) -> [{Var,Name}]. @@ -1721,7 +1747,7 @@ new_var_name(#core{vcount=C}=St) -> new_var(St) -> new_var([], St). -new_var(Anno, St0) -> +new_var(Anno, St0) when is_list(Anno) -> {New,St} = new_var_name(St0), {#c_var{anno=Anno,name=New},St}. @@ -1779,7 +1805,7 @@ uclauses(Lcs, Ks, St0) -> uclause(Cl0, Ks, St0) -> {Cl1,_Pvs,Used,New,St1} = uclause(Cl0, Ks, Ks, St0), - A0 = get_ianno(Cl1), + A0 = get_anno(Cl1), A = A0#a{us=Used,ns=New}, {Cl1#iclause{anno=A},St1}. @@ -1948,11 +1974,11 @@ uexpr(#ibinary{anno=A,segments=Ss}, _, St) -> uexpr(#c_literal{}=Lit, _, St) -> Anno = get_anno(Lit), {set_anno(Lit, #a{us=[],anno=Anno}),St}; -uexpr(Lit, _, St) -> - true = is_simple(Lit), %Sanity check! - Vs = lit_vars(Lit), - Anno = get_anno(Lit), - {set_anno(Lit, #a{us=Vs,anno=Anno}),St}. +uexpr(Simple, _, St) -> + true = is_simple(Simple), %Sanity check! + Vs = lit_vars(Simple), + Anno = get_anno(Simple), + {#isimple{anno=#a{us=Vs,anno=Anno},term=Simple},St}. uexpr_list(Les0, Ks, St0) -> mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0). @@ -1966,7 +1992,7 @@ ufun_clauses(Lcs, Ks, St0) -> ufun_clause(Cl0, Ks, St0) -> {Cl1,Pvs,Used,_,St1} = uclause(Cl0, [], Ks, St0), - A0 = get_ianno(Cl1), + A0 = get_anno(Cl1), A = A0#a{us=subtract(intersection(Used, Ks), Pvs),ns=[]}, {Cl1#iclause{anno=A},St1}. @@ -1999,9 +2025,14 @@ upattern(#c_tuple{es=Es0}=Tuple, Ks, St0) -> upattern(#c_map{es=Es0}=Map, Ks, St0) -> {Es1,Esg,Esv,Eus,St1} = upattern_list(Es0, Ks, St0), {Map#c_map{es=Es1},Esg,Esv,Eus,St1}; -upattern(#c_map_pair{op=#c_literal{val=exact},val=V0}=MapPair, Ks, St0) -> - {V,Vg,Vv,Vu,St1} = upattern(V0, Ks, St0), - {MapPair#c_map_pair{val=V},Vg,Vv,Vu,St1}; +upattern(#c_map_pair{op=#c_literal{val=exact},key=K0,val=V0}=Pair,Ks,St0) -> + {V,Vg,Vn,Vu,St1} = upattern(V0, Ks, St0), + % A variable key must be considered used here + Ku = case K0 of + #c_var{name=Name} -> [Name]; + _ -> [] + end, + {Pair#c_map_pair{val=V},Vg,Vn,union(Ku,Vu),St1}; upattern(#c_binary{segments=Es0}=Bin, Ks, St0) -> {Es1,Esg,Esv,Eus,St1} = upat_bin(Es0, Ks, St0), {Bin#c_binary{segments=Es1},Esg,Esv,Eus,St1}; @@ -2124,7 +2155,8 @@ cguard(Gs, St0) -> cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) -> %% Make return value explicit, and make Var true top level. - cexprs([Iset,Var#c_var{anno=#a{us=[Name]}}], As, St); + Isimple = #isimple{anno=#a{us=[Name]},term=Var}, + cexprs([Iset,Isimple], As, St); cexprs([Le], As, St0) -> {Ce,Es,Us,St1} = cexpr(Le, As, St0), Exp = make_vars(As), %The export variables @@ -2239,12 +2271,9 @@ cexpr(#c_literal{}=Lit, _As, St) -> Anno = get_anno(Lit), Vs = Anno#a.us, {set_anno(Lit, Anno#a.anno),[],Vs,St}; -cexpr(Lit, _As, St) -> - true = is_simple(Lit), %Sanity check! - Anno = get_anno(Lit), - Vs = Anno#a.us, - %%Vs = lit_vars(Lit), - {set_anno(Lit, Anno#a.anno),[],Vs,St}. +cexpr(#isimple{anno=#a{us=Vs},term=Simple}, _As, St) -> + true = is_simple(Simple), %Sanity check! + {Simple,[],Vs,St}. cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) -> {Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export! @@ -2267,11 +2296,6 @@ lit_vars(#c_map_pair{key=K,val=V}, Vs) -> lit_vars(K, lit_vars(V, Vs)); lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs); lit_vars(_, Vs) -> Vs. %These are atomic -% lit_bin_vars(Segs, Vs) -> -% foldl(fun (#c_bitstr{val=V,size=S}, Vs0) -> -% lit_vars(V, lit_vars(S, Vs0)) -% end, Vs, Segs). - lit_list_vars(Ls) -> lit_list_vars(Ls, []). lit_list_vars(Ls, Vs) -> @@ -2290,16 +2314,21 @@ record_anno(L, St) when L >= ?REC_OFFSET -> true -> [record | lineno_anno(L - ?REC_OFFSET, St)]; false -> - lineno_anno(L, St) + full_anno(L, St) end; record_anno(L, St) when L < -?REC_OFFSET -> case member(dialyzer, St#core.opts) of true -> [record | lineno_anno(L + ?REC_OFFSET, St)]; false -> - lineno_anno(L, St) + full_anno(L, St) end; record_anno(L, St) -> + full_anno(L, St). + +full_anno(L, #core{wanted=false}=St) -> + [result_not_wanted|lineno_anno(L, St)]; +full_anno(L, #core{wanted=true}=St) -> lineno_anno(L, St). lineno_anno(L, St) -> @@ -2311,12 +2340,6 @@ lineno_anno(L, St) -> [Line] ++ St#core.file end. -get_ianno(Ce) -> - case get_anno(Ce) of - #a{}=A -> A; - A when is_list(A) -> #a{anno=A} - end. - get_lineno_anno(Ce) -> case get_anno(Ce) of #a{anno=A} -> A; @@ -2372,8 +2395,6 @@ format_error(nomatch) -> "pattern cannot possibly match"; format_error(bad_binary) -> "binary construction will fail because of a type mismatch"; -format_error(bad_map_key) -> - "map construction will fail because of none literal key (large binaries are not literals)"; format_error(bad_map) -> "map construction will fail because of a type mismatch". diff --git a/lib/compiler/src/v3_kernel.erl b/lib/compiler/src/v3_kernel.erl index 40d2f72b4c..0ac1aaf158 100644 --- a/lib/compiler/src/v3_kernel.erl +++ b/lib/compiler/src/v3_kernel.erl @@ -131,12 +131,12 @@ module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, _Options) -> {ok,#k_mdef{anno=A,name=M#c_literal.val,exports=Kes,attributes=Kas, body=Kfs ++ St#kern.funs},lists:sort(St#kern.ws)}. -attributes([{#c_literal{val=Name},Val}|As]) -> +attributes([{#c_literal{val=Name},#c_literal{val=Val}}|As]) -> case include_attribute(Name) of false -> attributes(As); true -> - [{Name,core_lib:literal_value(Val)}|attributes(As)] + [{Name,Val}|attributes(As)] end; attributes([]) -> []. @@ -273,17 +273,7 @@ expr(#c_tuple{anno=A,es=Ces}, Sub, St0) -> {Kes,Ep,St1} = atomic_list(Ces, Sub, St0), {#k_tuple{anno=A,es=Kes},Ep,St1}; expr(#c_map{anno=A,arg=Var,es=Ces}, Sub, St0) -> - try expr_map(A,Var,Ces,Sub,St0) of - {_,_,_}=Res -> Res - catch - throw:bad_map -> - St1 = add_warning(get_line(A), bad_map, A, St0), - Erl = #c_literal{val=erlang}, - Name = #c_literal{val=error}, - Args = [#c_literal{val=badarg}], - Error = #c_call{anno=A,module=Erl,name=Name,args=Args}, - expr(Error, Sub, St1) - end; + expr_map(A, Var, Ces, Sub, St0); expr(#c_binary{anno=A,segments=Cv}, Sub, St0) -> try atomic_bin(Cv, Sub, St0) of {Kv,Ep,St1} -> @@ -506,82 +496,87 @@ translate_fc(Args) -> [#c_literal{val=function_clause},make_list(Args)]. expr_map(A,Var0,Ces,Sub,St0) -> - %% An extra pass of validation of Map src because of inlining {Var,Mps,St1} = expr(Var0, Sub, St0), - case is_valid_map_src(Var) of - true -> - {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1), - {Km,Eps++Mps,St2}; - false -> throw(bad_map) - end. - -is_valid_map_src(#k_map{}) -> true; -is_valid_map_src(#k_literal{val=M}) when is_map(M) -> true; -is_valid_map_src(#k_var{}) -> true; -is_valid_map_src(_) -> false. + {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1), + {Km,Eps++Mps,St2}. map_split_pairs(A, Var, Ces, Sub, St0) -> - %% two steps - %% 1. force variables - %% 2. remove multiples - Pairs0 = [{Op,K,V} || #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces], + %% 1. Force variables. + %% 2. Group adjacent pairs with literal keys. + %% 3. Within each such group, remove multiple assignments to the same key. + %% 4. Partition each group according to operator ('=>' and ':='). + Pairs0 = [{Op,K,V} || + #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces], {Pairs,Esp,St1} = foldr(fun ({Op,K0,V0}, {Ops,Espi,Sti0}) when Op =:= assoc; Op =:= exact -> - {K,[],Sti1} = expr(K0, Sub, Sti0), - {V,Ep,Sti2} = atomic(V0, Sub, Sti1), - {[{Op,K,V}|Ops],Ep ++ Espi,Sti2} + {K,Eps1,Sti1} = atomic(K0, Sub, Sti0), + {V,Eps2,Sti2} = atomic(V0, Sub, Sti1), + {[{Op,K,V}|Ops],Eps1 ++ Eps2 ++ Espi,Sti2} end, {[],[],St0}, Pairs0), - - case map_group_pairs(Pairs) of - {Assoc,[]} -> - Kes = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc], - {#k_map{anno=A,op=assoc,var=Var,es=Kes},Esp,St1}; - {[],Exact} -> - Kes = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact], - {#k_map{anno=A,op=exact,var=Var,es=Kes},Esp,St1}; - {Assoc,Exact} -> - Kes1 = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc], - {Mvar,Em,St2} = force_atomic(#k_map{anno=A,op=assoc,var=Var,es=Kes1},St1), - Kes2 = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact], - {#k_map{anno=A,op=exact,var=Mvar,es=Kes2},Esp ++ Em,St2} - + map_split_pairs_1(A, Var, Pairs, Esp, St1). + +map_split_pairs_1(A, Map0, [{Op,Key,Val}|Pairs1]=Pairs0, Esp0, St0) -> + {Map1,Em,St1} = force_atomic(Map0, St0), + case Key of + #k_var{} -> + %% Don't combine variable keys with other keys. + Kes = [#k_map_pair{key=Key,val=Val}], + Map = #k_map{anno=A,op=Op,var=Map1,es=Kes}, + map_split_pairs_1(A, Map, Pairs1, Esp0 ++ Em, St1); + _ -> + %% Literal key. Split off all literal keys. + {L,Pairs} = splitwith(fun({_,#k_var{},_}) -> false; + ({_,_,_}) -> true + end, Pairs0), + {Map,Esp,St2} = map_group_pairs(A, Map1, L, Esp0 ++ Em, St1), + map_split_pairs_1(A, Map, Pairs, Esp, St2) + end; +map_split_pairs_1(_, Map, [], Esp, St0) -> + {Map,Esp,St0}. + +map_group_pairs(A, Var, Pairs0, Esp, St0) -> + Pairs = map_remove_dup_keys(Pairs0), + Assoc = [#k_map_pair{key=K,val=V} || {_,{assoc,K,V}} <- Pairs], + Exact = [#k_map_pair{key=K,val=V} || {_,{exact,K,V}} <- Pairs], + case {Assoc,Exact} of + {[_|_],[]} -> + {#k_map{anno=A,op=assoc,var=Var,es=Assoc},Esp,St0}; + {[],[_|_]} -> + {#k_map{anno=A,op=exact,var=Var,es=Exact},Esp,St0}; + {[_|_],[_|_]} -> + Map = #k_map{anno=A,op=assoc,var=Var,es=Assoc}, + {Mvar,Em,St1} = force_atomic(Map, St0), + {#k_map{anno=A,op=exact,var=Mvar,es=Exact},Esp ++ Em,St1} end. -%% Group map by Assoc operations and Exact operations +map_remove_dup_keys(Es) -> + dict:to_list(map_remove_dup_keys(Es, dict:new())). -map_group_pairs(Es) -> - Groups = dict:to_list(map_group_pairs(Es,dict:new())), - partition(fun({_,{Op,_,_}}) -> Op =:= assoc end, Groups). - -map_group_pairs([{assoc,K,V}|Es0],Used0) -> - Used1 = case map_key_is_used(K,Used0) of - {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0); - {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0); - _ -> map_key_set_used(K,{assoc,K,V},Used0) - end, - map_group_pairs(Es0,Used1); -map_group_pairs([{exact,K,V}|Es0],Used0) -> - Used1 = case map_key_is_used(K,Used0) of - {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0); - {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0); - _ -> map_key_set_used(K,{exact,K,V},Used0) - end, - map_group_pairs(Es0,Used1); -map_group_pairs([],Used) -> - Used. - -map_key_set_used(K,How,Used) -> - dict:store(map_key_clean(K),How,Used). - -map_key_is_used(K,Used) -> - dict:find(map_key_clean(K),Used). +map_remove_dup_keys([{assoc,K0,V}|Es0],Used0) -> + K = map_key_clean(K0), + Op = case dict:find(K, Used0) of + {ok,{exact,_,_}} -> exact; + _ -> assoc + end, + Used1 = dict:store(K, {Op,K0,V}, Used0), + map_remove_dup_keys(Es0, Used1); +map_remove_dup_keys([{exact,K0,V}|Es0],Used0) -> + K = map_key_clean(K0), + Op = case dict:find(K, Used0) of + {ok,{assoc,_,_}} -> assoc; + _ -> exact + end, + Used1 = dict:store(K, {Op,K0,V}, Used0), + map_remove_dup_keys(Es0, Used1); +map_remove_dup_keys([], Used) -> Used. -%% Be explicit instead of using set_kanno(K,[]) -map_key_clean(#k_literal{val=V}) -> {k_literal,V}; -map_key_clean(#k_int{val=V}) -> {k_int,V}; -map_key_clean(#k_float{val=V}) -> {k_float,V}; -map_key_clean(#k_atom{val=V}) -> {k_atom,V}; -map_key_clean(#k_nil{}) -> k_nil. +%% Be explicit instead of using set_kanno(K, []). +map_key_clean(#k_var{name=V}) -> {var,V}; +map_key_clean(#k_literal{val=V}) -> {lit,V}; +map_key_clean(#k_int{val=V}) -> {lit,V}; +map_key_clean(#k_float{val=V}) -> {lit,V}; +map_key_clean(#k_atom{val=V}) -> {lit,V}; +map_key_clean(#k_nil{}) -> {lit,[]}. %% call_type(Module, Function, Arity) -> call | bif | apply | error. @@ -660,12 +655,12 @@ atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U0,type=T,flags=Fs0}|Es0], {E,Ap1,St1} = atomic(E0, Sub, St0), {S1,Ap2,St2} = atomic(S0, Sub, St1), validate_bin_element_size(S1), - U1 = core_lib:literal_value(U0), - Fs1 = core_lib:literal_value(Fs0), + U1 = cerl:concrete(U0), + Fs1 = cerl:concrete(Fs0), {Es,Ap3,St3} = atomic_bin(Es0, Sub, St2), {#k_bin_seg{anno=A,size=S1, unit=U1, - type=core_lib:literal_value(T), + type=cerl:concrete(T), flags=Fs1, seg=E,next=Es}, Ap1++Ap2++Ap3,St3}; @@ -757,23 +752,22 @@ flatten_alias(#c_alias{var=V,pat=P}) -> flatten_alias(Pat) -> {[],Pat}. pattern_map_pairs(Ces0, Isub, Osub0, St0) -> - %% It is assumed that all core keys are literals - %% It is later assumed that these keys are term sorted - %% so we need to sort them here - Ces1 = lists:sort(fun - (#c_map_pair{key=CkA},#c_map_pair{key=CkB}) -> - A = core_lib:literal_value(CkA), - B = core_lib:literal_value(CkB), - erts_internal:cmp_term(A,B) < 0 - end, Ces0), %% pattern the pair keys and values as normal {Kes,{Osub1,St1}} = lists:mapfoldl(fun (#c_map_pair{anno=A,key=Ck,val=Cv},{Osubi0,Sti0}) -> - {Kk,Osubi1,Sti1} = pattern(Ck, Isub, Osubi0, Sti0), - {Kv,Osubi2,Sti2} = pattern(Cv, Isub, Osubi1, Sti1), + {Kk,[],Sti1} = expr(Ck, Isub, Sti0), + {Kv,Osubi2,Sti2} = pattern(Cv, Isub, Osubi0, Sti1), {#k_map_pair{anno=A,key=Kk,val=Kv},{Osubi2,Sti2}} - end, {Osub0, St0}, Ces1), - {Kes,Osub1,St1}. + end, {Osub0, St0}, Ces0), + %% It is later assumed that these keys are term sorted + %% so we need to sort them here + Kes1 = lists:sort(fun + (#k_map_pair{key=KkA},#k_map_pair{key=KkB}) -> + A = map_key_clean(KkA), + B = map_key_clean(KkB), + erts_internal:cmp_term(A,B) < 0 + end, Kes), + {Kes1,Osub1,St1}. pattern_bin(Es, Isub, Osub0, St0) -> {Kbin,{_,Osub},St} = pattern_bin_1(Es, Isub, Osub0, St0), @@ -793,8 +787,8 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0], %% problems. #k_atom{val=bad_size} end, - U0 = core_lib:literal_value(U), - Fs0 = core_lib:literal_value(Fs), + U0 = cerl:concrete(U), + Fs0 = cerl:concrete(Fs), %%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S,U0,Fs0}]), {E,Osub1,St2} = pattern(E0, Isub0, Osub0, St1), Isub1 = case E0 of @@ -805,7 +799,7 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0], {Es,{Isub,Osub},St3} = pattern_bin_1(Es0, Isub1, Osub1, St2), {#k_bin_seg{anno=A,size=S, unit=U0, - type=core_lib:literal_value(T), + type=cerl:concrete(T), flags=Fs0, seg=E,next=Es}, {Isub,Osub},St3}; @@ -1550,13 +1544,11 @@ arg_val(Arg, C) -> {set_kanno(S, []),U,T,Fs} end; #k_map{op=exact,es=Es} -> - Keys = [begin - #k_map_pair{key=#k_literal{val=Key}} = Pair, - Key - end || Pair <- Es], - %% multiple keys may have the same name - %% do not use ordsets - lists:sort(fun(A,B) -> erts_internal:cmp_term(A,B) < 0 end, Keys) + lists:sort(fun(A,B) -> + %% on the form K :: {'lit' | 'var', term()} + %% lit < var as intended + erts_internal:cmp_term(A,B) < 0 + end, [map_key_clean(Key) || #k_map_pair{key=Key} <- Es]) end. %% ubody_used_vars(Expr, State) -> [UsedVar] @@ -1943,6 +1935,7 @@ lit_list_vars(Ps) -> %% pat_vars(Pattern) -> {[UsedVarName],[NewVarName]}. %% Return variables in a pattern. All variables are new variables %% except those in the size field of binary segments. +%% and map_pair keys pat_vars(#k_var{name=N}) -> {[],[N]}; %%pat_vars(#k_char{}) -> {[],[]}; @@ -1967,8 +1960,10 @@ pat_vars(#k_tuple{es=Es}) -> pat_list_vars(Es); pat_vars(#k_map{es=Es}) -> pat_list_vars(Es); -pat_vars(#k_map_pair{val=V}) -> - pat_vars(V). +pat_vars(#k_map_pair{key=K,val=V}) -> + {U1,New} = pat_vars(V), + {[], U2} = pat_vars(K), + {union(U1,U2),New}. pat_list_vars(Ps) -> foldl(fun (P, {Used0,New0}) -> @@ -2009,9 +2004,7 @@ format_error(nomatch_shadow) -> format_error(bad_call) -> "invalid module and/or function name; this call will always fail"; format_error(bad_segment_size) -> - "binary construction will fail because of a type mismatch"; -format_error(bad_map) -> - "map construction will fail because of a type mismatch". + "binary construction will fail because of a type mismatch". add_warning(none, Term, Anno, #kern{ws=Ws}=St) -> File = get_file(Anno), diff --git a/lib/compiler/src/v3_kernel.hrl b/lib/compiler/src/v3_kernel.hrl index ab66445f73..b008285d9f 100644 --- a/lib/compiler/src/v3_kernel.hrl +++ b/lib/compiler/src/v3_kernel.hrl @@ -38,7 +38,7 @@ -record(k_nil, {anno=[]}). -record(k_tuple, {anno=[],es}). --record(k_map, {anno=[],var,op,es}). +-record(k_map, {anno=[],var=#k_literal{val=#{}},op,es}). -record(k_map_pair, {anno=[],key,val}). -record(k_cons, {anno=[],hd,tl}). -record(k_binary, {anno=[],segs}). diff --git a/lib/compiler/src/v3_life.erl b/lib/compiler/src/v3_life.erl index cd4b5fd674..75bd188479 100644 --- a/lib/compiler/src/v3_life.erl +++ b/lib/compiler/src/v3_life.erl @@ -270,7 +270,7 @@ match(#k_select{anno=A,var=V,types=Kts}, Ls0, I, Ctxt, Vdb0) -> end, Vdb1 = use_vars(union(A#k.us, Ls1), I, Vdb0), Ts = [type_clause(Tc, Ls1, I+1, Ctxt, Vdb1) || Tc <- Kts], - #l{ke={select,literal2(V, Ctxt),Ts},i=I,vdb=Vdb1,a=Anno}; + #l{ke={select,literal(V, Ctxt),Ts},i=I,vdb=Vdb1,a=Anno}; match(#k_guard{anno=A,clauses=Kcs}, Ls, I, Ctxt, Vdb0) -> Vdb1 = use_vars(union(A#k.us, Ls), I, Vdb0), Cs = [guard_clause(G, Ls, I+1, Ctxt, Vdb1) || G <- Kcs], @@ -297,7 +297,7 @@ val_clause(#k_val_clause{anno=A,val=V,body=Kb}, Ls0, I, Ctxt0, Vdb0) -> _ -> Ctxt0 end, B = match(Kb, Ls1, I+1, Ctxt, Vdb1), - #l{ke={val_clause,literal2(V, Ctxt),B},i=I,vdb=use_vars(Bus, I+1, Vdb1),a=A#k.a}. + #l{ke={val_clause,literal(V, Ctxt),B},i=I,vdb=use_vars(Bus, I+1, Vdb1),a=A#k.a}. guard_clause(#k_guard_clause{anno=A,guard=Kg,body=Kb}, Ls, I, Ctxt, Vdb0) -> Vdb1 = use_vars(union(A#k.us, Ls), I+2, Vdb0), @@ -350,6 +350,7 @@ atomic_list(Ks) -> [atomic(K) || K <- Ks]. %% literal_list([Klit]) -> [Lit]. literal(#k_var{name=N}, _) -> {var,N}; +literal(#k_literal{val=I}, _) -> {literal,I}; literal(#k_int{val=I}, _) -> {integer,I}; literal(#k_float{val=F}, _) -> {float,F}; literal(#k_atom{val=N}, _) -> {atom,N}; @@ -358,58 +359,29 @@ literal(#k_nil{}, _) -> nil; literal(#k_cons{hd=H,tl=T}, Ctxt) -> {cons,[literal(H, Ctxt),literal(T, Ctxt)]}; literal(#k_binary{segs=V}, Ctxt) -> - {binary,literal(V, Ctxt)}; + {binary,literal(V, Ctxt)}; +literal(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=[]}, Ctxt) -> + %% Only occurs in patterns. + {bin_seg,Ctxt,literal(S, Ctxt),U,T,Fs,[literal(Seg, Ctxt)]}; literal(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}, Ctxt) -> {bin_seg,Ctxt,literal(S, Ctxt),U,T,Fs, [literal(Seg, Ctxt),literal(N, Ctxt)]}; +literal(#k_bin_int{size=S,unit=U,flags=Fs,val=Int,next=N}, Ctxt) -> + %% Only occurs in patterns. + {bin_int,Ctxt,literal(S, Ctxt),U,Fs,Int, + [literal(N, Ctxt)]}; literal(#k_bin_end{}, Ctxt) -> {bin_end,Ctxt}; literal(#k_tuple{es=Es}, Ctxt) -> {tuple,literal_list(Es, Ctxt)}; -literal(#k_map{op=Op,var=Var,es=Es}, Ctxt) -> - {map,Op,literal(Var, Ctxt),literal_list(Es, Ctxt)}; +literal(#k_map{op=Op,var=Var,es=Es0}, Ctxt) -> + {map,Op,literal(Var, Ctxt),literal_list(Es0, Ctxt)}; literal(#k_map_pair{key=K,val=V}, Ctxt) -> - {map_pair,literal(K, Ctxt),literal(V, Ctxt)}; -literal(#k_literal{val=V}, _Ctxt) -> - {literal,V}. + {map_pair,literal(K, Ctxt),literal(V, Ctxt)}. literal_list(Ks, Ctxt) -> [literal(K, Ctxt) || K <- Ks]. -literal2(#k_var{name=N}, _) -> {var,N}; -literal2(#k_literal{val=I}, _) -> {literal,I}; -literal2(#k_int{val=I}, _) -> {integer,I}; -literal2(#k_float{val=F}, _) -> {float,F}; -literal2(#k_atom{val=N}, _) -> {atom,N}; -%%literal2(#k_char{val=C}, _) -> {char,C}; -literal2(#k_nil{}, _) -> nil; -literal2(#k_cons{hd=H,tl=T}, Ctxt) -> - {cons,[literal2(H, Ctxt),literal2(T, Ctxt)]}; -literal2(#k_binary{segs=V}, Ctxt) -> - {binary,literal2(V, Ctxt)}; -literal2(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=[]}, Ctxt) -> - {bin_seg,Ctxt,literal2(S, Ctxt),U,T,Fs,[literal2(Seg, Ctxt)]}; -literal2(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}, Ctxt) -> - {bin_seg,Ctxt,literal2(S, Ctxt),U,T,Fs, - [literal2(Seg, Ctxt),literal2(N, Ctxt)]}; -literal2(#k_bin_int{size=S,unit=U,flags=Fs,val=Int,next=N}, Ctxt) -> - {bin_int,Ctxt,literal2(S, Ctxt),U,Fs,Int, - [literal2(N, Ctxt)]}; -literal2(#k_bin_end{}, Ctxt) -> - {bin_end,Ctxt}; -literal2(#k_tuple{es=Es}, Ctxt) -> - {tuple,literal_list2(Es, Ctxt)}; -literal2(#k_map{op=Op,es=Es}, Ctxt) -> - {map,Op,literal_list2(Es, Ctxt)}; -literal2(#k_map_pair{key=K,val=V}, Ctxt) -> - {map_pair,literal2(K, Ctxt),literal2(V, Ctxt)}. - -literal_list2(Ks, Ctxt) -> - [literal2(K, Ctxt) || K <- Ks]. - -%% literal_bin(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}) -> -%% {bin_seg,literal(S),U,T,Fs,[literal(Seg),literal(N)]} - %% is_gc_bif(Name, Arity) -> true|false %% Determines whether the BIF Name/Arity might do a GC. diff --git a/lib/compiler/test/Makefile b/lib/compiler/test/Makefile index 0b56a49cd6..73d52a48bc 100644 --- a/lib/compiler/test/Makefile +++ b/lib/compiler/test/Makefile @@ -11,6 +11,7 @@ MODULES= \ beam_validator_SUITE \ beam_disasm_SUITE \ beam_except_SUITE \ + beam_utils_SUITE \ bs_bincomp_SUITE \ bs_bit_binaries_SUITE \ bs_construct_SUITE \ @@ -40,6 +41,7 @@ NO_OPT= \ andor \ apply \ beam_except \ + beam_utils \ bs_construct \ bs_match \ bs_utf \ @@ -59,6 +61,7 @@ NO_OPT= \ INLINE= \ andor \ apply \ + beam_utils \ bs_bincomp \ bs_bit_binaries \ bs_construct \ @@ -108,7 +111,7 @@ RELSYSDIR = $(RELEASE_PATH)/compiler_test # ---------------------------------------------------- ERL_MAKE_FLAGS += -ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/test_server/include +clint +ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/test_server/include +clint +clint0 EBIN = . diff --git a/lib/compiler/test/andor_SUITE.erl b/lib/compiler/test/andor_SUITE.erl index b5408ecd8f..4d7f444c4f 100644 --- a/lib/compiler/test/andor_SUITE.erl +++ b/lib/compiler/test/andor_SUITE.erl @@ -33,7 +33,7 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [t_case,t_and_or,t_andalso,t_orelse,inside,overlap, combined,in_case,before_and_inside_if]}]. @@ -173,7 +173,13 @@ t_and_or(Config) when is_list(Config) -> true = (fun (X = true) when X or true or X -> true end)(True), - ok. + Tuple = id({a,b}), + case Tuple of + {_,_} -> + {'EXIT',{badarg,_}} = (catch true and Tuple) + end, + + ok. t_andalso(Config) when is_list(Config) -> Bs = [true,false], @@ -364,6 +370,11 @@ combined(Config) when is_list(Config) -> ?line true = ?COMB(false, blurf, true), ?line true = ?COMB(true, true, blurf), + false = simple_comb(false, false), + false = simple_comb(false, true), + false = simple_comb(true, false), + true = simple_comb(true, true), + ok. -undef(COMB). @@ -390,6 +401,13 @@ comb(A, B, C) -> end, id(Res). +simple_comb(A, B) -> + %% Use Res twice, to ensure that a careless optimization of 'not' + %% doesn't leave Res as a free variable. + Res = A andalso B, + _ = id(not Res), + Res. + %% Test that a boolean expression in a case expression is properly %% optimized (in particular, that the error behaviour is correct). in_case(Config) when is_list(Config) -> diff --git a/lib/compiler/test/beam_utils_SUITE.erl b/lib/compiler/test/beam_utils_SUITE.erl new file mode 100644 index 0000000000..d2e24cb5ae --- /dev/null +++ b/lib/compiler/test/beam_utils_SUITE.erl @@ -0,0 +1,236 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(beam_utils_SUITE). + +-export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1, + init_per_group/2,end_per_group/2, + apply_fun/1,apply_mf/1,bs_init/1,bs_save/1, + is_not_killed/1,is_not_used_at/1, + select/1,y_catch/1]). +-export([id/1]). + +suite() -> [{ct_hooks,[ts_install_cth]}]. + +all() -> + test_lib:recompile(?MODULE), + [{group,p}]. + +groups() -> + [{p,[parallel], + [apply_fun, + apply_mf, + bs_init, + bs_save, + is_not_killed, + is_not_used_at, + select, + y_catch + ]}]. + +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, Config) -> + Config. + +apply_fun(_Config) -> + 3 = do_apply_fun(false, false), + 3 = do_apply_fun(false, true), + 3 = do_apply_fun(true, false), + 2 = do_apply_fun(true, true), + ok. + +do_apply_fun(X, Y) -> + F = fun(I) -> I+1 end, + Arg = case X andalso id(Y) of + true -> 1; + false -> 2 + end, + F(Arg). + +apply_mf(_Config) -> + ok = do_apply_mf_used({a,b}, ?MODULE, id), + error = do_apply_mf_used([a], ?MODULE, id), + {'EXIT',{{case_clause,{[],b}},_}} = (catch do_apply_mf_used({[],b}, ?MODULE, id)), + + error = do_apply_mf_killed({error,[a]}, ?MODULE, id), + ok = do_apply_mf_killed([b], ?MODULE, id), + {'EXIT',{{case_clause,{a,[b]}},_}} = (catch do_apply_mf_killed({a,[b]}, ?MODULE, id)), + {'EXIT',{{case_clause,{error,[]}},_}} = (catch do_apply_mf_killed({error,[]}, ?MODULE, id)), + + ok. + +do_apply_mf_used(Arg, Mod, Func) -> + Res = case id(Arg) of + {Decoded,_} when Decoded =/= [] -> + ok; + List when is_list(List) -> + error + end, + Mod:Func(Res). + +do_apply_mf_killed(Arg, Mod, Func) -> + Res = case id(Arg) of + {Tag,Decoded} when Decoded =/= [], Tag =:= error -> + error; + List when is_list(List) -> + ok + end, + Mod:Func(Res). + +bs_init(_Config) -> + <<7>> = do_bs_init_1([?MODULE], 7), + error = do_bs_init_1([?MODULE], 0.0), + error = do_bs_init_1([?MODULE], -43), + error = do_bs_init_1([?MODULE], 42), + + <<>> = do_bs_init_2([]), + <<0:32,((1 bsl 32)-1):32>> = do_bs_init_2([0,(1 bsl 32)-1]), + {'EXIT',{badarg,_}} = (catch do_bs_init_2([0.5])), + {'EXIT',{badarg,_}} = (catch do_bs_init_2([-1])), + {'EXIT',{badarg,_}} = (catch do_bs_init_2([1 bsl 32])), + ok. + +do_bs_init_1([?MODULE], Sz) -> + if + is_integer(Sz), Sz >= -42, Sz < 42 -> + id(<<Sz:8>>); + true -> + error + end. + +do_bs_init_2(SigNos) -> + << <<SigNo:32>> || + SigNo <- SigNos, + (is_integer(SigNo) andalso SigNo >= 0 andalso SigNo < (1 bsl 32)) orelse + erlang:error(badarg) + >>. + + +bs_save(_Config) -> + {a,30,<<>>} = do_bs_save(<<1:1,30:5>>), + {b,127,<<>>} = do_bs_save(<<1:1,31:5,0:1,127:7>>), + {c,127,<<>>} = do_bs_save(<<1:1,31:5,1:1,127:7>>), + {c,127,<<>>} = do_bs_save(<<0:1,31:5,1:1,127:7>>), + {d,1024,<<>>} = do_bs_save(<<0:1,31:5>>), + ok. + +do_bs_save(<<_:1, Tag:5, T/binary>>) when Tag < 31 -> + {a,Tag,T}; +do_bs_save(<<1:1, 31:5, 0:1, Tag:7, T/binary>>) -> + {b,Tag,T}; +do_bs_save(<<_:1, 31:5, 1:1, Tag:7, T/binary>>) -> + {c,Tag,T}; +do_bs_save(<<_:1, 31:5, T/binary>>) -> + {d,1024,T}. + +is_not_killed(_Config) -> + {Pid,Ref} = spawn_monitor(fun() -> exit(banan) end), + receive + {'DOWN', Ref, process, Pid, banan} -> + ok + end, + receive after 0 -> ok end. + +is_not_used_at(_Config) -> + {a,b} = do_is_not_used_at(a, [{a,b}]), + {a,b} = do_is_not_used_at(a, [x,{a,b}]), + {a,b} = do_is_not_used_at(a, [{x,y},{a,b}]), + none = do_is_not_used_at(z, [{a,b}]), + none = do_is_not_used_at(a, [x]), + none = do_is_not_used_at(a, [{x,y}]), + ok. + +do_is_not_used_at(Key, [P|Ps]) -> + if + tuple_size(P) >= 1, element(1, P) =:= Key -> + P; + true -> + do_is_not_used_at(Key, Ps) + end; +do_is_not_used_at(_Key, []) -> none. + +-record(select, {fixed=false}). + +select(_Config) -> + a = do_select(#select{}, 0, 0), + b = do_select(#select{}, 0, 1), + c = do_select(#select{fixed=true}, 0, 0), + c = do_select(#select{fixed=true}, 0, 1), + ok. + +do_select(Head, OldSize, BSize) -> + Overwrite0 = + if + OldSize =:= BSize -> same; + true -> true + end, + Overwrite = + if + Head#select.fixed =/= false -> + false; + true -> + Overwrite0 + end, + if + Overwrite =:= same -> + a; + Overwrite -> + b; + true -> + c + end. + +y_catch(_Config) -> + ok = try + do_y_catch(<<"<?xmlX">>, {state}), + failed + catch + throw:{<<"<?xmlX">>,{state}} -> + ok + end. + +do_y_catch(<<"<?xml",Rest0/binary>> = Bytes, State0) -> + {Rest1,State1} = + case do_y_catch_1(Rest0, State0) of + false -> + {Bytes,State0}; + true -> + {_XmlAttributes, R, S} = do_y_catch_2(Rest0), + {R,S} + end, + case catch id({Rest1,State1}) of + Other -> + throw(Other) + end. + +do_y_catch_1(<<_,_/binary>>, _) -> + false. + +do_y_catch_2(_) -> {a,b,c}. + + +%% The identity function. +id(I) -> I. diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl index 626f89ba7a..1b1c7db0e8 100644 --- a/lib/compiler/test/beam_validator_SUITE.erl +++ b/lib/compiler/test/beam_validator_SUITE.erl @@ -21,16 +21,17 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, init_per_testcase/2,end_per_testcase/2, - beam_files/1,compiler_bug/1,stupid_but_valid/1, + compiler_bug/1,stupid_but_valid/1, xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1, uninit/1,unsafe_catch/1, - dead_code/1,mult_labels/1, + dead_code/1, overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1, cons_guard/1, freg_range/1,freg_uninit/1,freg_state/1, - bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1, + bad_bin_match/1,bin_aligned/1,bad_dsetel/1, state_after_fault_in_catch/1,no_exception_in_catch/1, - undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]). + undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1, + map_field_lists/1]). -include_lib("test_server/include/test_server.hrl"). @@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> test_lib:recompile(?MODULE), - [beam_files,{group,p}]. + [{group,p}]. groups() -> [{p,test_lib:parallel(), [compiler_bug,stupid_but_valid,xrange, yrange,stack,call_last,merge_undefined,uninit, - unsafe_catch,dead_code,mult_labels, + unsafe_catch,dead_code, overwrite_catchtag,overwrite_trytag,accessing_tags, bad_catch_try,cons_guard,freg_range,freg_uninit, - freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel, + freg_state,bad_bin_match,bin_aligned,bad_dsetel, state_after_fault_in_catch,no_exception_in_catch, - undef_label,illegal_instruction,failing_gc_guard_bif]}]. + undef_label,illegal_instruction,failing_gc_guard_bif, + map_field_lists]}]. init_per_suite(Config) -> Config. @@ -72,33 +74,19 @@ init_per_group(_GroupName, Config) -> end_per_group(_GroupName, Config) -> Config. - -beam_files(Config) when is_list(Config) -> - ?line DataDir = proplists:get_value(data_dir, Config), - ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]), - %% Must have at least two files here, or there will be - %% a grammatical error in the output of the io:format/2 call below. ;-) - ?line [_,_|_] = Fs = filelib:wildcard(Wc), - ?line io:format("~p files\n", [length(Fs)]), - test_lib:p_run(fun do_beam_file/1, Fs). - - -do_beam_file(F) -> - case beam_validator:file(F) of - ok -> - ok; - {error,Es} -> - io:format("File: ~s", [F]), - io:format("Error: ~p\n", [Es]), - error - end. - compiler_bug(Config) when is_list(Config) -> %% Check that the compiler returns an error if we try to %% assemble one of the bad '.S' files. - ?line Data = ?config(data_dir, Config), - ?line File = filename:join(Data, "stack"), - ?line error = compile:file(File, [asm,report_errors,binary,time]), + Data = ?config(data_dir, Config), + File = filename:join(Data, "compiler_bug"), + error = compile:file(File, [from_asm,report_errors,time]), + + %% Make sure that the error was reported by + %% the beam_validator module. + {error, + [{"compiler_bug", + [{beam_validator,_}]}], + []} = compile:file(File, [from_asm,return_errors,time]), ok. %% The following code is stupid but it should compile. @@ -134,7 +122,7 @@ yrange(Config) when is_list(Config) -> {{move,{x,1},{y,-1}},5, {invalid_store,{y,-1},term}}}, {{t,sum_2,2}, - {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},8, + {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},7, {uninitialized_reg,{y,1024}}}}, {{t,sum_3,2}, {{move,{x,1},{y,1024}},5,limit}}, @@ -145,31 +133,31 @@ yrange(Config) when is_list(Config) -> stack(Config) when is_list(Config) -> Errors = do_val(stack, Config), - ?line [{{t,a,2},{return,11,{stack_frame,2}}}, - {{t,b,2},{{deallocate,2},4,{allocated,none}}}, - {{t,c,2},{{deallocate,2},12,{allocated,none}}}, - {{t,d,2}, - {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, - {{t,e,2},{{deallocate,5},6,{allocated,2}}}, - {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, - {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors, + [{{t,a,2},{return,9,{stack_frame,2}}}, + {{t,b,2},{{deallocate,2},4,{allocated,none}}}, + {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}}, + {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}, + {{t,c,2},{{deallocate,2},10,{allocated,none}}}, + {{t,d,2}, + {{allocate,2,2},5,{existing_stack_frame,{size,2}}}}, + {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors, ok. call_last(Config) when is_list(Config) -> Errors = do_val(call_last, Config), - ?line [{{t,a,1},{{call_last,1,{f,8},2},11,{allocated,1}}}, - {{t,b,1}, - {{call_ext_last,2,{extfunc,lists,seq,2},2}, - 11, - {allocated,1}}}] = Errors, + [{{t,a,1},{{call_last,1,{f,8},2},9,{allocated,1}}}, + {{t,b,1}, + {{call_ext_last,2,{extfunc,lists,seq,2},2}, + 10, + {allocated,1}}}] = Errors, ok. merge_undefined(Config) when is_list(Config) -> Errors = do_val(merge_undefined, Config), - ?line [{{t,handle_call,2}, - {{call_ext,2,{extfunc,debug,filter,2}}, - 22, - {uninitialized_reg,{y,0}}}}] = Errors, + [{{t,handle_call,2}, + {{call_ext,2,{extfunc,debug,filter,2}}, + 22, + {uninitialized_reg,{y,0}}}}] = Errors, ok. uninit(Config) when is_list(Config) -> @@ -178,10 +166,10 @@ uninit(Config) when is_list(Config) -> [{{t,sum_1,2}, {{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}}, {{t,sum_2,2}, - {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}}, + {{call,1,{f,8}},5,{uninitialized_reg,{y,0}}}}, {{t,sum_3,2}, {{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}, - 7, + 6, {unassigned,{y,0}}}}] = Errors, ok. @@ -199,10 +187,6 @@ dead_code(Config) when is_list(Config) -> [] = do_val(dead_code, Config), ok. -mult_labels(Config) when is_list(Config) -> - [] = do_val(erl_prim_loader, Config, ".beam"), - ok. - overwrite_catchtag(Config) when is_list(Config) -> Errors = do_val(overwrite_catchtag, Config), ?line @@ -214,16 +198,15 @@ overwrite_trytag(Config) when is_list(Config) -> Errors = do_val(overwrite_trytag, Config), ?line [{{overwrite_trytag,foo,1}, - {{kill,{y,2}},9,{trytag,_}}}] = Errors, + {{kill,{y,2}},8,{trytag,_}}}] = Errors, ok. accessing_tags(Config) when is_list(Config) -> Errors = do_val(accessing_tags, Config), - ?line - [{{accessing_tags,foo,1}, - {{move,{y,0},{x,0}},6,{catchtag,_}}}, - {{accessing_tags,bar,1}, - {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors, + [{{accessing_tags,bar,1}, + {{move,{y,0},{x,0}},6,{trytag,_}}}, + {{accessing_tags,foo,1}, + {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors, ok. bad_catch_try(Config) when is_list(Config) -> @@ -310,13 +293,6 @@ freg_state(Config) when is_list(Config) -> {fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors, ok. -bin_match(Config) when is_list(Config) -> - Errors = do_val(bin_match, Config), - ?line - [{{t,t,1},{{bs_save,0},4,no_bs_match_state}}, - {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors, - ok. - bad_bin_match(Config) when is_list(Config) -> [{{t,t,1},{return,5,{match_context,{x,0}}}}] = do_val(bad_bin_match, Config), @@ -340,36 +316,69 @@ bad_dsetel(Config) when is_list(Config) -> ?line [{{t,t,1}, {{set_tuple_element,{x,1},{x,0},1}, - 15, + 17, illegal_context_for_set_tuple_element}}] = Errors, ok. state_after_fault_in_catch(Config) when is_list(Config) -> Errors = do_val(state_after_fault_in_catch, Config), - [{{t,foo,1}, - {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,if_end,1}, + [{{state_after_fault_in_catch,badmatch,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, {{state_after_fault_in_catch,case_end,1}, {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, - {{state_after_fault_in_catch,badmatch,1}, - {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors, + {{state_after_fault_in_catch,if_end,1}, + {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}, + {{t,foo,1}, + {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors, ok. no_exception_in_catch(Config) when is_list(Config) -> Errors = do_val(no_exception_in_catch, Config), [{{no_exception_in_catch,nested_of_1,4}, - {{move,{x,3},{x,0}},91,{uninitialized_reg,{x,3}}}}] = Errors, + {{move,{x,3},{x,0}},88,{uninitialized_reg,{x,3}}}}] = Errors, ok. undef_label(Config) when is_list(Config) -> - Errors = do_val(undef_label, Config), + M = {undef_label, + [{t,1}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,undef_label},{atom,t},1}, + {label,2}, + {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}, + {move,{atom,ok},{x,0}}, + return]}, + {function,x,1,17, + [{label,3}, + {func_info,{atom,undef_label},{atom,x},1}, + {label,4}, + return]}], + 5}, + Errors = beam_val(M), [{{undef_label,t,1},{undef_labels,[42]}}, {{undef_label,x,1},{return,4,no_entry_label}}] = Errors, ok. illegal_instruction(Config) when is_list(Config) -> - Errors = do_val(illegal_instruction, Config), + M = {illegal_instruction, + [{t,1},{x,1},{y,0}], + [], + [{function,t,1,2, + [{label,1}, + {func_info,{atom,illegal_instruction},{atom,t},1}, + {label,2}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,x,1,4, + [{label,3}, + bad_func_info, + {label,4}, + {my_illegal_instruction,{x,0}}, + return]}, + {function,y,0,17,[]}], + 5}, + Errors = beam_val(M), [{{illegal_instruction,t,1}, {{my_illegal_instruction,{x,0}},4,unknown_instruction}}, {{'_',x,1},{bad_func_info,1,illegal_instruction}}, @@ -407,19 +416,40 @@ process_request_foo(_) -> process_request_bar(Pid, [Response]) when is_pid(Pid) -> Response. +map_field_lists(Config) -> + Errors = do_val(map_field_lists, Config), + [{{map_field_lists,x,1}, + {{test,has_map_fields,{f,1},{x,0}, + {list,[{atom,z},{atom,a}]}}, + 5, + not_strict_order}}, + {{map_field_lists,y,1}, + {{test,has_map_fields,{f,3},{x,0},{list,[]}}, + 5, + empty_field_list}} + ] = Errors. %%%------------------------------------------------------------------------- -do_val(Name, Config) -> - do_val(Name, Config, ".S"). - -do_val(Name, Config, Type) -> - ?line Data = ?config(data_dir, Config), - ?line File = filename:join(Data, atom_to_list(Name)++Type), - ?line case beam_validator:file(File) of - {error,Errors} -> - ?line io:format("~p:~n~s", - [File,beam_validator:format_error(Errors)]), - Errors; - ok -> [] - end. +do_val(Mod, Config) -> + Data = ?config(data_dir, Config), + Base = atom_to_list(Mod), + File = filename:join(Data, Base), + case compile:file(File, [from_asm,no_postopt,return_errors]) of + {error,L,[]} -> + [{Base,Errors0}] = L, + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors; + {ok,Mod} -> + [] + end. + +beam_val(M) -> + Name = atom_to_list(element(1, M)), + {error,[{Name,Errors0}]} = beam_validator:module(M, []), + Errors = [E || {beam_validator,E} <- Errors0], + _ = [io:put_chars(beam_validator:format_error(E)) || + E <- Errors], + Errors. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S index 279b2fa97f..9630d73a93 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bad_dsetel}. %% version = 0 {exports, [{module_info,0},{module_info,1},{t,1}]}. @@ -21,7 +21,9 @@ {move,{integer,3},{x,0}}. {call_ext,3,{extfunc,erlang,setelement,3}}. {test_heap,6,1}. - {put_string,3,{string,"abc"},{x,1}}. + {put_list,{integer,99},nil,{x,1}}. + {put_list,{integer,98},{x,1},{x,1}}. + {put_list,{integer,97},{x,1},{x,1}}. {set_tuple_element,{x,1},{x,0},1}. {'%live',1}. {deallocate,0}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S index 2f353fbd25..a59f7ccc03 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S +++ b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S @@ -1,4 +1,4 @@ -{module, t}. %% version = 0 +{module, bin_aligned}. %% version = 0 {exports, [{decode,1},{module_info,0},{module_info,1}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S deleted file mode 100644 index 96df0f7933..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S +++ /dev/null @@ -1,64 +0,0 @@ -{module, bin_match}. %% version = 0 - -{exports, [{t,1}]}. - -{attributes, []}. - -{labels, 8}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,t},{atom,t},1}. - {label,2}. -%% {test,bs_start_match,{f,1},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,3}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,3},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,3}. - {bs_restore,0}. - {test,bs_get_integer, - {f,1}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,1},[0]}. - {move,{x,1},{x,0}}. - return. - -{function, x, 1, 5}. - {label,4}. - {func_info,{atom,t},{atom,x},1}. - {label,5}. - {test,bs_start_match,{f,4},[{x,0}]}. - {bs_save,0}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_get_integer, - {f,6}, - [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}. - {test,bs_test_tail,{f,6},[0]}. - {test_heap,3,3}. - {put_tuple,2,{x,0}}. - {put,{x,1}}. - {put,{x,2}}. - {'%live',1}. - return. - {label,6}. - {bs_restore,1}. - {test,bs_get_integer, - {f,4}, - [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}. - {test,bs_test_tail,{f,4},[0]}. - {move,{x,1},{x,0}}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S new file mode 100644 index 0000000000..ba27bf5c47 --- /dev/null +++ b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S @@ -0,0 +1,38 @@ +{module, compiler_bug}. %% version = 0 + +{exports, [{module_info,0},{module_info,1},{sum,2}]}. + +{attributes, []}. + +{labels, 7}. + + +{function, sum, 2, 2}. + {label,1}. + {line,[{location,"compiler_bug.erl",4}]}. + {func_info,{atom,compiler_bug},{atom,sum},2}. + {label,2}. + {line,[{location,"compiler_bug.erl",5}]}. + {gc_bif,'+',{f,0},2,[{y,0},{y,1}],{x,0}}. + return. + + +{function, module_info, 0, 4}. + {label,3}. + {line,[]}. + {func_info,{atom,compiler_bug},{atom,module_info},0}. + {label,4}. + {move,{atom,compiler_bug},{x,0}}. + {line,[]}. + {call_ext_only,1,{extfunc,erlang,get_module_info,1}}. + + +{function, module_info, 1, 6}. + {label,5}. + {line,[]}. + {func_info,{atom,compiler_bug},{atom,module_info},1}. + {label,6}. + {move,{x,0},{x,1}}. + {move,{atom,compiler_bug},{x,0}}. + {line,[]}. + {call_ext_only,2,{extfunc,erlang,get_module_info,2}}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S index f964f98fba..c114664ba0 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S +++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S @@ -1,10 +1,10 @@ {module, dead_code}. %% version = 0 -{exports, [{execute,0},{module_info,0},{module_info,1}]}. +{exports, [{execute,0}]}. {attributes, []}. -{labels, 10}. +{labels, 6}. {function, execute, 0, 2}. @@ -12,7 +12,6 @@ {func_info,{atom,dead_code},{atom,execute},0}. {label,2}. {allocate,0,0}. - {'%live',0}. {call_ext,0,{extfunc,foo,fie,0}}. {test,is_ne,{f,4},[{x,0},{integer,0}]}. {test,is_ne,{f,4},[{x,0},{integer,1}]}. @@ -22,27 +21,7 @@ {case_end,{x,0}}. {label,4}. {move,{atom,ok},{x,0}}. - {'%live',1}. {deallocate,0}. return. - {'%','Moved code'}. {label,5}. {case_end,{x,0}}. - - -{function, module_info, 0, 7}. - {label,6}. - {func_info,{atom,dead_code},{atom,module_info},0}. - {label,7}. - {move,nil,{x,0}}. - {'%live',1}. - return. - - -{function, module_info, 1, 9}. - {label,8}. - {func_info,{atom,dead_code},{atom,module_info},1}. - {label,9}. - {move,nil,{x,0}}. - {'%live',1}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam Binary files differdeleted file mode 100644 index dd58a88e42..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam +++ /dev/null diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S index ee583a923e..b3ebff3ade 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S @@ -1,10 +1,10 @@ {module, freg_range}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S index ff4d7548ae..7466763482 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S @@ -1,6 +1,6 @@ {module, freg_state}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}. {attributes, []}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S index f8d805d9ec..71e833446a 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S @@ -1,10 +1,10 @@ {module, freg_uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2}]}. {attributes, []}. -{labels, 8}. +{labels, 7}. {function, sum_1, 2, 2}. @@ -14,7 +14,6 @@ {fconv,{x,0},{fr,0}}. fclearerror. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. return. @@ -26,7 +25,12 @@ {fconv,{x,1},{fr,1}}. fclearerror. {fcheckerror,{f,0}}. - {call,2,{f,8}}. + {call,2,{f,6}}. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. - {'%live',1}. + return. + +{function, foo, 2, 6}. + {label,5}. + {func_info,{atom,t},{atom,foo},2}. + {label,6}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S deleted file mode 100644 index d6e92abc71..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S +++ /dev/null @@ -1,26 +0,0 @@ -{module, illegal_instruction}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,illegal_instruction},{atom,t},1}. - {label,2}. - {my_illegal_instruction,{x,0}}. - return. - - -{function, x, 1, 4}. - {label,3}. - bad_func_info. - {label,4}. - {my_illegal_instruction,{x,0}}. - return. - -{function, y, 0, 17}. -
\ No newline at end of file diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S new file mode 100644 index 0000000000..9af68c82d4 --- /dev/null +++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S @@ -0,0 +1,29 @@ +{module, map_field_lists}. %% version = 0 + +{exports, [{x,1},{y,1}]}. + +{attributes, []}. + +{labels, 5}. + + +{function, x, 1, 2}. + {label,1}. + {line,[{location,"map_field_lists.erl",4}]}. + {func_info,{atom,map_field_lists},{atom,x},1}. + {label,2}. + {test,is_map,{f,1},[{x,0}]}. + {test,has_map_fields,{f,1},{x,0},{list,[{atom,z},{atom,a}]}}. + {move,{atom,ok},{x,0}}. + return. + + +{function, y, 1, 4}. + {label,3}. + {line,[{location,"map_field_lists.erl",7}]}. + {func_info,{atom,map_field_lists},{atom,y},1}. + {label,4}. + {test,is_map,{f,3},[{x,0}]}. + {test,has_map_fields,{f,3},{x,0},{list,[]}}. + {move,{atom,ok},{x,0}}. + return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S index 3d76127824..481d55045d 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S +++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S @@ -22,7 +22,8 @@ {label,4}. {allocate_heap,1,6,2}. {move,{x,1},{y,0}}. - {put_string,2,{string,"~p"},{x,0}}. + {put_list,{integer,112},nil,{x,0}}. + {put_list,{integer,126},{x,0},{x,0}}. {put_list,{y,0},nil,{x,1}}. {'%live',2}. {call_ext,2,{extfunc,io,format,2}}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S index e08a718a39..1a5b417a5f 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S +++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S @@ -26,7 +26,7 @@ {call_ext,1,{extfunc,erlang,erase,1}}. {move,{atom,nested},{x,0}}. {call_ext,1,{extfunc,erlang,erase,1}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'try',{y,8},{f,13}}. {'try',{y,7},{f,11}}. {'try',{y,6},{f,9}}. @@ -34,7 +34,7 @@ %% Because the following instructions can't possible throw an exception, %% label 7 used to get no state. Now the try_end itself will save the state. {move,{x,0},{y,4}}. - {bif,self,nofail,[],{x,0}}. + {bif,self,{f,0},[],{x,0}}. {'%live',1}. {try_end,{y,5}}. {test,is_eq_exact,{f,15},[{x,0},{y,4}]}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S index 244c22a2f9..e4356a9d00 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/stack.S +++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S @@ -1,10 +1,10 @@ {module, stack}. %% version = 0 -{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}. +{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}. {attributes, []}. -{labels, 21}. +{labels, 17}. {function, a, 2, 2}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S deleted file mode 100644 index dd29066bf4..0000000000 --- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S +++ /dev/null @@ -1,22 +0,0 @@ -{module, undef_label}. %% version = 0 - -{exports, []}. - -{attributes, []}. - -{labels, 7}. - - -{function, t, 1, 2}. - {label,1}. - {func_info,{atom,undef_label},{atom,t},1}. - {label,2}. - {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}. - {move,{atom,ok},{x,0}}. - return. - -{function, x, 1, 17}. - {label,3}. - {func_info,{atom,undef_label},{atom,x},1}. - {label,4}. - return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S index 1a45c31411..9a66f4f7d6 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S @@ -1,9 +1,11 @@ {module, uninit}. %% version = 0 -{exports, []}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}. {attributes, []}. +{labels, 9}. + {function, sum_1, 2, 2}. {label,1}. {func_info,{atom,t},{atom,sum_1},2}. @@ -11,7 +13,7 @@ {allocate,1,2}. {move,{y,0},{x,0}}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -23,7 +25,7 @@ {label,4}. {allocate,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. @@ -35,14 +37,14 @@ {label,6}. {allocate_zero,1,2}. {'%live',1}. - {call,1,{f,10}}. + {call,1,{f,8}}. {bif,'+',{f,0},[{x,0},{y,0}],{x,0}}. {'%live',1}. {deallocate,1}. return. -{function, id, 1, 10}. - {label,9}. +{function, id, 1, 8}. + {label,7}. {func_info,{atom,t},{atom,id},1}. - {label,10}. + {label,8}. return. diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S index 3abbdffbc2..c6f20288f7 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S +++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S @@ -1,10 +1,10 @@ {module, xrange}. %% version = 0 -{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}. +{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}. {attributes, []}. -{labels, 8}. +{labels, 9}. {function, sum_1, 2, 2}. diff --git a/lib/compiler/test/bs_bit_binaries_SUITE.erl b/lib/compiler/test/bs_bit_binaries_SUITE.erl index 8609a490f5..2433e7621e 100644 --- a/lib/compiler/test/bs_bit_binaries_SUITE.erl +++ b/lib/compiler/test/bs_bit_binaries_SUITE.erl @@ -37,7 +37,7 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [misc,horrid_match,test_bitstr,test_bit_size, asymmetric_tests,big_asymmetric_tests, binary_to_and_from_list,big_binary_to_and_from_list, diff --git a/lib/compiler/test/bs_construct_SUITE.erl b/lib/compiler/test/bs_construct_SUITE.erl index ce39de2a82..9df874c387 100644 --- a/lib/compiler/test/bs_construct_SUITE.erl +++ b/lib/compiler/test/bs_construct_SUITE.erl @@ -39,7 +39,7 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [two,test1,fail,float_bin,in_guard,in_catch, nasty_literals,side_effect,opt,otp_7556,float_arith, otp_8054]}]. diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl index 10e3451e8f..f7af56afcc 100644 --- a/lib/compiler/test/bs_match_SUITE.erl +++ b/lib/compiler/test/bs_match_SUITE.erl @@ -48,7 +48,7 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [fun_shadow,int_float,otp_5269,null_fields,wiger, bin_tail,save_restore,shadowed_size_var, partitioned_bs_match,function_clause,unit, @@ -368,11 +368,20 @@ partitioned_bs_match_3(Var, <<_>>) -> Var; partitioned_bs_match_3(1, 2) -> ok. function_clause(Config) when is_list(Config) -> - ?line ok = function_clause_1(<<0,7,0,7,42>>), - ?line fc(function_clause_1, [<<0,1,2,3>>], - catch function_clause_1(<<0,1,2,3>>)), - ?line fc(function_clause_1, [<<0,1,2,3>>], - catch function_clause_1(<<0,7,0,1,2,3>>)), + ok = function_clause_1(<<0,7,0,7,42>>), + fc(function_clause_1, [<<0,1,2,3>>], + catch function_clause_1(<<0,1,2,3>>)), + fc(function_clause_1, [<<0,1,2,3>>], + catch function_clause_1(<<0,7,0,1,2,3>>)), + + ok = function_clause_2(<<0,7,0,7,42>>), + ok = function_clause_2(<<255>>), + ok = function_clause_2(<<13:4>>), + fc(function_clause_2, [<<0,1,2,3>>], + catch function_clause_2(<<0,1,2,3>>)), + fc(function_clause_2, [<<0,1,2,3>>], + catch function_clause_2(<<0,7,0,1,2,3>>)), + ok. function_clause_1(<<0:8,7:8,T/binary>>) -> @@ -380,6 +389,13 @@ function_clause_1(<<0:8,7:8,T/binary>>) -> function_clause_1(<<_:8>>) -> ok. +function_clause_2(<<0:8,7:8,T/binary>>) -> + function_clause_2(T); +function_clause_2(<<_:8>>) -> + ok; +function_clause_2(<<_:4>>) -> + ok. + unit(Config) when is_list(Config) -> ?line 42 = peek1(<<42>>), ?line 43 = peek1(<<43,1,2>>), diff --git a/lib/compiler/test/compilation_SUITE.erl b/lib/compiler/test/compilation_SUITE.erl index f7b1dbdddf..296774e083 100644 --- a/lib/compiler/test/compilation_SUITE.erl +++ b/lib/compiler/test/compilation_SUITE.erl @@ -436,7 +436,7 @@ self_compile_1(Config, Prefix, Opts) -> %% Compile the compiler. (In this node to get better coverage.) ?line CompA = make_compiler_dir(Priv, Prefix++"compiler_a"), ?line VsnA = Version ++ ".0", - ?line compile_compiler(compiler_src(), CompA, VsnA, [clint|Opts]), + compile_compiler(compiler_src(), CompA, VsnA, [clint0,clint|Opts]), %% Compile the compiler again using the newly compiled compiler. %% (In another node because reloading the compiler would disturb cover.) @@ -611,12 +611,10 @@ otp_7345(Config) when is_list(Config) -> otp_7345(ObjRef, _RdEnv, Args) -> Cid = ObjRef#contextId.cid, - _DpRef = - #dpRef{cid = Cid, + _ = #dpRef{cid = Cid, ms_device_context_id = cid_id, tlli = #ptmsi{value = 0}}, - _QosProfile = - #qosProfileBssgp{peak_bit_rate_msb = 0, + _ = #qosProfileBssgp{peak_bit_rate_msb = 0, peak_bit_rate_lsb = 80, t_a_precedence = 49}, [Cpdu|_] = Args, diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl index 8cb7d1b55b..1c96abe017 100644 --- a/lib/compiler/test/compile_SUITE.erl +++ b/lib/compiler/test/compile_SUITE.erl @@ -30,7 +30,7 @@ other_output/1, encrypted_abstr/1, bad_record_use1/1, bad_record_use2/1, strict_record/1, missing_testheap/1, cover/1, env/1, core/1, asm/1, - sys_pre_attributes/1]). + sys_pre_attributes/1, dialyzer/1]). -export([init/3]). @@ -47,7 +47,7 @@ all() -> other_output, encrypted_abstr, {group, bad_record_use}, strict_record, missing_testheap, cover, env, core, asm, - sys_pre_attributes]. + sys_pre_attributes, dialyzer]. groups() -> [{bad_record_use, [], @@ -365,7 +365,7 @@ listings_big(Config) when is_list(Config) -> ?line do_listing(Big, TargetDir, dkern, ".kernel"), ?line Target = filename:join(TargetDir, big), - ?line {ok,big} = compile:file(Target, [asm,{outdir,TargetDir}]), + {ok,big} = compile:file(Target, [from_asm,{outdir,TargetDir}]), %% Cleanup. ?line ok = file:delete(Target ++ ".beam"), @@ -748,42 +748,65 @@ env_1(Simple, Target) -> %% compile the generated Core Erlang files. core(Config) when is_list(Config) -> - ?line Dog = test_server:timetrap(test_server:minutes(5)), - ?line PrivDir = ?config(priv_dir, Config), - ?line Outdir = filename:join(PrivDir, "core"), - ?line ok = file:make_dir(Outdir), + PrivDir = ?config(priv_dir, Config), + Outdir = filename:join(PrivDir, "core"), + ok = file:make_dir(Outdir), - ?line Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"), - ?line TestBeams = filelib:wildcard(Wc), - ?line Abstr = [begin {ok,{Mod,[{abstract_code, + Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"), + TestBeams = filelib:wildcard(Wc), + Abstr = [begin {ok,{Mod,[{abstract_code, {raw_abstract_v1,Abstr}}]}} = beam_lib:chunks(Beam, [abstract_code]), {Mod,Abstr} end || Beam <- TestBeams], - ?line Res = test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr), - ?line test_server:timetrap_cancel(Dog), - Res. - + test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr). do_core({M,A}, Outdir) -> try - {ok,M,Core} = compile:forms(A, [to_core,report]), - CoreFile = filename:join(Outdir, atom_to_list(M)++".core"), - CorePP = core_pp:format(Core), - ok = file:write_file(CoreFile, CorePP), - case compile:file(CoreFile, [clint,from_core,binary]) of - {ok,M,_} -> - ok = file:delete(CoreFile); - Other -> - io:format("*** core_lint failure '~p' for ~s\n", - [Other,CoreFile]), - error - end - catch Class:Error -> + do_core_1(M, A, Outdir) + catch + throw:{error,Error} -> + io:format("*** compilation failure '~p' for module ~s\n", + [Error,M]), + error; + Class:Error -> io:format("~p: ~p ~p\n~p\n", [M,Class,Error,erlang:get_stacktrace()]), error end. +do_core_1(M, A, Outdir) -> + {ok,M,Core0} = compile:forms(A, [to_core]), + CoreFile = filename:join(Outdir, atom_to_list(M)++".core"), + CorePP = core_pp:format(Core0), + ok = file:write_file(CoreFile, CorePP), + + %% Parse the .core file and return the result as Core Erlang Terms. + Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of + {ok,M,Core1} -> Core1; + Other -> throw({error,Other}) + end, + ok = file:delete(CoreFile), + + %% Compile as usual (including optimizations). + compile_forms(Core, [clint,from_core,binary]), + + %% Don't optimize to test that we are not dependent + %% on the Core Erlang optmimization passes. + %% (Example of a previous bug: The core_parse pass + %% would not turn map literals into #c_literal{} + %% records; if sys_core_fold was run it would fix + %% that; if sys_core_fold was not run v3_kernel would + %% crash.) + compile_forms(Core, [clint,from_core,no_copt,binary]), + + ok. + +compile_forms(Forms, Opts) -> + case compile:forms(Forms, [report_errors|Opts]) of + {ok,[],_} -> ok; + Other -> throw({error,Other}) + end. + %% Compile to Beam assembly language (.S) and then try to %% run .S through the compiler again. @@ -854,6 +877,20 @@ sys_pre_attributes(Config) -> [report,verbose]), ok. +%% Test the dialyzer option to cover more code. +dialyzer(Config) -> + Priv = ?config(priv_dir, Config), + file:set_cwd(?config(data_dir, Config)), + Opts = [{outdir,Priv},report_errors], + M = dialyzer_test, + {ok,M} = c:c(M, [dialyzer|Opts]), + [{a,b,c}] = M:M(), + + %% Cover huge line numbers without the 'dialyzer' option. + {ok,M} = c:c(M, Opts), + [{a,b,c}] = M:M(), + ok. + %%% %%% Utilities. %%% diff --git a/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl new file mode 100644 index 0000000000..ed65ff9c43 --- /dev/null +++ b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl @@ -0,0 +1,39 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +-module(dialyzer_test). +-export([?MODULE/0,turtle/0,test/1,huge/1]). + +-record(turtle, {a,b,c}). +-record(tortoise, {a,b,c}). + +?MODULE() -> + [{a,b,c}]. + +turtle() -> + #turtle{a=1,b=2,c=3}. + +test(T) -> + {T#tortoise.a,T#tortoise.b}. + +-file("dialyzer_test", 100000000). + +huge(X) -> + #turtle{a=42,b=100,c=511}, + X#tortoise.a. diff --git a/lib/compiler/test/core_SUITE_data/map_core_test.core b/lib/compiler/test/core_SUITE_data/map_core_test.core index 2aa853d450..a75f6cf24f 100644 --- a/lib/compiler/test/core_SUITE_data/map_core_test.core +++ b/lib/compiler/test/core_SUITE_data/map_core_test.core @@ -7,11 +7,11 @@ module 'map_core_test' ['map_core_test'/0, fun () -> let <_cor0> = %% Line 15 - ~{::<'check','ok'>,::<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]), + ~{'check'=>'ok',1337=>#{#<104>(8,1,'integer',['unsigned'|['big']]), #<101>(8,1,'integer',['unsigned'|['big']]), #<108>(8,1,'integer',['unsigned'|['big']]), #<108>(8,1,'integer',['unsigned'|['big']]), - #<111>(8,1,'integer',['unsigned'|['big']])}#>,::<'val',0>}~ + #<111>(8,1,'integer',['unsigned'|['big']])}#,'val'=>0}~ in let <M> = %% Line 15 apply 'id'/1 @@ -23,7 +23,7 @@ module 'map_core_test' ['map_core_test'/0, in %% Line 16 case apply 'call'/2 (M, _cor2) of - <~{~<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]), + <~{1337:=#{#<104>(8,1,'integer',['unsigned'|['big']]), #<101>(8,1,'integer',['unsigned'|['big']]), #<108>(8,1,'integer',['unsigned'|['big']]), #<108>(8,1,'integer',['unsigned'|['big']]), @@ -39,7 +39,7 @@ module 'map_core_test' ['map_core_test'/0, #<32>(8,1,'integer',['unsigned'|['big']]), #<53>(8,1,'integer',['unsigned'|['big']]), #<32>(8,1,'integer',['unsigned'|['big']]), - #<54>(8,1,'integer',['unsigned'|['big']])}#>,~<'check','ok'>,~<'val',21>}~> when 'true' -> + #<54>(8,1,'integer',['unsigned'|['big']])}#,'check':='ok','val':=21}~> when 'true' -> %% Line 17 'ok' ( <_cor3> when 'true' -> @@ -51,7 +51,7 @@ module 'map_core_test' ['map_core_test'/0, %% Line 20 fun (_cor1,_cor0) -> case <_cor1,_cor0> of - <M = ~{~<1337,Bin>,~<'check',_cor8>,~<'val',Val>}~,[V|Vs]> when 'true' -> + <M = ~{1337:=Bin,'check':=_cor8,'val':=Val}~,[V|Vs]> when 'true' -> let <_cor3> = %% Line 21 call 'erlang':'+' @@ -67,7 +67,7 @@ module 'map_core_test' ['map_core_test'/0, (Val, V) in let <_cor5> = %% Line 21 - ~{~<1337,_cor4>,~<'val',_cor2>|M}~ + ~{1337:=_cor4,'val':=_cor2|M}~ in %% Line 21 apply 'call'/2 (_cor5, Vs) diff --git a/lib/compiler/test/core_fold_SUITE.erl b/lib/compiler/test/core_fold_SUITE.erl index 2de17e7653..bc82eaf5aa 100644 --- a/lib/compiler/test/core_fold_SUITE.erl +++ b/lib/compiler/test/core_fold_SUITE.erl @@ -23,7 +23,8 @@ t_element/1,setelement/1,t_length/1,append/1,t_apply/1,bifs/1, eq/1,nested_call_in_case/1,guard_try_catch/1,coverage/1, unused_multiple_values_error/1,unused_multiple_values/1, - multiple_aliases/1,redundant_boolean_clauses/1,mixed_matching_clauses/1]). + multiple_aliases/1,redundant_boolean_clauses/1, + mixed_matching_clauses/1,unnecessary_building/1]). -export([foo/0,foo/1,foo/2,foo/3]). @@ -36,11 +37,12 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [t_element,setelement,t_length,append,t_apply,bifs, eq,nested_call_in_case,guard_try_catch,coverage, unused_multiple_values_error,unused_multiple_values, - multiple_aliases,redundant_boolean_clauses,mixed_matching_clauses]}]. + multiple_aliases,redundant_boolean_clauses, + mixed_matching_clauses,unnecessary_building]}]. init_per_suite(Config) -> @@ -86,6 +88,7 @@ t_element(Config) when is_list(Config) -> {_,_,_}=Tup -> ?line {'EXIT',{badarg,_}} = (catch element(4, Tup)) end, + {'EXIT',{badarg,_}} = (catch element(1, tuple_size(Tuple))), ok. @@ -104,6 +107,7 @@ setelement(Config) when is_list(Config) -> ?line error = setelement_crash_2({a,b,c,d,e,f}, <<42>>), {'EXIT',{badarg,_}} = (catch setelement(1, not_a_tuple, New)), + {'EXIT',{badarg,_}} = (catch setelement(3, {a,b}, New)), ok. @@ -195,7 +199,10 @@ foo(A, B, C) -> A + B + C. bifs(Config) when is_list(Config) -> - ?line <<1,2,3,4>> = id(list_to_binary([1,2,3,4])), + <<1,2,3,4>> = id(list_to_binary([1,2,3,4])), + K = {a,key}, + V = {a,value}, + {ok,#{K:=V}} = id(list_to_tuple([ok,#{K=>V}])), ok. -define(CMP_SAME(A0, B), (fun(A) -> true = A == B, false = A /= B end)(id(A0))). @@ -252,6 +259,8 @@ do_guard_try_catch(K, V) -> false end. +-record(cover_opt_guard_try, {list=[]}). + coverage(Config) when is_list(Config) -> ?line {'EXIT',{{case_clause,{a,b,c}},_}} = (catch cover_will_match_list_type({a,b,c})), @@ -261,6 +270,9 @@ coverage(Config) when is_list(Config) -> ?line error = cover_will_match_lit_list(), {ok,[a]} = cover_is_safe_bool_expr(a), + ok = cover_opt_guard_try(#cover_opt_guard_try{list=[a]}), + error = cover_opt_guard_try(#cover_opt_guard_try{list=[]}), + %% Make sure that we don't attempt to make literals %% out of pids. (Putting a pid into a #c_literal{} %% would crash later compiler passes.) @@ -273,6 +285,12 @@ coverage(Config) when is_list(Config) -> error = bsm_an_inlined(<<1,2,3>>, Config), error = bsm_an_inlined([], Config), + %% Cover eval_rel_op/4. + Tuple = id({a,b}), + false = case Tuple of + {_,_} -> + Tuple =:= true + end, ok. cover_will_match_list_type(A) -> @@ -314,6 +332,14 @@ cover_is_safe_bool_expr(X) -> false end. +cover_opt_guard_try(Msg) -> + if + length(Msg#cover_opt_guard_try.list) =/= 1 -> + error; + true -> + ok + end. + bsm_an_inlined(<<_:8>>, _) -> ok; bsm_an_inlined(_, _) -> error. @@ -400,4 +426,29 @@ mixed_matching_clauses(Config) when is_list(Config) -> end, ok. +unnecessary_building(Config) when is_list(Config) -> + Term1 = do_unnecessary_building_1(test_lib:id(a)), + [{a,a},{a,a}] = Term1, + 7 = erts_debug:size(Term1), + + %% The Input term should not be rebuilt (thus, it should + %% only be counted once in the size of the combined term). + Input = test_lib:id({a,b,c}), + Term2 = test_lib:id(do_unnecessary_building_2(Input)), + {b,[{a,b,c},none],x} = Term2, + 4+4+4+2 = erts_debug:size([Term2|Input]), + + ok. + +do_unnecessary_building_1(S) -> + %% The tuple must only be built once. + F0 = F1 = {S,S}, + [F0,F1]. + +do_unnecessary_building_2({a,_,_}=T) -> + %% The T term should not be rebuilt. + {b, + [_,_] = [T,none], + x}. + id(I) -> I. diff --git a/lib/compiler/test/error_SUITE.erl b/lib/compiler/test/error_SUITE.erl index bd877bb528..0d23f12fb5 100644 --- a/lib/compiler/test/error_SUITE.erl +++ b/lib/compiler/test/error_SUITE.erl @@ -23,7 +23,7 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, head_mismatch_line/1,warnings_as_errors/1, bif_clashes/1, - transforms/1,forbidden_maps/1,bad_utf8/1]). + transforms/1,maps_warnings/1,bad_utf8/1]). %% Used by transforms/1 test case. -export([parse_transform/2]). @@ -37,7 +37,7 @@ all() -> groups() -> [{p,test_lib:parallel(), [head_mismatch_line,warnings_as_errors,bif_clashes, - transforms,forbidden_maps,bad_utf8]}]. + transforms,maps_warnings,bad_utf8]}]. init_per_suite(Config) -> Config. @@ -241,17 +241,30 @@ parse_transform(_, _) -> error(too_bad). -forbidden_maps(Config) when is_list(Config) -> - Ts1 = [{map_illegal_use_of_pattern, +maps_warnings(Config) when is_list(Config) -> + Ts1 = [{map_ok_use_of_pattern, <<" - -export([t/0]). + -export([t/1]). + t(K) -> + #{K := 1 = V} = id(#{<<\"hi all\">> => 1}), + V. + id(I) -> I. + ">>, + [return], + []}, + {map_illegal_use_of_pattern, + <<" + -export([t/0,t/2]). + t(K,#{ K := V }) -> V. t() -> V = 32, #{<<\"hi\",V,\"all\">> := 1} = id(#{<<\"hi all\">> => 1}). id(I) -> I. ">>, [return], - {error,[{5,erl_lint,{illegal_map_key_variable,'V'}}], []}}], + {error,[{3,erl_lint,{unbound_var,'K'}}, + {6,erl_lint,illegal_map_key}],[]}} + ], [] = run2(Config, Ts1), ok. diff --git a/lib/compiler/test/float_SUITE.erl b/lib/compiler/test/float_SUITE.erl index afc04fd440..fb8da37f4f 100644 --- a/lib/compiler/test/float_SUITE.erl +++ b/lib/compiler/test/float_SUITE.erl @@ -118,6 +118,7 @@ math_functions(Config) when is_list(Config) -> ?line 0.0 = math:sinh(0), ?line 1.0 = math:cosh(0), ?line 0.0 = math:tanh(0), + 1.0 = math:log2(2), ?line 1.0 = math:log10(10), ?line -1.0 = math:cos(math:pi()), ?line 1.0 = math:exp(0), @@ -136,6 +137,7 @@ math_functions(Config) when is_list(Config) -> ?line 0.0 = math:sinh(id(0)), ?line 1.0 = math:cosh(id(0)), ?line 0.0 = math:tanh(id(0)), + 1.0 = math:log2(id(2)), ?line 1.0 = math:log10(id(10)), ?line 1.0 = math:exp(id(0)), ?line 0.0 = math:log(id(1)), diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl index 34bfdeb1e5..08279d9408 100644 --- a/lib/compiler/test/guard_SUITE.erl +++ b/lib/compiler/test/guard_SUITE.erl @@ -30,7 +30,7 @@ old_guard_tests/1, build_in_guard/1,gbif/1, t_is_boolean/1,is_function_2/1, - tricky/1,rel_ops/1,literal_type_tests/1, + tricky/1,rel_ops/1,rel_op_combinations/1,literal_type_tests/1, basic_andalso_orelse/1,traverse_dcd/1, check_qlc_hrl/1,andalso_semi/1,t_tuple_size/1,binary_part/1, bad_constants/1,bad_guards/1]). @@ -42,12 +42,13 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [misc,const_cond,basic_not,complex_not,nested_nots, semicolon,complex_semicolon,comma,or_guard, more_or_guards,complex_or_guards,and_guard,xor_guard, more_xor_guards,build_in_guard,old_guard_tests,gbif, - t_is_boolean,is_function_2,tricky,rel_ops, + t_is_boolean,is_function_2,tricky, + rel_ops,rel_op_combinations, literal_type_tests,basic_andalso_orelse,traverse_dcd, check_qlc_hrl,andalso_semi,t_tuple_size,binary_part, bad_constants,bad_guards]}]. @@ -330,7 +331,15 @@ complex_semicolon(Config) when is_list(Config) -> ?line ok = csemi6({a,b}, 0), ?line ok = csemi6({}, 3), ?line ok = csemi6({a,b,c}, 3), - + + %% 7 + error = csemi7(#{a=>1}, 1, 0), + error = csemi7(<<>>, 1, 0), + ok = csemi7(#{a=>1}, 3, 0), + ok = csemi7(#{a=>1}, 0, 3), + ok = csemi7(#{a=>1}, 3, 3), + ok = csemi7(#{a=>1, b=>3}, 0, 0), + ok. csemi1(Type, Val) when is_list(Val), Type == float; @@ -442,6 +451,9 @@ csemi5(_, _) -> error. csemi6(A, B) when hd([tuple_size(A)]) > 1; abs(B) > 2 -> ok; csemi6(_, _) -> error. +csemi7(A, B, C) when A#{a:=B} > #{a=>1}; abs(C) > 2 -> ok; +csemi7(_, _, _) -> error. + comma(Config) when is_list(Config) -> %% ',' combinations of literal true/false. @@ -1122,6 +1134,231 @@ rel_ops(Config) when is_list(Config) -> -undef(TestOp). +rel_op_combinations(Config) when is_list(Config) -> + Digits0 = lists:seq(16#0030, 16#0039) ++ + lists:seq(16#0660, 16#0669) ++ + lists:seq(16#06F0, 16#06F9), + Digits = gb_sets:from_list(Digits0), + rel_op_combinations_1(16#0700, Digits), + + BrokenRange0 = lists:seq(3, 5) ++ + lists:seq(10, 12) ++ lists:seq(14, 20), + BrokenRange = gb_sets:from_list(BrokenRange0), + rel_op_combinations_2(30, BrokenRange), + + Red0 = [{I,2*I} || I <- lists:seq(0, 50)] ++ + [{I,5*I} || I <- lists:seq(51, 80)], + Red = gb_trees:from_orddict(Red0), + rel_op_combinations_3(100, Red). + +rel_op_combinations_1(0, _) -> + ok; +rel_op_combinations_1(N, Digits) -> + Bool = gb_sets:is_member(N, Digits), + Bool = is_digit_1(N), + Bool = is_digit_2(N), + Bool = is_digit_3(N), + Bool = is_digit_4(N), + Bool = is_digit_5(N), + Bool = is_digit_6(N), + Bool = is_digit_7(N), + Bool = is_digit_8(N), + rel_op_combinations_1(N-1, Digits). + +is_digit_1(X) when 16#0660 =< X, X =< 16#0669 -> true; +is_digit_1(X) when 16#0030 =< X, X =< 16#0039 -> true; +is_digit_1(X) when 16#06F0 =< X, X =< 16#06F9 -> true; +is_digit_1(_) -> false. + +is_digit_2(X) when (16#0030-1) < X, X =< 16#0039 -> true; +is_digit_2(X) when (16#0660-1) < X, X =< 16#0669 -> true; +is_digit_2(X) when (16#06F0-1) < X, X =< 16#06F9 -> true; +is_digit_2(_) -> false. + +is_digit_3(X) when 16#0660 =< X, X < (16#0669+1) -> true; +is_digit_3(X) when 16#0030 =< X, X < (16#0039+1) -> true; +is_digit_3(X) when 16#06F0 =< X, X < (16#06F9+1) -> true; +is_digit_3(_) -> false. + +is_digit_4(X) when (16#0660-1) < X, X < (16#0669+1) -> true; +is_digit_4(X) when (16#0030-1) < X, X < (16#0039+1) -> true; +is_digit_4(X) when (16#06F0-1) < X, X < (16#06F9+1) -> true; +is_digit_4(_) -> false. + +is_digit_5(X) when X >= 16#0660, X =< 16#0669 -> true; +is_digit_5(X) when X >= 16#0030, X =< 16#0039 -> true; +is_digit_5(X) when X >= 16#06F0, X =< 16#06F9 -> true; +is_digit_5(_) -> false. + +is_digit_6(X) when X > (16#0660-1), X =< 16#0669 -> true; +is_digit_6(X) when X > (16#0030-1), X =< 16#0039 -> true; +is_digit_6(X) when X > (16#06F0-1), X =< 16#06F9 -> true; +is_digit_6(_) -> false. + +is_digit_7(X) when 16#0660 =< X, X =< 16#0669 -> true; +is_digit_7(X) when 16#0030 =< X, X =< 16#003A, X =/= 16#003A -> true; +is_digit_7(X) when 16#06F0 =< X, X =< 16#06F9 -> true; +is_digit_7(_) -> false. + +is_digit_8(X) when X =< 16#0039, X > (16#0030-1) -> true; +is_digit_8(X) when X =< 16#06F9, X > (16#06F0-1) -> true; +is_digit_8(X) when X =< 16#0669, X > (16#0660-1) -> true; +is_digit_8(16#0670) -> false; +is_digit_8(_) -> false. + +rel_op_combinations_2(0, _) -> + ok; +rel_op_combinations_2(N, Range) -> + Bool = gb_sets:is_member(N, Range), + Bool = broken_range_1(N), + Bool = broken_range_2(N), + Bool = broken_range_3(N), + Bool = broken_range_4(N), + Bool = broken_range_5(N), + Bool = broken_range_6(N), + Bool = broken_range_7(N), + Bool = broken_range_8(N), + Bool = broken_range_9(N), + Bool = broken_range_10(N), + Bool = broken_range_11(N), + Bool = broken_range_12(N), + Bool = broken_range_13(N), + rel_op_combinations_2(N-1, Range). + +broken_range_1(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_1(X) when X >= 3, X =< 5 -> true; +broken_range_1(_) -> false. + +broken_range_2(X) when X >= 10, X =< 12 -> true; +broken_range_2(X) when X >= 14, X =< 20 -> true; +broken_range_2(X) when X >= 3, X =< 5 -> true; +broken_range_2(_) -> false. + +broken_range_3(X) when X >= 10, X =< 12 -> true; +broken_range_3(X) when X >= 14, X < 21 -> true; +broken_range_3(3) -> true; +broken_range_3(4) -> true; +broken_range_3(5) -> true; +broken_range_3(_) -> false. + +broken_range_4(X) when X =< 5, X >= 3 -> true; +broken_range_4(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_4(X) when X =< 100 -> false; +broken_range_4(_) -> false. + +broken_range_5(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_5(X) when X > 2, X =< 5 -> true; +broken_range_5(_) -> false. + +broken_range_6(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_6(X) when X > 2, X < 6 -> true; +broken_range_6(_) -> false. + +broken_range_7(X) when X > 2, X < 6 -> true; +broken_range_7(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_7(X) when X > 30 -> false; +broken_range_7(_) -> false. + +broken_range_8(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_8(X) when X =:= 3 -> true; +broken_range_8(X) when X >= 3, X =< 5 -> true; +broken_range_8(_) -> false. + +broken_range_9(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_9(X) when X =:= 13 -> false; +broken_range_9(X) when X >= 3, X =< 5 -> true; +broken_range_9(_) -> false. + +broken_range_10(X) when X >= 3, X =< 5 -> true; +broken_range_10(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_10(X) when X =/= 13 -> false; +broken_range_10(_) -> false. + +broken_range_11(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_11(X) when is_tuple(X), X =:= 10 -> true; +broken_range_11(X) when X >= 3, X =< 5 -> true; +broken_range_11(_) -> false. + +broken_range_12(X) when X >= 3, X =< 5 -> true; +broken_range_12(X) when X >= 10, X =< 20, X =/= 13 -> true; +broken_range_12(X) when X < 30, X > 20 -> false; +broken_range_12(_) -> false. + +broken_range_13(X) when X >= 10, X =< 20, 13 =/= X -> true; +broken_range_13(X) when X >= 3, X =< 5 -> true; +broken_range_13(_) -> false. + +rel_op_combinations_3(0, _) -> + ok; +rel_op_combinations_3(N, Red) -> + Val = case gb_trees:lookup(N, Red) of + none -> none; + {value,V} -> V + end, + Val = redundant_1(N), + Val = redundant_2(N), + Val = redundant_3(N), + Val = redundant_4(N), + Val = redundant_5(N), + Val = redundant_6(N), + Val = redundant_7(N), + Val = redundant_8(N), + Val = redundant_9(N), + Val = redundant_10(N), + Val = redundant_11(N), + rel_op_combinations_3(N-1, Red). + +redundant_1(X) when X >= 51, X =< 80 -> 5*X; +redundant_1(X) when X < 51 -> 2*X; +redundant_1(_) -> none. + +redundant_2(X) when X < 51 -> 2*X; +redundant_2(X) when X >= 51, X =< 80 -> 5*X; +redundant_2(_) -> none. + +redundant_3(X) when X < 51 -> 2*X; +redundant_3(X) when X =< 80, X >= 51 -> 5*X; +redundant_3(X) when X =/= 100 -> none; +redundant_3(_) -> none. + +redundant_4(X) when X < 51 -> 2*X; +redundant_4(X) when X =< 80, X > 50 -> 5*X; +redundant_4(X) when X =/= 100 -> none; +redundant_4(_) -> none. + +redundant_5(X) when X < 51 -> 2*X; +redundant_5(X) when X > 50, X < 81 -> 5*X; +redundant_5(X) when X =< 10 -> none; +redundant_5(_) -> none. + +redundant_6(X) when X > 50, X =< 80 -> 5*X; +redundant_6(X) when X < 51 -> 2*X; +redundant_6(_) -> none. + +redundant_7(X) when is_integer(X), X >= 51, X =< 80 -> 5*X; +redundant_7(X) when is_integer(X), X < 51 -> 2*X; +redundant_7(_) -> none. + +redundant_8(X) when X >= 51, X =< 80 -> 5*X; +redundant_8(X) when X < 51 -> 2*X; +redundant_8(_) -> none. + +redundant_9(X) when X >= 51, X =< 80 -> 5*X; +redundant_9(X) when X < 51 -> 2*X; +redundant_9(90) -> none; +redundant_9(X) when X =/= 90 -> none; +redundant_9(_) -> none. + +redundant_10(X) when X >= 51, X =< 80 -> 5*X; +redundant_10(X) when X < 51 -> 2*X; +redundant_10(90) -> none; +redundant_10(X) when X =:= 90 -> none; +redundant_10(_) -> none. + +redundant_11(X) when X < 51 -> 2*X; +redundant_11(X) when X =:= 10 -> 2*X; +redundant_11(X) when X >= 51, X =< 80 -> 5*X; +redundant_11(_) -> none. %% Test type tests on literal values. (From emulator test suites.) literal_type_tests(Config) when is_list(Config) -> @@ -1565,6 +1802,12 @@ bad_guards(Config) when is_list(Config) -> fc(catch bad_guards_2(#{a=>0,b=>0}, [x])), fc(catch bad_guards_2(not_a_map, [x])), fc(catch bad_guards_2(42, [x])), + + fc(catch bad_guards_3(#{a=>0,b=>0}, [])), + fc(catch bad_guards_3(#{a=>0,b=>0}, [x])), + fc(catch bad_guards_3(not_a_map, [x])), + fc(catch bad_guards_3(42, [x])), + ok. %% beam_bool used to produce GC BIF instructions whose @@ -1576,6 +1819,12 @@ bad_guards_1(X, [_]) when {{X}}, -X -> bad_guards_2(M, [_]) when M#{a := 0, b => 0}, map_size(M) -> ok. +%% beam_type used to produce an GC BIF instruction whose Live operand +%% included uninitialized registers. + +bad_guards_3(M, [_]) when is_map(M) andalso M#{a := 0, b => 0}, length(M) -> + ok. + %% Call this function to turn off constant propagation. id(I) -> I. diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl index 398398a397..62bada1407 100644 --- a/lib/compiler/test/lc_SUITE.erl +++ b/lib/compiler/test/lc_SUITE.erl @@ -18,12 +18,12 @@ %% -module(lc_SUITE). --author('[email protected]'). -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, init_per_testcase/2,end_per_testcase/2, basic/1,deeply_nested/1,no_generator/1, - empty_generator/1,no_export/1]). + empty_generator/1,no_export/1,shadow/1, + effect/1]). -include_lib("test_server/include/test_server.hrl"). @@ -31,10 +31,18 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> test_lib:recompile(?MODULE), - [basic, deeply_nested, no_generator, empty_generator, no_export]. + [{group,p}]. groups() -> - []. + [{p,test_lib:parallel(), + [basic, + deeply_nested, + no_generator, + empty_generator, + no_export, + shadow, + effect + ]}]. init_per_suite(Config) -> Config. @@ -59,34 +67,34 @@ end_per_testcase(Case, Config) when is_atom(Case), is_list(Config) -> ok. basic(Config) when is_list(Config) -> - ?line L0 = lists:seq(1, 10), - ?line L1 = my_map(fun(X) -> {x,X} end, L0), - ?line L1 = [{x,X} || X <- L0], - ?line L0 = my_map(fun({x,X}) -> X end, L1), - ?line [1,2,3,4,5] = [X || X <- L0, X < 6], - ?line [4,5,6] = [X || X <- L0, X > 3, X < 7], - ?line [] = [X || X <- L0, X > 32, X < 7], - ?line [1,3,5,7,9] = [X || X <- L0, odd(X)], - ?line [2,4,6,8,10] = [X || X <- L0, not odd(X)], - ?line [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7], - ?line [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6], + L0 = lists:seq(1, 10), + L1 = my_map(fun(X) -> {x,X} end, L0), + L1 = [{x,X} || X <- L0], + L0 = my_map(fun({x,X}) -> X end, L1), + [1,2,3,4,5] = [X || X <- L0, X < 6], + [4,5,6] = [X || X <- L0, X > 3, X < 7], + [] = [X || X <- L0, X > 32, X < 7], + [1,3,5,7,9] = [X || X <- L0, odd(X)], + [2,4,6,8,10] = [X || X <- L0, not odd(X)], + [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7], + [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6], %% Append is specially handled. - ?line [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++ + [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++ [X || X <- L0, not odd(X), X =/= 6], %% Guards BIFs are evaluated in guard context. Weird, but true. - ?line [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)], + [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)], %% Filter expressions with andalso/orelse. - ?line "abc123" = alphanum("?abc123.;"), + "abc123" = alphanum("?abc123.;"), %% Error cases. - ?line [] = [{xx,X} || X <- L0, element(2, X) == no_no_no], - ?line {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]), - ?line [] = [X || X <- L1, X+1 < 2], - ?line {'EXIT',_} = (catch [X || X <- L1, odd(X)]), - ?line fc([x], catch [E || E <- id(x)]), + [] = [{xx,X} || X <- L0, element(2, X) == no_no_no], + {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]), + [] = [X || X <- L1, X+1 < 2], + {'EXIT',_} = (catch [X || X <- L1, odd(X)]), + fc([x], catch [E || E <- id(x)]), ok. tuple_list() -> @@ -116,12 +124,12 @@ deeply_nested_1() -> X16 <- [4],X17 <- [3],X18 <- [fun() -> X16+X17 end],X19 <- [2],X20 <- [1]]. no_generator(Config) when is_list(Config) -> - ?line Seq = lists:seq(-10, 17), - ?line [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq], + Seq = lists:seq(-10, 17), + [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq], %% Literal expression, for coverage. - ?line [a] = [a || true], - ?line [a,b,c] = [a || true] ++ [b,c], + [a] = [a || true], + [a,b,c] = [a || true] ++ [b,c], ok. no_gen(A, B) -> @@ -174,13 +182,51 @@ no_gen_eval(Fun, Res) -> no_gen_one_more(A, B) -> A + 1 =:= B. empty_generator(Config) when is_list(Config) -> - ?line [] = [X || {X} <- [], (false or (X/0 > 3))], + [] = [X || {X} <- [], (false or (X/0 > 3))], ok. no_export(Config) when is_list(Config) -> [] = [ _X = a || false ] ++ [ _X = a || false ], ok. +%% Test that variables in list comprehensions are +%% correctly shadowed. + +shadow(Config) when is_list(Config) -> + Shadowed = nomatch, + _ = id(Shadowed), %Eliminate warning. + L = [{Shadowed,Shadowed+1} || Shadowed <- lists:seq(7, 9)], + [{7,8},{8,9},{9,10}] = id(L), + [8,9] = id([Shadowed || {_,Shadowed} <- id(L), + Shadowed < 10]), + ok. + +effect(Config) when is_list(Config) -> + [{42,{a,b,c}}] = + do_effect(fun(F, L) -> + [F({V1,V2}) || + #{<<1:500>>:=V1,<<2:301>>:=V2} <- L], + ok + end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])), + + %% Will trigger the time-trap timeout if not tail-recursive. + case ?MODULE of + lc_SUITE -> + _ = [{'EXIT',{badarg,_}} = + (catch binary_to_atom(<<C/utf8>>, utf8)) || + C <- lists:seq(16#10000, 16#FFFFF)]; + _ -> + ok + end, + + ok. + +do_effect(Lc, L) -> + put(?MODULE, []), + F = fun(V) -> put(?MODULE, [V|get(?MODULE)]) end, + ok = Lc(F, L), + lists:reverse(erase(?MODULE)). + id(I) -> I. fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok; diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl index 403b7e8405..cfa8262701 100644 --- a/lib/compiler/test/map_SUITE.erl +++ b/lib/compiler/test/map_SUITE.erl @@ -21,6 +21,7 @@ ]). -export([ + %% literals t_build_and_match_literals/1, t_update_literals/1,t_match_and_update_literals/1, t_update_map_expressions/1, @@ -32,6 +33,15 @@ t_map_size/1, t_build_and_match_aliasing/1, + %% variables + t_build_and_match_variables/1, + t_update_assoc_variables/1,t_update_exact_variables/1, + t_nested_pattern_expressions/1, + t_guard_update_variables/1, + t_guard_sequence_variables/1, + t_guard_sequence_mixed/1, + t_frequency_table/1, + %% warnings t_warn_useless_build/1, t_warn_pair_key_overloaded/1, @@ -51,7 +61,10 @@ suite() -> []. -all() -> [ +all() -> + test_lib:recompile(?MODULE), + [ + %% literals t_build_and_match_literals, t_update_literals, t_match_and_update_literals, t_update_map_expressions, @@ -62,6 +75,15 @@ all() -> [ t_map_size, t_build_and_match_aliasing, + %% variables + t_build_and_match_variables, + t_update_assoc_variables,t_update_exact_variables, + t_nested_pattern_expressions, + t_guard_update_variables, + t_guard_sequence_variables, + t_guard_sequence_mixed, + t_frequency_table, + %% warnings t_warn_useless_build, t_warn_pair_key_overloaded, @@ -73,6 +95,7 @@ all() -> [ t_build_and_match_nil, t_build_and_match_structure, + %% errors in 17.0-rc1 t_update_values, t_expand_map_update, @@ -119,6 +142,11 @@ t_build_and_match_literals(Config) when is_list(Config) -> %% nil key #{[]:=ok,1:=2} = id(#{[]=>ok,1=>2}), + #{1:=2,[]:=ok,1:=2} = id(#{[]=>ok,1=>2}), + + %% pseudo literals + #{ -3 := yep } = id(#{ -3 => yep }), + #{ <<0:358>> := "three" } = id(#{<<0:358>> =>"three"}), %% error case {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3,x:=2} = id(#{x=>3}))), @@ -126,10 +154,10 @@ t_build_and_match_literals(Config) when is_list(Config) -> {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id({a,b,c}))), {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{y=>3}))), {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{x=>"three"}))), - {'EXIT',{badarg,_}} = (catch id(#{<<0:258>> =>"three"})), {'EXIT',{{badmatch,_},_}} = (catch (#{#{"a"=>42} := 3}=id(#{#{"a"=>3}=>42}))), ok. + t_build_and_match_aliasing(Config) when is_list(Config) -> M1 = id(#{a=>1,b=>2,c=>3,d=>4}), #{c:=C1=_=_=C2} = M1, @@ -143,6 +171,19 @@ t_build_and_match_aliasing(Config) when is_list(Config) -> M2 = id(#{"a"=>1,"b"=>2,"c"=>3,"d"=>4}), #{"a":=A2,"a":=A2,"a":=A2,"b":=B2,"b":=B2,"b":=2} = M2, #{"a":=_,"a":=_,"a":=_,"b":=_,"b":=_,"b":=2} = M2, + + #{a:=A1,a:=A1,a:=A1,b:=B1,b:=B1} = #{a:=A1,a:=A1,a:=A1,b:=B1,b:=B1,b:=2} = M1, + #{"a":=A3,"b":=B3} = #{"a":=A3,"a":=A3} = #{"b":=B3,"b":=2} = M2, + + #{"a":=1,"b":=2,"c":=3,"d":=4} = #{"a":=A4,"b":=B4} = #{"a":=A4,"a":=A4} = #{"b":=B4,"d":=4} = M2, + #{"a":=A5,"b":=B5} = #{"a":=A5,"a":=A5} = #{"b":=B5,"d":=4} = #{"a":=1,"b":=2,"c":=3,"d":=4} = M2, + #{"a":=_,"b":=_} = #{"a":=_,"a":=_} = #{"b":=_,"d":=4} = #{"a":=1,"b":=2,"c":=3,"d":=4} = M2, + + M3 = id(#{<<12:300>>=>1,<<13:300>>=>2}), + #{<<12:300>> := V1, <<13:300>> := V2} = #{<<13:300>> := V2, <<12:300>> := V1} = M3, + #{<<12:300>> := 1, <<13:300>> := 2} = #{<<13:300>> := _, <<12:300>> := _} = M3, + #{<<13:300>> := _, <<12:300>> := _} = #{<<12:300>> := 1, <<13:300>> := 2} = M3, + ok. t_map_size(Config) when is_list(Config) -> @@ -241,11 +282,14 @@ t_update_assoc(Config) when is_list(Config) -> #{1:=a,2:=b,3.0:=new,4:=d,5:=e} = M2, M2 = M0#{3.0:=wrong,3.0=>new}, + % Can't handle directly yet + Bin = <<0:257>>, + #{ Bin := val } = id(M0#{<<0:257>> => val}), %% binary limitation + %% Errors cases. BadMap = id(badmap), {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}), {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting=>val}), - {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> => val}), %% limitation ok. t_update_exact(Config) when is_list(Config) -> @@ -275,14 +319,22 @@ t_update_exact(Config) when is_list(Config) -> {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}), {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation + + %% A workaround for a bug allowed an empty map to be updated. + {'EXIT',{badarg,_}} = (catch (id(#{}))#{a:=1}), + {'EXIT',{badarg,_}} = (catch #{}#{a:=1}), + Empty = #{}, + {'EXIT',{badarg,_}} = (catch Empty#{a:=1}), ok. t_update_values(Config) when is_list(Config) -> V0 = id(1337), M0 = #{ a => 1, val => V0}, V1 = get_val(M0), - M1 = M0#{ val := [V0,V1], "wazzup" => 42 }, + M1 = id(M0#{ val := [V0,V1], "wazzup" => 42 }), [1337, {some_val, 1337}] = get_val(M1), + M2 = id(M1#{ <<42:333>> => 1337 }), + {bin_key,1337} = get_val(M2), N = 110, List = [{[I,1,2,3,I],{1,2,3,"wat",I}}|| I <- lists:seq(1,N)], @@ -308,6 +360,7 @@ t_export(Config) when is_list(Config) -> check_val(#{val1:=V1, val2:=V2},V1,V2) -> ok. +get_val(#{ <<42:333>> := V }) -> {bin_key, V}; get_val(#{ "wazzup" := _, val := V}) -> V; get_val(#{ val := V }) -> {some_val, V}. @@ -437,7 +490,10 @@ guard_receive_loop() -> t_list_comprehension(Config) when is_list(Config) -> - [#{k:=1},#{k:=2},#{k:=3}] = [#{k=>I} || I <- [1,2,3]], + [#{k:=1},#{k:=2},#{k:=3}] = id([#{k=>I} || I <- [1,2,3]]), + Ls = id([#{<<2:301>> => I, "wat" => I + 1} || I <- [1,2,3]]), + [#{<<2:301>>:=1,"wat":=2},#{<<2:301>>:=2,"wat":=3},#{<<2:301>>:=3,"wat":=4}] = Ls, + [{1,2},{2,3},{3,4}] = id([{I2,I1} || #{"wat" := I1, <<2:301>> := I2} <- Ls]), ok. t_guard_fun(Config) when is_list(Config) -> @@ -585,6 +641,7 @@ t_build_and_match_nil(Config) when is_list(Config) -> "treat" => V2, [] => V1 }), #{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }), + #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }), ok. t_build_and_match_structure(Config) when is_list(Config) -> @@ -601,5 +658,325 @@ t_build_and_match_structure(Config) when is_list(Config) -> end, ok. +%% simple build and match variables +t_build_and_match_variables(Config) when is_list(Config) -> + K0 = id(#{}), + K1 = id(1), V1 = id(a), + K2 = id(2), V2 = id(b), + K3 = id(3), V3 = id("c"), + K4 = id("4"), V4 = id("d"), + K5 = id(<<"5">>), V5 = id(<<"e">>), + K6 = id({"6",7}), V6 = id("f"), + K7 = id(#{ "a" => 3 }), + #{K1:=V1} = id(#{K1=>V1}), + #{K1:=V1,K2:=V2} = id(#{K1=>V1,K2=>V2}), + #{K1:=V1,K2:=V2,K3:=V3} = id(#{K1=>V1,K2=>V2,K3=>V3}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4,K5:=V5} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4,K5=>V5}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4,K5:=V5,K6:=V6} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4,K5=>V5,K6=>V6}), + + #{K5:=X,K5:=X=3,K4:=4} = id(#{K5=>3,K4=>4}), + #{K5:=X,<<"5">>:=X=3,K4:=4} = id(#{K5=>3,K4=>4}), + #{K5:=X,<<"5">>:=X=3,K4:=4} = id(#{<<"5">>=>3,K4=>4}), + + #{ K4:=#{ K3:=#{K1:=V1, K2:=V2}}, K5:=V5} = + id(#{ K5=>V5, K4=>#{ K3=>#{K2 => V2, K1 => V1}}}), + #{ K4 := #{ K5 := Res }, K6 := Res} = id(#{K4=>#{K5 => 99}, K6 => 99}), + + %% has keys + #{a :=_,b :=_,K1:=_,K2:=_,K3:=V3,K4:=ResKey,K4:=ResKey,"4":=ResKey,"4":="ok"} = + id(#{ a=>1, b=>1, K1=>V1, K2=>V2, K3=>V3, K4=>"nope", "4"=>"ok" }), + + %% function + ok = match_function_map_neg_keys(#{ -1 => a, -2 => b, -3 => c }), + + %% map key + #{ K0 := 42 } = id(#{ K0 => 42 }), + #{ K7 := 42 } = id(#{ K7 => 42 }), + + %% nil key + KNIL = id([]), + #{KNIL:=ok,1:=2} = id(#{KNIL=>ok,1=>2}), + + Bin = <<0:258>>, + #{ Bin := "three" } = id(#{<<0:258>> =>"three"}), + + %% error case + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3,x:=2} = id(#{K5=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=2} = id(#{K5=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id({a,b,c}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id(#{K6=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id(K7))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K7:=3} = id(#{K7=>42}))), + ok. + + +match_function_map_neg_keys(#{ -1 := a, -2 := b, -3 := c }) -> ok. + +t_update_assoc_variables(Config) when is_list(Config) -> + K1 = id(1), + K2 = id(2), + K3 = id(3.0), + K4 = id(4), + K5 = id(5), + K6 = id(2.0), + + M0 = #{K1=>a,K2=>b,K3=>c,K4=>d,K5=>e}, + + M1 = M0#{K1=>42,K2=>100,K4=>[a,b,c]}, + #{1:=42,2:=100,3.0:=c,4:=[a,b,c],5:=e} = M1, + #{1:=42,2:=b,4:=d,5:=e,2.0:=100,K3:=c,4.0:=[a,b,c]} = M0#{1.0=>float,1:=42,2.0=>wrong,K6=>100,4.0=>[a,b,c]}, + + M2 = M0#{K3=>new}, + #{1:=a,2:=b,K3:=new,4:=d,5:=e} = M2, + M2 = M0#{3.0:=wrong,K3=>new}, + + #{ <<0:258>> := val } = id(M0#{<<0:258>> => val}), %% binary limitation + + %% Errors cases. + BadMap = id(badmap), + {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}), + {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting=>val}), + ok. + +t_update_exact_variables(Config) when is_list(Config) -> + K1 = id(1), + K2 = id(2), + K3 = id(3.0), + K4 = id(4), + + M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}), + + M1 = M0#{K1:=42,K2:=100,K4:=[a,b,c]}, + #{1:=42,2:=100,3.0:=c,K4:=[a,b,c],5:=e} = M1, + M1 = M0#{K1:=wrong,1:=also_wrong,K1=>42,2=>wrong,K2:=100,4:=[a,b,c]}, + + M2 = M0#{K3:=new}, + #{1:=a,K2:=b,3.0:=new,K4:=d,5:=e} = M2, + M2 = M0#{3.0=>wrong,K3:=new}, + true = M2 =/= M0#{3=>right,3.0:=new}, + #{ 3 := right, 3.0 := new } = M0#{3=>right,K3:=new}, + + M3 = id(#{ 1 => val}), + #{1 := update2,1.0 := new_val4} = M3#{ + 1.0 => new_val1, K1 := update, K1=> update3, + K1 := update2, 1.0 := new_val2, 1.0 => new_val3, + 1.0 => new_val4 }, + + #{ "wat" := 3, 2 := a } = id(#{ "wat" => 1, K2 => 2 }#{ K2 := a, "wat" := 3 }), + + %% Errors cases. + {'EXIT',{badarg,_}} = (catch ((id(nil))#{ a := b })), + {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), + {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), + {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), + {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation + ok. + +t_nested_pattern_expressions(Config) when is_list(Config) -> + K1 = id("hello"), + %K2 = id({ok}), + [_,_,#{ <<"hi">> := wat, K1 := 42 }|_] = id([k,k,#{<<"hi">> => wat, K1 => 42}]), + [_,_,#{ -1 := wat, K1 := 42 }|_] = id([k,k,#{-1 => wat, K1 => 42}]), + [_,_,{#{ -1 := #{ {-3,<<0:300>>} := V1 }, K1 := 42 },3}|_] = id([k,k,{#{-1 => #{{-3,<<0:300>>}=>"hi"}, K1 => 42},3}]), + "hi" = V1, + %[k,#{ {-1,K1,[]} := {wat,K1}, K2 := 42 }|_] = id([k,#{{-1,K1,[]} => {wat,K1}, K2 => 42}]), + %[k,#{ [-1,K2,[]] := {wat,K1}, K1 := 42 }|_] = id([k,#{[-1,K2,[]] => {wat,K1}, K1 => 42}]), + + M0 = id(#{ <<33:333>> => 1, <<332:333>> => ok, a => ok, wat => yep, watzor => ok }), + F0 = map_nested_pattern_funs(M0), + F1 = F0(wat), + F2 = F1(watzor), + {yep,ok} = F2(M0), + ok. + +map_nested_pattern_funs(M) -> + K0 = id(a), + fun(K1) -> + case M of + #{ K0 := ok, K1 := yep, <<33:333>> := 1 } -> + fun(K2) -> + case M of + #{ K2 := ok, K1 := yep, <<33:333>> := 1 } -> + fun + (#{ <<332:333>> := ok, K1 := V1, K2 := V2 }) -> + {V1,V2} + end + end + end + end + end. + +t_guard_update_variables(Config) when is_list(Config) -> + error = map_guard_update_variables(n,#{},#{}), + first = map_guard_update_variables(x,#{}, #{x=>first}), + second = map_guard_update_variables(x,#{y=>old}, #{x=>second,y=>old}), + third = map_guard_update_variables(x,#{x=>old,y=>old}, #{x=>third,y=>old}), + fourth = map_guard_update_variables(x,#{x=>old,y=>old}, #{x=>4,y=>new}), + ok. + +map_guard_update_variables(K,M1,M2) when M1#{K=>first} =:= M2 -> first; +map_guard_update_variables(K,M1,M2) when M1#{K=>second} =:= M2 -> second; +map_guard_update_variables(K,M1,M2) when M1#{K:=third} =:= M2 -> third; +map_guard_update_variables(K,M1,M2) when M1#{K:=4,y=>new} =:= M2 -> fourth; +map_guard_update_variables(_,_,_) -> error. + +t_guard_sequence_variables(Config) when is_list(Config) -> + {1,"a"} = map_guard_sequence_var_1(a,#{seq=>1,a=>id("a"),b=>no}), + {2,"b"} = map_guard_sequence_var_1(b,#{seq=>2,b=>id("b"),a=>no}), + {3,"c"} = map_guard_sequence_var_1(a,#{seq=>3,a=>id("c"),b=>no}), + {4,"d"} = map_guard_sequence_var_1(b,#{seq=>4,b=>id("d"),a=>no}), + {4,4} = map_guard_sequence_var_1(seq,#{seq=>4}), + {4,4,y} = map_guard_sequence_var_1(seq,#{seq=>4,b=>id("d"),a=>y}), + {5,"d"} = map_guard_sequence_var_1(b,#{seq=>5,b=>id("d"),a=>y}), + + %% error case + {'EXIT',{{case_clause,_},_}} = (catch map_guard_sequence_var_1("a",#{seq=>4,val=>id("e")})), + ok. + + +map_guard_sequence_var_1(K,M) -> + case M of + #{seq:=1=Seq, K:=Val} -> {Seq,Val}; + #{seq:=2=Seq, K:=Val} -> {Seq,Val}; + #{seq:=3=Seq, K:=Val} -> {Seq,Val}; + #{K:=4=Seq, K:=Val1,a:=Val2} -> {Seq,Val1,Val2}; + #{seq:=4=Seq, K:=Val} -> {Seq,Val}; + #{K:=4=Seq, K:=Val} -> {Seq,Val}; + #{seq:=5=Seq, K:=Val} -> {Seq,Val} + end. + + +t_guard_sequence_mixed(Config) when is_list(Config) -> + M0 = id(#{ a=>1, b=>1, c=>1, d=>1, e=>1, f=>1, g=>1, h=>1 }), + M1 = id(M0#{ d := 3 }), + 1 = map_guard_sequence_mixed(a,d,M1), + M2 = id(M1#{ b := 2, d := 4, h := 2 }), + 2 = map_guard_sequence_mixed(a,d,M2), + M3 = id(M2#{ b := 3, e := 5, g := 3 }), + 3 = map_guard_sequence_mixed(a,e,M3), + M4 = id(M3#{ c := 4, e := 6, h := 1 }), + 4 = map_guard_sequence_mixed(a,e,M4), + M5 = id(M4#{ c := 5, f := 7, g := 2 }), + 5 = map_guard_sequence_mixed(a,f,M5), + M6 = id(M5#{ c := 6, f := 8, h := 3 }), + 6 = map_guard_sequence_mixed(a,f,M6), + + %% error case + {'EXIT',{{case_clause,_},_}} = (catch map_guard_sequence_mixed(a,b,M0)), + ok. + +map_guard_sequence_mixed(K1,K2,M) -> + case M of + #{ K1 := 1, b := 1, K2 := 3, g := 1} -> 1; + #{ K1 := 1, b := 2, K2 := 4, h := 2} -> 2; + #{ K1 := 1, b := 3, K2 := 5, g := 3} -> 3; + #{ K1 := 1, c := 4, K2 := 6, h := 1} -> 4; + #{ K1 := 1, c := 5, K2 := 7, g := 2} -> 5; + #{ K1 := 1, c := 6, K2 := 8, h := 3} -> 6 + end. + + + +t_frequency_table(Config) when is_list(Config) -> + random:seed({13,1337,54}), % pseudo random + N = 100000, + Ts = rand_terms(N), + #{ n:=N, tf := Tf } = frequency_table(Ts,#{ n=>0, tf => #{}}), + ok = check_frequency(Ts,Tf), + ok. + + +frequency_table([T|Ts], M) -> + case M of + #{ n := N, tf := #{ T := C } = F } -> + frequency_table(Ts,M#{ n := N + 1, tf := F#{ T := C + 1 }}); + #{ n := N, tf := F } -> + frequency_table(Ts,M#{ n := N + 1, tf := F#{ T => 1 }}) + end; +frequency_table([], M) -> M. + + +check_frequency(Ts,Tf) -> + check_frequency(Ts,Tf,dict:new()). + +check_frequency([T|Ts],Tf,D) -> + case dict:find(T,D) of + error -> check_frequency(Ts,Tf,dict:store(T,1,D)); + {ok,C} -> check_frequency(Ts,Tf,dict:store(T,C+1,D)) + end; +check_frequency([],Tf,D) -> + validate_frequency(dict:to_list(D),Tf). + +validate_frequency([{T,C}|Fs],Tf) -> + case Tf of + #{ T := C } -> validate_frequency(Fs,Tf); + _ -> error + end; +validate_frequency([], _) -> ok. + + +%% aux + +rand_terms(0) -> []; +rand_terms(N) -> [rand_term()|rand_terms(N-1)]. + +rand_term() -> + case random:uniform(6) of + 1 -> rand_binary(); + 2 -> rand_number(); + 3 -> rand_atom(); + 4 -> rand_tuple(); + 5 -> rand_list(); + 6 -> rand_map() + end. + +rand_binary() -> + case random:uniform(3) of + 1 -> <<>>; + 2 -> <<"hi">>; + 3 -> <<"message text larger than 64 bytes. yep, message text larger than 64 bytes.">> + end. + +rand_number() -> + case random:uniform(3) of + 1 -> random:uniform(5); + 2 -> float(random:uniform(5)); + 3 -> 1 bsl (63 + random:uniform(3)) + end. + +rand_atom() -> + case random:uniform(3) of + 1 -> hi; + 2 -> some_atom; + 3 -> some_other_atom + end. + + +rand_tuple() -> + case random:uniform(3) of + 1 -> {ok, rand_term()}; % careful + 2 -> {1, 2, 3}; + 3 -> {<<"yep">>, 1337} + end. + +rand_list() -> + case random:uniform(3) of + 1 -> "hi"; + 2 -> [1,rand_term()]; % careful + 3 -> [improper|list] + end. + +rand_map() -> + case random:uniform(3) of + 1 -> #{ hi => 3 }; + 2 -> #{ wat => rand_term(), other => 3 }; % careful + 3 -> #{ hi => 42, other => 42, yet_anoter => 1337 } + end. + + + %% Use this function to avoid compile-time evaluation of an expression. id(I) -> I. diff --git a/lib/compiler/test/match_SUITE.erl b/lib/compiler/test/match_SUITE.erl index 1e778dca24..9aec0b3d4e 100644 --- a/lib/compiler/test/match_SUITE.erl +++ b/lib/compiler/test/match_SUITE.erl @@ -22,7 +22,8 @@ init_per_group/2,end_per_group/2, pmatch/1,mixed/1,aliases/1,match_in_call/1, untuplify/1,shortcut_boolean/1,letify_guard/1, - selectify/1,underscore/1,match_map/1,coverage/1]). + selectify/1,underscore/1,match_map/1,map_vars_used/1, + coverage/1]). -include_lib("test_server/include/test_server.hrl"). @@ -33,10 +34,10 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [pmatch,mixed,aliases,match_in_call,untuplify, shortcut_boolean,letify_guard,selectify, - underscore,match_map,coverage]}]. + underscore,match_map,map_vars_used,coverage]}]. init_per_suite(Config) -> @@ -140,6 +141,13 @@ aliases(Config) when is_list(Config) -> ?line {a,b} = list_alias2([a,b]), ?line {a,b} = list_alias3([a,b]), + %% Non-matching aliases. + none = mixed_aliases(<<42>>), + none = mixed_aliases([b]), + none = mixed_aliases([d]), + none = mixed_aliases({a,42}), + none = mixed_aliases(42), + ok. str_alias(V) -> @@ -243,6 +251,12 @@ list_alias2([X,Y]=[a,b]) -> list_alias3([X,b]=[a,Y]) -> {X,Y}. +mixed_aliases(<<X:8>> = x) -> {a,X}; +mixed_aliases([b] = <<X:8>>) -> {b,X}; +mixed_aliases(<<X:8>> = {a,X}) -> {c,X}; +mixed_aliases([X] = <<X:8>>) -> {d,X}; +mixed_aliases(_) -> none. + %% OTP-7018. match_in_call(Config) when is_list(Config) -> @@ -419,6 +433,18 @@ do_match_map_2(Map) -> Tuple end. +map_vars_used(Config) when is_list(Config) -> + {some,value} = do_map_vars_used(a, b, #{{a,b}=>42,v=>{some,value}}), + ok. + +do_map_vars_used(X, Y, Map) -> + case {X,Y} of + T -> + %% core_lib:is_var_used/2 would not consider T used. + #{T:=42,v:=Val} = Map, + Val + end. + coverage(Config) when is_list(Config) -> %% Cover beam_dead. ok = coverage_1(x, a), diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl index 44c7161530..68a31f14d5 100644 --- a/lib/compiler/test/misc_SUITE.erl +++ b/lib/compiler/test/misc_SUITE.erl @@ -60,7 +60,7 @@ all() -> [{group,p}]. groups() -> - [{p,[],%%test_lib:parallel(), + [{p,[], [tobias,empty_string,md5,silly_coverage, confused_literals,integer_encoding,override_bif]}]. @@ -225,14 +225,15 @@ silly_coverage(Config) when is_list(Config) -> {label,2}|non_proper_list]}],99}, ?line expect_error(fun() -> beam_bool:module(BoolInput, []) end), - %% beam_dead + %% beam_dead. This is tricky. Our function must look OK to + %% beam_utils:clean_labels/1, but must crash beam_dead. DeadInput = {?MODULE,[{foo,0}],[], [{function,foo,0,2, [{label,1}, {func_info,{atom,?MODULE},{atom,foo},0}, {label,2}, - {jump,bad}]}],99}, - ?line expect_error(fun() -> beam_block:module(DeadInput, []) end), + {test,is_eq_exact,{f,1},[bad,operands]}]}],99}, + expect_error(fun() -> beam_dead:module(DeadInput, []) end), %% beam_clean CleanInput = {?MODULE,[{foo,0}],[], @@ -279,6 +280,14 @@ silly_coverage(Config) when is_list(Config) -> {label,2}|non_proper_list]}],99}, expect_error(fun() -> beam_z:module(BeamZInput, []) end), + %% beam_validator. + BeamValInput = {?MODULE,[{foo,0}],[], + [{function,foo,0,2, + [{label,1}, + {func_info,{atom,?MODULE},{atom,foo},0}, + {label,2}|non_proper_list]}],99}, + expect_error(fun() -> beam_validator:module(BeamValInput, []) end), + ok. expect_error(Fun) -> diff --git a/lib/compiler/test/receive_SUITE.erl b/lib/compiler/test/receive_SUITE.erl index 00a6e900d4..fb82bf6101 100644 --- a/lib/compiler/test/receive_SUITE.erl +++ b/lib/compiler/test/receive_SUITE.erl @@ -187,12 +187,13 @@ ref_opt(Config) when is_list(Config) -> end. ref_opt_1(Config) -> - ?line DataDir = ?config(data_dir, Config), - ?line PrivDir = ?config(priv_dir, Config), + DataDir = ?config(data_dir, Config), + PrivDir = ?config(priv_dir, Config), Sources = filelib:wildcard(filename:join([DataDir,"ref_opt","*.{erl,S}"])), - ?line test_lib:p_run(fun(Src) -> - do_ref_opt(Src, PrivDir) - end, Sources), + test_lib:p_run(fun(Src) -> + do_ref_opt(Src, PrivDir) + end, Sources), + cover_recv_instructions(), ok. do_ref_opt(Source, PrivDir) -> @@ -202,9 +203,9 @@ do_ref_opt(Source, PrivDir) -> {outdir,PrivDir}] ++ [from_asm || Ext =:= ".S" ]), Base = filename:rootname(filename:basename(Source), Ext), - code:purge(list_to_atom(Base)), - BeamFile = filename:join(PrivDir, Base), - code:load_abs(BeamFile), + code:purge(list_to_atom(Base)), + BeamFile = filename:join(PrivDir, Base), + code:load_abs(BeamFile), ok = Mod:Mod(), {beam_file,Mod,_,_,_,Code} = beam_disasm:file(BeamFile), case Base of @@ -232,6 +233,27 @@ collect_recv_opt_instrs(Code) -> end] || {function,_,_,_,Is} <- Code], lists:append(L). +cover_recv_instructions() -> + %% We want to cover the handling of recv_mark and recv_set in beam_utils. + %% Since those instructions are introduced in a late optimization pass, + %% beam_utils:live_opt() will not see them unless the compilation is + %% started from a .S file. The compile_SUITE:asm/1 test case will + %% compile all test suite files to .S and then run them through the + %% compiler again. + %% + %% Here will we will ensure that this modules contains recv_mark + %% and recv_set instructions. + Pid = spawn_link(fun() -> + receive {Parent,Ref} -> + Parent ! Ref + end + end), + Ref = make_ref(), + Pid ! {self(),Ref}, + receive + Ref -> ok + end. + export(Config) when is_list(Config) -> Ref = make_ref(), ?line self() ! {result,Ref,42}, diff --git a/lib/compiler/test/record_SUITE.erl b/lib/compiler/test/record_SUITE.erl index f736e14bf6..8cc90026ec 100644 --- a/lib/compiler/test/record_SUITE.erl +++ b/lib/compiler/test/record_SUITE.erl @@ -246,6 +246,14 @@ record_test_2(Config) when is_list(Config) -> ?line Barf = update_barf(Barf0), ?line #barf{a="abc",b=1} = id(Barf), + %% Test optimization of is_record/3. + false = case id({a,b}) of + {_,_}=Tuple -> is_record(Tuple, foo) + end, + false = case id(true) of + true=Bool -> is_record(Bool, foo) + end, + ok. record_test_3(Config) when is_list(Config) -> diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl index a8befbecd9..a5e2855f8c 100644 --- a/lib/compiler/test/test_lib.erl +++ b/lib/compiler/test/test_lib.erl @@ -18,11 +18,13 @@ %% -module(test_lib). --include("test_server.hrl"). +-include_lib("test_server/include/test_server.hrl"). -compile({no_auto_import,[binary_part/2]}). --export([recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1, +-export([id/1,recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1, smoke_disasm/1,p_run/2,binary_part/2]). +id(I) -> I. + recompile(Mod) when is_atom(Mod) -> case whereis(cover_server) of undefined -> ok; @@ -44,6 +46,10 @@ smoke_disasm(File) when is_list(File) -> Res = beam_disasm:file(File), {beam_file,_Mod} = {element(1, Res),element(2, Res)}. +%% If we are running cover, we don't want to run test cases that +%% invokes the compiler in parallel, as doing so would probably +%% be slower than running them sequentially. + parallel() -> case ?t:is_cover() orelse erlang:system_info(schedulers) =:= 1 of true -> []; @@ -90,13 +96,18 @@ get_data_dir(Config) -> %% Will fail the test case if there were any errors. p_run(Test, List) -> + S = erlang:system_info(schedulers), N = case ?t:is_cover() of false -> - erlang:system_info(schedulers); + S + 1; true -> - %% Cover is running. Using more than one process - %% will probably only slow down compilation. - 1 + %% Cover is running. Using too many processes + %% could slow us down. Measurements on my computer + %% showed that using 4 parallel processes was + %% slightly faster than using 3. Using more than + %% 4 would not buy us much and could actually be + %% slower. + max(S, 4) end, p_run_loop(Test, List, N, [], 0, 0). diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl index 4530d08c77..80d93fbfa4 100644 --- a/lib/compiler/test/trycatch_SUITE.erl +++ b/lib/compiler/test/trycatch_SUITE.erl @@ -24,7 +24,8 @@ catch_oops/1,after_oops/1,eclectic/1,rethrow/1, nested_of/1,nested_catch/1,nested_after/1, nested_horrid/1,last_call_optimization/1,bool/1, - plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]). + plain_catch_coverage/1,andalso_orelse/1,get_in_try/1, + hockey/1]). -include_lib("test_server/include/test_server.hrl"). @@ -35,11 +36,12 @@ all() -> [{group,p}]. groups() -> - [{p,test_lib:parallel(), + [{p,[parallel], [basic,lean_throw,try_of,try_after,catch_oops, after_oops,eclectic,rethrow,nested_of,nested_catch, nested_after,nested_horrid,last_call_optimization, - bool,plain_catch_coverage,andalso_orelse,get_in_try]}]. + bool,plain_catch_coverage,andalso_orelse,get_in_try, + hockey]}]. init_per_suite(Config) -> @@ -790,7 +792,6 @@ nested_after_1({X1,C1,V1}, nested_horrid(Config) when is_list(Config) -> - _V = {make_ref(),nested_horrid,4.711}, {[true,true],{[true,1.0],1.0}} = nested_horrid_1({true,void,void}, 1.0), ok. @@ -944,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) -> _:not_found -> get_valid_line(T, Annotations) end. + +hockey(_) -> + {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()), + ok. + +hockey() -> + %% beam_jump used to generate a call into the try block. + %% beam_validator disapproved. + receive _ -> (b = fun() -> ok end) + + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, + + y catch _ -> ok end. diff --git a/lib/compiler/test/warnings_SUITE.erl b/lib/compiler/test/warnings_SUITE.erl index 0637041873..d0b7c71be8 100644 --- a/lib/compiler/test/warnings_SUITE.erl +++ b/lib/compiler/test/warnings_SUITE.erl @@ -39,7 +39,7 @@ guard/1,bad_arith/1,bool_cases/1,bad_apply/1, files/1,effect/1,bin_opt_info/1,bin_construction/1, comprehensions/1,maps/1,redundant_boolean_clauses/1, - latin1_fallback/1]). + latin1_fallback/1,underscore/1,no_warnings/1]). % Default timetrap timeout (set in init_per_testcase). -define(default_timeout, ?t:minutes(2)). @@ -64,7 +64,8 @@ groups() -> [pattern,pattern2,pattern3,pattern4,guard, bad_arith,bool_cases,bad_apply,files,effect, bin_opt_info,bin_construction,comprehensions,maps, - redundant_boolean_clauses,latin1_fallback]}]. + redundant_boolean_clauses,latin1_fallback, + underscore,no_warnings]}]. init_per_suite(Config) -> Config. @@ -284,7 +285,7 @@ bad_arith(Config) when is_list(Config) -> {10,sys_core_fold,{eval_failure,badarith}}, {15,sys_core_fold,{eval_failure,badarith}} ] }}], - ?line [] = run(Config, Ts), + [] = run(Config, Ts), ok. bool_cases(Config) when is_list(Config) -> @@ -578,11 +579,11 @@ maps(Config) when is_list(Config) -> <<" t() -> M = {a,[]}, - {'EXIT',{badarg,_}} = (catch(M#{ a => 1})), + {'EXIT',{badarg,_}} = (catch(M#{ a => 1 })), ok. ">>, [], - {warnings,[{4,v3_kernel,bad_map}]}}, + {warnings,[{4,sys_core_fold,{eval_failure,badarg}}]}}, {bad_map_src2, <<" t() -> @@ -592,7 +593,7 @@ maps(Config) when is_list(Config) -> id(I) -> I. ">>, [inline], - {warnings,[{4,v3_kernel,bad_map}]}}, + []}, {bad_map_src3, <<" t() -> @@ -601,7 +602,7 @@ maps(Config) when is_list(Config) -> ">>, [], {warnings,[{3,v3_core,bad_map}]}}, - {bad_map_literal_key, + {ok_map_literal_key, <<" t() -> V = id(1), @@ -614,7 +615,7 @@ maps(Config) when is_list(Config) -> id(I) -> I. ">>, [], - {warnings,[{6,v3_core,nomatch}]}}], + []}], run(Config, Ts), ok. @@ -678,6 +679,66 @@ latin1_fallback(Conf) when is_list(Conf) -> ok. +underscore(Config) when is_list(Config) -> + S0 = <<"f(A) -> + _VAR1 = <<A>>, + _VAR2 = {ok,A}, + _VAR3 = [A], + ok. + g(A) -> + _VAR1 = A/0, + _VAR2 = date(), + ok. + h() -> + _VAR1 = fun() -> ok end, + ok. + i(A) -> + _VAR1 = #{A=>42}, + ok. + ">>, + Ts0 = [{underscore0, + S0, + [], + {warnings,[{2,sys_core_fold,useless_building}, + {3,sys_core_fold,useless_building}, + {4,sys_core_fold,useless_building}, + {7,sys_core_fold,result_ignored}, + {8,sys_core_fold,{no_effect,{erlang,date,0}}}, + {11,sys_core_fold,useless_building}, + {14,sys_core_fold,useless_building} + ]}}], + [] = run(Config, Ts0), + + %% Replace all "_VAR<digit>" variables with a plain underscore. + %% Now there should be no warnings. + S1 = re:replace(S0, "_VAR\\d+", "_", [global]), + io:format("~s\n", [S1]), + Ts1 = [{underscore1,S1,[],[]}], + [] = run(Config, Ts1), + + ok. + +no_warnings(Config) when is_list(Config) -> + Ts = [{no_warnings, + <<"-record(r, {s=ordsets:new(),a,b}). + + a() -> + R = #r{}, %No warning expected. + {R#r.a,R#r.b}. + + b(X) -> + T = true, + Var = [X], %No warning expected. + case T of + false -> Var; + true -> [] + end. + ">>, + [], + []}], + run(Config, Ts), + ok. + %%% %%% End of test cases. %%% @@ -699,10 +760,10 @@ run(Config, Tests) -> %% Compiles a test module and returns the list of errors and warnings. run_test(Conf, Test0, Warnings) -> - Mod = "warnings_"++test_lib:uniq(), - Filename = Mod ++ ".erl", + Module = "warnings_"++test_lib:uniq(), + Filename = Module ++ ".erl", ?line DataDir = ?privdir, - Test = ["-module(", Mod, "). ", Test0], + Test = ["-module(", Module, "). ", Test0], ?line File = filename:join(DataDir, Filename), ?line Opts = [binary,export_all,return|Warnings], ?line ok = file:write_file(File, Test), diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c index 26e2486dc2..aa99f2236e 100644 --- a/lib/crypto/c_src/crypto.c +++ b/lib/crypto/c_src/crypto.c @@ -54,6 +54,10 @@ #include <openssl/evp.h> #include <openssl/hmac.h> +#if OPENSSL_VERSION_NUMBER >= 0x1000000fL +#include <openssl/modes.h> +#endif + #include "crypto_callback.h" #if OPENSSL_VERSION_NUMBER >= 0x00908000L && !defined(OPENSSL_NO_SHA224) && defined(NID_sha224)\ @@ -85,13 +89,32 @@ # define HAVE_AES_IGE #endif +#if OPENSSL_VERSION_NUMBER >= 0x1000100fL +# define HAVE_GCM +#endif + +#if defined(NID_chacha20) && !defined(OPENSSL_NO_CHACHA) && !defined(OPENSSL_NO_POLY1305) +# define HAVE_CHACHA20_POLY1305 +#endif + #if defined(HAVE_EC) #include <openssl/ec.h> #include <openssl/ecdh.h> #include <openssl/ecdsa.h> #endif +#if defined(HAVE_CHACHA20_POLY1305) +#include <openssl/chacha.h> +#include <openssl/poly1305.h> +#if !defined(CHACHA20_NONCE_LEN) +# define CHACHA20_NONCE_LEN 8 +#endif +#if !defined(POLY1305_TAG_LEN) +# define POLY1305_TAG_LEN 16 +#endif + +#endif #ifdef VALGRIND # include <valgrind/memcheck.h> @@ -219,6 +242,7 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM aes_ctr_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM aes_ctr_stream_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); +static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM strong_rand_bytes_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); static ERL_NIF_TERM rand_bytes_3(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); @@ -257,6 +281,11 @@ static ERL_NIF_TERM ecdh_compute_key_nif(ErlNifEnv* env, int argc, const ERL_NIF static ERL_NIF_TERM rand_seed_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); +static ERL_NIF_TERM aes_gcm_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); +static ERL_NIF_TERM aes_gcm_decrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); + +static ERL_NIF_TERM chacha20_poly1305_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); +static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]); /* helpers */ static void init_algorithms_types(ErlNifEnv*); @@ -351,6 +380,7 @@ static ErlNifFunc nif_funcs[] = { {"aes_ctr_decrypt", 3, aes_ctr_encrypt}, {"aes_ctr_stream_encrypt", 2, aes_ctr_stream_encrypt}, {"aes_ctr_stream_decrypt", 2, aes_ctr_stream_encrypt}, + {"aes_ecb_crypt", 3, aes_ecb_crypt}, {"rand_bytes", 1, rand_bytes_1}, {"strong_rand_bytes_nif", 1, strong_rand_bytes_nif}, {"rand_bytes", 3, rand_bytes_3}, @@ -382,12 +412,20 @@ static ErlNifFunc nif_funcs[] = { {"bf_ecb_crypt", 3, bf_ecb_crypt}, {"blowfish_ofb64_encrypt", 3, blowfish_ofb64_encrypt}, - {"ec_key_generate", 1, ec_key_generate}, + {"ec_key_generate", 2, ec_key_generate}, {"ecdsa_sign_nif", 4, ecdsa_sign_nif}, {"ecdsa_verify_nif", 5, ecdsa_verify_nif}, {"ecdh_compute_key_nif", 3, ecdh_compute_key_nif}, - {"rand_seed_nif", 1, rand_seed_nif} + {"rand_seed_nif", 1, rand_seed_nif}, + + {"aes_gcm_encrypt", 4, aes_gcm_encrypt}, + {"aes_gcm_decrypt", 5, aes_gcm_decrypt}, + + {"chacha20_poly1305_encrypt", 4, chacha20_poly1305_encrypt}, + {"chacha20_poly1305_decrypt", 5, chacha20_poly1305_decrypt} + + }; ERL_NIF_INIT(crypto,nif_funcs,load,NULL,upgrade,unload) @@ -725,7 +763,7 @@ static ERL_NIF_TERM algo_hash[8]; /* increase when extending the list */ static int algo_pubkey_cnt; static ERL_NIF_TERM algo_pubkey[3]; /* increase when extending the list */ static int algo_cipher_cnt; -static ERL_NIF_TERM algo_cipher[2]; /* increase when extending the list */ +static ERL_NIF_TERM algo_cipher[4]; /* increase when extending the list */ static void init_algorithms_types(ErlNifEnv* env) { @@ -763,6 +801,12 @@ static void init_algorithms_types(ErlNifEnv* env) #ifdef HAVE_AES_IGE algo_cipher[algo_cipher_cnt++] = enif_make_atom(env,"aes_ige256"); #endif +#if defined(HAVE_GCM) + algo_cipher[algo_cipher_cnt++] = enif_make_atom(env,"aes_gcm"); +#endif +#if defined(HAVE_CHACHA20_POLY1305) + algo_cipher[algo_cipher_cnt++] = enif_make_atom(env,"chacha20_poly1305"); +#endif ASSERT(algo_hash_cnt <= sizeof(algo_hash)/sizeof(ERL_NIF_TERM)); ASSERT(algo_pubkey_cnt <= sizeof(algo_pubkey)/sizeof(ERL_NIF_TERM)); @@ -1762,6 +1806,268 @@ static ERL_NIF_TERM aes_ctr_stream_encrypt(ErlNifEnv* env, int argc, const ERL_N return ret; } +static ERL_NIF_TERM aes_gcm_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (Key,Iv,AAD,In) */ +#if defined(HAVE_GCM) + GCM128_CONTEXT *ctx = NULL; + ErlNifBinary key, iv, aad, in; + AES_KEY aes_key; + unsigned char *outp; + ERL_NIF_TERM out, out_tag; + + CHECK_OSE_CRYPTO(); + + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) + || AES_set_encrypt_key(key.data, key.size*8, &aes_key) != 0 + || !enif_inspect_binary(env, argv[1], &iv) || iv.size == 0 + || !enif_inspect_iolist_as_binary(env, argv[2], &aad) + || !enif_inspect_iolist_as_binary(env, argv[3], &in)) { + return enif_make_badarg(env); + } + + if (!(ctx = CRYPTO_gcm128_new(&aes_key, (block128_f)AES_encrypt))) + return atom_error; + + CRYPTO_gcm128_setiv(ctx, iv.data, iv.size); + + if (CRYPTO_gcm128_aad(ctx, aad.data, aad.size)) + goto out_err; + + outp = enif_make_new_binary(env, in.size, &out); + + /* encrypt */ + if (CRYPTO_gcm128_encrypt(ctx, in.data, outp, in.size)) + goto out_err; + + /* calculate the tag */ + CRYPTO_gcm128_tag(ctx, enif_make_new_binary(env, EVP_GCM_TLS_TAG_LEN, &out_tag), EVP_GCM_TLS_TAG_LEN); + CRYPTO_gcm128_release(ctx); + + CONSUME_REDS(env, in); + + return enif_make_tuple2(env, out, out_tag); + +out_err: + CRYPTO_gcm128_release(ctx); + return atom_error; + +#else + return atom_notsup; +#endif +} + +static ERL_NIF_TERM aes_gcm_decrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (Key,Iv,AAD,In,Tag) */ +#if defined(HAVE_GCM) + GCM128_CONTEXT *ctx; + ErlNifBinary key, iv, aad, in, tag; + AES_KEY aes_key; + unsigned char *outp; + ERL_NIF_TERM out; + + CHECK_OSE_CRYPTO(); + + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) + || AES_set_encrypt_key(key.data, key.size*8, &aes_key) != 0 + || !enif_inspect_binary(env, argv[1], &iv) || iv.size == 0 + || !enif_inspect_iolist_as_binary(env, argv[2], &aad) + || !enif_inspect_iolist_as_binary(env, argv[3], &in) + || !enif_inspect_iolist_as_binary(env, argv[4], &tag) || tag.size != EVP_GCM_TLS_TAG_LEN) { + return enif_make_badarg(env); + } + + if (!(ctx = CRYPTO_gcm128_new(&aes_key, (block128_f)AES_encrypt))) + return atom_error; + + CRYPTO_gcm128_setiv(ctx, iv.data, iv.size); + + if (CRYPTO_gcm128_aad(ctx, aad.data, aad.size)) + goto out_err; + + outp = enif_make_new_binary(env, in.size, &out); + + /* decrypt */ + if (CRYPTO_gcm128_decrypt(ctx, in.data, outp, in.size)) + goto out_err; + + /* calculate and check the tag */ + if (CRYPTO_gcm128_finish(ctx, tag.data, EVP_GCM_TLS_TAG_LEN)) + goto out_err; + + CRYPTO_gcm128_release(ctx); + CONSUME_REDS(env, in); + + return out; + +out_err: + CRYPTO_gcm128_release(ctx); + return atom_error; +#else + return atom_notsup; +#endif +} + +#if defined(HAVE_CHACHA20_POLY1305) +static void +poly1305_update_with_length(poly1305_state *poly1305, + const unsigned char *data, size_t data_len) +{ + size_t j = data_len; + unsigned char length_bytes[8]; + unsigned i; + + for (i = 0; i < sizeof(length_bytes); i++) { + length_bytes[i] = j; + j >>= 8; + } + + CRYPTO_poly1305_update(poly1305, data, data_len); + CRYPTO_poly1305_update(poly1305, length_bytes, sizeof(length_bytes)); +} +#endif + +static ERL_NIF_TERM chacha20_poly1305_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (Key,Iv,AAD,In) */ +#if defined(HAVE_CHACHA20_POLY1305) + ErlNifBinary key, iv, aad, in; + unsigned char *outp; + ERL_NIF_TERM out, out_tag; + ErlNifUInt64 in_len_64; + unsigned char poly1305_key[32]; + poly1305_state poly1305; + + CHECK_OSE_CRYPTO(); + + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 32 + || !enif_inspect_binary(env, argv[1], &iv) || iv.size != CHACHA20_NONCE_LEN + || !enif_inspect_iolist_as_binary(env, argv[2], &aad) + || !enif_inspect_iolist_as_binary(env, argv[3], &in)) { + return enif_make_badarg(env); + } + + /* Take from OpenSSL patch set/LibreSSL: + * + * The underlying ChaCha implementation may not overflow the block + * counter into the second counter word. Therefore we disallow + * individual operations that work on more than 2TB at a time. + * in_len_64 is needed because, on 32-bit platforms, size_t is only + * 32-bits and this produces a warning because it's always false. + * Casting to uint64_t inside the conditional is not sufficient to stop + * the warning. */ + in_len_64 = in.size; + if (in_len_64 >= (1ULL << 32) * 64 - 64) + return enif_make_badarg(env); + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, sizeof(poly1305_key), key.data, iv.data, 0); + + outp = enif_make_new_binary(env, in.size, &out); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_length(&poly1305, aad.data, aad.size); + CRYPTO_chacha_20(outp, in.data, in.size, key.data, iv.data, 1); + poly1305_update_with_length(&poly1305, outp, in.size); + + CRYPTO_poly1305_finish(&poly1305, enif_make_new_binary(env, POLY1305_TAG_LEN, &out_tag)); + + CONSUME_REDS(env, in); + + return enif_make_tuple2(env, out, out_tag); + +#else + return atom_notsup; +#endif +} + +static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (Key,Iv,AAD,In,Tag) */ +#if defined(HAVE_CHACHA20_POLY1305) + ErlNifBinary key, iv, aad, in, tag; + unsigned char *outp; + ERL_NIF_TERM out; + ErlNifUInt64 in_len_64; + unsigned char poly1305_key[32]; + unsigned char mac[POLY1305_TAG_LEN]; + poly1305_state poly1305; + + CHECK_OSE_CRYPTO(); + + if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 32 + || !enif_inspect_binary(env, argv[1], &iv) || iv.size != CHACHA20_NONCE_LEN + || !enif_inspect_iolist_as_binary(env, argv[2], &aad) + || !enif_inspect_iolist_as_binary(env, argv[3], &in) + || !enif_inspect_iolist_as_binary(env, argv[4], &tag) || tag.size != POLY1305_TAG_LEN) { + return enif_make_badarg(env); + } + + /* Take from OpenSSL patch set/LibreSSL: + * + * The underlying ChaCha implementation may not overflow the block + * counter into the second counter word. Therefore we disallow + * individual operations that work on more than 2TB at a time. + * in_len_64 is needed because, on 32-bit platforms, size_t is only + * 32-bits and this produces a warning because it's always false. + * Casting to uint64_t inside the conditional is not sufficient to stop + * the warning. */ + in_len_64 = in.size; + if (in_len_64 >= (1ULL << 32) * 64 - 64) + return enif_make_badarg(env); + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, sizeof(poly1305_key), key.data, iv.data, 0); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_length(&poly1305, aad.data, aad.size); + poly1305_update_with_length(&poly1305, in.data, in.size); + CRYPTO_poly1305_finish(&poly1305, mac); + + if (memcmp(mac, tag.data, POLY1305_TAG_LEN) != 0) + return atom_error; + + outp = enif_make_new_binary(env, in.size, &out); + + CRYPTO_chacha_20(outp, in.data, in.size, key.data, iv.data, 1); + + CONSUME_REDS(env, in); + + return out; +#else + return atom_notsup; +#endif +} + +static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) +{/* (Key, Data, IsEncrypt) */ + ErlNifBinary key_bin, data_bin; + AES_KEY aes_key; + int i; + unsigned char* ret_ptr; + ERL_NIF_TERM ret; + + CHECK_OSE_CRYPTO(); + + if (!enif_inspect_iolist_as_binary(env, argv[0], &key_bin) + || (key_bin.size != 16 && key_bin.size != 32) + || !enif_inspect_iolist_as_binary(env, argv[1], &data_bin) + || data_bin.size % 16 != 0) { + return enif_make_badarg(env); + } + + if (argv[2] == atom_true) { + i = AES_ENCRYPT; + AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key); + } + else { + i = AES_DECRYPT; + AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key); + } + + ret_ptr = enif_make_new_binary(env, data_bin.size, &ret); + AES_ecb_encrypt(data_bin.data, ret_ptr, &aes_key, i); + CONSUME_REDS(env,data_bin); + return ret; +} + static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {/* (Bytes) */ unsigned bytes; @@ -2194,11 +2500,12 @@ done: static ERL_NIF_TERM aes_cbc_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) {/* (Key, IVec, Data, IsEncrypt) */ ErlNifBinary key_bin, ivec_bin, data_bin; - AES_KEY aes_key; unsigned char ivec[16]; - int i; + int enc, i = 0, outlen = 0; + EVP_CIPHER_CTX *ctx = NULL; + const EVP_CIPHER *cipher = NULL; unsigned char* ret_ptr; - ERL_NIF_TERM ret; + ERL_NIF_TERM ret; CHECK_OSE_CRYPTO(); @@ -2212,20 +2519,44 @@ static ERL_NIF_TERM aes_cbc_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM a return enif_make_badarg(env); } - if (argv[3] == atom_true) { - i = AES_ENCRYPT; - AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key); - } - else { - i = AES_DECRYPT; - AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key); - } + if (argv[3] == atom_true) + enc = 1; + else + enc = 0; + + if (!(ctx = EVP_CIPHER_CTX_new())) + return enif_make_badarg(env); + + if (key_bin.size == 16) + cipher = EVP_aes_128_cbc(); + else if (key_bin.size == 32) + cipher = EVP_aes_256_cbc(); + + memcpy(ivec, ivec_bin.data, 16); /* writeable copy */ + + /* openssl docs say we need to leave at least 3 blocks available + at the end of the buffer for EVP calls. let's be safe */ + ret_ptr = enif_make_new_binary(env, data_bin.size + 16*3, &ret); + + if (EVP_CipherInit_ex(ctx, cipher, NULL, key_bin.data, ivec, enc) != 1) + return enif_make_badarg(env); + + /* disable padding, we only handle whole blocks */ + EVP_CIPHER_CTX_set_padding(ctx, 0); + + if (EVP_CipherUpdate(ctx, ret_ptr, &i, data_bin.data, data_bin.size) != 1) + return enif_make_badarg(env); + outlen += i; + if (EVP_CipherFinal_ex(ctx, ret_ptr + outlen, &i) != 1) + return enif_make_badarg(env); + outlen += i; + + EVP_CIPHER_CTX_free(ctx); - ret_ptr = enif_make_new_binary(env, data_bin.size, &ret); - memcpy(ivec, ivec_bin.data, 16); /* writable copy */ - AES_cbc_encrypt(data_bin.data, ret_ptr, data_bin.size, &aes_key, ivec, i); CONSUME_REDS(env,data_bin); - return ret; + + /* the garbage collector is going to love this */ + return enif_make_sub_binary(env, ret, 0, outlen); } static ERL_NIF_TERM aes_ige_crypt_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) @@ -3419,32 +3750,37 @@ out: static ERL_NIF_TERM ec_key_generate(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { #if defined(HAVE_EC) - EC_KEY *key = ec_key_new(env, argv[0]); + EC_KEY *key; + const EC_GROUP *group; + const EC_POINT *public_key; + ERL_NIF_TERM priv_key; + ERL_NIF_TERM pub_key = atom_undefined; CHECK_OSE_CRYPTO(); - if (key && EC_KEY_generate_key(key)) { - const EC_GROUP *group; - const EC_POINT *public_key; - ERL_NIF_TERM priv_key; - ERL_NIF_TERM pub_key = atom_undefined; - - group = EC_KEY_get0_group(key); - public_key = EC_KEY_get0_public_key(key); + if (!get_ec_key(env, argv[0], argv[1], atom_undefined, &key)) + goto badarg; - if (group && public_key) { - pub_key = point2term(env, group, public_key, - EC_KEY_get_conv_form(key)); - } - priv_key = bn2term(env, EC_KEY_get0_private_key(key)); - EC_KEY_free(key); - return enif_make_tuple2(env, pub_key, priv_key); + if (argv[1] == atom_undefined) { + if (!EC_KEY_generate_key(key)) + goto badarg; } - else { - if (key) - EC_KEY_free(key); - return enif_make_badarg(env); + + group = EC_KEY_get0_group(key); + public_key = EC_KEY_get0_public_key(key); + + if (group && public_key) { + pub_key = point2term(env, group, public_key, + EC_KEY_get_conv_form(key)); } + priv_key = bn2term(env, EC_KEY_get0_private_key(key)); + EC_KEY_free(key); + return enif_make_tuple2(env, pub_key, priv_key); + +badarg: + if (key) + EC_KEY_free(key); + return enif_make_badarg(env); #else return atom_notsup; #endif diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml index 98384978a5..4a8ba5c1bf 100644 --- a/lib/crypto/doc/src/crypto.xml +++ b/lib/crypto/doc/src/crypto.xml @@ -41,7 +41,7 @@ </item> <item> <p>Block ciphers - <url href="http://csrc.nist.gov/groups/ST/toolkit/block_ciphers.html"> </url> DES and AES in - Block Cipher Modes - <url href="http://csrc.nist.gov/groups/ST/toolkit/BCM/index.html"> ECB, CBC, CFB, OFB and CTR </url></p> + Block Cipher Modes - <url href="http://csrc.nist.gov/groups/ST/toolkit/BCM/index.html"> ECB, CBC, CFB, OFB, CTR and GCM </url></p> </item> <item> <p><url href="http://www.ietf.org/rfc/rfc1321.txt"> RSA encryption RFC 1321 </url> </p> @@ -53,6 +53,12 @@ <item> <p><url href="http://www.ietf.org/rfc/rfc2945.txt"> Secure Remote Password Protocol (SRP - RFC 2945) </url></p> </item> + <item> + <p>gcm: Dworkin, M., "Recommendation for Block Cipher Modes of + Operation: Galois/Counter Mode (GCM) and GMAC", + National Institute of Standards and Technology SP 800- + 38D, November 2007.</p> + </item> </list> </description> @@ -132,6 +138,8 @@ blowfish_cfb64 | des_cbc | des_cfb | des3_cbc | des3_cbf | des_ede3 | rc2_cbc </code></p> + <p><code>aead_cipher() = aes_gcm | chacha20_poly1305 </code></p> + <p><code>stream_key() = aes_key() | rc4_key() </code></p> <p><code>block_key() = aes_key() | blowfish_key() | des_key()| des3_key() </code></p> @@ -152,7 +160,7 @@ Note that both md4 and md5 are recommended only for compatibility with existing applications. </p> <p><code> cipher_algorithms() = des_cbc | des_cfb | des3_cbc | des3_cbf | des_ede3 | - blowfish_cbc | blowfish_cfb64 | aes_cbc128 | aes_cfb8 | aes_cfb128| aes_cbc256 | aes_ige256 | rc2_cbc | aes_ctr| rc4 </code> </p> + blowfish_cbc | blowfish_cfb64 | aes_cbc128 | aes_cfb8 | aes_cfb128| aes_cbc256 | aes_ige256 | aes_gcm | chacha20_poly1305 | rc2_cbc | aes_ctr| rc4 </code> </p> <p><code> public_key_algorithms() = rsa |dss | ecdsa | dh | ecdh | ec_gf2m</code> Note that ec_gf2m is not strictly a public key algorithm, but a restriction on what curves are supported with ecdsa and ecdh. @@ -161,18 +169,53 @@ </section> <funcs> - <func> + <func> + <name>block_encrypt(Type, Key, PlainText) -> CipherText</name> + <fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary> + <type> + <v>Type = des_ecb | blowfish_ecb | aes_ecb </v> + <v>Key = block_key() </v> + <v>PlainText = iodata() </v> + </type> + <desc> + <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.</p> + <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c> + is not supported by the underlying OpenSSL implementation.</p> + </desc> + </func> + + <func> + <name>block_decrypt(Type, Key, CipherText) -> PlainText</name> + <fsummary>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher</fsummary> + <type> + <v>Type = des_ecb | blowfish_ecb | aes_ecb </v> + <v>Key = block_key() </v> + <v>PlainText = iodata() </v> + </type> + <desc> + <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.</p> + <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c> + is not supported by the underlying OpenSSL implementation.</p> + </desc> + </func> + + <func> <name>block_encrypt(Type, Key, Ivec, PlainText) -> CipherText</name> - <fsummary>Encrypt <c>PlainText</c>according to <c>Type</c> block cipher</fsummary> + <name>block_encrypt(AeadType, Key, Ivec, {AAD, PlainText}) -> {CipherText, CipherTag}</name> + <fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary> <type> <v>Type = block_cipher() </v> + <v>AeadType = aead_cipher() </v> <v>Key = block_key() </v> <v>PlainText = iodata() </v> - <v>IVec = CipherText = binary()</v> + <v>AAD = IVec = CipherText = CipherTag = binary()</v> </type> <desc> - <p>Encrypt <c>PlainText</c>according to <c>Type</c> block cipher. + <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher. <c>IVec</c> is an arbitrary initializing vector.</p> + <p>In AEAD (Authenticated Encryption with Associated Data) mode, encrypt + <c>PlainText</c>according to <c>Type</c> block cipher and calculate + <c>CipherTag</c> that also authenticates the <c>AAD</c> (Associated Authenticated Data).</p> <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c> is not supported by the underlying OpenSSL implementation.</p> </desc> @@ -180,16 +223,22 @@ <func> <name>block_decrypt(Type, Key, Ivec, CipherText) -> PlainText</name> - <fsummary>Decrypt <c>CipherText</c>according to <c>Type</c> block cipher</fsummary> + <name>block_decrypt(AeadType, Key, Ivec, {AAD, CipherText, CipherTag}) -> PlainText | error</name> + <fsummary>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher</fsummary> <type> <v>Type = block_cipher() </v> + <v>AeadType = aead_cipher() </v> <v>Key = block_key() </v> <v>PlainText = iodata() </v> - <v>IVec = CipherText = binary()</v> + <v>AAD = IVec = CipherText = CipherTag = binary()</v> </type> <desc> - <p>Decrypt <c>CipherText</c>according to <c>Type</c> block cipher. + <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher. <c>IVec</c> is an arbitrary initializing vector.</p> + <p>In AEAD (Authenticated Encryption with Associated Data) mode, decrypt + <c>CipherText</c>according to <c>Type</c> block cipher and check the authenticity + the <c>PlainText</c> and <c>AAD</c> (Associated Authenticated Data) using the + <c>CipherTag</c>. May return <c>error</c> if the decryption or validation fail's</p> <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c> is not supported by the underlying OpenSSL implementation.</p> </desc> @@ -250,7 +299,7 @@ <v>SrpUserParams = {user, [Generator::binary(), Prime::binary(), Version::atom()]}</v> <v>SrpHostParams = {host, [Verifier::binary(), Generator::binary(), Prime::binary(), Version::atom()]}</v> <v>PublicKey = dh_public() | ecdh_public() | srp_public() </v> - <v>PrivKeyIn = undefined | dh_private() | srp_private() </v> + <v>PrivKeyIn = undefined | dh_private() | ecdh_private() | srp_private() </v> <v>PrivKeyOut = dh_private() | ecdh_private() | srp_private() </v> </type> <desc> diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl index e1fbbf9ab8..e8845ed52f 100644 --- a/lib/crypto/src/crypto.erl +++ b/lib/crypto/src/crypto.erl @@ -211,7 +211,7 @@ supports()-> [{hashs, Hashs}, {ciphers, [des_cbc, des_cfb, des3_cbc, des_ede3, blowfish_cbc, blowfish_cfb64, blowfish_ofb64, blowfish_ecb, aes_cbc128, aes_cfb8, aes_cfb128, - aes_cbc256, rc2_cbc, aes_ctr, rc4] ++ Ciphers}, + aes_cbc256, rc2_cbc, aes_ctr, rc4, aes_ecb] ++ Ciphers}, {public_keys, [rsa, dss, dh, srp] ++ PubKeys} ]. @@ -282,7 +282,8 @@ hmac_final_n(_Context, _HashLen) -> ? nif_stub. -spec block_encrypt(des_cbc | des_cfb | des3_cbc | des3_cbf | des_ede3 | blowfish_cbc | blowfish_cfb64 | aes_cbc128 | aes_cfb8 | aes_cfb128 | aes_cbc256 | rc2_cbc, - Key::iodata(), Ivec::binary(), Data::iodata()) -> binary(). + Key::iodata(), Ivec::binary(), Data::iodata()) -> binary(); + (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata()}) -> {binary(), binary()}. block_encrypt(des_cbc, Key, Ivec, Data) -> des_cbc_encrypt(Key, Ivec, Data); @@ -310,14 +311,25 @@ block_encrypt(aes_cfb8, Key, Ivec, Data) -> aes_cfb_8_encrypt(Key, Ivec, Data); block_encrypt(aes_cfb128, Key, Ivec, Data) -> aes_cfb_128_encrypt(Key, Ivec, Data); +block_encrypt(aes_gcm, Key, Ivec, {AAD, Data}) -> + case aes_gcm_encrypt(Key, Ivec, AAD, Data) of + notsup -> erlang:error(notsup); + Return -> Return + end; +block_encrypt(chacha20_poly1305, Key, Ivec, {AAD, Data}) -> + case chacha20_poly1305_encrypt(Key, Ivec, AAD, Data) of + notsup -> erlang:error(notsup); + Return -> Return + end; block_encrypt(rc2_cbc, Key, Ivec, Data) -> rc2_cbc_encrypt(Key, Ivec, Data). -spec block_decrypt(des_cbc | des_cfb | des3_cbc | des3_cbf | des_ede3 | blowfish_cbc | - blowfish_cfb64 | blowfish_ofb64 | aes_cbc128 | aes_cbc256 | aes_ige256 | - aes_cfb8 | aes_cfb128 | rc2_cbc, - Key::iodata(), Ivec::binary(), Data::iodata()) -> binary(). - + blowfish_cfb64 | blowfish_ofb64 | aes_cbc128 | aes_cbc256 | aes_ige256 | + aes_cfb8 | aes_cfb128 | rc2_cbc, + Key::iodata(), Ivec::binary(), Data::iodata()) -> binary(); + (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), + {AAD::binary(), Data::iodata(), Tag::binary()}) -> binary() | error. block_decrypt(des_cbc, Key, Ivec, Data) -> des_cbc_decrypt(Key, Ivec, Data); block_decrypt(des_cfb, Key, Ivec, Data) -> @@ -344,22 +356,36 @@ block_decrypt(aes_cfb8, Key, Ivec, Data) -> aes_cfb_8_decrypt(Key, Ivec, Data); block_decrypt(aes_cfb128, Key, Ivec, Data) -> aes_cfb_128_decrypt(Key, Ivec, Data); +block_decrypt(aes_gcm, Key, Ivec, {AAD, Data, Tag}) -> + case aes_gcm_decrypt(Key, Ivec, AAD, Data, Tag) of + notsup -> erlang:error(notsup); + Return -> Return + end; +block_decrypt(chacha20_poly1305, Key, Ivec, {AAD, Data, Tag}) -> + case chacha20_poly1305_decrypt(Key, Ivec, AAD, Data, Tag) of + notsup -> erlang:error(notsup); + Return -> Return + end; block_decrypt(rc2_cbc, Key, Ivec, Data) -> rc2_cbc_decrypt(Key, Ivec, Data). --spec block_encrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary(). +-spec block_encrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary(). block_encrypt(des_ecb, Key, Data) -> des_ecb_encrypt(Key, Data); block_encrypt(blowfish_ecb, Key, Data) -> - blowfish_ecb_encrypt(Key, Data). + blowfish_ecb_encrypt(Key, Data); +block_encrypt(aes_ecb, Key, Data) -> + aes_ecb_encrypt(Key, Data). --spec block_decrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary(). +-spec block_decrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary(). block_decrypt(des_ecb, Key, Data) -> des_ecb_decrypt(Key, Data); block_decrypt(blowfish_ecb, Key, Data) -> - blowfish_ecb_decrypt(Key, Data). + blowfish_ecb_decrypt(Key, Data); +block_decrypt(aes_ecb, Key, Data) -> + aes_ecb_decrypt(Key, Data). -spec next_iv(des_cbc | des3_cbc | aes_cbc | aes_ige, Data::iodata()) -> binary(). @@ -567,9 +593,8 @@ generate_key(srp, {user, [Generator, Prime, Version]}, PrivateArg) end, user_srp_gen_key(Private, Generator, Prime); -generate_key(ecdh, Curve, undefined) -> - ec_key_generate(nif_curve_params(Curve)). - +generate_key(ecdh, Curve, PrivKey) -> + ec_key_generate(nif_curve_params(Curve), ensure_int_as_bin(PrivKey)). compute_key(dh, OthersPublicKey, MyPrivateKey, DHParameters) -> case dh_compute_key_nif(ensure_int_as_bin(OthersPublicKey), @@ -1190,6 +1215,17 @@ aes_cfb_128_decrypt(Key, IVec, Data) -> aes_cfb_128_crypt(_Key, _IVec, _Data, _IsEncrypt) -> ?nif_stub. +%% +%% AES - in Galois/Counter Mode (GCM) +%% +aes_gcm_encrypt(_Key, _Ivec, _AAD, _In) -> ?nif_stub. +aes_gcm_decrypt(_Key, _Ivec, _AAD, _In, _Tag) -> ?nif_stub. + +%% +%% Chacha20/Ppoly1305 +%% +chacha20_poly1305_encrypt(_Key, _Ivec, _AAD, _In) -> ?nif_stub. +chacha20_poly1305_decrypt(_Key, _Ivec, _AAD, _In, _Tag) -> ?nif_stub. %% %% DES - in cipher block chaining mode (CBC) @@ -1361,6 +1397,18 @@ aes_ctr_encrypt(_Key, _IVec, _Data) -> ?nif_stub. aes_ctr_decrypt(_Key, _IVec, _Cipher) -> ?nif_stub. %% +%% AES - in electronic codebook mode (ECB) +%% +aes_ecb_encrypt(Key, Data) -> + aes_ecb_crypt(Key, Data, true). + +aes_ecb_decrypt(Key, Data) -> + aes_ecb_crypt(Key, Data, false). + +aes_ecb_crypt(_Key, __Data, _IsEncrypt) -> ?nif_stub. + + +%% %% AES - in counter mode (CTR) with state maintained for multi-call streaming %% -type ctr_state() :: { iodata(), binary(), binary(), integer() }. @@ -1523,7 +1571,7 @@ dh_compute_key(OthersPublicKey, MyPrivateKey, DHParameters) -> dh_compute_key_nif(_OthersPublicKey, _MyPrivateKey, _DHParameters) -> ?nif_stub. -ec_key_generate(_Key) -> ?nif_stub. +ec_key_generate(_Curve, _Key) -> ?nif_stub. ecdh_compute_key_nif(_Others, _Curve, _My) -> ?nif_stub. diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl index 53e29af338..72944eea8e 100644 --- a/lib/crypto/test/crypto_SUITE.erl +++ b/lib/crypto/test/crypto_SUITE.erl @@ -58,10 +58,13 @@ all() -> {group, aes_cfb8}, {group, aes_cfb128}, {group, aes_cbc256}, + {group, aes_ecb}, {group, aes_ige256}, {group, rc2_cbc}, {group, rc4}, {group, aes_ctr}, + {group, aes_gcm}, + {group, chacha20_poly1305}, mod_pow, exor, rand_uniform @@ -82,7 +85,7 @@ groups() -> {dss, [], [sign_verify]}, {ecdsa, [], [sign_verify]}, {dh, [], [generate_compute]}, - {ecdh, [], [compute]}, + {ecdh, [], [compute, generate]}, {srp, [], [generate_compute]}, {des_cbc, [], [block]}, {des_cfb, [], [block]}, @@ -94,13 +97,16 @@ groups() -> {aes_cfb8,[], [block]}, {aes_cfb128,[], [block]}, {aes_cbc256,[], [block]}, + {aes_ecb,[], [block]}, {aes_ige256,[], [block]}, {blowfish_cbc, [], [block]}, {blowfish_ecb, [], [block]}, {blowfish_cfb64, [], [block]}, {blowfish_ofb64,[], [block]}, {rc4, [], [stream]}, - {aes_ctr, [], [stream]} + {aes_ctr, [], [stream]}, + {aes_gcm, [], [aead]}, + {chacha20_poly1305, [], [aead]} ]. %%------------------------------------------------------------------- @@ -203,6 +209,14 @@ stream(Config) when is_list(Config) -> lists:foreach(fun stream_cipher/1, stream_iolistify(Streams)), lists:foreach(fun stream_cipher_incment/1, stream_iolistify(Streams)). +%%-------------------------------------------------------------------- +aead() -> + [{doc, "Test AEAD ciphers"}]. +aead(Config) when is_list(Config) -> + AEADs = lazy_eval(proplists:get_value(aead, Config)), + + lists:foreach(fun aead_cipher/1, AEADs). + %%-------------------------------------------------------------------- sign_verify() -> [{doc, "Sign/verify digital signatures"}]. @@ -231,6 +245,12 @@ compute(Config) when is_list(Config) -> Gen = proplists:get_value(compute, Config), lists:foreach(fun do_compute/1, Gen). %%-------------------------------------------------------------------- +generate() -> + [{doc, " Test crypto:generate_key"}]. +generate(Config) when is_list(Config) -> + Gen = proplists:get_value(generate, Config), + lists:foreach(fun do_generate/1, Gen). +%%-------------------------------------------------------------------- mod_pow() -> [{doc, "mod_pow testing (A ^ M % P with bignums)"}]. mod_pow(Config) when is_list(Config) -> @@ -406,7 +426,22 @@ stream_cipher_incment(_State, OrigState, [], Acc, Plain) -> stream_cipher_incment(State0, OrigState, [PlainText | PlainTexts], Acc, Plain) -> {State, CipherText} = crypto:stream_encrypt(State0, PlainText), stream_cipher_incment(State, OrigState, PlainTexts, [CipherText | Acc], Plain). - + +aead_cipher({Type, Key, PlainText, IV, AAD, CipherText, CipherTag}) -> + Plain = iolist_to_binary(PlainText), + case crypto:block_encrypt(Type, Key, IV, {AAD, Plain}) of + {CipherText, CipherTag} -> + ok; + Other0 -> + ct:fail({{crypto, block_encrypt, [Plain, PlainText]}, {expected, {CipherText, CipherTag}}, {got, Other0}}) + end, + case crypto:block_decrypt(Type, Key, IV, {AAD, CipherText, CipherTag}) of + Plain -> + ok; + Other1 -> + ct:fail({{crypto, block_decrypt, [CipherText]}, {expected, Plain}, {got, Other1}}) + end. + do_sign_verify({Type, Hash, Public, Private, Msg}) -> Signature = crypto:sign(Type, Hash, Msg, Private), case crypto:verify(Type, Hash, Msg, Signature, Public) of @@ -467,6 +502,14 @@ do_compute({ecdh = Type, Pub, Priv, Curve, SharedSecret}) -> ct:fail({{crypto, compute_key, [Type, Pub, Priv, Curve]}, {expected, SharedSecret}, {got, Other}}) end. +do_generate({ecdh = Type, Curve, Priv, Pub}) -> + case crypto:generate_key(Type, Curve, Priv) of + {Pub, _} -> + ok; + {Other, _} -> + ct:fail({{crypto, generate_key, [Type, Priv, Curve]}, {expected, Pub}, {got, Other}}) + end. + hexstr2point(X, Y) -> <<4:8, (hexstr2bin(X))/binary, (hexstr2bin(Y))/binary>>. @@ -694,7 +737,8 @@ group_config(srp, Config) -> [{generate_compute, GenerateCompute} | Config]; group_config(ecdh, Config) -> Compute = ecdh(), - [{compute, Compute} | Config]; + Generate = ecc(), + [{compute, Compute}, {generate, Generate} | Config]; group_config(dh, Config) -> GenerateCompute = [dh()], [{generate_compute, GenerateCompute} | Config]; @@ -722,6 +766,9 @@ group_config(aes_cbc128, Config) -> group_config(aes_cbc256, Config) -> Block = aes_cbc256(), [{block, Block} | Config]; +group_config(aes_ecb, Config) -> + Block = aes_ecb(), + [{block, Block} | Config]; group_config(aes_ige256, Config) -> Block = aes_ige256(), [{block, Block} | Config]; @@ -749,6 +796,12 @@ group_config(rc4, Config) -> group_config(aes_ctr, Config) -> Stream = aes_ctr(), [{stream, Stream} | Config]; +group_config(aes_gcm, Config) -> + AEAD = aes_gcm(), + [{aead, AEAD} | Config]; +group_config(chacha20_poly1305, Config) -> + AEAD = chacha20_poly1305(), + [{aead, AEAD} | Config]; group_config(_, Config) -> Config. @@ -1150,6 +1203,106 @@ aes_cbc256() -> hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")} ]. +aes_ecb() -> + [ + {aes_ecb, + <<"YELLOW SUBMARINE">>, + <<"YELLOW SUBMARINE">>}, + {aes_ecb, + <<"0000000000000000">>, + <<"0000000000000000">>}, + {aes_ecb, + <<"FFFFFFFFFFFFFFFF">>, + <<"FFFFFFFFFFFFFFFF">>}, + {aes_ecb, + <<"3000000000000000">>, + <<"1000000000000001">>}, + {aes_ecb, + <<"1111111111111111">>, + <<"1111111111111111">>}, + {aes_ecb, + <<"0123456789ABCDEF">>, + <<"1111111111111111">>}, + {aes_ecb, + <<"0000000000000000">>, + <<"0000000000000000">>}, + {aes_ecb, + <<"FEDCBA9876543210">>, + <<"0123456789ABCDEF">>}, + {aes_ecb, + <<"7CA110454A1A6E57">>, + <<"01A1D6D039776742">>}, + {aes_ecb, + <<"0131D9619DC1376E">>, + <<"5CD54CA83DEF57DA">>}, + {aes_ecb, + <<"07A1133E4A0B2686">>, + <<"0248D43806F67172">>}, + {aes_ecb, + <<"3849674C2602319E">>, + <<"51454B582DDF440A">>}, + {aes_ecb, + <<"04B915BA43FEB5B6">>, + <<"42FD443059577FA2">>}, + {aes_ecb, + <<"0113B970FD34F2CE">>, + <<"059B5E0851CF143A">>}, + {aes_ecb, + <<"0170F175468FB5E6">>, + <<"0756D8E0774761D2">>}, + {aes_ecb, + <<"43297FAD38E373FE">>, + <<"762514B829BF486A">>}, + {aes_ecb, + <<"07A7137045DA2A16">>, + <<"3BDD119049372802">>}, + {aes_ecb, + <<"04689104C2FD3B2F">>, + <<"26955F6835AF609A">>}, + {aes_ecb, + <<"37D06BB516CB7546">>, + <<"164D5E404F275232">>}, + {aes_ecb, + <<"1F08260D1AC2465E">>, + <<"6B056E18759F5CCA">>}, + {aes_ecb, + <<"584023641ABA6176">>, + <<"004BD6EF09176062">>}, + {aes_ecb, + <<"025816164629B007">>, + <<"480D39006EE762F2">>}, + {aes_ecb, + <<"49793EBC79B3258F">>, + <<"437540C8698F3CFA">>}, + {aes_ecb, + <<"018310DC409B26D6">>, + <<"1D9D5C5018F728C2">>}, + {aes_ecb, + <<"1C587F1C13924FEF">>, + <<"305532286D6F295A">>}, + {aes_ecb, + <<"0101010101010101">>, + <<"0123456789ABCDEF">>}, + {aes_ecb, + <<"1F1F1F1F0E0E0E0E">>, + <<"0123456789ABCDEF">>}, + {aes_ecb, + <<"E0FEE0FEF1FEF1FE">>, + <<"0123456789ABCDEF">>}, + {aes_ecb, + <<"0000000000000000">>, + <<"FFFFFFFFFFFFFFFF">>}, + {aes_ecb, + <<"FFFFFFFFFFFFFFFF">>, + <<"0000000000000000">>}, + {aes_ecb, + <<"0123456789ABCDEF">>, + <<"0000000000000000">>}, + {aes_ecb, + <<"FEDCBA9876543210">>, + <<"FFFFFFFFFFFFFFFF">>} + ]. + aes_ige256() -> [{aes_ige256, hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"), @@ -1442,6 +1595,269 @@ aes_ctr() -> long_msg()} ]. + +%% AES GCM test vectors from http://csrc.nist.gov/groups/ST/toolkit/BCM/documents/proposedmodes/gcm/gcm-spec.pdf +aes_gcm() -> + [ + %% Test Case 1 + {aes_gcm, hexstr2bin("00000000000000000000000000000000"), %% Key + hexstr2bin(""), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin(""), %% CipherText + hexstr2bin("58e2fccefa7e3061367f1d57a4e7455a")}, %% CipherTag + + %% Test Case 2 + {aes_gcm, hexstr2bin("00000000000000000000000000000000"), %% Key + hexstr2bin("00000000000000000000000000000000"), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin("0388dace60b6a392f328c2b971b2fe78"), %% CipherText + hexstr2bin("ab6e47d42cec13bdf53a67b21257bddf")}, %% CipherTag + + %% Test Case 3 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308"), %% Key + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b391aafd255"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin("42831ec2217774244b7221b784d0d49c" %% CipherText + "e3aa212f2c02a4e035c17e2329aca12e" + "21d514b25466931c7d8f6a5aac84aa05" + "1ba30b396a0aac973d58e091473f5985"), + hexstr2bin("4d5c2af327cd64a62cf35abd2ba6fab4")}, %% CipherTag + + %% Test Case 4 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308"), %% Key + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("42831ec2217774244b7221b784d0d49c" %% CipherText + "e3aa212f2c02a4e035c17e2329aca12e" + "21d514b25466931c7d8f6a5aac84aa05" + "1ba30b396a0aac973d58e091"), + hexstr2bin("5bc94fbc3221a5db94fae95ae7121a47")}, %% CipherTag + + %% Test Case 5 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308"), %% Key + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbad"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("61353b4c2806934a777ff51fa22a4755" %% CipherText + "699b2a714fcdc6f83766e5f97b6c7423" + "73806900e49f24b22b097544d4896b42" + "4989b5e1ebac0f07c23f4598"), + hexstr2bin("3612d2e79e3b0785561be14aaca2fccb")}, %% CipherTag + + %% Test Case 6" + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308"), %% Key + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("9313225df88406e555909c5aff5269aa" %% IV + "6a7a9538534f7da1e4c303d2a318a728" + "c3c0c95156809539fcf0e2429a6b5254" + "16aedbf5a0de6a57a637b39b"), + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("8ce24998625615b603a033aca13fb894" %% CipherText + "be9112a5c3a211a8ba262a3cca7e2ca7" + "01e4a9a4fba43c90ccdcb281d48c7c6f" + "d62875d2aca417034c34aee5"), + hexstr2bin("619cc5aefffe0bfa462af43c1699d050")}, %% CipherTag + + %% Test Case 7 + {aes_gcm, hexstr2bin("00000000000000000000000000000000" %% Key + "0000000000000000"), + hexstr2bin(""), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin(""), %% CipherText + hexstr2bin("cd33b28ac773f74ba00ed1f312572435")}, %% CipherTag + + %% Test Case 8 + {aes_gcm, hexstr2bin("00000000000000000000000000000000" %% Key + "0000000000000000"), + hexstr2bin("00000000000000000000000000000000"), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin("98e7247c07f0fe411c267e4384b0f600"), %% CipherText + hexstr2bin("2ff58d80033927ab8ef4d4587514f0fb")}, %% CipherTag + + %% Test Case 9 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b391aafd255"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin(""), %% ADD + hexstr2bin("3980ca0b3c00e841eb06fac4872a2757" %% CipherText + "859e1ceaa6efd984628593b40ca1e19c" + "7d773d00c144c525ac619d18c84a3f47" + "18e2448b2fe324d9ccda2710acade256"), + hexstr2bin("9924a7c8587336bfb118024db8674a14")}, %% CipherTag + + %% Test Case 10 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("3980ca0b3c00e841eb06fac4872a2757" %% CipherText + "859e1ceaa6efd984628593b40ca1e19c" + "7d773d00c144c525ac619d18c84a3f47" + "18e2448b2fe324d9ccda2710"), + hexstr2bin("2519498e80f1478f37ba55bd6d27618c")}, %% CipherTag + + %% Test Case 11 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbad"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("0f10f599ae14a154ed24b36e25324db8" %% CipherText + "c566632ef2bbb34f8347280fc4507057" + "fddc29df9a471f75c66541d4d4dad1c9" + "e93a19a58e8b473fa0f062f7"), + hexstr2bin("65dcc57fcf623a24094fcca40d3533f8")}, %% CipherTag + + %% Test Case 12 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("9313225df88406e555909c5aff5269aa" %% IV + "6a7a9538534f7da1e4c303d2a318a728" + "c3c0c95156809539fcf0e2429a6b5254" + "16aedbf5a0de6a57a637b39b"), + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("d27e88681ce3243c4830165a8fdcf9ff" %% CipherText + "1de9a1d8e6b447ef6ef7b79828666e45" + "81e79012af34ddd9e2f037589b292db3" + "e67c036745fa22e7e9b7373b"), + hexstr2bin("dcf566ff291c25bbb8568fc3d376a6d9")}, %% CipherTag + + %% Test Case 13 + {aes_gcm, hexstr2bin("00000000000000000000000000000000" %% Key + "00000000000000000000000000000000"), + hexstr2bin(""), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin(""), %% CipherText + hexstr2bin("530f8afbc74536b9a963b4f1c4cb738b")}, %% CipherTag + + %% Test Case 14 + {aes_gcm, hexstr2bin("00000000000000000000000000000000" %% Key + "00000000000000000000000000000000"), + hexstr2bin("00000000000000000000000000000000"), %% PlainText + hexstr2bin("000000000000000000000000"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin("cea7403d4d606b6e074ec5d3baf39d18"), %% CipherText + hexstr2bin("d0d1c8a799996bf0265b98b5d48ab919")}, %% CipherTag + + %% Test Case 15 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c6d6a8f9467308308"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b391aafd255"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin(""), %% AAD + hexstr2bin("522dc1f099567d07f47f37a32a84427d" %% CipherText + "643a8cdcbfe5c0c97598a2bd2555d1aa" + "8cb08e48590dbb3da7b08b1056828838" + "c5f61e6393ba7a0abcc9f662898015ad"), + hexstr2bin("b094dac5d93471bdec1a502270e3cc6c")}, %% CipherTag + + %% Test Case 16 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c6d6a8f9467308308"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbaddecaf888"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("522dc1f099567d07f47f37a32a84427d" %% CipherText + "643a8cdcbfe5c0c97598a2bd2555d1aa" + "8cb08e48590dbb3da7b08b1056828838" + "c5f61e6393ba7a0abcc9f662"), + hexstr2bin("76fc6ece0f4e1768cddf8853bb2d551b")}, %% CipherTag + + %% Test Case 17 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c6d6a8f9467308308"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("cafebabefacedbad"), %% IV + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("c3762df1ca787d32ae47c13bf19844cb" %% CipherText + "af1ae14d0b976afac52ff7d79bba9de0" + "feb582d33934a4f0954cc2363bc73f78" + "62ac430e64abe499f47c9b1f"), + hexstr2bin("3a337dbf46a792c45e454913fe2ea8f2")}, %% CipherTag + + %% Test Case 18 + {aes_gcm, hexstr2bin("feffe9928665731c6d6a8f9467308308" %% Key + "feffe9928665731c6d6a8f9467308308"), + hexstr2bin("d9313225f88406e5a55909c5aff5269a" %% PlainText + "86a7a9531534f7da2e4c303d8a318a72" + "1c3c0c95956809532fcf0e2449a6b525" + "b16aedf5aa0de657ba637b39"), + hexstr2bin("9313225df88406e555909c5aff5269aa" %% IV + "6a7a9538534f7da1e4c303d2a318a728" + "c3c0c95156809539fcf0e2429a6b5254" + "16aedbf5a0de6a57a637b39b"), + hexstr2bin("feedfacedeadbeeffeedfacedeadbeef" %% AAD + "abaddad2"), + hexstr2bin("5a8def2f0c9e53f1f75d7853659e2a20" %% CipherText + "eeb2b22aafde6419a058ab4f6f746bf4" + "0fc0c3b780f244452da3ebf1c5d82cde" + "a2418997200ef82e44ae7e3f"), + hexstr2bin("a44a8266ee1c8eb0c8b5d4cf5ae9f19a")} %% CipherTag + ]. + +%% http://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04 +chacha20_poly1305() -> + [ + {chacha20_poly1305, hexstr2bin("4290bcb154173531f314af57f3be3b500" %% Key + "6da371ece272afa1b5dbdd1100a1007"), + hexstr2bin("86d09974840bded2a5ca"), %% PlainText + hexstr2bin("cd7cf67be39c794a"), %% Nonce + hexstr2bin("87e229d4500845a079c0"), %% AAD + hexstr2bin("e3e446f7ede9a19b62a4"), %% CipherText + hexstr2bin("677dabf4e3d24b876bb284753896e1d6")} %% CipherTag + ]. + rsa_plain() -> <<"7896345786348756234 Hejsan Svejsan, erlang crypto debugger" "09812312908312378623487263487623412039812 huagasd">>. @@ -1736,6 +2152,27 @@ rsa_oaep() -> Msg = hexstr2bin("750c4047f547e8e41411856523298ac9bae245efaf1397fbe56f9dd5"), {rsa, Public, Private, Msg, rsa_pkcs1_oaep_padding}. +ecc() -> +%% http://point-at-infinity.org/ecc/nisttv +%% +%% Test vectors for the NIST elliptic curves P192, P224, P256, P384, P521, +%% B163, B233, B283, B409, B571, K163, K233, K283, K409 and K571. For more +%% information about the curves see +%% http://csrc.nist.gov/encryption/dss/ecdsa/NISTReCur.pdf +%% + [{ecdh,secp192r1,1, + hexstr2point("188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012", + "07192B95FFC8DA78631011ED6B24CDD573F977A11E794811")}, + {ecdh,secp192r1,2, + hexstr2point("DAFEBF5828783F2AD35534631588A3F629A70FB16982A888", + "DD6BDA0D993DA0FA46B27BBC141B868F59331AFA5C7E93AB")}, + {ecdh,secp192r1,3, + hexstr2point("76E32A2557599E6EDCD283201FB2B9AADFD0D359CBB263DA", + "782C37E372BA4520AA62E0FED121D49EF3B543660CFD05FD")}, + {ecdh,secp192r1,4, + hexstr2point("35433907297CC378B0015703374729D7A4FE46647084E4BA", + "A2649984F2135C301EA3ACB0776CD4F125389B311DB3BE32")}]. + no_padding() -> Public = [_, Mod] = rsa_public(), Private = rsa_private(), diff --git a/lib/debugger/test/map_SUITE.erl b/lib/debugger/test/map_SUITE.erl index 0076193725..e525484a8e 100644 --- a/lib/debugger/test/map_SUITE.erl +++ b/lib/debugger/test/map_SUITE.erl @@ -32,9 +32,30 @@ t_guard_receive/1, t_guard_fun/1, t_list_comprehension/1, t_map_sort_literals/1, - %t_size/1, t_map_size/1, - + t_build_and_match_aliasing/1, + + %% variables + t_build_and_match_variables/1, + t_update_assoc_variables/1,t_update_exact_variables/1, + t_nested_pattern_expressions/1, + t_guard_update_variables/1, + t_guard_sequence_variables/1, + t_guard_sequence_mixed/1, + t_frequency_table/1, + + %% not covered in 17.0-rc1 + t_build_and_match_over_alloc/1, + t_build_and_match_empty_val/1, + t_build_and_match_val/1, + t_build_and_match_nil/1, + t_build_and_match_structure/1, + + %% errors in 17.0-rc1 + t_update_values/1, + t_expand_map_update/1, + t_export/1, + %% Specific Map BIFs t_bif_map_get/1, t_bif_map_find/1, @@ -61,8 +82,7 @@ %% misc t_pdict/1, - t_ets/1, - t_dets/1 + t_ets/1 ]). -include_lib("stdlib/include/ms_transform.hrl"). @@ -77,7 +97,30 @@ all() -> [ t_guard_bifs, t_guard_sequence, t_guard_update, t_guard_receive,t_guard_fun, t_list_comprehension, t_map_sort_literals, - + t_build_and_match_aliasing, + + %% variables + t_build_and_match_variables, + t_update_assoc_variables,t_update_exact_variables, + t_nested_pattern_expressions, + t_guard_update_variables, + t_guard_sequence_variables, + t_guard_sequence_mixed, + t_frequency_table, + + %% not covered in 17.0-rc1 + t_build_and_match_over_alloc, + t_build_and_match_empty_val, + t_build_and_match_val, + t_build_and_match_nil, + t_build_and_match_structure, + + + %% errors in 17.0-rc1 + t_update_values, + t_expand_map_update, + t_export, + %% Specific Map BIFs t_bif_map_get,t_bif_map_find,t_bif_map_is_key, t_bif_map_keys, t_bif_map_merge, t_bif_map_new, @@ -94,7 +137,6 @@ all() -> [ t_maps_fold, t_maps_map, t_maps_size, t_maps_without, - %% Other functions t_pdict, t_ets @@ -147,18 +189,6 @@ t_build_and_match_literals(Config) when is_list(Config) -> {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{x=>"three"}))), ok. - -%% Tests size(Map). -%% not implemented, perhaps it shouldn't be either - -%t_size(Config) when is_list(Config) -> -% 0 = size(#{}), -% 1 = size(#{a=>1}), -% 1 = size(#{a=>#{a=>1}}), -% 2 = size(#{a=>1, b=>2}), -% 3 = size(#{a=>1, b=>2, b=>"3"}), -% ok. - t_map_size(Config) when is_list(Config) -> 0 = map_size(id(#{})), 1 = map_size(id(#{a=>1})), @@ -268,6 +298,44 @@ t_update_exact(Config) when is_list(Config) -> ok. +t_update_values(Config) when is_list(Config) -> + V0 = id(1337), + M0 = #{ a => 1, val => V0}, + V1 = get_val(M0), + M1 = id(M0#{ val := [V0,V1], "wazzup" => 42 }), + [1337, {some_val, 1337}] = get_val(M1), + M2 = id(M1#{ <<42:333>> => 1337 }), + {bin_key,1337} = get_val(M2), + + N = 110, + List = [{[I,1,2,3,I],{1,2,3,"wat",I}}|| I <- lists:seq(1,N)], + + {_,_,#{val2 := {1,2,3,"wat",N}, val1 := [N,1,2,3,N]}} = lists:foldl(fun + ({V2,V3},{Old2,Old3,Mi}) -> + ok = check_val(Mi,Old2,Old3), + #{ val1 := Old2, val2 := Old3 } = Mi, + {V2,V3, Mi#{ val1 := id(V2), val2 := V1, val2 => id(V3)}} + end, {none, none, #{val1=>none,val2=>none}},List), + ok. + +t_expand_map_update(Config) when is_list(Config) -> + M = #{<<"hello">> => <<"world">>}#{<<"hello">> := <<"les gens">>}, + #{<<"hello">> := <<"les gens">>} = M, + ok. + +t_export(Config) when is_list(Config) -> + Raclette = id(#{}), + case brie of brie -> Fromage = Raclette end, + Raclette = Fromage#{}, + ok. + +check_val(#{val1:=V1, val2:=V2},V1,V2) -> ok. + +get_val(#{ <<42:333>> := V }) -> {bin_key, V}; +get_val(#{ "wazzup" := _, val := V}) -> V; +get_val(#{ val := V }) -> {some_val, V}. + + t_guard_bifs(Config) when is_list(Config) -> true = map_guard_head(#{a=>1}), false = map_guard_head([]), @@ -988,16 +1056,385 @@ t_ets(_Config) -> ets:delete(Tid), ok. -t_dets(_Config) -> +t_build_and_match_aliasing(Config) when is_list(Config) -> + M1 = id(#{a=>1,b=>2,c=>3,d=>4}), + #{c:=C1=_=_=C2} = M1, + true = C1 =:= C2, + #{a:=A,a:=A,a:=A,b:=B,b:=B} = M1, + #{a:=A,a:=A,a:=A,b:=B,b:=B,b:=2} = M1, + #{a:=A=1,a:=A,a:=A,b:=B=2,b:=B,b:=2} = M1, + #{c:=C1, c:=_, c:=3, c:=_, c:=C2} = M1, + #{c:=C=_=3=_=C} = M1, + + M2 = id(#{"a"=>1,"b"=>2,"c"=>3,"d"=>4}), + #{"a":=A2,"a":=A2,"a":=A2,"b":=B2,"b":=B2,"b":=2} = M2, + #{"a":=_,"a":=_,"a":=_,"b":=_,"b":=_,"b":=2} = M2, + ok. + +%% simple build and match variables +t_build_and_match_variables(Config) when is_list(Config) -> + K0 = id(#{}), + K1 = id(1), V1 = id(a), + K2 = id(2), V2 = id(b), + K3 = id(3), V3 = id("c"), + K4 = id("4"), V4 = id("d"), + K5 = id(<<"5">>), V5 = id(<<"e">>), + K6 = id({"6",7}), V6 = id("f"), + K7 = id(#{ "a" => 3 }), + #{K1:=V1} = id(#{K1=>V1}), + #{K1:=V1,K2:=V2} = id(#{K1=>V1,K2=>V2}), + #{K1:=V1,K2:=V2,K3:=V3} = id(#{K1=>V1,K2=>V2,K3=>V3}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4,K5:=V5} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4,K5=>V5}), + #{K1:=V1,K2:=V2,K3:=V3,K4:=V4,K5:=V5,K6:=V6} = id(#{K1=>V1,K2=>V2,K3=>V3,K4=>V4,K5=>V5,K6=>V6}), + + #{K5:=X,K5:=X=3,K4:=4} = id(#{K5=>3,K4=>4}), + #{K5:=X,<<"5">>:=X=3,K4:=4} = id(#{K5=>3,K4=>4}), + #{K5:=X,<<"5">>:=X=3,K4:=4} = id(#{<<"5">>=>3,K4=>4}), + + #{ K4:=#{ K3:=#{K1:=V1, K2:=V2}}, K5:=V5} = + id(#{ K5=>V5, K4=>#{ K3=>#{K2 => V2, K1 => V1}}}), + #{ K4 := #{ K5 := Res }, K6 := Res} = id(#{K4=>#{K5 => 99}, K6 => 99}), + + %% has keys + #{a :=_,b :=_,K1:=_,K2:=_,K3:=V3,K4:=ResKey,K4:=ResKey,"4":=ResKey,"4":="ok"} = + id(#{ a=>1, b=>1, K1=>V1, K2=>V2, K3=>V3, K4=>"nope", "4"=>"ok" }), + + %% function + ok = match_function_map_neg_keys(#{ -1 => a, -2 => b, -3 => c }), + + %% map key + #{ K0 := 42 } = id(#{ K0 => 42 }), + #{ K7 := 42 } = id(#{ K7 => 42 }), + + %% nil key + KNIL = id([]), + #{KNIL:=ok,1:=2} = id(#{KNIL=>ok,1=>2}), + + Bin = <<0:258>>, + #{ Bin := "three" } = id(#{<<0:258>> =>"three"}), + + %% error case + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3,x:=2} = id(#{K5=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=2} = id(#{K5=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id({a,b,c}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id(#{K6=>3}))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K5:=3} = id(K7))), + {'EXIT',{{badmatch,_},_}} = (catch (#{K7:=3} = id(#{K7=>42}))), + ok. + + +match_function_map_neg_keys(#{ -1 := a, -2 := b, -3 := c }) -> ok. + +t_update_assoc_variables(Config) when is_list(Config) -> + K1 = id(1), + K2 = id(2), + K3 = id(3.0), + K4 = id(4), + K5 = id(5), + K6 = id(2.0), + + M0 = #{K1=>a,K2=>b,K3=>c,K4=>d,K5=>e}, + + M1 = M0#{K1=>42,K2=>100,K4=>[a,b,c]}, + #{1:=42,2:=100,3.0:=c,4:=[a,b,c],5:=e} = M1, + #{1:=42,2:=b,4:=d,5:=e,2.0:=100,K3:=c,4.0:=[a,b,c]} = M0#{1.0=>float,1:=42,2.0=>wrong,K6=>100,4.0=>[a,b,c]}, + + M2 = M0#{K3=>new}, + #{1:=a,2:=b,K3:=new,4:=d,5:=e} = M2, + M2 = M0#{3.0:=wrong,K3=>new}, + + #{ <<0:258>> := val } = id(M0#{<<0:258>> => val}), %% binary limitation + + %% Errors cases. + BadMap = id(badmap), + {'EXIT',{{badarg,_},_}} = (catch BadMap#{nonexisting=>val}), + {'EXIT',{{badarg,_},_}} = (catch <<>>#{nonexisting=>val}), + ok. + +t_update_exact_variables(Config) when is_list(Config) -> + K1 = id(1), + K2 = id(2), + K3 = id(3.0), + K4 = id(4), + + M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}), + + M1 = M0#{K1:=42,K2:=100,K4:=[a,b,c]}, + #{1:=42,2:=100,3.0:=c,K4:=[a,b,c],5:=e} = M1, + M1 = M0#{K1:=wrong,1:=also_wrong,K1=>42,2=>wrong,K2:=100,4:=[a,b,c]}, + + M2 = M0#{K3:=new}, + #{1:=a,K2:=b,3.0:=new,K4:=d,5:=e} = M2, + M2 = M0#{3.0=>wrong,K3:=new}, + true = M2 =/= M0#{3=>right,3.0:=new}, + #{ 3 := right, 3.0 := new } = M0#{3=>right,K3:=new}, + + M3 = id(#{ 1 => val}), + #{1 := update2,1.0 := new_val4} = M3#{ + 1.0 => new_val1, K1 := update, K1=> update3, + K1 := update2, 1.0 := new_val2, 1.0 => new_val3, + 1.0 => new_val4 }, + + %% Errors cases. + {'EXIT',{{badarg,_},_}} = (catch ((id(nil))#{ a := b })), + {'EXIT',{{badarg,_},_}} = (catch <<>>#{nonexisting:=val}), + + {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}), + {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}), + {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}), + {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}), + {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation + ok. + +t_nested_pattern_expressions(Config) when is_list(Config) -> + K1 = id("hello"), + %K2 = id({ok}), + [_,_,#{ <<"hi">> := wat, K1 := 42 }|_] = id([k,k,#{<<"hi">> => wat, K1 => 42}]), + [_,_,#{ -1 := wat, K1 := 42 }|_] = id([k,k,#{-1 => wat, K1 => 42}]), + [_,_,{#{ -1 := #{ {-3,<<0:300>>} := V1 }, K1 := 42 },3}|_] = id([k,k,{#{-1 => #{{-3,<<0:300>>}=>"hi"}, K1 => 42},3}]), + "hi" = V1, + %[k,#{ {-1,K1,[]} := {wat,K1}, K2 := 42 }|_] = id([k,#{{-1,K1,[]} => {wat,K1}, K2 => 42}]), + %[k,#{ [-1,K2,[]] := {wat,K1}, K1 := 42 }|_] = id([k,#{[-1,K2,[]] => {wat,K1}, K1 => 42}]), + ok. + +t_guard_update_variables(Config) when is_list(Config) -> + error = map_guard_update_variables(n,#{},#{}), + first = map_guard_update_variables(x,#{}, #{x=>first}), + second = map_guard_update_variables(x,#{y=>old}, #{x=>second,y=>old}), + third = map_guard_update_variables(x,#{x=>old,y=>old}, #{x=>third,y=>old}), + fourth = map_guard_update_variables(x,#{x=>old,y=>old}, #{x=>4,y=>new}), + ok. + +map_guard_update_variables(K,M1,M2) when M1#{K=>first} =:= M2 -> first; +map_guard_update_variables(K,M1,M2) when M1#{K=>second} =:= M2 -> second; +map_guard_update_variables(K,M1,M2) when M1#{K:=third} =:= M2 -> third; +map_guard_update_variables(K,M1,M2) when M1#{K:=4,y=>new} =:= M2 -> fourth; +map_guard_update_variables(_,_,_) -> error. + +t_guard_sequence_variables(Config) when is_list(Config) -> + {1,"a"} = map_guard_sequence_var_1(a,#{seq=>1,a=>id("a"),b=>no}), + {2,"b"} = map_guard_sequence_var_1(b,#{seq=>2,b=>id("b"),a=>no}), + {3,"c"} = map_guard_sequence_var_1(a,#{seq=>3,a=>id("c"),b=>no}), + {4,"d"} = map_guard_sequence_var_1(b,#{seq=>4,b=>id("d"),a=>no}), + {4,4} = map_guard_sequence_var_1(seq,#{seq=>4}), + {4,4,y} = map_guard_sequence_var_1(seq,#{seq=>4,b=>id("d"),a=>y}), + {5,"d"} = map_guard_sequence_var_1(b,#{seq=>5,b=>id("d"),a=>y}), + + %% error case + {'EXIT',{{case_clause,_},_}} = (catch map_guard_sequence_var_1("a",#{seq=>4,val=>id("e")})), + ok. + + +map_guard_sequence_var_1(K,M) -> + case M of + #{seq:=1=Seq, K:=Val} -> {Seq,Val}; + #{seq:=2=Seq, K:=Val} -> {Seq,Val}; + #{seq:=3=Seq, K:=Val} -> {Seq,Val}; + #{K:=4=Seq, K:=Val1,a:=Val2} -> {Seq,Val1,Val2}; + #{seq:=4=Seq, K:=Val} -> {Seq,Val}; + #{K:=4=Seq, K:=Val} -> {Seq,Val}; + #{seq:=5=Seq, K:=Val} -> {Seq,Val} + end. + + +t_guard_sequence_mixed(Config) when is_list(Config) -> + M0 = id(#{ a=>1, b=>1, c=>1, d=>1, e=>1, f=>1, g=>1, h=>1 }), + M1 = id(M0#{ d := 3 }), + 1 = map_guard_sequence_mixed(a,d,M1), + M2 = id(M1#{ b := 2, d := 4, h := 2 }), + 2 = map_guard_sequence_mixed(a,d,M2), + M3 = id(M2#{ b := 3, e := 5, g := 3 }), + 3 = map_guard_sequence_mixed(a,e,M3), + M4 = id(M3#{ c := 4, e := 6, h := 1 }), + 4 = map_guard_sequence_mixed(a,e,M4), + M5 = id(M4#{ c := 5, f := 7, g := 2 }), + 5 = map_guard_sequence_mixed(a,f,M5), + M6 = id(M5#{ c := 6, f := 8, h := 3 }), + 6 = map_guard_sequence_mixed(a,f,M6), + + %% error case + {'EXIT',{{case_clause,_},_}} = (catch map_guard_sequence_mixed(a,b,M0)), + ok. + +map_guard_sequence_mixed(K1,K2,M) -> + case M of + #{ K1 := 1, b := 1, K2 := 3, g := 1} -> 1; + #{ K1 := 1, b := 2, K2 := 4, h := 2} -> 2; + #{ K1 := 1, b := 3, K2 := 5, g := 3} -> 3; + #{ K1 := 1, c := 4, K2 := 6, h := 1} -> 4; + #{ K1 := 1, c := 5, K2 := 7, g := 2} -> 5; + #{ K1 := 1, c := 6, K2 := 8, h := 3} -> 6 + end. + + + +t_frequency_table(Config) when is_list(Config) -> + random:seed({13,1337,54}), % pseudo random + N = 100000, + Ts = rand_terms(N), + #{ n:=N, tf := Tf } = frequency_table(Ts,#{ n=>0, tf => #{}}), + ok = check_frequency(Ts,Tf), + ok. + + +frequency_table([T|Ts], M) -> + case M of + #{ n := N, tf := #{ T := C } = F } -> + frequency_table(Ts,M#{ n := N + 1, tf := F#{ T := C + 1 }}); + #{ n := N, tf := F } -> + frequency_table(Ts,M#{ n := N + 1, tf := F#{ T => 1 }}) + end; +frequency_table([], M) -> M. + + +check_frequency(Ts,Tf) -> + check_frequency(Ts,Tf,dict:new()). + +check_frequency([T|Ts],Tf,D) -> + case dict:find(T,D) of + error -> check_frequency(Ts,Tf,dict:store(T,1,D)); + {ok,C} -> check_frequency(Ts,Tf,dict:store(T,C+1,D)) + end; +check_frequency([],Tf,D) -> + validate_frequency(dict:to_list(D),Tf). + +validate_frequency([{T,C}|Fs],Tf) -> + case Tf of + #{ T := C } -> validate_frequency(Fs,Tf); + _ -> error + end; +validate_frequency([], _) -> ok. + + +%% aux + +rand_terms(0) -> []; +rand_terms(N) -> [rand_term()|rand_terms(N-1)]. + +rand_term() -> + case random:uniform(6) of + 1 -> rand_binary(); + 2 -> rand_number(); + 3 -> rand_atom(); + 4 -> rand_tuple(); + 5 -> rand_list(); + 6 -> rand_map() + end. + +rand_binary() -> + case random:uniform(3) of + 1 -> <<>>; + 2 -> <<"hi">>; + 3 -> <<"message text larger than 64 bytes. yep, message text larger than 64 bytes.">> + end. + +rand_number() -> + case random:uniform(3) of + 1 -> random:uniform(5); + 2 -> float(random:uniform(5)); + 3 -> 1 bsl (63 + random:uniform(3)) + end. + +rand_atom() -> + case random:uniform(3) of + 1 -> hi; + 2 -> some_atom; + 3 -> some_other_atom + end. + + +rand_tuple() -> + case random:uniform(3) of + 1 -> {ok, rand_term()}; % careful + 2 -> {1, 2, 3}; + 3 -> {<<"yep">>, 1337} + end. + +rand_list() -> + case random:uniform(3) of + 1 -> "hi"; + 2 -> [1,rand_term()]; % careful + 3 -> [improper|list] + end. + +rand_map() -> + case random:uniform(3) of + 1 -> #{ hi => 3 }; + 2 -> #{ wat => rand_term(), other => 3 }; % careful + 3 -> #{ hi => 42, other => 42, yet_anoter => 1337 } + end. + + +t_build_and_match_over_alloc(Config) when is_list(Config) -> + Ls = id([1,2,3]), + V0 = [a|Ls], + M0 = id(#{ "a" => V0 }), + #{ "a" := V1 } = M0, + V2 = id([c|Ls]), + M2 = id(#{ "a" => V2 }), + #{ "a" := V3 } = M2, + {[a,1,2,3],[c,1,2,3]} = id({V1,V3}), + ok. + +t_build_and_match_empty_val(Config) when is_list(Config) -> + F = fun(#{ "hi":=_,{1,2}:=_,1337:=_}) -> ok end, + ok = F(id(#{"hi"=>ok,{1,2}=>ok,1337=>ok})), + + %% error case + case (catch (F(id(#{"hi"=>ok})))) of + {'EXIT',{function_clause,_}} -> ok; + {'EXIT', {{case_clause,_},_}} -> {comment,inlined}; + Other -> + test_server:fail({no_match, Other}) + end. + +t_build_and_match_val(Config) when is_list(Config) -> + F = fun + (#{ "hi" := first, v := V}) -> {1,V}; + (#{ "hi" := second, v := V}) -> {2,V} + end, + + + {1,"hello"} = F(id(#{"hi"=>first,v=>"hello"})), + {2,"second"} = F(id(#{"hi"=>second,v=>"second"})), + + %% error case + case (catch (F(id(#{"hi"=>ok})))) of + {'EXIT',{function_clause,_}} -> ok; + {'EXIT', {{case_clause,_},_}} -> {comment,inlined}; + Other -> + test_server:fail({no_match, Other}) + end. + +t_build_and_match_nil(Config) when is_list(Config) -> + %% literals removed the coverage + V1 = id(cookie), + V2 = id(cake), + V3 = id(crisps), + + #{ [] := V1, "treat" := V2, {your,treat} := V3 } = id(#{ + {your,treat} => V3, + "treat" => V2, + [] => V1 }), + #{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }), + ok. + +t_build_and_match_structure(Config) when is_list(Config) -> + V2 = id("it"), + S = id([42,{"hi", "=)", #{ "a" => 42, any => any, val => "get_" ++ V2}}]), + + %% match deep map values + V2 = case S of + [42,{"hi",_, #{ "a" := 42, val := "get_" ++ V1, any := _ }}] -> V1 + end, + %% match deep map + ok = case S of + [42,{"hi",_, #{ }}] -> ok + end, ok. -getmsg(_Tracer) -> - receive V -> V after 100 -> timeout end. -trace_collector(Msg,Parent) -> - io:format("~p~n",[Msg]), - Parent ! Msg, - Parent. %% Use this function to avoid compile-time evaluation of an expression. id(I) -> I. diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml index e482b1e6f8..b52c1edebf 100644 --- a/lib/dialyzer/doc/src/dialyzer.xml +++ b/lib/dialyzer/doc/src/dialyzer.xml @@ -139,7 +139,11 @@ <tag><c><![CDATA[-Wwarn]]></c></tag> <item>A family of options which selectively turn on/off warnings (for help on the names of warnings use - <c><![CDATA[dialyzer -Whelp]]></c>).</item> + <c><![CDATA[dialyzer -Whelp]]></c>). + Note that the options can also be given in the file with a + <c>-dialyzer()</c> attribute. See <seealso + marker="#suppression">Requesting or Suppressing Warnings in + Source Files</seealso> below for details.</item> <tag><c><![CDATA[--shell]]></c></tag> <item>Do not disable the Erlang shell while running the GUI.</item> <tag><c><![CDATA[--version]]></c> (or <c><![CDATA[-v]]></c>)</tag> @@ -269,6 +273,71 @@ given from the command line, so please refer to the sections above for a description of these.</p> </section> + + <section> + <marker id="suppression"></marker> + <title>Requesting or Suppressing Warnings in Source Files</title> + <p> + The <c>-dialyzer()</c> attribute can be used for turning off + warnings in a module by specifying functions or warning options. + For example, to turn off all warnings for the function + <c>f/0</c>, include the following line: + </p> +<code type="none"> +-dialyzer({nowarn_function, f/0}). +</code> + <p>To turn off warnings for improper lists, add the following line + to the source file: + </p> +<code type="none"> +-dialyzer(no_improper_lists). +</code> + <p>The <c>-dialyzer()</c> attribute is allowed after function + declarations. Lists of warning options or functions are allowed: + </p> +<code type="none"> +-dialyzer([{nowarn_function, [f/0]}, no_improper_lists]). +</code> + <p> + Warning options can be restricted to functions: + </p> +<code type="none"> +-dialyzer({no_improper_lists, g/0}). +</code> +<code type="none"> +-dialyzer({[no_return, no_match], [g/0, h/0]}). +</code> + <p> + For help on the warning options use <c>dialyzer -Whelp</c>. The + options are also enumerated <seealso + marker="#gui/1">below</seealso> (<c>WarnOpts</c>). + </p> + <note> + <p> + The <c>-dialyzer()</c> attribute is not checked by the Erlang + Compiler, but by the Dialyzer itself. + </p> + </note> + <note> + <p> + The warning option <c>-Wrace_conditions</c> has no effect when + set in source files. + </p> + </note> + <p> + The <c>-dialyzer()</c> attribute can also be used for turning on + warnings. For instance, if a module has been fixed regarding + unmatched returns, adding the line + </p> +<code type="none"> +-dialyzer(unmatched_returns). +</code> + <p> + can help in assuring that no new unmatched return warnings are + introduced. + </p> + </section> + <funcs> <func> <name>gui() -> ok | {error, Msg}</name> @@ -283,7 +352,7 @@ OptList :: [Option] Option :: {files, [Filename :: string()]} | {files_rec, [DirName :: string()]} - | {defines, [{Macro: atom(), Value : term()}]} + | {defines, [{Macro :: atom(), Value :: term()}]} | {from, src_code | byte_code} %% Defaults to byte_code | {init_plt, FileName :: string()} %% If changed from default | {plts, [FileName :: string()]} %% If changed from default diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl index cec94a49fd..c9e7da9ef0 100644 --- a/lib/dialyzer/src/dialyzer.erl +++ b/lib/dialyzer/src/dialyzer.erl @@ -282,15 +282,17 @@ cl_check_log(none) -> cl_check_log(Output) -> io:format(" Check output file `~s' for details\n", [Output]). --spec format_warning(dial_warning()) -> string(). +-spec format_warning(raw_warning()) -> string(). format_warning(W) -> format_warning(W, basename). --spec format_warning(dial_warning(), fopt()) -> string(). +-spec format_warning(raw_warning() | dial_warning(), fopt()) -> string(). +format_warning({Tag, {File, Line, _MFA}, Msg}, FOpt) -> + format_warning({Tag, {File, Line}, Msg}, FOpt); format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File), - is_integer(Line) -> + is_integer(Line) -> F = case FOpt of fullpath -> File; basename -> filename:basename(File) diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl index 9a25f86512..90addc35a8 100644 --- a/lib/dialyzer/src/dialyzer.hrl +++ b/lib/dialyzer/src/dialyzer.hrl @@ -84,6 +84,15 @@ -type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}. %% +%% This is the representation of each warning before suppressions have +%% been applied +%% +-type m_or_mfa() :: module() % warnings not associated with any function + | mfa(). +-type warning_info() :: {file:filename(), non_neg_integer(), m_or_mfa()}. +-type raw_warning() :: {dial_warn_tag(), warning_info(), {atom(), [term()]}}. + +%% %% This is the representation of dialyzer's internal errors %% -type dial_error() :: any(). %% XXX: underspecified @@ -103,6 +112,7 @@ -type fopt() :: 'basename' | 'fullpath'. -type format() :: 'formatted' | 'raw'. -type label() :: non_neg_integer(). +-type dial_warn_tags():: ordsets:ordset(dial_warn_tag()). -type rep_mode() :: 'quiet' | 'normal' | 'verbose'. -type start_from() :: 'byte_code' | 'src_code'. -type mfa_or_funlbl() :: label() | mfa(). @@ -138,7 +148,7 @@ init_plts = [] :: [file:filename()], include_dirs = [] :: [file:filename()], output_plt = none :: 'none' | file:filename(), - legal_warnings = ordsets:new() :: ordsets:ordset(dial_warn_tag()), + legal_warnings = ordsets:new() :: dial_warn_tags(), report_mode = normal :: rep_mode(), erlang_mode = false :: boolean(), use_contracts = true :: boolean(), diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl index af1c2b7e3a..5ff7ad9c6f 100644 --- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl +++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl @@ -39,8 +39,6 @@ one_file_result/0, compile_result/0]). --export_type([no_warn_unused/0]). - -include("dialyzer.hrl"). -record(analysis_state, @@ -50,8 +48,9 @@ defines = [] :: [dial_define()], doc_plt :: dialyzer_plt:plt(), include_dirs = [] :: [file:filename()], - no_warn_unused :: no_warn_unused(), parent :: pid(), + legal_warnings :: % command line options + [dial_warn_tag()], plt :: dialyzer_plt:plt(), start_from = byte_code :: start_from(), use_contracts = true :: boolean(), @@ -59,9 +58,10 @@ solvers :: [solver()] }). --record(server_state, {parent :: pid(), legal_warnings :: [dial_warn_tag()]}). - --type no_warn_unused() :: sets:set(mfa()). +-record(server_state, + { + parent :: pid() + }). %%-------------------------------------------------------------------- %% Main @@ -75,24 +75,24 @@ start(Parent, LegalWarnings, Analysis) -> Analysis0 = Analysis#analysis{race_detection = RacesOn, timing_server = TimingServer}, Analysis1 = expand_files(Analysis0), - Analysis2 = run_analysis(Analysis1), - State = #server_state{parent = Parent, legal_warnings = LegalWarnings}, + Analysis2 = run_analysis(Analysis1, LegalWarnings), + State = #server_state{parent = Parent}, loop(State, Analysis2, none), dialyzer_timing:stop(TimingServer). -run_analysis(Analysis) -> +run_analysis(Analysis, LegalWarnings) -> Self = self(), - Fun = fun() -> analysis_start(Self, Analysis) end, + Fun = fun() -> analysis_start(Self, Analysis, LegalWarnings) end, Analysis#analysis{analysis_pid = spawn_link(Fun)}. -loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State, +loop(#server_state{parent = Parent} = State, #analysis{analysis_pid = AnalPid} = Analysis, ExtCalls) -> receive {AnalPid, log, LogMsg} -> send_log(Parent, LogMsg), loop(State, Analysis, ExtCalls); {AnalPid, warnings, Warnings} -> - case filter_warnings(LegalWarnings, Warnings) of + case Warnings of [] -> ok; SendWarnings -> send_warnings(Parent, SendWarnings) @@ -129,7 +129,7 @@ loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State, %% The Analysis %%-------------------------------------------------------------------- -analysis_start(Parent, Analysis) -> +analysis_start(Parent, Analysis, LegalWarnings) -> CServer = dialyzer_codeserver:new(), Plt = Analysis#analysis.plt, State = #analysis_state{codeserver = CServer, @@ -139,13 +139,14 @@ analysis_start(Parent, Analysis) -> include_dirs = Analysis#analysis.include_dirs, plt = Plt, parent = Parent, + legal_warnings = LegalWarnings, start_from = Analysis#analysis.start_from, use_contracts = Analysis#analysis.use_contracts, timing_server = Analysis#analysis.timing_server, solvers = Analysis#analysis.solvers }, Files = ordsets:from_list(Analysis#analysis.files), - {Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State), + {Callgraph, TmpCServer0} = compile_and_store(Files, State), %% Remote type postprocessing NewCServer = try @@ -177,7 +178,6 @@ analysis_start(Parent, Analysis) -> State0 = State#analysis_state{plt = NewPlt1}, dump_callgraph(Callgraph, State0, Analysis), State1 = State0#analysis_state{codeserver = NewCServer}, - State2 = State1#analysis_state{no_warn_unused = NoWarn}, %% Remove all old versions of the files being analyzed AllNodes = dialyzer_callgraph:all_nodes(Callgraph), Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes), @@ -187,14 +187,14 @@ analysis_start(Parent, Analysis) -> true -> dialyzer_callgraph:put_race_detection(true, Callgraph); false -> Callgraph end, - State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}), + State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}), dialyzer_callgraph:dispose_race_server(NewCallgraph), rcv_and_send_ext_types(Parent), NonExports = sets:subtract(sets:from_list(AllNodes), Exports), NonExportsList = sets:to_list(NonExports), - Plt2 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList), - send_codeserver_plt(Parent, CServer, State3#analysis_state.plt), - send_analysis_done(Parent, Plt2, State3#analysis_state.doc_plt). + Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList), + send_codeserver_plt(Parent, CServer, State2#analysis_state.plt), + send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt). analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver, doc_plt = DocPlt, @@ -210,11 +210,11 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver, TimingServer, Solvers, Parent), {NewPlt0, DocPlt}; succ_typings -> - NoWarn = State#analysis_state.no_warn_unused, {Warnings, NewPlt0, NewDocPlt0} = dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver, - NoWarn, TimingServer, Solvers, Parent), - send_warnings(State#analysis_state.parent, Warnings), + TimingServer, Solvers, Parent), + Warnings1 = filter_warnings(Warnings, Codeserver), + send_warnings(State#analysis_state.parent, Warnings1), {NewPlt0, NewDocPlt0} end, dialyzer_callgraph:delete(Callgraph), @@ -230,19 +230,22 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver, defines = [] :: [dial_define()], include_dirs = [] :: [file:filename()], start_from = byte_code :: start_from(), - use_contracts = true :: boolean() + use_contracts = true :: boolean(), + legal_warnings :: [dial_warn_tag()] }). make_compile_init(#analysis_state{codeserver = Codeserver, defines = Defs, include_dirs = Dirs, use_contracts = UseContracts, + legal_warnings = LegalWarnings, start_from = StartFrom}, Callgraph) -> #compile_init{callgraph = Callgraph, codeserver = Codeserver, defines = [{d, Macro, Val} || {Macro, Val} <- Defs], include_dirs = [{i, D} || D <- Dirs], use_contracts = UseContracts, + legal_warnings = LegalWarnings, start_from = StartFrom}. compile_and_store(Files, #analysis_state{codeserver = CServer, @@ -252,7 +255,7 @@ compile_and_store(Files, #analysis_state{codeserver = CServer, {T1, _} = statistics(wall_clock), Callgraph = dialyzer_callgraph:new(), CompileInit = make_compile_init(State, Callgraph), - {{Failed, NoWarn, Modules}, NextLabel} = + {{Failed, Modules}, NextLabel} = ?timing(Timing, "compile", _C1, dialyzer_coordinator:parallel_job(compile, Files, CompileInit, Timing)), @@ -281,34 +284,34 @@ compile_and_store(Files, #analysis_state{codeserver = CServer, {T3, _} = statistics(wall_clock), Msg2 = io_lib:format("done in ~.2f secs\n", [(T3-T2)/1000]), send_log(Parent, Msg2), - {Callgraph, sets:from_list(NoWarn), CServer2}. + {Callgraph, CServer2}. -type compile_init_data() :: #compile_init{}. -type error_reason() :: string(). --type compile_result() :: {[{file:filename(), error_reason()}], [mfa()], +-type compile_result() :: {[{file:filename(), error_reason()}], [module()]}. %%opaque -type one_file_result() :: {error, error_reason()} | {ok, [dialyzer_callgraph:callgraph_edge()], - [mfa_or_funlbl()], [mfa()], module()}. %%opaque --type compile_mid_data() :: {module(), cerl:cerl(), [mfa()], + [mfa_or_funlbl()], module()}. %%opaque +-type compile_mid_data() :: {module(), cerl:cerl(), dialyzer_callgraph:callgraph(), dialyzer_codeserver:codeserver()}. -spec compile_init_result() -> compile_result(). -compile_init_result() -> {[], [], []}. +compile_init_result() -> {[], []}. -spec add_to_result(file:filename(), one_file_result(), compile_result(), compile_init_data()) -> compile_result(). -add_to_result(File, NewData, {Failed, NoWarn, Mods}, InitData) -> +add_to_result(File, NewData, {Failed, Mods}, InitData) -> case NewData of {error, Reason} -> - {[{File, Reason}|Failed], NoWarn, Mods}; - {ok, V, E, NewNoWarn, Mod} -> + {[{File, Reason}|Failed], Mods}; + {ok, V, E, Mod} -> Callgraph = InitData#compile_init.callgraph, dialyzer_callgraph:add_edges(E, V, Callgraph), - {Failed, NewNoWarn ++ NoWarn, [Mod|Mods]} + {Failed, [Mod|Mods]} end. -spec start_compilation(file:filename(), compile_init_data()) -> @@ -318,12 +321,14 @@ start_compilation(File, #compile_init{callgraph = Callgraph, codeserver = Codeserver, defines = Defines, include_dirs = IncludeD, use_contracts = UseContracts, + legal_warnings = LegalWarnings, start_from = StartFrom}) -> case StartFrom of src_code -> - compile_src(File, IncludeD, Defines, Callgraph, Codeserver, UseContracts); + compile_src(File, IncludeD, Defines, Callgraph, Codeserver, + UseContracts, LegalWarnings); byte_code -> - compile_byte(File, Callgraph, Codeserver, UseContracts) + compile_byte(File, Callgraph, Codeserver, UseContracts, LegalWarnings) end. cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent, @@ -357,88 +362,86 @@ cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent, end, Callgraph1. -compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts) -> +compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts, + LegalWarnings) -> DefaultIncludes = default_includes(filename:dirname(File)), SrcCompOpts = dialyzer_utils:src_compiler_opts(), CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes, case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of {error, _Msg} = Error -> Error; {ok, AbstrCode} -> - compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) + compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, + UseContracts, LegalWarnings) end. -compile_byte(File, Callgraph, CServer, UseContracts) -> +compile_byte(File, Callgraph, CServer, UseContracts, LegalWarnings) -> case dialyzer_utils:get_abstract_code_from_beam(File) of error -> {error, " Could not get abstract code for: " ++ File ++ "\n" ++ " Recompile with +debug_info or analyze starting from source code"}; {ok, AbstrCode} -> - compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts) + compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts, + LegalWarnings) end. -compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts) -> +compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts, + LegalWarnings) -> case dialyzer_utils:get_compile_options_from_beam(File) of error -> {error, " Could not get compile options for: " ++ File ++ "\n" ++ " Recompile or analyze starting from source code"}; {ok, CompOpts} -> - compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) + compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, + UseContracts, LegalWarnings) end. -compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) -> +compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, + UseContracts, LegalWarnings) -> case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of error -> {error, " Could not get core Erlang code for: " ++ File}; {ok, Core} -> Mod = cerl:concrete(cerl:module_name(Core)), - NoWarn = abs_get_nowarn(AbstrCode, Mod), case dialyzer_utils:get_record_and_type_info(AbstrCode) of {error, _} = Error -> Error; {ok, RecInfo} -> CServer1 = dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer), + MetaFunInfo = + dialyzer_utils:get_fun_meta_info(Mod, AbstrCode, LegalWarnings), + CServer2 = + dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1), case UseContracts of true -> case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of {error, _} = Error -> Error; {ok, SpecInfo, CallbackInfo} -> - CServer2 = + CServer3 = dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo, CallbackInfo, - CServer1), - store_core(Mod, Core, NoWarn, Callgraph, CServer2) + CServer2), + store_core(Mod, Core, Callgraph, CServer3) end; false -> - store_core(Mod, Core, NoWarn, Callgraph, CServer1) + store_core(Mod, Core, Callgraph, CServer2) end end end. -store_core(Mod, Core, NoWarn, Callgraph, CServer) -> +store_core(Mod, Core, Callgraph, CServer) -> Exp = get_exports_from_core(Core), ExpTypes = get_exported_types_from_core(Core), CServer = dialyzer_codeserver:insert_exports(Exp, CServer), CServer = dialyzer_codeserver:insert_temp_exported_types(ExpTypes, CServer), CoreTree = cerl:from_records(Core), - {ok, cerl_trees:size(CoreTree), {Mod, CoreTree, NoWarn, Callgraph, CServer}}. + CoreSize = cerl_trees:size(CoreTree), + {ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}. -spec continue_compilation(integer(), compile_mid_data()) -> one_file_result(). -continue_compilation(NextLabel, {Mod, CoreTree, NoWarn, Callgraph, CServer}) -> +continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) -> {LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel), LabeledCore = cerl:to_records(LabeledTree), - store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, NoWarn, CServer). - -abs_get_nowarn(Abs, M) -> - Opts = lists:flatten([C || {attribute, _, compile, C} <- Abs]), - Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function, - true, Opts), - case Warn of - false -> - [{M, F, A} || {function, _, F, A, _} <- Abs]; % all functions - true -> - [{M, F, A} || {nowarn_unused_function, FAs} <- Opts, - {F, A} <- lists:flatten([FAs])] - end. + store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer). get_exported_types_from_core(Core) -> Attrs = cerl:module_attrs(Core), @@ -456,11 +459,11 @@ get_exports_from_core(Core) -> M = cerl:atom_val(cerl:module_name(Tree)), [{M, F, A} || {F, A} <- Exports2]. -store_code_and_build_callgraph(Mod, Core, Callgraph, NoWarn, CServer) -> +store_code_and_build_callgraph(Mod, Core, Callgraph, CServer) -> CoreTree = cerl:from_records(Core), {Vertices, Edges} = dialyzer_callgraph:scan_core_tree(CoreTree, Callgraph), CServer = dialyzer_codeserver:insert(Mod, CoreTree, CServer), - {ok, Vertices, Edges, NoWarn, Mod}. + {ok, Vertices, Edges, Mod}. %%-------------------------------------------------------------------- %% Utilities @@ -548,10 +551,19 @@ send_warnings(Parent, Warnings) -> Parent ! {self(), warnings, Warnings}, ok. -filter_warnings(LegalWarnings, Warnings) -> - [TIW || {Tag, _Id, _Warning} = TIW <- Warnings, - ordsets:is_element(Tag, LegalWarnings)]. +filter_warnings(Warnings, Codeserver) -> + [TWW || {Tag, WarningInfo, _Warning} = TWW <- Warnings, + is_ok_fun(WarningInfo, Codeserver), + is_ok_tag(Tag, WarningInfo, Codeserver)]. + +is_ok_fun({_F, _L, Module}, _Codeserver) when is_atom(Module) -> + true; +is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) -> + not dialyzer_utils:is_suppressed_fun(MFA, Codeserver). +is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) -> + not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver). + send_analysis_done(Parent, Plt, DocPlt) -> Parent ! {self(), done, Plt, DocPlt}, ok. @@ -573,7 +585,8 @@ send_codeserver_plt(Parent, CServer, Plt ) -> ok. send_bad_calls(Parent, BadCalls, CodeServer) -> - send_warnings(Parent, format_bad_calls(BadCalls, CodeServer, [])). + FormatedBadCalls = format_bad_calls(BadCalls, CodeServer, []), + send_warnings(Parent, FormatedBadCalls). send_mod_deps(Parent, ModuleDeps) -> Parent ! {self(), mod_deps, ModuleDeps}, @@ -585,8 +598,9 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc) format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) -> {_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer), Msg = {call_to_missing, [M, F, A]}, - FileLine = find_call_file_and_line(FunCode, To), - NewAcc = [{?WARN_CALLGRAPH, FileLine, Msg}|Acc], + {File, Line} = find_call_file_and_line(FunCode, To), + WarningInfo = {File, Line, FromMFA}, + NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc], format_bad_calls(Left, CodeServer, NewAcc); format_bad_calls([], _CodeServer, Acc) -> Acc. diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl index 1d458b49fc..19b63bd2c8 100644 --- a/lib/dialyzer/src/dialyzer_behaviours.erl +++ b/lib/dialyzer/src/dialyzer_behaviours.erl @@ -52,7 +52,7 @@ -spec check_callbacks(module(), [{cerl:cerl(), cerl:cerl()}], rectab(), dialyzer_plt:plt(), - dialyzer_codeserver:codeserver()) -> [dial_warning()]. + dialyzer_codeserver:codeserver()) -> [raw_warning()]. check_callbacks(Module, Attrs, Records, Plt, Codeserver) -> {Behaviours, BehLines} = get_behaviours(Attrs), @@ -65,7 +65,7 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) -> State = #state{plt = Plt, filename = File, behlines = BehLines, codeserver = Codeserver, records = Records}, Warnings = get_warnings(Module, Behaviours, State), - [add_tag_file_line(Module, W, State) || W <- Warnings] + [add_tag_warning_info(Module, W, State) || W <- Warnings] end. %%-------------------------------------------------------------------- @@ -102,14 +102,18 @@ check_all_callbacks(Module, Behaviour, [Cb|Rest], #state{plt = Plt, codeserver = Codeserver, records = Records} = State, Acc) -> {{Behaviour, Function, Arity}, - {{_BehFile, _BehLine}, Callback}} = Cb, + {{_BehFile, _BehLine}, Callback, Xtra}} = Cb, CbMFA = {Module, Function, Arity}, CbReturnType = dialyzer_contracts:get_contract_return(Callback), CbArgTypes = dialyzer_contracts:get_contract_args(Callback), Acc0 = Acc, Acc1 = case dialyzer_plt:lookup(Plt, CbMFA) of - 'none' -> [{callback_missing, [Behaviour, Function, Arity]}|Acc0]; + 'none' -> + case lists:member(optional_callback, Xtra) of + true -> Acc0; + false -> [{callback_missing, [Behaviour, Function, Arity]}|Acc0] + end; {'value', RetArgTypes} -> Acc00 = Acc0, {ReturnType, ArgTypes} = RetArgTypes, @@ -137,7 +141,7 @@ check_all_callbacks(Module, Behaviour, [Cb|Rest], Acc2 = case dialyzer_codeserver:lookup_mfa_contract(CbMFA, Codeserver) of 'error' -> Acc1; - {ok, {{File, Line}, Contract}} -> + {ok, {{File, Line}, Contract, _Xtra}} -> Acc10 = Acc1, SpecReturnType0 = dialyzer_contracts:get_contract_return(Contract), SpecArgTypes0 = dialyzer_contracts:get_contract_args(Contract), @@ -189,7 +193,7 @@ find_mismatching_args(Kind, [Type|Rest], [CbType|CbRest], Behaviour, Arity, Records, N+1, NewAcc) end. -add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State) +add_tag_warning_info(Module, {Tag, [B|_R]} = Warn, State) when Tag =:= callback_missing; Tag =:= callback_info_missing -> {B, Line} = lists:keyfind(B, 1, State#state.behlines), @@ -198,18 +202,18 @@ add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State) callback_missing -> ?WARN_BEHAVIOUR; callback_info_missing -> ?WARN_UNDEFINED_CALLBACK end, - {Category, {State#state.filename, Line}, Warn}; -add_tag_file_line(_Module, {Tag, [File, Line|R]}, _State) + {Category, {State#state.filename, Line, Module}, Warn}; +add_tag_warning_info(Module, {Tag, [File, Line|R]}, _State) when Tag =:= callback_spec_type_mismatch; Tag =:= callback_spec_arg_type_mismatch -> - {?WARN_BEHAVIOUR, {File, Line}, {Tag, R}}; -add_tag_file_line(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) -> + {?WARN_BEHAVIOUR, {File, Line, Module}, {Tag, R}}; +add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) -> {_A, FunCode} = dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity}, State#state.codeserver), Anns = cerl:get_ann(FunCode), - FileLine = {get_file(Anns), get_line(Anns)}, - {?WARN_BEHAVIOUR, FileLine, Warn}. + WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}}, + {?WARN_BEHAVIOUR, WarningInfo, Warn}. get_line([Line|_]) when is_integer(Line) -> Line; get_line([_|Tail]) -> get_line(Tail); diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl index 3e7d9dfa99..debb78bd0b 100644 --- a/lib/dialyzer/src/dialyzer_cl.erl +++ b/lib/dialyzer/src/dialyzer_cl.erl @@ -48,7 +48,7 @@ plt_info = none :: 'none' | dialyzer_plt:plt_info(), report_mode = normal :: rep_mode(), return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(), - stored_warnings = [] :: [dial_warning()], + stored_warnings = [] :: [raw_warning()], unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()] }). @@ -627,7 +627,7 @@ format_log_cache(LogCache) -> Str = lists:append(lists:reverse(LogCache)), string:join(string:tokens(Str, "\n"), "\n "). --spec store_warnings(#cl_state{}, [dial_warning()]) -> #cl_state{}. +-spec store_warnings(#cl_state{}, [raw_warning()]) -> #cl_state{}. store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) -> St#cl_state{stored_warnings = StoredWarnings ++ Warnings}. @@ -685,16 +685,22 @@ return_value(State = #cl_state{erlang_mode = ErlangMode, unknown_behaviours(State); false -> [] end, + WarningInfo = {_Filename = "", _Line = 0, _MorMFA = ''}, UnknownWarnings = - [{?WARN_UNKNOWN, {_Filename = "", _Line = 0}, W} || W <- Unknown], + [{?WARN_UNKNOWN, WarningInfo, W} || W <- Unknown], AllWarnings = UnknownWarnings ++ process_warnings(StoredWarnings), - {RetValue, AllWarnings} + {RetValue, set_warning_id(AllWarnings)} end. unknown_functions(#cl_state{external_calls = Calls}) -> [{unknown_function, MFA} || MFA <- Calls]. +set_warning_id(Warnings) -> + lists:map(fun({Tag, {File, Line, _MorMFA}, Msg}) -> + {Tag, {File, Line}, Msg} + end, Warnings). + print_ext_calls(#cl_state{report_mode = quiet}) -> ok; print_ext_calls(#cl_state{output = Output, @@ -817,15 +823,16 @@ print_warnings(#cl_state{output = Output, formatted -> [dialyzer:format_warning(W, FOpt) || W <- PrWarnings]; raw -> - [io_lib:format("~p. \n", [W]) || W <- PrWarnings] + [io_lib:format("~p. \n", + [W]) || W <- set_warning_id(PrWarnings)] end, io:format(Output, "\n~s", [S]) end. --spec process_warnings([dial_warning()]) -> [dial_warning()]. +-spec process_warnings([raw_warning()]) -> [raw_warning()]. process_warnings(Warnings) -> - Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line + Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line (and m/mfa..) remove_duplicate_warnings(Warnings1, []). remove_duplicate_warnings([Duplicate, Duplicate|Left], Acc) -> diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl index aab3d6add6..e0add00061 100644 --- a/lib/dialyzer/src/dialyzer_codeserver.erl +++ b/lib/dialyzer/src/dialyzer_codeserver.erl @@ -43,19 +43,21 @@ insert/3, insert_exports/2, insert_temp_exported_types/2, + insert_fun_meta_info/2, is_exported/2, lookup_mod_code/2, lookup_mfa_code/2, lookup_mod_records/2, lookup_mod_contracts/2, lookup_mfa_contract/2, + lookup_meta_info/2, new/0, set_next_core_label/2, set_temp_records/2, store_temp_records/3, store_temp_contracts/4]). --export_type([codeserver/0]). +-export_type([codeserver/0, fun_meta_info/0]). -include("dialyzer.hrl"). @@ -70,12 +72,19 @@ -type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()). -type mod_contracts() :: dict:dict(module(), contracts()). +%% A property-list of data compiled from -compile and -dialyzer attributes. +-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()}, + 'mod' | 'func'}]. +-type fun_meta_info() :: [{mfa(), meta_info()} + | {module(), [dial_warn_tag()]}]. + -record(codeserver, {next_core_label = 0 :: label(), code :: dict_ets(), exported_types :: set_ets(), % set(mfa()) records :: dict_ets(), contracts :: dict_ets(), callbacks :: dict_ets(), + fun_meta_info :: dict_ets(), % {mfa(), meta_info()} exports :: 'clean' | set_ets(), % set(mfa()) temp_exported_types :: 'clean' | set_ets(), % set(mfa()) temp_records :: 'clean' | dict_ets(), @@ -129,14 +138,17 @@ new() -> CodeOptions = [compressed, public, {read_concurrency, true}], Code = ets:new(dialyzer_codeserver_code, CodeOptions), TempOptions = [public, {write_concurrency, true}], - [Exports, TempExportedTypes, TempRecords, TempContracts, TempCallbacks] = + [Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts, + TempCallbacks] = [ets:new(Name, TempOptions) || Name <- - [dialyzer_codeserver_exports, dialyzer_codeserver_temp_exported_types, + [dialyzer_codeserver_exports, dialyzer_codeserver_fun_meta_info, + dialyzer_codeserver_temp_exported_types, dialyzer_codeserver_temp_records, dialyzer_codeserver_temp_contracts, dialyzer_codeserver_temp_callbacks]], #codeserver{code = Code, exports = Exports, + fun_meta_info = FunMetaInfo, temp_exported_types = TempExportedTypes, temp_records = TempRecords, temp_contracts = TempContracts, @@ -184,6 +196,12 @@ insert_exports(List, #codeserver{exports = Exports} = CS) -> true = ets_set_insert_list(List, Exports), CS. +-spec insert_fun_meta_info(fun_meta_info(), codeserver()) -> codeserver(). + +insert_fun_meta_info(List, #codeserver{fun_meta_info = FunMetaInfo} = CS) -> + true = ets:insert(FunMetaInfo, List), + CS. + -spec is_exported(mfa(), codeserver()) -> boolean(). is_exported(MFA, #codeserver{exports = Exports}) -> @@ -278,10 +296,10 @@ lookup_mod_contracts(Mod, #codeserver{contracts = ContDict}) case ets_dict_find(Mod, ContDict) of error -> dict:new(); {ok, Keys} -> - dict:from_list([get_contract_pair(Key, ContDict)|| Key <- Keys]) + dict:from_list([get_file_contract(Key, ContDict)|| Key <- Keys]) end. -get_contract_pair(Key, ContDict) -> +get_file_contract(Key, ContDict) -> {Key, ets:lookup_element(ContDict, Key, 2)}. -spec lookup_mfa_contract(mfa(), codeserver()) -> @@ -290,6 +308,14 @@ get_contract_pair(Key, ContDict) -> lookup_mfa_contract(MFA, #codeserver{contracts = ContDict}) -> ets_dict_find(MFA, ContDict). +-spec lookup_meta_info(module() | mfa(), codeserver()) -> meta_info(). + +lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) -> + case ets_dict_find(MorMFA, FunMetaInfo) of + error -> []; + {ok, PropList} -> PropList + end. + -spec get_contracts(codeserver()) -> mod_contracts(). get_contracts(#codeserver{contracts = ContDict}) -> diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl index f27fc1a842..4a1ba9c539 100644 --- a/lib/dialyzer/src/dialyzer_contracts.erl +++ b/lib/dialyzer/src/dialyzer_contracts.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -43,7 +43,7 @@ %% Types used in other parts of the system below %%----------------------------------------------------------------------- --type file_contract() :: {file_line(), #contract{}}. +-type file_contract() :: {file_line(), #contract{}, Extra :: [_]}. -type plt_contracts() :: [{mfa(), #contract{}}]. % actually, an orddict() @@ -146,10 +146,10 @@ process_contract_remote_types(CodeServer) -> ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer), RecordDict = dialyzer_codeserver:get_records(CodeServer), ContractFun = - fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}}) -> + fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}, Xtra}) -> NewCs = [CFun(ExpTypes, RecordDict) || CFun <- CFuns], Args = general_domain(NewCs), - {File, #contract{contracts = NewCs, args = Args, forms = Forms}} + {File, #contract{contracts = NewCs, args = Args, forms = Forms}, Xtra} end, ModuleFun = fun(_ModuleName, ContractDict) -> @@ -175,7 +175,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) -> case dialyzer_callgraph:lookup_name(Label, Callgraph) of {ok, {M,F,A} = MFA} -> case orddict:find(MFA, Contracts) of - {ok, {_FileLine, Contract}} -> + {ok, {_FileLine, Contract, _Xtra}} -> Opaques = FindOpaques(M), case check_contract(Contract, Type, Opaques) of ok -> @@ -351,7 +351,7 @@ solve_constraints(Contract, Call, Constraints) -> %% Checks the contracts for functions that are not implemented -spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) -> - [dial_warning()]. + [raw_warning()]. contracts_without_fun(Contracts, AllFuns0, Callgraph) -> AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity} @@ -362,8 +362,9 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) -> [warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs]. warn_spec_missing_fun({M, F, A} = MFA, Contracts) -> - {FileLine, _Contract} = dict:fetch(MFA, Contracts), - {?WARN_CONTRACT_SYNTAX, FileLine, {spec_missing_fun, [M, F, A]}}. + {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts), + WarningInfo = {File, Line, MFA}, + {?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}. %% This treats the "when" constraints. It will be extended, we hope. insert_constraints([{subtype, Type1, Type2}|Left], Dict) -> @@ -386,26 +387,29 @@ insert_constraints([], Dict) -> Dict. -type types() :: erl_types:type_table(). --spec store_tmp_contract(mfa(), file_line(), [_], contracts(), types()) -> +-type spec_data() :: {TypeSpec :: [_], Xtra:: [_]}. + +-spec store_tmp_contract(mfa(), file_line(), spec_data(), contracts(), types()) -> contracts(). -store_tmp_contract(MFA, FileLine, TypeSpec, SpecDict, RecordsDict) -> +store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) -> %% io:format("contract from form: ~p\n", [TypeSpec]), - TmpContract = contract_from_form(TypeSpec, RecordsDict, FileLine), + {Module, _, _} = MFA, + TmpContract = contract_from_form(TypeSpec, Module, RecordsDict, FileLine), %% io:format("contract: ~p\n", [TmpContract]), - dict:store(MFA, {FileLine, TmpContract}, SpecDict). + dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict). -contract_from_form(Forms, RecDict, FileLine) -> - {CFuns, Forms1} = contract_from_form(Forms, RecDict, FileLine, [], []), +contract_from_form(Forms, Module, RecDict, FileLine) -> + {CFuns, Forms1} = contract_from_form(Forms, Module, RecDict, FileLine, [], []), #tmp_contract{contract_funs = CFuns, forms = Forms1}. -contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict, +contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], Module, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = fun(ExpTypes, AllRecords) -> - Type = + NewType = try - erl_types:t_from_form(Form, RecDict) + erl_types:t_from_form(Form, ExpTypes, Module, AllRecords) catch throw:{error, Msg} -> {File, Line} = FileLine, @@ -413,61 +417,60 @@ contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict, Line, Msg]), throw({error, NewMsg}) end, - NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {NewTypeNoVars, []} end, NewTypeAcc = [TypeFun | TypeAcc], NewFormAcc = [{Form, []} | FormAcc], - contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc); + contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc); contract_from_form([{type, _L1, bounded_fun, [{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left], - RecDict, FileLine, TypeAcc, FormAcc) -> + Module, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = fun(ExpTypes, AllRecords) -> {Constr1, VarDict} = - process_constraints(Constr, RecDict, ExpTypes, AllRecords), - Type = erl_types:t_from_form(Form, RecDict, VarDict), - NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords), + process_constraints(Constr, Module, RecDict, ExpTypes, AllRecords), + NewType = erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, + VarDict), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {NewTypeNoVars, Constr1} end, NewTypeAcc = [TypeFun | TypeAcc], NewFormAcc = [{Form, Constr} | FormAcc], - contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc); -contract_from_form([], _RecDict, _FileLine, TypeAcc, FormAcc) -> + contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc); +contract_from_form([], _Module, _RecDict, _FileLine, TypeAcc, FormAcc) -> {lists:reverse(TypeAcc), lists:reverse(FormAcc)}. -process_constraints(Constrs, RecDict, ExpTypes, AllRecords) -> - Init0 = initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords), +process_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) -> + Init0 = initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords), Init = remove_cycles(Init0), - constraints_fixpoint(Init, RecDict, ExpTypes, AllRecords). + constraints_fixpoint(Init, Module, RecDict, ExpTypes, AllRecords). -initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords) -> - initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords, []). +initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) -> + initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords, []). -initialize_constraints([], _RecDict, _ExpTypes, _AllRecords, Acc) -> +initialize_constraints([], _Module, _RecDict, _ExpTypes, _AllRecords, Acc) -> Acc; -initialize_constraints([Constr|Rest], RecDict, ExpTypes, AllRecords, Acc) -> +initialize_constraints([Constr|Rest], Module, RecDict, ExpTypes, AllRecords, Acc) -> case Constr of {type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} -> - T1 = final_form(Type1, RecDict, ExpTypes, AllRecords, dict:new()), + T1 = final_form(Type1, Module, ExpTypes, AllRecords, dict:new()), Entry = {T1, Type2}, - initialize_constraints(Rest, RecDict, ExpTypes, AllRecords, [Entry|Acc]); + initialize_constraints(Rest, Module, RecDict, ExpTypes, AllRecords, [Entry|Acc]); {type, _, constraint, [{atom,_,Name}, List]} -> N = length(List), throw({error, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}) end. -constraints_fixpoint(Constrs, RecDict, ExpTypes, AllRecords) -> +constraints_fixpoint(Constrs, Module, RecDict, ExpTypes, AllRecords) -> VarDict = - constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, dict:new()), - constraints_fixpoint(VarDict, Constrs, RecDict, ExpTypes, AllRecords). + constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, dict:new()), + constraints_fixpoint(VarDict, Module, Constrs, RecDict, ExpTypes, AllRecords). -constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) -> +constraints_fixpoint(OldVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) -> NewVarDict = - constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, OldVarDict), + constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, OldVarDict), case NewVarDict of OldVarDict -> DictFold = @@ -477,25 +480,24 @@ constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) -> FinalConstrs = dict:fold(DictFold, [], NewVarDict), {FinalConstrs, NewVarDict}; _Other -> - constraints_fixpoint(NewVarDict, Constrs, RecDict, ExpTypes, AllRecords) + constraints_fixpoint(NewVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) end. -final_form(Form, RecDict, ExpTypes, AllRecords, VarDict) -> - T1 = erl_types:t_from_form(Form, RecDict, VarDict), - erl_types:t_solve_remote(T1, ExpTypes, AllRecords). +final_form(Form, Module, ExpTypes, AllRecords, VarDict) -> + erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, VarDict). -constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, VarDict) -> +constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict) -> Subtypes = - constraints_to_subs(Constrs, RecDict, ExpTypes, AllRecords, VarDict, []), + constraints_to_subs(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict, []), insert_constraints(Subtypes, dict:new()). -constraints_to_subs([], _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) -> +constraints_to_subs([], _Module, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) -> Acc; -constraints_to_subs([C|Rest], RecDict, ExpTypes, AllRecords, VarDict, Acc) -> +constraints_to_subs([C|Rest], Module, RecDict, ExpTypes, AllRecords, VarDict, Acc) -> {T1, Form2} = C, - T2 = final_form(Form2, RecDict, ExpTypes, AllRecords, VarDict), + T2 = final_form(Form2, Module, ExpTypes, AllRecords, VarDict), NewAcc = [{subtype, T1, T2}|Acc], - constraints_to_subs(Rest, RecDict, ExpTypes, AllRecords, VarDict, NewAcc). + constraints_to_subs(Rest, Module, RecDict, ExpTypes, AllRecords, VarDict, NewAcc). %% Replaces variables with '_' when necessary to break up cycles among %% the constraints. @@ -583,7 +585,7 @@ general_domain([], AccSig) -> -spec get_invalid_contract_warnings([module()], dialyzer_codeserver:codeserver(), dialyzer_plt:plt(), - opaques_fun()) -> [dial_warning()]. + opaques_fun()) -> [raw_warning()]. get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) -> get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, FindOpaques, []). @@ -597,7 +599,7 @@ get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, FindOpaques, get_invalid_contract_warnings_modules([], _CodeServer, _Plt, _FindOpaques, Acc) -> Acc. -get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left], +get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left], Plt, RecDict, FindOpaques, Acc) -> case dialyzer_plt:lookup(Plt, MFA) of none -> @@ -607,12 +609,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left], Sig = erl_types:t_fun(Args, Ret), {M, _F, _A} = MFA, Opaques = FindOpaques(M), + {File, Line} = FileLine, + WarningInfo = {File, Line, MFA}, NewAcc = case check_contract(Contract, Sig, Opaques) of {error, invalid_contract} -> - [invalid_contract_warning(MFA, FileLine, Sig, RecDict)|Acc]; + [invalid_contract_warning(MFA, WarningInfo, Sig, RecDict)|Acc]; {error, {overlapping_contract, []}} -> - [overlapping_contract_warning(MFA, FileLine)|Acc]; + [overlapping_contract_warning(MFA, WarningInfo)|Acc]; {error, {extra_range, ExtraRanges, STRange}} -> Warn = case t_from_forms_without_remote(Contract#contract.forms, @@ -625,12 +629,12 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left], end, case Warn of true -> - [extra_range_warning(MFA, FileLine, ExtraRanges, STRange)|Acc]; + [extra_range_warning(MFA, WarningInfo, ExtraRanges, STRange)|Acc]; false -> Acc end; {error, Msg} -> - [{?WARN_CONTRACT_SYNTAX, FileLine, Msg}|Acc]; + [{?WARN_CONTRACT_SYNTAX, WarningInfo, Msg}|Acc]; ok -> {M, F, A} = MFA, CSig0 = get_contract_signature(Contract), @@ -644,14 +648,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left], BifSig = erl_types:t_fun(BifArgs, BifRet), case check_contract(Contract, BifSig, Opaques) of {error, _} -> - [invalid_contract_warning(MFA, FileLine, BifSig, RecDict) + [invalid_contract_warning(MFA, WarningInfo, BifSig, RecDict) |Acc]; ok -> - picky_contract_check(CSig, BifSig, MFA, FileLine, + picky_contract_check(CSig, BifSig, MFA, WarningInfo, Contract, RecDict, Acc) end; false -> - picky_contract_check(CSig, Sig, MFA, FileLine, Contract, + picky_contract_check(CSig, Sig, MFA, WarningInfo, Contract, RecDict, Acc) end end, @@ -660,20 +664,20 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract}}|Left], get_invalid_contract_warnings_funs([], _Plt, _RecDict, _FindOpaques, Acc) -> Acc. -invalid_contract_warning({M, F, A}, FileLine, SuccType, RecDict) -> +invalid_contract_warning({M, F, A}, WarningInfo, SuccType, RecDict) -> SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict), - {?WARN_CONTRACT_TYPES, FileLine, {invalid_contract, [M, F, A, SuccTypeStr]}}. + {?WARN_CONTRACT_TYPES, WarningInfo, {invalid_contract, [M, F, A, SuccTypeStr]}}. -overlapping_contract_warning({M, F, A}, FileLine) -> - {?WARN_CONTRACT_TYPES, FileLine, {overlapping_contract, [M, F, A]}}. +overlapping_contract_warning({M, F, A}, WarningInfo) -> + {?WARN_CONTRACT_TYPES, WarningInfo, {overlapping_contract, [M, F, A]}}. -extra_range_warning({M, F, A}, FileLine, ExtraRanges, STRange) -> +extra_range_warning({M, F, A}, WarningInfo, ExtraRanges, STRange) -> ERangesStr = erl_types:t_to_string(ExtraRanges), STRangeStr = erl_types:t_to_string(STRange), - {?WARN_CONTRACT_SUPERTYPE, FileLine, + {?WARN_CONTRACT_SUPERTYPE, WarningInfo, {extra_range, [M, F, A, ERangesStr, STRangeStr]}}. -picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) -> +picky_contract_check(CSig0, Sig0, MFA, WarningInfo, Contract, RecDict, Acc) -> CSig = erl_types:t_abstract_records(CSig0, RecDict), Sig = erl_types:t_abstract_records(Sig0, RecDict), case erl_types:t_is_equal(CSig, Sig) of @@ -683,7 +687,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) -> erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of true -> Acc; false -> - case extra_contract_warning(MFA, FileLine, Contract, + case extra_contract_warning(MFA, WarningInfo, Contract, CSig0, Sig0, RecDict) of no_warning -> Acc; {warning, Warning} -> [Warning|Acc] @@ -691,7 +695,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) -> end end. -extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) -> +extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) -> %% We do not want to depend upon erl_types:t_to_string() possibly %% hiding the contents of opaque types. SigUnopaque = erl_types:t_unopaque(Sig), @@ -722,7 +726,7 @@ extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) -> {?WARN_CONTRACT_NOT_EQUAL, {contract_diff, [M, F, A, ContractString, SigString]}} end, - {warning, {Tag, FileLine, Msg}} + {warning, {Tag, WarningInfo, Msg}} end. is_remote_types_related(Contract, CSig, Sig, RecDict) -> @@ -749,8 +753,7 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) -> end. t_from_forms_without_remote([{FType, []}], RecDict) -> - Type0 = erl_types:t_from_form(FType, RecDict), - Type1 = erl_types:subst_all_remote(Type0, erl_types:t_none()), + Type1 = erl_types:t_from_form_without_remote(FType, RecDict), {ok, erl_types:subst_all_vars_to_any(Type1)}; t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) -> %% 'When' constraints diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl index 03005e689f..336b4641d4 100644 --- a/lib/dialyzer/src/dialyzer_dataflow.erl +++ b/lib/dialyzer/src/dialyzer_dataflow.erl @@ -2,7 +2,7 @@ %%-------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -28,14 +28,15 @@ -module(dialyzer_dataflow). --export([get_fun_types/4, get_warnings/5, format_args/3]). +-export([get_fun_types/5, get_warnings/5, format_args/3]). %% Data structure interfaces. -export([state__add_warning/2, state__cleanup/1, state__duplicate/1, dispose_state/1, state__get_callgraph/1, state__get_races/1, state__get_records/1, state__put_callgraph/2, - state__put_races/2, state__records_only/1]). + state__put_races/2, state__records_only/1, + state__find_function/2]). -export_type([state/0]). @@ -89,6 +90,8 @@ -type type() :: erl_types:erl_type(). -type types() :: erl_types:type_table(). +-type curr_fun() :: 'undefined' | 'top' | mfa_or_funlbl(). + -define(no_arg, no_arg). -define(TYPE_LIMIT, 3). @@ -96,17 +99,20 @@ -define(BITS, 128). -record(state, {callgraph :: dialyzer_callgraph:callgraph(), + codeserver :: dialyzer_codeserver:codeserver(), envs :: env_tab(), fun_tab :: fun_tab(), + fun_homes :: dict:dict(label(), mfa()), plt :: dialyzer_plt:plt(), opaques :: [type()], races = dialyzer_races:new() :: dialyzer_races:races(), records = dict:new() :: types(), tree_map :: dict:dict(label(), cerl:cerl()), warning_mode = false :: boolean(), - warnings = [] :: [dial_warning()], + warnings = [] :: [raw_warning()], work :: {[_], [_], sets:set()}, - module :: module() + module :: module(), + curr_fun :: curr_fun() }). -record(map, {dict = dict:new() :: type_tab(), @@ -115,7 +121,6 @@ modified_stack = [] :: [{[Key :: term()],reference()}], ref = undefined :: reference() | undefined}). --type nowarn() :: dialyzer_analysis_callgraph:no_warn_unused(). -type env_tab() :: dict:dict(label(), #map{}). -type fun_entry() :: {Args :: [type()], RetType :: type()}. -type fun_tab() :: dict:dict('top' | label(), @@ -133,22 +138,24 @@ -type fun_types() :: dict:dict(label(), type()). -spec get_warnings(cerl:c_module(), dialyzer_plt:plt(), - dialyzer_callgraph:callgraph(), types(), nowarn()) -> - {[dial_warning()], fun_types()}. - -get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) -> - State1 = analyze_module(Tree, Plt, Callgraph, Records, true), - State2 = - state__renew_warnings(state__get_warnings(State1, NoWarnUnused), State1), + dialyzer_callgraph:callgraph(), + dialyzer_codeserver:codeserver(), + types()) -> + {[raw_warning()], fun_types()}. + +get_warnings(Tree, Plt, Callgraph, Codeserver, Records) -> + State1 = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, true), + State2 = state__renew_warnings(state__get_warnings(State1), State1), State3 = state__get_race_warnings(State2), {State3#state.warnings, state__all_fun_types(State3)}. -spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(), dialyzer_callgraph:callgraph(), + dialyzer_codeserver:codeserver(), types()) -> fun_types(). -get_fun_types(Tree, Plt, Callgraph, Records) -> - State = analyze_module(Tree, Plt, Callgraph, Records, false), +get_fun_types(Tree, Plt, Callgraph, Codeserver, Records) -> + State = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, false), state__all_fun_types(State). %%% =========================================================================== @@ -157,11 +164,11 @@ get_fun_types(Tree, Plt, Callgraph, Records) -> %%% %%% =========================================================================== -analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) -> +analyze_module(Tree, Plt, Callgraph, Codeserver, Records, GetWarnings) -> debug_pp(Tree, false), Module = cerl:atom_val(cerl:module_name(Tree)), TopFun = cerl:ann_c_fun([{label, top}], [], Tree), - State = state__new(Callgraph, TopFun, Plt, Module, Records), + State = state__new(Callgraph, Codeserver, TopFun, Plt, Module, Records), State1 = state__race_analysis(not GetWarnings, State), State2 = analyze_loop(State1), case GetWarnings of @@ -175,25 +182,26 @@ analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) -> analyze_loop(State) -> case state__get_work(State) of - none -> State; - {Fun, NewState1} -> + none -> state__set_curr_fun(undefined, State); + {Fun, NewState0} -> + NewState1 = state__set_curr_fun(get_label(Fun), NewState0), {ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1), case not IsCalled of true -> ?debug("Not handling (not called) ~w: ~s\n", - [state__lookup_name(get_label(Fun), State), + [NewState1#state.curr_fun, t_to_string(t_product(ArgTypes))]), analyze_loop(NewState1); false -> case state__fun_env(Fun, NewState1) of none -> ?debug("Not handling (no env) ~w: ~s\n", - [state__lookup_name(get_label(Fun), State), + [NewState1#state.curr_fun, t_to_string(t_product(ArgTypes))]), analyze_loop(NewState1); Map -> ?debug("Handling fun ~p: ~s\n", - [state__lookup_name(get_label(Fun), State), + [NewState1#state.curr_fun, t_to_string(state__fun_type(Fun, NewState1))]), Vars = cerl:fun_vars(Fun), Map1 = enter_type_lists(Vars, ArgTypes, Map), @@ -212,7 +220,7 @@ analyze_loop(State) -> {NewState4, _Map2, BodyType} = traverse(Body, Map1, NewState3), ?debug("Done analyzing: ~w:~s\n", - [state__lookup_name(get_label(Fun), State), + [NewState1#state.curr_fun, t_to_string(t_fun(ArgTypes, BodyType))]), NewState5 = case IsRaceAnalysisEnabled of @@ -2780,10 +2788,9 @@ filter_match_fail([]) -> %%% %%% =========================================================================== -state__new(Callgraph, Tree, Plt, Module, Records) -> - Opaques = erl_types:module_builtin_opaques(Module) ++ - erl_types:t_opaque_from_records(Records), - TreeMap = build_tree_map(Tree), +state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) -> + Opaques = erl_types:t_opaque_from_records(Records), + {TreeMap, FunHomes} = build_tree_map(Tree, Callgraph), Funs = dict:fetch_keys(TreeMap), FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt), ExportedFuns = @@ -2791,7 +2798,8 @@ state__new(Callgraph, Tree, Plt, Module, Records) -> Work = init_work(ExportedFuns), Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end, dict:new(), Funs), - #state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques, + #state{callgraph = Callgraph, codeserver = Codeserver, + envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques, plt = Plt, races = dialyzer_races:new(), records = Records, warning_mode = false, warnings = [], work = Work, tree_map = TreeMap, module = Module}. @@ -2830,7 +2838,7 @@ state__renew_race_list(RaceList, RaceListSize, state__renew_warnings(Warnings, State) -> State#state{warnings = Warnings}. --spec state__add_warning(dial_warning(), state()) -> state(). +-spec state__add_warning(raw_warning(), state()) -> state(). state__add_warning(Warn, #state{warnings = Warnings} = State) -> State#state{warnings = [Warn|Warnings]}. @@ -2845,29 +2853,45 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State, Ann = cerl:get_ann(Tree), case Force of true -> - Warn = {Tag, {get_file(Ann), abs(get_line(Ann))}, Msg}, + WarningInfo = {get_file(Ann), + abs(get_line(Ann)), + State#state.curr_fun}, + Warn = {Tag, WarningInfo, Msg}, ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]), State#state{warnings = [Warn|Warnings]}; false -> case is_compiler_generated(Ann) of - true -> State; - false -> - Warn = {Tag, {get_file(Ann), get_line(Ann)}, Msg}, + true -> State; + false -> + WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun}, + Warn = {Tag, WarningInfo, Msg}, ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]), - State#state{warnings = [Warn|Warnings]} + State#state{warnings = [Warn|Warnings]} end end. +-spec state__set_curr_fun(curr_fun(), state()) -> state(). + +state__set_curr_fun(undefined, State) -> + State#state{curr_fun = undefined}; +state__set_curr_fun(FunLbl, State) -> + State#state{curr_fun = find_function(FunLbl, State)}. + +-spec state__find_function(mfa_or_funlbl(), state()) -> mfa_or_funlbl(). + +state__find_function(FunLbl, State) -> + find_function(FunLbl, State). + state__get_race_warnings(#state{races = Races} = State) -> {Races1, State1} = dialyzer_races:get_race_warnings(Races, State), State1#state{races = Races1}. state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab, - callgraph = Callgraph, plt = Plt} = State, - NoWarnUnused) -> + callgraph = Callgraph, plt = Plt} = State) -> FoldFun = fun({top, _}, AccState) -> AccState; ({FunLbl, Fun}, AccState) -> + AccState1 = state__set_curr_fun(FunLbl, AccState), {NotCalled, Ret} = case dict:fetch(get_label(Fun), FunTab) of {not_handled, {_Args0, Ret0}} -> {true, Ret0}; @@ -2875,17 +2899,12 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab, end, case NotCalled of true -> - {Warn, Msg} = - case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of - error -> {false, {}}; - {ok, {_M, F, A} = MFA} -> - {not sets:is_element(MFA, NoWarnUnused), - {unused_fun, [F, A]}} - end, - case Warn of - true -> state__add_warning(AccState, ?WARN_NOT_CALLED, Fun, Msg); - false -> AccState - end; + case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of + error -> AccState1; + {ok, {_M, F, A}} -> + Msg = {unused_fun, [F, A]}, + state__add_warning(AccState1, ?WARN_NOT_CALLED, Fun, Msg) + end; false -> {Name, Contract} = case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of @@ -2898,7 +2917,7 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab, %% Check if the function has a contract that allows this. Warn = case Contract of - none -> not parent_allows_this(FunLbl, State); + none -> not parent_allows_this(FunLbl, AccState1); {value, C} -> GenRet = dialyzer_contracts:get_contract_return(C), not t_is_unit(GenRet) @@ -2908,19 +2927,19 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab, case classify_returns(Fun) of no_match -> Msg = {no_return, [no_match|Name]}, - state__add_warning(AccState, ?WARN_RETURN_NO_RETURN, + state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN, Fun, Msg); only_explicit -> Msg = {no_return, [only_explicit|Name]}, - state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT, + state__add_warning(AccState1, ?WARN_RETURN_ONLY_EXIT, Fun, Msg); only_normal -> Msg = {no_return, [only_normal|Name]}, - state__add_warning(AccState, ?WARN_RETURN_NO_RETURN, + state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN, Fun, Msg); both -> Msg = {no_return, [both|Name]}, - state__add_warning(AccState, ?WARN_RETURN_NO_RETURN, + state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN, Fun, Msg) end; false -> @@ -2958,8 +2977,10 @@ state__lookup_name(Fun, #state{callgraph = Callgraph}) -> state__lookup_record(Tag, Arity, #state{records = Records}) -> case erl_types:lookup_record(Tag, Arity, Records) of {ok, Fields} -> - {ok, t_tuple([t_atom(Tag)| - [FieldType || {_FieldName, FieldType} <- Fields]])}; + RecType = + t_tuple([t_atom(Tag)| + [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]), + {ok, RecType}; error -> error end. @@ -2971,17 +2992,31 @@ state__get_args_and_status(Tree, #state{fun_tab = FunTab}) -> {ok, {ArgTypes, _}} -> {ArgTypes, true} end. -build_tree_map(Tree) -> +build_tree_map(Tree, Callgraph) -> Fun = - fun(T, Dict) -> + fun(T, {Dict, Homes, FunLbls} = Acc) -> case cerl:is_c_fun(T) of true -> - dict:store(get_label(T), T, Dict); + FunLbl = get_label(T), + Dict1 = dict:store(FunLbl, T, Dict), + case catch dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of + {ok, MFA} -> + F2 = + fun(Lbl, Dict0) -> + dict:store(Lbl, MFA, Dict0) + end, + Homes1 = lists:foldl(F2, Homes, [FunLbl|FunLbls]), + {Dict1, Homes1, []}; + _ -> + {Dict1, Homes, [FunLbl|FunLbls]} + end; false -> - Dict + Acc end end, - cerl_trees:fold(Fun, dict:new(), Tree). + Dict0 = dict:new(), + {Dict, Homes, _} = cerl_trees:fold(Fun, {Dict0, Dict0, []}, Tree), + {Dict, Homes}. init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) -> NewDict = dict:store(top, {[], t_none()}, Dict), @@ -3439,6 +3474,13 @@ parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) -> end end. +find_function({_, _, _} = MFA, _State) -> + MFA; +find_function(top, _State) -> + top; +find_function(FunLbl, #state{fun_homes = Homes}) -> + dict:fetch(FunLbl, Homes). + classify_returns(Tree) -> case find_terminals(cerl:fun_body(Tree)) of {false, false} -> no_match; diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl index a92b8b1958..20971f1407 100644 --- a/lib/dialyzer/src/dialyzer_options.erl +++ b/lib/dialyzer/src/dialyzer_options.erl @@ -28,7 +28,7 @@ -module(dialyzer_options). --export([build/1]). +-export([build/1, build_warnings/2]). -include("dialyzer.hrl"). @@ -270,7 +270,7 @@ assert_solvers([v2|Terms]) -> assert_solvers([Term|_]) -> bad_option("Illegal value for solver", Term). --spec build_warnings([atom()], [dial_warning()]) -> [dial_warning()]. +-spec build_warnings([atom()], dial_warn_tags()) -> dial_warn_tags(). build_warnings([Opt|Opts], Warnings) -> NewWarnings = diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl index 63798f44b1..7c970daf41 100644 --- a/lib/dialyzer/src/dialyzer_plt.erl +++ b/lib/dialyzer/src/dialyzer_plt.erl @@ -158,9 +158,7 @@ lookup_contract(#mini_plt{contracts = ETSContracts}, ets_table_lookup(ETSContracts, MFA). -spec lookup_callbacks(plt(), module()) -> - 'none' | {'value', [{mfa(), {{Filename::string(), - Line::pos_integer()}, - #contract{}}}]}. + 'none' | {'value', [{mfa(), dialyzer_contracts:file_contract()}]}. lookup_callbacks(#mini_plt{callbacks = ETSCallbacks}, Mod) when is_atom(Mod) -> ets_table_lookup(ETSCallbacks, Mod). @@ -618,9 +616,7 @@ table_insert_list(Plt, [{Key, Val}|Left]) -> table_insert_list(Plt, []) -> Plt. -table_insert(Plt, Key, {_Ret, _Arg} = Obj) -> - dict:store(Key, Obj, Plt); -table_insert(Plt, Key, #contract{} = C) -> +table_insert(Plt, Key, {_File, #contract{}, _Xtra} = C) -> dict:store(Key, C, Plt). table_lookup(Plt, Obj) -> diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl index 2a8aba5d8f..48eb331239 100644 --- a/lib/dialyzer/src/dialyzer_races.erl +++ b/lib/dialyzer/src/dialyzer_races.erl @@ -85,9 +85,9 @@ -type race_tag() :: 'whereis_register' | 'whereis_unregister' | 'ets_lookup_insert' | 'mnesia_dirty_read_write'. -%% The following type is similar to the dial_warning() type but has a +%% The following type is similar to the raw_warning() type but has a %% tag which is local to this module and is not propagated to outside --type dial_race_warning() :: {race_warn_tag(), file_line(), {atom(), [term()]}}. +-type dial_race_warning() :: {race_warn_tag(), warning_info(), {atom(), [term()]}}. -type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER | ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE. @@ -312,10 +312,13 @@ race(State) -> DepList = fixup_race_list(RaceWarnTag, VarArgs, State1), {State2, RaceWarn} = get_race_warn(Fun, Args, ArgTypes, DepList, State), + {File, Line} = FileLine, + CurrMFA = dialyzer_dataflow:state__find_function(CurrFun, State), + WarningInfo = {File, Line, CurrMFA}, race( state__add_race_warning( state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag, - FileLine)) + WarningInfo)) end, state__renew_race_tags([], RetState). @@ -2324,7 +2327,7 @@ get_race_warnings_helper(Warnings, State) -> [] -> {dialyzer_dataflow:state__get_races(State), State}; [H|T] -> - {RaceWarnTag, FileLine, {race_condition, [M, F, A, AT, S, DepList]}} = H, + {RaceWarnTag, WarningInfo, {race_condition, [M, F, A, AT, S, DepList]}} = H, Reason = case RaceWarnTag of ?WARN_WHEREIS_REGISTER -> @@ -2347,7 +2350,7 @@ get_race_warnings_helper(Warnings, State) -> "caused by its combination with ") end, W = - {?WARN_RACE_CONDITION, FileLine, + {?WARN_RACE_CONDITION, WarningInfo, {race_condition, [M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}}, get_race_warnings_helper(T, @@ -2377,12 +2380,12 @@ get_reason(DependencyList, Reason) -> end end. -state__add_race_warning(State, RaceWarn, RaceWarnTag, FileLine) -> +state__add_race_warning(State, RaceWarn, RaceWarnTag, WarningInfo) -> case RaceWarn of no_race -> State; _Else -> Races = dialyzer_dataflow:state__get_races(State), - Warn = {RaceWarnTag, FileLine, RaceWarn}, + Warn = {RaceWarnTag, WarningInfo, RaceWarn}, dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State) end. diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl index ef9b00e203..7ceb19e30a 100644 --- a/lib/dialyzer/src/dialyzer_succ_typings.erl +++ b/lib/dialyzer/src/dialyzer_succ_typings.erl @@ -29,7 +29,7 @@ -export([analyze_callgraph/3, analyze_callgraph/6, - get_warnings/8 + get_warnings/7 ]). -export([ @@ -69,10 +69,8 @@ -type scc() :: [mfa_or_funlbl()] | [module()]. - -record(st, {callgraph :: dialyzer_callgraph:callgraph(), codeserver :: dialyzer_codeserver:codeserver(), - no_warn_unused :: sets:set(mfa()), parent = none :: parent(), timing_server :: dialyzer_timing:timing_server(), solvers :: [solver()], @@ -137,18 +135,17 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph, -type doc_plt() :: 'undefined' | dialyzer_plt:plt(). -spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(), - doc_plt(), dialyzer_codeserver:codeserver(), sets:set(mfa()), + doc_plt(), dialyzer_codeserver:codeserver(), dialyzer_timing:timing_server(), [solver()], pid()) -> - {[dial_warning()], dialyzer_plt:plt(), doc_plt()}. + {[raw_warning()], dialyzer_plt:plt(), doc_plt()}. get_warnings(Callgraph, Plt, DocPlt, Codeserver, - NoWarnUnused, TimingServer, Solvers, Parent) -> + TimingServer, Solvers, Parent) -> InitState = init_state_and_get_success_typings(Callgraph, Plt, Codeserver, TimingServer, Solvers, Parent), - NewState = InitState#st{no_warn_unused = NoWarnUnused}, - Mods = dialyzer_callgraph:modules(NewState#st.callgraph), - MiniPlt = NewState#st.plt, + Mods = dialyzer_callgraph:modules(InitState#st.callgraph), + MiniPlt = InitState#st.plt, FindOpaques = lookup_and_find_opaques_fun(Codeserver), CWarns = dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver, @@ -156,31 +153,30 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver, MiniDocPlt = dialyzer_plt:get_mini_plt(DocPlt), ModWarns = ?timing(TimingServer, "warning", - get_warnings_from_modules(Mods, NewState, MiniDocPlt)), + get_warnings_from_modules(Mods, InitState, MiniDocPlt)), {postprocess_warnings(CWarns ++ ModWarns, Codeserver), dialyzer_plt:restore_full_plt(MiniPlt, Plt), dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}. get_warnings_from_modules(Mods, State, DocPlt) -> #st{callgraph = Callgraph, codeserver = Codeserver, - no_warn_unused = NoWarnUnused, plt = Plt, - timing_server = TimingServer} = State, - Init = {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt}, + plt = Plt, timing_server = TimingServer} = State, + Init = {Codeserver, Callgraph, Plt, DocPlt}, dialyzer_coordinator:parallel_job(warnings, Mods, Init, TimingServer). --spec collect_warnings(module(), warnings_init_data()) -> [dial_warning()]. +-spec collect_warnings(module(), warnings_init_data()) -> [raw_warning()]. -collect_warnings(M, {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt}) -> +collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) -> ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver), Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver), Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver), AllFuns = collect_fun_info([ModCode]), %% Check if there are contracts for functions that do not exist - Warnings1 = + Warnings1 = dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph), {Warnings2, FunTypes} = - dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, - Records, NoWarnUnused), + dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver, + Records), Attrs = cerl:module_attrs(ModCode), Warnings3 = dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver), @@ -197,17 +193,19 @@ postprocess_warnings(RawWarnings, Codeserver) -> postprocess_dataflow_warns([], _Callgraph, WAcc, Acc) -> lists:reverse(Acc, WAcc); -postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest], +postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, WarningInfo, Msg}|Rest], Codeserver, WAcc, Acc) -> + {CallF, CallL, _CallMFA} = WarningInfo, {contract_range, [Contract, M, F, A, ArgStrings, CRet]} = Msg, case dialyzer_codeserver:lookup_mfa_contract({M,F,A}, Codeserver) of - {ok, {{ContrF, _ContrL} = FileLine, _C}} -> + {ok, {{ContrF, ContrL}, _C, _X}} -> case CallF =:= ContrF of true -> NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]}, - W = {?WARN_CONTRACT_RANGE, FileLine, NewMsg}, + WarningInfo2 = {ContrF, ContrL, {M, F, A}}, + W = {?WARN_CONTRACT_RANGE, WarningInfo2, NewMsg}, Filter = - fun({?WARN_CONTRACT_TYPES, FL, _}) when FL =:= FileLine -> false; + fun({?WARN_CONTRACT_TYPES, WI, _}) when WI =:= WarningInfo2 -> false; (_) -> true end, FilterWAcc = lists:filter(Filter, WAcc), @@ -219,7 +217,7 @@ postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest], %% The contract is not in a module that is currently under analysis. %% We display the warning in the file/line of the call. NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]}, - W = {?WARN_CONTRACT_RANGE, {CallF, CallL}, NewMsg}, + W = {?WARN_CONTRACT_RANGE, WarningInfo, NewMsg}, postprocess_dataflow_warns(Rest, Codeserver, WAcc, [W|Acc]) end. @@ -262,7 +260,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) -> Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer), FunTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt), NewFunTypes = - dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, Records), + dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records), Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer), Contracts = orddict:from_list(dict:to_list(Contracts1)), FindOpaques = find_opaques_fun(Records), @@ -401,7 +399,7 @@ decorate_succ_typings(Contracts, Callgraph, FunTypes, FindOpaques) -> case dialyzer_callgraph:lookup_name(Label, Callgraph) of {ok, MFA} -> case orddict:find(MFA, Contracts) of - {ok, {_FileLine, Contract}} -> + {ok, {_FileLine, Contract, _Xtra}} -> Args = dialyzer_contracts:get_contract_args(Contract), Ret = dialyzer_contracts:get_contract_return(Contract), C = erl_types:t_fun(Args, Ret), @@ -422,10 +420,7 @@ lookup_and_find_opaques_fun(Codeserver) -> end. find_opaques_fun(Records) -> - fun(Module) -> - erl_types:module_builtin_opaques(Module) ++ - erl_types:t_opaque_from_records(Records) - end. + fun(_Module) -> erl_types:t_opaque_from_records(Records) end. get_fun_types_from_plt(FunList, Callgraph, Plt) -> get_fun_types_from_plt(FunList, Callgraph, Plt, dict:new()). diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl index 3d03ed3ab3..1737bfd3a9 100644 --- a/lib/dialyzer/src/dialyzer_typesig.erl +++ b/lib/dialyzer/src/dialyzer_typesig.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -3264,7 +3264,7 @@ lookup_record(Records, Tag, Arity) -> {ok, Fields} -> RecType = t_tuple([t_from_term(Tag)| - [FieldType || {_FieldName, FieldType} <- Fields]]), + [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]), {ok, RecType}; error -> error @@ -3275,7 +3275,7 @@ is_literal_record(Tree) -> lists:member(record, Ann). family(L) -> - sofs:to_external(sofs:rel2fam(sofs:relation(L))). + dialyzer_utils:family(L). %% ============================================================================ %% diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl index 5297a3a7b4..1cc9528fed 100644 --- a/lib/dialyzer/src/dialyzer_utils.erl +++ b/lib/dialyzer/src/dialyzer_utils.erl @@ -2,7 +2,7 @@ %%----------------------------------------------------------------------- %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2014. All Rights Reserved. +%% Copyright Ericsson AB 2006-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -40,12 +40,16 @@ get_core_from_src/2, get_record_and_type_info/1, get_spec_info/3, + get_fun_meta_info/3, + is_suppressed_fun/2, + is_suppressed_tag/3, merge_records/2, pp_hook/0, process_record_remote_types/1, sets_filter/2, src_compiler_opts/0, - parallelism/0 + parallelism/0, + family/1 ]). -include("dialyzer.hrl"). @@ -59,13 +63,13 @@ print_types(RecDict) -> print_types1([], _) -> ok; -print_types1([{type, _Name} = Key|T], RecDict) -> - {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict), - io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]), +print_types1([{type, _Name, _NArgs} = Key|T], RecDict) -> + {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict), + io:format("\n~w: ~w\n", [Key, Type]), print_types1(T, RecDict); -print_types1([{opaque, _Name} = Key|T], RecDict) -> - {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict), - io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]), +print_types1([{opaque, _Name, _NArgs} = Key|T], RecDict) -> + {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict), + io:format("\n~w: ~w\n", [Key, Type]), print_types1(T, RecDict); print_types1([{record, _Name} = Key|T], RecDict) -> {ok, [{_Arity, _Fields} = AF]} = dict:find(Key, RecDict), @@ -80,7 +84,9 @@ print_types1([{record, _Name} = Key|T], RecDict) -> -type abstract_code() :: [tuple()]. %% XXX: import from somewhere -type comp_options() :: [compile:option()]. --type mod_or_fname() :: atom() | file:filename(). +-type mod_or_fname() :: module() | file:filename(). +-type fa() :: {atom(), arity()}. +-type codeserver() :: dialyzer_codeserver:codeserver(). %% ============================================================================ %% @@ -215,28 +221,29 @@ get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}} get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left], Module, Records, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' -> - try - NewRecDict = add_new_type(Attr, Name, TypeForm, [], Module, RecDict), - get_record_and_type_info(Left, Module, Records, NewRecDict) + try add_new_type(Attr, Name, TypeForm, [], Module, RecDict) of + NewRecDict -> + get_record_and_type_info(Left, Module, Records, NewRecDict) catch throw:{error, _} = Error -> Error end; get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left], Module, Records, RecDict) when Attr =:= 'type'; Attr =:= 'opaque' -> - try - NewRecDict = add_new_type(Attr, Name, TypeForm, Args, Module, RecDict), - get_record_and_type_info(Left, Module, Records, NewRecDict) + try add_new_type(Attr, Name, TypeForm, Args, Module, RecDict) of + NewRecDict -> + get_record_and_type_info(Left, Module, Records, NewRecDict) catch throw:{error, _} = Error -> Error end; get_record_and_type_info([_Other|Left], Module, Records, RecDict) -> get_record_and_type_info(Left, Module, Records, RecDict); get_record_and_type_info([], _Module, Records, RecDict) -> - case type_record_fields(lists:reverse(Records), RecDict) of - {ok, _NewRecDict} = Ok -> - ?debug(_NewRecDict), - Ok; + case + check_type_of_record_fields(lists:reverse(Records), RecDict) + of + ok -> + {ok, RecDict}; {error, Name, Error} -> {error, flat_format(" Error while parsing #~w{}: ~s\n", [Name, Error])} end. @@ -248,20 +255,21 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) -> Msg = flat_format("Type ~s/~w already defined\n", [Name, Arity]), throw({error, Msg}); false -> - ArgTypes = [erl_types:t_from_form(X) || X <- ArgForms], - case lists:all(fun erl_types:t_is_var/1, ArgTypes) of - true -> - ArgNames = [erl_types:t_var_name(X) || X <- ArgTypes], + try erl_types:t_var_names(ArgForms) of + ArgNames -> dict:store({TypeOrOpaque, Name, Arity}, - {Module, TypeForm, ArgNames}, RecDict); - false -> + {{Module, TypeForm, ArgNames}, + erl_types:t_any()}, RecDict) + catch + _:_ -> throw({error, flat_format("Type declaration for ~w does not " "have variables as parameters", [Name])}) end end. get_record_fields(Fields, RecDict) -> - get_record_fields(Fields, RecDict, []). + Fs = get_record_fields(Fields, RecDict, []), + {ok, [{Name, Form, erl_types:t_any()} || {Name, Form} <- Fs]}. get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left], RecDict, Acc) -> @@ -270,7 +278,7 @@ get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left], {record_field, _Line, Name0} -> erl_parse:normalise(Name0); {record_field, _Line, Name0, _Init} -> erl_parse:normalise(Name0) end, - get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]); + get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]); get_record_fields([{record_field, _Line, Name}|Left], RecDict, Acc) -> NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc], get_record_fields(Left, RecDict, NewAcc); @@ -278,54 +286,66 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) -> NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc], get_record_fields(Left, RecDict, NewAcc); get_record_fields([], _RecDict, Acc) -> - {ok, lists:reverse(Acc)}. + lists:reverse(Acc). -type_record_fields([], RecDict) -> - {ok, RecDict}; -type_record_fields([RecKey|Recs], RecDict) -> - {ok, [{Arity, Fields}]} = dict:find(RecKey, RecDict), +%% Just check the local types. process_record_remote_types will add +%% the types later. +check_type_of_record_fields([], _RecDict) -> + ok; +check_type_of_record_fields([RecKey|Recs], RecDict) -> + {ok, [{_Arity, Fields}]} = dict:find(RecKey, RecDict), try - TypedFields = - [{FieldName, erl_types:t_from_form(FieldTypeForm, RecDict)} - || {FieldName, FieldTypeForm} <- Fields], - RecDict1 = dict:store(RecKey, [{Arity, TypedFields}], RecDict), - Fun = fun(OldOrdDict) -> - orddict:store(Arity, TypedFields, OldOrdDict) - end, - RecDict2 = dict:update(RecKey, Fun, RecDict1), - type_record_fields(Recs, RecDict2) + [erl_types:t_from_form_without_remote(FieldTypeForm, RecDict) + || {_FieldName, FieldTypeForm, _} <- Fields] + of + L when is_list(L) -> + check_type_of_record_fields(Recs, RecDict) catch throw:{error, Error} -> {record, Name} = RecKey, {error, Name, Error} end. --spec process_record_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver(). +-spec process_record_remote_types(codeserver()) -> codeserver(). +%% The field types are cached. Used during analysis when handling records. process_record_remote_types(CServer) -> TempRecords = dialyzer_codeserver:get_temp_records(CServer), TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer), - RecordFun = - fun(Key, Value) -> - case Key of - {record, _Name} -> - FieldFun = - fun(_Arity, Fields) -> - [{Name, erl_types:t_solve_remote(Field, TempExpTypes, - TempRecords)} - || {Name, Field} <- Fields] - end, - orddict:map(FieldFun, Value); - _Other -> Value - end - end, ModuleFun = - fun(_Module, Record) -> + fun(Module, Record) -> + RecordFun = + fun(Key, Value) -> + case Key of + {record, _Name} -> + FieldFun = + fun(_Arity, Fields) -> + [{Name, Field, + erl_types:t_from_form(Field, + TempExpTypes, + Module, + TempRecords)} + || {Name, Field, _} <- Fields] + end, + orddict:map(FieldFun, Value); + {opaque, _, _} -> + {{_Module, Form, _ArgNames}=F, _Type} = Value, + Type = erl_types:t_from_form(Form, TempExpTypes, Module, + TempRecords), + {F, Type}; + _Other -> Value + end + end, dict:map(RecordFun, Record) end, - NewRecords = dict:map(ModuleFun, TempRecords), - CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer), - dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1). + try dict:map(ModuleFun, TempRecords) of + NewRecords -> + CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer), + dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1) + catch + throw:{error, _RecName, _Error} = Error-> + Error + end. -spec merge_records(dict:dict(), dict:dict()) -> dict:dict(). @@ -341,12 +361,23 @@ merge_records(NewRecords, OldRecords) -> -type spec_dict() :: dict:dict(). -type callback_dict() :: dict:dict(). --spec get_spec_info(atom(), abstract_code(), dict:dict()) -> +-spec get_spec_info(module(), abstract_code(), dict:dict()) -> {'ok', spec_dict(), callback_dict()} | {'error', string()}. get_spec_info(ModName, AbstractCode, RecordsDict) -> + OptionalCallbacks0 = get_optional_callbacks(AbstractCode, ModName), + OptionalCallbacks = gb_sets:from_list(OptionalCallbacks0), get_spec_info(AbstractCode, dict:new(), dict:new(), - RecordsDict, ModName, "nofile"). + RecordsDict, ModName, OptionalCallbacks, "nofile"). + +get_optional_callbacks(Abs, ModName) -> + [{ModName, F, A} || {F, A} <- get_optional_callbacks(Abs)]. + +get_optional_callbacks(Abs) -> + L = [O || + {attribute, _, optional_callbacks, O} <- Abs, + is_fa_list(O)], + lists:append(L). %% TypeSpec is a list of conditional contracts for a function. %% Each contract is of the form {[Argument], Range, [Constraint]} where @@ -355,13 +386,14 @@ get_spec_info(ModName, AbstractCode, RecordsDict) -> %% are erl_types:erl_type() get_spec_info([{attribute, Ln, Contract, {Id, TypeSpec}}|Left], - SpecDict, CallbackDict, RecordsDict, ModName, File) + SpecDict, CallbackDict, RecordsDict, ModName, OptCb, File) when ((Contract =:= 'spec') or (Contract =:= 'callback')), is_list(TypeSpec) -> MFA = case Id of {_, _, _} = T -> T; {F, A} -> {ModName, F, A} end, + Xtra = [optional_callback || gb_sets:is_member(MFA, OptCb)], ActiveDict = case Contract of spec -> SpecDict; @@ -369,8 +401,9 @@ get_spec_info([{attribute, Ln, Contract, {Id, TypeSpec}}|Left], end, try dict:find(MFA, ActiveDict) of error -> + SpecData = {TypeSpec, Xtra}, NewActiveDict = - dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, TypeSpec, + dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, SpecData, ActiveDict, RecordsDict), {NewSpecDict, NewCallbackDict} = case Contract of @@ -378,8 +411,8 @@ get_spec_info([{attribute, Ln, Contract, {Id, TypeSpec}}|Left], callback -> {SpecDict, NewActiveDict} end, get_spec_info(Left, NewSpecDict, NewCallbackDict, - RecordsDict, ModName,File); - {ok, {{OtherFile, L},_C}} -> + RecordsDict, ModName, OptCb, File); + {ok, {{OtherFile, L}, _D}} -> {Mod, Fun, Arity} = MFA, Msg = flat_format(" Contract/callback for function ~w:~w/~w " "already defined in ~s:~w\n", @@ -391,15 +424,137 @@ get_spec_info([{attribute, Ln, Contract, {Id, TypeSpec}}|Left], [Ln, Error])} end; get_spec_info([{attribute, _, file, {IncludeFile, _}}|Left], - SpecDict, CallbackDict, RecordsDict, ModName, _File) -> + SpecDict, CallbackDict, RecordsDict, ModName, OptCb, _File) -> get_spec_info(Left, SpecDict, CallbackDict, - RecordsDict, ModName, IncludeFile); + RecordsDict, ModName, OptCb, IncludeFile); get_spec_info([_Other|Left], SpecDict, CallbackDict, - RecordsDict, ModName, File) -> - get_spec_info(Left, SpecDict, CallbackDict, RecordsDict, ModName, File); -get_spec_info([], SpecDict, CallbackDict, _RecordsDict, _ModName, _File) -> + RecordsDict, ModName, OptCb, File) -> + get_spec_info(Left, SpecDict, CallbackDict, + RecordsDict, ModName, OptCb, File); +get_spec_info([], SpecDict, CallbackDict, + _RecordsDict, _ModName, _OptCb, _File) -> {ok, SpecDict, CallbackDict}. +-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) -> + dialyzer_codeserver:fun_meta_info(). + +get_fun_meta_info(M, Abs, LegalWarnings) -> + NoWarn = get_nowarn_unused_function(M, Abs), + FuncSupp = get_func_suppressions(M, Abs), + Warnings0 = get_options(Abs, LegalWarnings), + Warnings = ordsets:to_list(Warnings0), + ModuleWarnings = [{M, W} || W <- Warnings], + RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]), + process_options(dialyzer_utils:family(RawProps), Warnings0). + +process_options([{M, _}=Mod|Left], Warnings) when is_atom(M) -> + [Mod|process_options(Left, Warnings)]; +process_options([{{_M, _F, _A}=MFA, Opts}|Left], Warnings) -> + WL = case lists:member(nowarn_function, Opts) of + true -> [{nowarn_function, func}]; % takes precedence + false -> + Ws = dialyzer_options:build_warnings(Opts, Warnings), + ModOnly = [{W, mod} || W <- ordsets:subtract(Warnings, Ws)], + FunOnly = [{W, func} || W <- ordsets:subtract(Ws, Warnings)], + ordsets:union(ModOnly, FunOnly) + end, + case WL of + [] -> process_options(Left, Warnings); + _ -> [{MFA, WL}|process_options(Left, Warnings)] + end; +process_options([], _Warnings) -> []. + +-spec get_nowarn_unused_function(module(), abstract_code()) -> + [{mfa(), 'no_unused'}]. + +get_nowarn_unused_function(M, Abs) -> + Opts = get_options_with_tag(compile, Abs), + Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function, + true, Opts), + Functions = [{F, A} || {function, _, F, A, _} <- Abs], + AttrFile = collect_attribute(Abs, compile), + TagsFaList = check_fa_list(AttrFile, nowarn_unused_function, Functions), + FAs = case Warn of + false -> Functions; + true -> + [FA || {{nowarn_unused_function,_L,_File}, FA} <- TagsFaList] + end, + [{{M, F, A}, no_unused} || {F, A} <- FAs]. + +-spec get_func_suppressions(module(), abstract_code()) -> + [{mfa(), 'nowarn_function' | dial_warn_tag()}]. + +get_func_suppressions(M, Abs) -> + Functions = [{F, A} || {function, _, F, A, _} <- Abs], + AttrFile = collect_attribute(Abs, dialyzer), + TagsFAs = check_fa_list(AttrFile, '*', Functions), + %% Check the options: + Fun = fun({{nowarn_function, _L, _File}, _FA}) -> ok; + ({OptLFile, _FA}) -> + _ = get_options1([OptLFile], ordsets:new()) + end, + lists:foreach(Fun, TagsFAs), + [{{M, F, A}, W} || {{W, _L, _File}, {F, A}} <- TagsFAs]. + +-spec get_options(abstract_code(), [dial_warn_tag()]) -> + ordsets:ordset(dial_warn_tag()). + +get_options(Abs, LegalWarnings) -> + AttrFile = collect_attribute(Abs, dialyzer), + get_options1(AttrFile, LegalWarnings). + +get_options1([{Args, L, File}|Left], Warnings) -> + Opts = [O || + O <- lists:flatten([Args]), + is_atom(O)], + try dialyzer_options:build_warnings(Opts, Warnings) of + NewWarnings -> + get_options1(Left, NewWarnings) + catch + throw:{dialyzer_options_error, Msg} -> + Msg1 = flat_format(" ~s:~w: ~s", [File, L, Msg]), + throw({error, Msg1}) + end; +get_options1([], Warnings) -> + Warnings. + +-type collected_attribute() :: + {Args :: term(), erl_scan:line(), file:filename()}. + +collect_attribute(Abs, Tag) -> + collect_attribute(Abs, Tag, "nofile"). + +collect_attribute([{attribute, L, Tag, Args}|Left], Tag, File) -> + CollAttr = {Args, L, File}, + [CollAttr | collect_attribute(Left, Tag, File)]; +collect_attribute([{attribute, _, file, {IncludeFile, _}}|Left], Tag, _) -> + collect_attribute(Left, Tag, IncludeFile); +collect_attribute([_Other|Left], Tag, File) -> + collect_attribute(Left, Tag, File); +collect_attribute([], _Tag, _File) -> []. + +-spec is_suppressed_fun(mfa(), codeserver()) -> boolean(). + +is_suppressed_fun(MFA, CodeServer) -> + lookup_fun_property(MFA, nowarn_function, CodeServer). + +-spec is_suppressed_tag(mfa() | module(), dial_warn_tag(), codeserver()) -> + boolean(). + +is_suppressed_tag(MorMFA, Tag, Codeserver) -> + not lookup_fun_property(MorMFA, Tag, Codeserver). + +lookup_fun_property({M, _F, _A}=MFA, Property, CodeServer) -> + MFAPropList = dialyzer_codeserver:lookup_meta_info(MFA, CodeServer), + case proplists:get_value(Property, MFAPropList, no) of + mod -> false; % suppressed in function + func -> true; % requested in function + no -> lookup_fun_property(M, Property, CodeServer) + end; +lookup_fun_property(M, Property, CodeServer) when is_atom(M) -> + MPropList = dialyzer_codeserver:lookup_meta_info(M, CodeServer), + proplists:is_defined(Property, MPropList). + %% ============================================================================ %% %% Exported types @@ -449,7 +604,6 @@ cleanup_compile_options(Opts) -> %% Using abstract, not asm or core. keep_compile_option(from_asm) -> false; -keep_compile_option(asm) -> false; keep_compile_option(from_core) -> false; %% The parse transform will already have been applied, may cause %% problems if it is re-applied. @@ -482,6 +636,57 @@ format_sig(Type, RecDict) -> flat_format(Fmt, Lst) -> lists:flatten(io_lib:format(Fmt, Lst)). +-spec get_options_with_tag(atom(), abstract_code()) -> [term()]. + +get_options_with_tag(Tag, Abs) -> + lists:flatten([O || {attribute, _, Tag0, O} <- Abs, Tag =:= Tag0]). + +%% Check F/A, and collect (unchecked) warning tags with line and file. +-spec check_fa_list([collected_attribute()], atom(), [fa()]) -> + [{{atom(), erl_scan:line(), file:filename()},fa()}]. + +check_fa_list(AttrFile, Tag, Functions) -> + FuncTab = gb_sets:from_list(Functions), + check_fa_list1(AttrFile, Tag, FuncTab). + +check_fa_list1([{Args, L, File}|Left], Tag, Funcs) -> + TermsL = [{{Tag0, L, File}, Term} || + {Tags, Terms0} <- lists:flatten([Args]), + Tag0 <- lists:flatten([Tags]), + Tag =:= '*' orelse Tag =:= Tag0, + Term <- lists:flatten([Terms0])], + case lists:dropwhile(fun({_, T}) -> is_fa(T) end, TermsL) of + [] -> ok; + [{_, Bad}|_] -> + Msg1 = flat_format(" Bad function ~w in line ~s:~w", + [Bad, File, L]), + throw({error, Msg1}) + end, + case lists:dropwhile(fun({_, FA}) -> is_known(FA, Funcs) end, TermsL) of + [] -> ok; + [{_, {F, A}}|_] -> + Msg2 = flat_format(" Unknown function ~w/~w in line ~s:~w", + [F, A, File, L]), + throw({error, Msg2}) + end, + TermsL ++ check_fa_list1(Left, Tag, Funcs); +check_fa_list1([], _Tag, _Funcs) -> []. + +is_known(FA, Funcs) -> + gb_sets:is_element(FA, Funcs). + +-spec is_fa_list(term()) -> boolean(). + +is_fa_list([E|L]) -> is_fa(E) andalso is_fa_list(L); +is_fa_list([]) -> true; +is_fa_list(_) -> false. + +-spec is_fa(term()) -> boolean(). + +is_fa({FuncName, Arity}) + when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> true; +is_fa(_) -> false. + %%------------------------------------------------------------------- %% Author : Per Gustafsson <[email protected]> %% Description : Provides better printing of binaries. @@ -586,3 +791,8 @@ parallelism() -> CPUs = erlang:system_info(logical_processors_available), Schedulers = erlang:system_info(schedulers), min(CPUs, Schedulers). + +-spec family([{K,V}]) -> [{K,[V]}]. + +family(L) -> + sofs:to_external(sofs:rel2fam(sofs:relation(L))). diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return b/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return index e89caf3cf7..4103a2d8b4 100644 --- a/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return +++ b/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return @@ -1,2 +1,2 @@ -supervisor_incorrect_return.erl:14: The inferred return type of init/1 ({'ok',{{'one_against_one',0,1},[{_,_,_,_,_,_},...]}}) has nothing in common with 'ignore' | {'ok',{{'one_for_all',non_neg_integer(),non_neg_integer()} | {'one_for_one',non_neg_integer(),non_neg_integer()} | {'rest_for_one',non_neg_integer(),non_neg_integer()} | {'simple_one_for_one',non_neg_integer(),non_neg_integer()},[{_,{atom() | tuple(),atom(),'undefined' | [any()]},'permanent' | 'temporary' | 'transient','brutal_kill' | 'infinity' | non_neg_integer(),'supervisor' | 'worker','dynamic' | [atom() | tuple()]}]}}, which is the expected return type for the callback of supervisor behaviour +supervisor_incorrect_return.erl:14: The inferred return type of init/1 ({'ok',{{'one_against_one',0,1},[{_,_,_,_,_,_},...]}}) has nothing in common with 'ignore' | {'ok',{{'one_for_all',non_neg_integer(),pos_integer()} | {'one_for_one',non_neg_integer(),pos_integer()} | {'rest_for_one',non_neg_integer(),pos_integer()} | {'simple_one_for_one',non_neg_integer(),pos_integer()} | #{},[{_,{atom() | tuple(),atom(),'undefined' | [any()]},'permanent' | 'temporary' | 'transient','brutal_kill' | 'infinity' | non_neg_integer(),'supervisor' | 'worker','dynamic' | [atom() | tuple()]} | #{}]}}, which is the expected return type for the callback of supervisor behaviour diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/custom_sup.erl b/lib/dialyzer/test/behaviour_SUITE_data/src/custom_sup.erl index 76da1fda70..401ee88eab 100644 --- a/lib/dialyzer/test/behaviour_SUITE_data/src/custom_sup.erl +++ b/lib/dialyzer/test/behaviour_SUITE_data/src/custom_sup.erl @@ -13,7 +13,7 @@ -export([init/1]). -spec init(atom()) -> - {ok, {{supervisor:strategy(), non_neg_integer(), non_neg_integer()}, + {ok, {{supervisor:strategy(), non_neg_integer(), pos_integer()}, [supervisor:child_spec()]}} | ignore. init(StorageName) -> diff --git a/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl new file mode 100644 index 0000000000..5134cc6f0b --- /dev/null +++ b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl @@ -0,0 +1,19 @@ +%% This tests the presence of possible races due to an ets:lookup/ets:insert +%% combination. It takes into account the argument types of the calls. + +-module(ets_insert_args1_suppressed). +-export([start/0]). + +-dialyzer({nowarn_function,start/0}). + +start() -> + F = fun(T) -> [{_, N}] = ets:lookup(T, counter), + ets:insert(T, [{counter, N+1}]) + end, + io:format("Created ~w\n", [ets:new(foo, [named_table, public])]), + ets:insert(foo, {counter, 0}), + io:format("Inserted ~w\n", [{counter, 0}]), + F(foo), + io:format("Update complete\n", []), + ObjectList = ets:lookup(foo, counter), + io:format("Counter: ~w\n", [ObjectList]). diff --git a/lib/dialyzer/test/small_SUITE_data/results/behaviour_info b/lib/dialyzer/test/small_SUITE_data/results/behaviour_info new file mode 100644 index 0000000000..2da4d26acb --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/results/behaviour_info @@ -0,0 +1,2 @@ + +with_bad_format_status.erl:12: The inferred type for the 1st argument of format_status/2 ('bad_arg') is not a supertype of 'normal' | 'terminate', which is expected type for this argument in the callback of the gen_server behaviour diff --git a/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed new file mode 100644 index 0000000000..40733434f6 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed @@ -0,0 +1,2 @@ + +blame_contract_range_suppressed.erl:8: Function foo/0 has no local return diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes index fbdd182358..a9fbfb6068 100644 --- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes +++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes @@ -6,23 +6,27 @@ contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'} contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) -contracts_with_subtypes.erl:171: The pattern 1 can never match the type string() -contracts_with_subtypes.erl:174: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()} -contracts_with_subtypes.erl:176: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()} -contracts_with_subtypes.erl:192: The pattern 'alpha' can never match the type {'ok',_} -contracts_with_subtypes.erl:194: The pattern 42 can never match the type {'ok',_} -contracts_with_subtypes.erl:212: The pattern 'alpha' can never match the type {'ok',_} -contracts_with_subtypes.erl:214: The pattern 42 can never match the type {'ok',_} -contracts_with_subtypes.erl:231: The pattern 1 can never match the type string() -contracts_with_subtypes.erl:234: The pattern {'ok', _} can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:235: The pattern 'alpha' can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:236: The pattern {'ok', 42} can never match the type {'ok',_,string()} -contracts_with_subtypes.erl:237: The pattern 42 can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab()) +contracts_with_subtypes.erl:175: The pattern 1 can never match the type string() +contracts_with_subtypes.erl:178: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()} +contracts_with_subtypes.erl:180: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()} +contracts_with_subtypes.erl:196: The pattern 'alpha' can never match the type {'ok',_} +contracts_with_subtypes.erl:198: The pattern 42 can never match the type {'ok',_} +contracts_with_subtypes.erl:216: The pattern 'alpha' can never match the type {'ok',_} +contracts_with_subtypes.erl:218: The pattern 42 can never match the type {'ok',_} +contracts_with_subtypes.erl:235: The pattern 1 can never match the type string() +contracts_with_subtypes.erl:238: The pattern {'ok', _} can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:239: The pattern 'alpha' can never match the type {'ok',_,string()} contracts_with_subtypes.erl:23: Invalid type specification for function contracts_with_subtypes:extract2/0. The success typing is () -> 'something' -contracts_with_subtypes.erl:263: Function flat_ets_new_t/0 has no local return -contracts_with_subtypes.erl:264: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed') -contracts_with_subtypes.erl:290: Function factored_ets_new_t/0 has no local return -contracts_with_subtypes.erl:291: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term()) +contracts_with_subtypes.erl:240: The pattern {'ok', 42} can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:241: The pattern 42 can never match the type {'ok',_,string()} +contracts_with_subtypes.erl:267: Function flat_ets_new_t/0 has no local return +contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed') +contracts_with_subtypes.erl:294: Function factored_ets_new_t/0 has no local return +contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term()) contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,atom()), is_subtype(Res,atom()) contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,Arg2), is_subtype(Arg2,atom()), is_subtype(Res,atom()) contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when is_subtype(Arg2,atom()), is_subtype(Arg1,Arg2), is_subtype(Res,atom()) diff --git a/lib/dialyzer/test/small_SUITE_data/results/request1 b/lib/dialyzer/test/small_SUITE_data/results/request1 new file mode 100644 index 0000000000..0cf4017403 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/results/request1 @@ -0,0 +1,2 @@ + +request1.erl:8: Expression produces a value of type {'a','b'}, but this value is unmatched diff --git a/lib/dialyzer/test/small_SUITE_data/results/suppress_request b/lib/dialyzer/test/small_SUITE_data/results/suppress_request new file mode 100644 index 0000000000..18e82b7972 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/results/suppress_request @@ -0,0 +1,6 @@ + +suppress_request.erl:21: Expression produces a value of type {'a','b'}, but this value is unmatched +suppress_request.erl:25: Expression produces a value of type {'a','b'}, but this value is unmatched +suppress_request.erl:35: Function test3_b/0 has no local return +suppress_request.erl:39: Guard test 2 =:= A::fun((none()) -> no_return()) can never succeed +suppress_request.erl:7: Type specification suppress_request:test1('a' | 'b') -> 'ok' is a subtype of the success typing: suppress_request:test1('a' | 'b' | 'c') -> 'ok' diff --git a/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_bad_format_status.erl b/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_bad_format_status.erl new file mode 100644 index 0000000000..24591e08fa --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_bad_format_status.erl @@ -0,0 +1,12 @@ +-module(with_bad_format_status). + +-behaviour(gen_server). +-export([handle_call/3,handle_cast/2,handle_info/2, + code_change/3, init/1, terminate/2, format_status/2]). +handle_call(_, _, S) -> {noreply, S}. +handle_cast(_, S) -> {noreply, S}. +handle_info(_, S) -> {noreply, S}. +code_change(_, _, _) -> {error, not_implemented}. +init(_) -> {ok, state}. +terminate(_, _) -> ok. +format_status(bad_arg, _) -> ok. % optional callback diff --git a/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_format_status.erl b/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_format_status.erl new file mode 100644 index 0000000000..a56ff63d1d --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/behaviour_info/with_format_status.erl @@ -0,0 +1,13 @@ +-module(with_format_status). + +-behaviour(gen_server). +-export([handle_call/3,handle_cast/2,handle_info/2, + code_change/3, init/1, terminate/2, format_status/2]). +-export([handle_call/3,handle_cast/2,handle_info/2]). +handle_call(_, _, S) -> {noreply, S}. +handle_cast(_, S) -> {noreply, S}. +handle_info(_, S) -> {noreply, S}. +code_change(_, _, _) -> {error, not_implemented}. +init(_) -> {ok, state}. +terminate(_, _) -> ok. +format_status(normal, _) -> ok. % optional callback diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl new file mode 100644 index 0000000000..91a157b17f --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl @@ -0,0 +1,528 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%% A stripped version of erl_parse.yrl. +%%% +%%% A type for the abstract format with *external* types has been added. +%%% The type of the abstract format is not up-to-date, but it does not +%%% matter since the purpose of the type is to stress the conversion +%%% of type forms to erl_type(). + +-module(big_external_type). + +-export([parse_form/1,parse_exprs/1,parse_term/1]). +-export([normalise/1,tokens/1,tokens/2]). +-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]). + +-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0, + error_info/0]). + +%% Start of Abstract Format + +-type line() :: erl_scan:line(). + +-export_type([af_record_index/0, af_record_field/1, af_record_name/0, + af_field_name/0, af_function_decl/0]). + +-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0, + af_compile/0, af_file/0, af_record_decl/0, + af_field_decl/0, af_wild_attribute/0, + af_record_update/1, af_catch/0, af_local_call/0, + af_remote_call/0, af_args/0, af_local_function/0, + af_remote_function/0, af_list_comprehension/0, + af_binary_comprehension/0, af_template/0, + af_qualifier_seq/0, af_qualifier/0, af_generator/0, + af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0, + af_clause_seq/0, af_catch_clause_seq/0, af_receive/0, + af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0, + af_query_access/0, af_clause/0, + af_catch_clause/0, af_catch_pattern/0, af_catch_class/0, + af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0, + af_record_access/1, af_guard_call/0, + af_remote_guard_call/0, af_pattern/0, af_literal/0, + af_atom/0, af_lit_atom/1, af_integer/0, af_float/0, + af_string/0, af_match/1, af_variable/0, + af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1, + af_bin/1, af_binelement/1, af_binelement_size/0, + af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]). + +-type abstract_form() :: ?MODULE:af_module() + | ?MODULE:af_export() + | ?MODULE:af_import() + | ?MODULE:af_compile() + | ?MODULE:af_file() + | ?MODULE:af_record_decl() + | ?MODULE:af_wild_attribute() + | ?MODULE:af_function_decl(). + +-type af_module() :: {attribute, line(), module, module()}. + +-type af_export() :: {attribute, line(), export, ?MODULE:af_fa_list()}. + +-type af_import() :: {attribute, line(), import, ?MODULE:af_fa_list()}. + +-type af_fa_list() :: [{function(), arity()}]. + +-type af_compile() :: {attribute, line(), compile, any()}. + +-type af_file() :: {attribute, line(), file, {string(), line()}}. + +-type af_record_decl() :: + {attribute, line(), record, ?MODULE:af_record_name(), [?MODULE:af_field_decl()]}. + +-type af_field_decl() :: {record_field, line(), ?MODULE:af_atom()} + | {record_field, line(), ?MODULE:af_atom(), ?MODULE:abstract_expr()}. + +%% Types and specs, among other things... +-type af_wild_attribute() :: {attribute, line(), ?MODULE:af_atom(), any()}. + +-type af_function_decl() :: + {function, line(), function(), arity(), ?MODULE:af_clause_seq()}. + +-type abstract_expr() :: ?MODULE:af_literal() + | ?MODULE:af_match(?MODULE:abstract_expr()) + | ?MODULE:af_variable() + | ?MODULE:af_tuple(?MODULE:abstract_expr()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:abstract_expr()) + | ?MODULE:af_bin(?MODULE:abstract_expr()) + | ?MODULE:af_binary_op(?MODULE:abstract_expr()) + | ?MODULE:af_unary_op(?MODULE:abstract_expr()) + | ?MODULE:af_record_access(?MODULE:abstract_expr()) + | ?MODULE:af_record_update(?MODULE:abstract_expr()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:abstract_expr()) + | ?MODULE:af_catch() + | ?MODULE:af_local_call() + | ?MODULE:af_remote_call() + | ?MODULE:af_list_comprehension() + | ?MODULE:af_binary_comprehension() + | ?MODULE:af_block() + | ?MODULE:af_if() + | ?MODULE:af_case() + | ?MODULE:af_try() + | ?MODULE:af_receive() + | ?MODULE:af_local_fun() + | ?MODULE:af_remote_fun() + | ?MODULE:af_fun() + | ?MODULE:af_query() + | ?MODULE:af_query_access(). + +-type af_record_update(T) :: {record, + line(), + ?MODULE:abstract_expr(), + ?MODULE:af_record_name(), + [?MODULE:af_record_field(T)]}. + +-type af_catch() :: {'catch', line(), ?MODULE:abstract_expr()}. + +-type af_local_call() :: {call, line(), ?MODULE:af_local_function(), ?MODULE:af_args()}. + +-type af_remote_call() :: {call, line(), ?MODULE:af_remote_function(), ?MODULE:af_args()}. + +-type af_args() :: [?MODULE:abstract_expr()]. + +-type af_local_function() :: ?MODULE:abstract_expr(). + +-type af_remote_function() :: + {remote, line(), ?MODULE:abstract_expr(), ?MODULE:abstract_expr()}. + +-type af_list_comprehension() :: + {lc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}. + +-type af_binary_comprehension() :: + {bc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}. + +-type af_template() :: ?MODULE:abstract_expr(). + +-type af_qualifier_seq() :: [?MODULE:af_qualifier()]. + +-type af_qualifier() :: ?MODULE:af_generator() | ?MODULE:af_filter(). + +-type af_generator() :: {generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()} + | {b_generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}. + +-type af_filter() :: ?MODULE:abstract_expr(). + +-type af_block() :: {block, line(), ?MODULE:af_body()}. + +-type af_if() :: {'if', line(), ?MODULE:af_clause_seq()}. + +-type af_case() :: {'case', line(), ?MODULE:abstract_expr(), ?MODULE:af_clause_seq()}. + +-type af_try() :: {'try', + line(), + ?MODULE:af_body(), + ?MODULE:af_clause_seq(), + ?MODULE:af_catch_clause_seq(), + ?MODULE:af_body()}. + +-type af_clause_seq() :: [?MODULE:af_clause(), ...]. + +-type af_catch_clause_seq() :: [?MODULE:af_clause(), ...]. + +-type af_receive() :: + {'receive', line(), ?MODULE:af_clause_seq()} + | {'receive', line(), ?MODULE:af_clause_seq(), ?MODULE:abstract_expr(), ?MODULE:af_body()}. + +-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}. + +-type af_remote_fun() :: + {'fun', line(), {function, module(), function(), arity()}} + | {'fun', line(), {function, ?MODULE:af_atom(), ?MODULE:af_atom(), ?MODULE:af_integer()}}. + +-type af_fun() :: {'fun', line(), {clauses, ?MODULE:af_clause_seq()}}. + +-type af_query() :: {'query', line(), ?MODULE:af_list_comprehension()}. + +-type af_query_access() :: + {record_field, line(), ?MODULE:abstract_expr(), ?MODULE:af_field_name()}. + +-type abstract_clause() :: ?MODULE:af_clause() | ?MODULE:af_catch_clause(). + +-type af_clause() :: + {clause, line(), [?MODULE:af_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}. + +-type af_catch_clause() :: + {clause, line(), [?MODULE:af_catch_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}. + +-type af_catch_pattern() :: + {?MODULE:af_catch_class(), ?MODULE:af_pattern(), ?MODULE:af_anon_variable()}. + +-type af_catch_class() :: + ?MODULE:af_variable() + | ?MODULE:af_lit_atom(throw) | ?MODULE:af_lit_atom(error) | ?MODULE:af_lit_atom(exit). + +-type af_body() :: [?MODULE:abstract_expr(), ...]. + +-type af_guard_seq() :: [?MODULE:af_guard()]. + +-type af_guard() :: [?MODULE:af_guard_test(), ...]. + +-type af_guard_test() :: ?MODULE:af_literal() + | ?MODULE:af_variable() + | ?MODULE:af_tuple(?MODULE:af_guard_test()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:af_guard_test()) + | ?MODULE:af_bin(?MODULE:af_guard_test()) + | ?MODULE:af_binary_op(?MODULE:af_guard_test()) + | ?MODULE:af_unary_op(?MODULE:af_guard_test()) + | ?MODULE:af_record_access(?MODULE:af_guard_test()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:af_guard_test()) + | ?MODULE:af_guard_call() + | ?MODULE:af_remote_guard_call(). + +-type af_record_access(T) :: + {record, line(), ?MODULE:af_record_name(), [?MODULE:af_record_field(T)]}. + +-type af_guard_call() :: {call, line(), function(), [?MODULE:af_guard_test()]}. + +-type af_remote_guard_call() :: + {call, line(), atom(), ?MODULE:af_lit_atom(erlang), [?MODULE:af_guard_test()]}. + +-type af_pattern() :: ?MODULE:af_literal() + | ?MODULE:af_match(?MODULE:af_pattern()) + | ?MODULE:af_variable() + | ?MODULE:af_anon_variable() + | ?MODULE:af_tuple(?MODULE:af_pattern()) + | ?MODULE:af_nil() + | ?MODULE:af_cons(?MODULE:af_pattern()) + | ?MODULE:af_bin(?MODULE:af_pattern()) + | ?MODULE:af_binary_op(?MODULE:af_pattern()) + | ?MODULE:af_unary_op(?MODULE:af_pattern()) + | ?MODULE:af_record_index() + | ?MODULE:af_record_field(?MODULE:af_pattern()). + +-type af_literal() :: ?MODULE:af_atom() | ?MODULE:af_integer() | ?MODULE:af_float() | ?MODULE:af_string(). + +-type af_atom() :: ?MODULE:af_lit_atom(atom()). + +-type af_lit_atom(A) :: {atom, line(), A}. + +-type af_integer() :: {integer, line(), non_neg_integer()}. + +-type af_float() :: {float, line(), float()}. + +-type af_string() :: {string, line(), [byte()]}. + +-type af_match(T) :: {match, line(), T, T}. + +-type af_variable() :: {var, line(), atom()}. + +-type af_anon_variable() :: {var, line(), '_'}. + +-type af_tuple(T) :: {tuple, line(), [T]}. + +-type af_nil() :: {nil, line()}. + +-type af_cons(T) :: {cons, line, T, T}. + +-type af_bin(T) :: {bin, line(), [?MODULE:af_binelement(T)]}. + +-type af_binelement(T) :: {bin_element, + line(), + T, + ?MODULE:af_binelement_size(), + type_specifier_list()}. + +-type af_binelement_size() :: default | ?MODULE:abstract_expr(). + +-type af_binary_op(T) :: {op, line(), T, ?MODULE:af_binop(), T}. + +-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-' + | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++' + | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:=' + | '=/='. + +-type af_unary_op(T) :: {op, line(), ?MODULE:af_unop(), T}. + +-type af_unop() :: '+' | '*' | 'bnot' | 'not'. + +%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}. +-type type_specifier_list() :: default | [type_specifier(), ...]. + +-type type_specifier() :: af_type() + | af_signedness() + | af_endianness() + | af_unit(). + +-type af_type() :: integer + | float + | binary + | bytes + | bitstring + | bits + | utf8 + | utf16 + | utf32. + +-type af_signedness() :: signed | unsigned. + +-type af_endianness() :: big | little | native. + +-type af_unit() :: {unit, 1..256}. + +-type af_record_index() :: + {record_index, line(), af_record_name(), af_field_name()}. + +-type af_record_field(T) :: {record_field, line(), af_field_name(), T}. + +-type af_record_name() :: atom(). + +-type af_field_name() :: atom(). + +%% End of Abstract Format + +-type error_description() :: term(). +-type error_info() :: {erl_scan:line(), module(), error_description()}. +-type token() :: {Tag :: atom(), Line :: erl_scan:line()}. + +%% mkop(Op, Arg) -> {op,Line,Op,Arg}. +%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}. + +-define(mkop2(L, OpPos, R), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,L,R} + end). + +-define(mkop1(OpPos, A), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,A} + end). + +%% keep track of line info in tokens +-define(line(Tup), element(2, Tup)). + +%% Entry points compatible to old erl_parse. +%% These really suck and are only here until Calle gets multiple +%% entry points working. + +-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when + Tokens :: [token()], + AbsForm :: abstract_form(), + ErrorInfo :: error_info(). +parse_form([{'-',L1},{atom,L2,spec}|Tokens]) -> + parse([{'-',L1},{'spec',L2}|Tokens]); +parse_form([{'-',L1},{atom,L2,callback}|Tokens]) -> + parse([{'-',L1},{'callback',L2}|Tokens]); +parse_form(Tokens) -> + parse(Tokens). + +-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when + Tokens :: [token()], + ExprList :: [abstract_expr()], + ErrorInfo :: error_info(). +parse_exprs(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} -> + {ok,Exprs}; + {error,_} = Err -> Err + end. + +-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when + Tokens :: [token()], + Term :: term(), + ErrorInfo :: error_info(). +parse_term(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} -> + try normalise(Expr) of + Term -> {ok,Term} + catch + _:_R -> {error,{?line(Expr),?MODULE,"bad term"}} + end; + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} -> + {error,{?line(E2),?MODULE,"bad term"}}; + {error,_} = Err -> Err + end. + +%% Convert between the abstract form of a term and a term. + +-spec normalise(AbsTerm) -> Data when + AbsTerm :: abstract_expr(), + Data :: term(). +normalise({char,_,C}) -> C; +normalise({integer,_,I}) -> I; +normalise({float,_,F}) -> F; +normalise({atom,_,A}) -> A; +normalise({string,_,S}) -> S; +normalise({nil,_}) -> []; +normalise({bin,_,Fs}) -> + {value, B, _} = + eval_bits:expr_grp(Fs, [], + fun(E, _) -> + {value, normalise(E), []} + end, [], true), + B; +normalise({cons,_,Head,Tail}) -> + [normalise(Head)|normalise(Tail)]; +normalise({tuple,_,Args}) -> + list_to_tuple(normalise_list(Args)); +%% Atom dot-notation, as in 'foo.bar.baz' +%% Special case for unary +/-. +normalise({op,_,'+',{char,_,I}}) -> I; +normalise({op,_,'+',{integer,_,I}}) -> I; +normalise({op,_,'+',{float,_,F}}) -> F; +normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible! +normalise({op,_,'-',{integer,_,I}}) -> -I; +normalise({op,_,'-',{float,_,F}}) -> -F; +normalise(X) -> erlang:error({badarg, X}). + +normalise_list([H|T]) -> + [normalise(H)|normalise_list(T)]; +normalise_list([]) -> + []. + +%% Generate a list of tokens representing the abstract term. + +-spec tokens(AbsTerm) -> Tokens when + AbsTerm :: abstract_expr(), + Tokens :: [token()]. +tokens(Abs) -> + tokens(Abs, []). + +-spec tokens(AbsTerm, MoreTokens) -> Tokens when + AbsTerm :: abstract_expr(), + MoreTokens :: [token()], + Tokens :: [token()]. +tokens({char,L,C}, More) -> [{char,L,C}|More]; +tokens({integer,L,N}, More) -> [{integer,L,N}|More]; +tokens({float,L,F}, More) -> [{float,L,F}|More]; +tokens({atom,L,A}, More) -> [{atom,L,A}|More]; +tokens({var,L,V}, More) -> [{var,L,V}|More]; +tokens({string,L,S}, More) -> [{string,L,S}|More]; +tokens({nil,L}, More) -> [{'[',L},{']',L}|More]; +tokens({cons,L,Head,Tail}, More) -> + [{'[',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens({tuple,L,[]}, More) -> + [{'{',L},{'}',L}|More]; +tokens({tuple,L,[E|Es]}, More) -> + [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))]. + +tokens_tail({cons,L,Head,Tail}, More) -> + [{',',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens_tail({nil,L}, More) -> + [{']',L}|More]; +tokens_tail(Other, More) -> + L = ?line(Other), + [{'|',L}|tokens(Other, [{']',L}|More])]. + +tokens_tuple([E|Es], Line, More) -> + [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))]; +tokens_tuple([], Line, More) -> + [{'}',Line}|More]. + +%% Give the relative precedences of operators. + +inop_prec('=') -> {150,100,100}; +inop_prec('!') -> {150,100,100}; +inop_prec('orelse') -> {160,150,150}; +inop_prec('andalso') -> {200,160,160}; +inop_prec('==') -> {300,200,300}; +inop_prec('/=') -> {300,200,300}; +inop_prec('=<') -> {300,200,300}; +inop_prec('<') -> {300,200,300}; +inop_prec('>=') -> {300,200,300}; +inop_prec('>') -> {300,200,300}; +inop_prec('=:=') -> {300,200,300}; +inop_prec('=/=') -> {300,200,300}; +inop_prec('++') -> {400,300,300}; +inop_prec('--') -> {400,300,300}; +inop_prec('+') -> {400,400,500}; +inop_prec('-') -> {400,400,500}; +inop_prec('bor') -> {400,400,500}; +inop_prec('bxor') -> {400,400,500}; +inop_prec('bsl') -> {400,400,500}; +inop_prec('bsr') -> {400,400,500}; +inop_prec('or') -> {400,400,500}; +inop_prec('xor') -> {400,400,500}; +inop_prec('*') -> {500,500,600}; +inop_prec('/') -> {500,500,600}; +inop_prec('div') -> {500,500,600}; +inop_prec('rem') -> {500,500,600}; +inop_prec('band') -> {500,500,600}; +inop_prec('and') -> {500,500,600}; +inop_prec('#') -> {800,700,800}; +inop_prec(':') -> {900,800,900}; +inop_prec('.') -> {900,900,1000}. + +-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'. + +-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}. + +preop_prec('catch') -> {0,100}; +preop_prec('+') -> {600,700}; +preop_prec('-') -> {600,700}; +preop_prec('bnot') -> {600,700}; +preop_prec('not') -> {600,700}; +preop_prec('#') -> {700,800}. + +-spec func_prec() -> {800,700}. + +func_prec() -> {800,700}. + +-spec max_prec() -> 1000. + +max_prec() -> 1000. + +parse(T) -> + bar:foo(T). diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl new file mode 100644 index 0000000000..6de263eda1 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl @@ -0,0 +1,525 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%%% A stripped version of erl_parse.yrl. +%%% +%%% A type for the abstract format with *local* types has been added. +%%% The type of the abstract format is not up-to-date, but it does not +%%% matter since the purpose of the type is to stress the conversion +%%% of type forms to erl_type(). + +-module(big_local_type). + +-export([parse_form/1,parse_exprs/1,parse_term/1]). +-export([normalise/1,tokens/1,tokens/2]). +-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]). + +-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0, + error_info/0]). + +%% Start of Abstract Format + +-type line() :: erl_scan:line(). + +-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0, + af_compile/0, af_file/0, af_record_decl/0, + af_field_decl/0, af_wild_attribute/0, + af_record_update/1, af_catch/0, af_local_call/0, + af_remote_call/0, af_args/0, af_local_function/0, + af_remote_function/0, af_list_comprehension/0, + af_binary_comprehension/0, af_template/0, + af_qualifier_seq/0, af_qualifier/0, af_generator/0, + af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0, + af_clause_seq/0, af_catch_clause_seq/0, af_receive/0, + af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0, + af_query_access/0, af_clause/0, + af_catch_clause/0, af_catch_pattern/0, af_catch_class/0, + af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0, + af_record_access/1, af_guard_call/0, + af_remote_guard_call/0, af_pattern/0, af_literal/0, + af_atom/0, af_lit_atom/1, af_integer/0, af_float/0, + af_string/0, af_match/1, af_variable/0, + af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1, + af_bin/1, af_binelement/1, af_binelement_size/0, + af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]). + +-type abstract_form() :: af_module() + | af_export() + | af_import() + | af_compile() + | af_file() + | af_record_decl() + | af_wild_attribute() + | af_function_decl(). + +-type af_module() :: {attribute, line(), module, module()}. + +-type af_export() :: {attribute, line(), export, af_fa_list()}. + +-type af_import() :: {attribute, line(), import, af_fa_list()}. + +-type af_fa_list() :: [{function(), arity()}]. + +-type af_compile() :: {attribute, line(), compile, any()}. + +-type af_file() :: {attribute, line(), file, {string(), line()}}. + +-type af_record_decl() :: + {attribute, line(), record, af_record_name(), [af_field_decl()]}. + +-type af_field_decl() :: {record_field, line(), af_atom()} + | {record_field, line(), af_atom(), abstract_expr()}. + +%% Types and specs, among other things... +-type af_wild_attribute() :: {attribute, line(), af_atom(), any()}. + +-type af_function_decl() :: + {function, line(), function(), arity(), af_clause_seq()}. + +-type abstract_expr() :: af_literal() + | af_match(abstract_expr()) + | af_variable() + | af_tuple(abstract_expr()) + | af_nil() + | af_cons(abstract_expr()) + | af_bin(abstract_expr()) + | af_binary_op(abstract_expr()) + | af_unary_op(abstract_expr()) + | af_record_access(abstract_expr()) + | af_record_update(abstract_expr()) + | af_record_index() + | af_record_field(abstract_expr()) + | af_catch() + | af_local_call() + | af_remote_call() + | af_list_comprehension() + | af_binary_comprehension() + | af_block() + | af_if() + | af_case() + | af_try() + | af_receive() + | af_local_fun() + | af_remote_fun() + | af_fun() + | af_query() + | af_query_access(). + +-type af_record_update(T) :: {record, + line(), + abstract_expr(), + af_record_name(), + [af_record_field(T)]}. + +-type af_catch() :: {'catch', line(), abstract_expr()}. + +-type af_local_call() :: {call, line(), af_local_function(), af_args()}. + +-type af_remote_call() :: {call, line(), af_remote_function(), af_args()}. + +-type af_args() :: [abstract_expr()]. + +-type af_local_function() :: abstract_expr(). + +-type af_remote_function() :: + {remote, line(), abstract_expr(), abstract_expr()}. + +-type af_list_comprehension() :: + {lc, line(), af_template(), af_qualifier_seq()}. + +-type af_binary_comprehension() :: + {bc, line(), af_template(), af_qualifier_seq()}. + +-type af_template() :: abstract_expr(). + +-type af_qualifier_seq() :: [af_qualifier()]. + +-type af_qualifier() :: af_generator() | af_filter(). + +-type af_generator() :: {generate, line(), af_pattern(), abstract_expr()} + | {b_generate, line(), af_pattern(), abstract_expr()}. + +-type af_filter() :: abstract_expr(). + +-type af_block() :: {block, line(), af_body()}. + +-type af_if() :: {'if', line(), af_clause_seq()}. + +-type af_case() :: {'case', line(), abstract_expr(), af_clause_seq()}. + +-type af_try() :: {'try', + line(), + af_body(), + af_clause_seq(), + af_catch_clause_seq(), + af_body()}. + +-type af_clause_seq() :: [af_clause(), ...]. + +-type af_catch_clause_seq() :: [af_clause(), ...]. + +-type af_receive() :: + {'receive', line(), af_clause_seq()} + | {'receive', line(), af_clause_seq(), abstract_expr(), af_body()}. + +-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}. + +-type af_remote_fun() :: + {'fun', line(), {function, module(), function(), arity()}} + | {'fun', line(), {function, af_atom(), af_atom(), af_integer()}}. + +-type af_fun() :: {'fun', line(), {clauses, af_clause_seq()}}. + +-type af_query() :: {'query', line(), af_list_comprehension()}. + +-type af_query_access() :: + {record_field, line(), abstract_expr(), af_field_name()}. + +-type abstract_clause() :: af_clause() | af_catch_clause(). + +-type af_clause() :: + {clause, line(), [af_pattern()], af_guard_seq(), af_body()}. + +-type af_catch_clause() :: + {clause, line(), [af_catch_pattern()], af_guard_seq(), af_body()}. + +-type af_catch_pattern() :: + {af_catch_class(), af_pattern(), af_anon_variable()}. + +-type af_catch_class() :: + af_variable() + | af_lit_atom(throw) | af_lit_atom(error) | af_lit_atom(exit). + +-type af_body() :: [abstract_expr(), ...]. + +-type af_guard_seq() :: [af_guard()]. + +-type af_guard() :: [af_guard_test(), ...]. + +-type af_guard_test() :: af_literal() + | af_variable() + | af_tuple(af_guard_test()) + | af_nil() + | af_cons(af_guard_test()) + | af_bin(af_guard_test()) + | af_binary_op(af_guard_test()) + | af_unary_op(af_guard_test()) + | af_record_access(af_guard_test()) + | af_record_index() + | af_record_field(af_guard_test()) + | af_guard_call() + | af_remote_guard_call(). + +-type af_record_access(T) :: + {record, line(), af_record_name(), [af_record_field(T)]}. + +-type af_guard_call() :: {call, line(), function(), [af_guard_test()]}. + +-type af_remote_guard_call() :: + {call, line(), atom(), af_lit_atom(erlang), [af_guard_test()]}. + +-type af_pattern() :: af_literal() + | af_match(af_pattern()) + | af_variable() + | af_anon_variable() + | af_tuple(af_pattern()) + | af_nil() + | af_cons(af_pattern()) + | af_bin(af_pattern()) + | af_binary_op(af_pattern()) + | af_unary_op(af_pattern()) + | af_record_index() + | af_record_field(af_pattern()). + +-type af_literal() :: af_atom() | af_integer() | af_float() | af_string(). + +-type af_atom() :: af_lit_atom(atom()). + +-type af_lit_atom(A) :: {atom, line(), A}. + +-type af_integer() :: {integer, line(), non_neg_integer()}. + +-type af_float() :: {float, line(), float()}. + +-type af_string() :: {string, line(), [byte()]}. + +-type af_match(T) :: {match, line(), T, T}. + +-type af_variable() :: {var, line(), atom()}. + +-type af_anon_variable() :: {var, line(), '_'}. + +-type af_tuple(T) :: {tuple, line(), [T]}. + +-type af_nil() :: {nil, line()}. + +-type af_cons(T) :: {cons, line, T, T}. + +-type af_bin(T) :: {bin, line(), [af_binelement(T)]}. + +-type af_binelement(T) :: {bin_element, + line(), + T, + af_binelement_size(), + type_specifier_list()}. + +-type af_binelement_size() :: default | abstract_expr(). + +-type af_binary_op(T) :: {op, line(), T, af_binop(), T}. + +-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-' + | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++' + | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:=' + | '=/='. + +-type af_unary_op(T) :: {op, line(), af_unop(), T}. + +-type af_unop() :: '+' | '*' | 'bnot' | 'not'. + +%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}. +-type type_specifier_list() :: default | [type_specifier(), ...]. + +-type type_specifier() :: af_type() + | af_signedness() + | af_endianness() + | af_unit(). + +-type af_type() :: integer + | float + | binary + | bytes + | bitstring + | bits + | utf8 + | utf16 + | utf32. + +-type af_signedness() :: signed | unsigned. + +-type af_endianness() :: big | little | native. + +-type af_unit() :: {unit, 1..256}. + +-type af_record_index() :: + {record_index, line(), af_record_name(), af_field_name()}. + +-type af_record_field(T) :: {record_field, line(), af_field_name(), T}. + +-type af_record_name() :: atom(). + +-type af_field_name() :: atom(). + +%% End of Abstract Format + +-type error_description() :: term(). +-type error_info() :: {erl_scan:line(), module(), error_description()}. +-type token() :: {Tag :: atom(), Line :: erl_scan:line()}. + +%% mkop(Op, Arg) -> {op,Line,Op,Arg}. +%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}. + +-define(mkop2(L, OpPos, R), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,L,R} + end). + +-define(mkop1(OpPos, A), + begin + {Op,Pos} = OpPos, + {op,Pos,Op,A} + end). + +%% keep track of line info in tokens +-define(line(Tup), element(2, Tup)). + +%% Entry points compatible to old erl_parse. +%% These really suck and are only here until Calle gets multiple +%% entry points working. + +-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when + Tokens :: [token()], + AbsForm :: abstract_form(), + ErrorInfo :: error_info(). +parse_form([{'-',L1},{atom,L2,spec}|Tokens]) -> + parse([{'-',L1},{'spec',L2}|Tokens]); +parse_form([{'-',L1},{atom,L2,callback}|Tokens]) -> + parse([{'-',L1},{'callback',L2}|Tokens]); +parse_form(Tokens) -> + parse(Tokens). + +-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when + Tokens :: [token()], + ExprList :: [abstract_expr()], + ErrorInfo :: error_info(). +parse_exprs(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} -> + {ok,Exprs}; + {error,_} = Err -> Err + end. + +-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when + Tokens :: [token()], + Term :: term(), + ErrorInfo :: error_info(). +parse_term(Tokens) -> + case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} -> + try normalise(Expr) of + Term -> {ok,Term} + catch + _:_R -> {error,{?line(Expr),?MODULE,"bad term"}} + end; + {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} -> + {error,{?line(E2),?MODULE,"bad term"}}; + {error,_} = Err -> Err + end. + +%% Convert between the abstract form of a term and a term. + +-spec normalise(AbsTerm) -> Data when + AbsTerm :: abstract_expr(), + Data :: term(). +normalise({char,_,C}) -> C; +normalise({integer,_,I}) -> I; +normalise({float,_,F}) -> F; +normalise({atom,_,A}) -> A; +normalise({string,_,S}) -> S; +normalise({nil,_}) -> []; +normalise({bin,_,Fs}) -> + {value, B, _} = + eval_bits:expr_grp(Fs, [], + fun(E, _) -> + {value, normalise(E), []} + end, [], true), + B; +normalise({cons,_,Head,Tail}) -> + [normalise(Head)|normalise(Tail)]; +normalise({tuple,_,Args}) -> + list_to_tuple(normalise_list(Args)); +%% Atom dot-notation, as in 'foo.bar.baz' +%% Special case for unary +/-. +normalise({op,_,'+',{char,_,I}}) -> I; +normalise({op,_,'+',{integer,_,I}}) -> I; +normalise({op,_,'+',{float,_,F}}) -> F; +normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible! +normalise({op,_,'-',{integer,_,I}}) -> -I; +normalise({op,_,'-',{float,_,F}}) -> -F; +normalise(X) -> erlang:error({badarg, X}). + +normalise_list([H|T]) -> + [normalise(H)|normalise_list(T)]; +normalise_list([]) -> + []. + +%% Generate a list of tokens representing the abstract term. + +-spec tokens(AbsTerm) -> Tokens when + AbsTerm :: abstract_expr(), + Tokens :: [token()]. +tokens(Abs) -> + tokens(Abs, []). + +-spec tokens(AbsTerm, MoreTokens) -> Tokens when + AbsTerm :: abstract_expr(), + MoreTokens :: [token()], + Tokens :: [token()]. +tokens({char,L,C}, More) -> [{char,L,C}|More]; +tokens({integer,L,N}, More) -> [{integer,L,N}|More]; +tokens({float,L,F}, More) -> [{float,L,F}|More]; +tokens({atom,L,A}, More) -> [{atom,L,A}|More]; +tokens({var,L,V}, More) -> [{var,L,V}|More]; +tokens({string,L,S}, More) -> [{string,L,S}|More]; +tokens({nil,L}, More) -> [{'[',L},{']',L}|More]; +tokens({cons,L,Head,Tail}, More) -> + [{'[',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens({tuple,L,[]}, More) -> + [{'{',L},{'}',L}|More]; +tokens({tuple,L,[E|Es]}, More) -> + [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))]. + +tokens_tail({cons,L,Head,Tail}, More) -> + [{',',L}|tokens(Head, tokens_tail(Tail, More))]; +tokens_tail({nil,L}, More) -> + [{']',L}|More]; +tokens_tail(Other, More) -> + L = ?line(Other), + [{'|',L}|tokens(Other, [{']',L}|More])]. + +tokens_tuple([E|Es], Line, More) -> + [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))]; +tokens_tuple([], Line, More) -> + [{'}',Line}|More]. + +%% Give the relative precedences of operators. + +inop_prec('=') -> {150,100,100}; +inop_prec('!') -> {150,100,100}; +inop_prec('orelse') -> {160,150,150}; +inop_prec('andalso') -> {200,160,160}; +inop_prec('==') -> {300,200,300}; +inop_prec('/=') -> {300,200,300}; +inop_prec('=<') -> {300,200,300}; +inop_prec('<') -> {300,200,300}; +inop_prec('>=') -> {300,200,300}; +inop_prec('>') -> {300,200,300}; +inop_prec('=:=') -> {300,200,300}; +inop_prec('=/=') -> {300,200,300}; +inop_prec('++') -> {400,300,300}; +inop_prec('--') -> {400,300,300}; +inop_prec('+') -> {400,400,500}; +inop_prec('-') -> {400,400,500}; +inop_prec('bor') -> {400,400,500}; +inop_prec('bxor') -> {400,400,500}; +inop_prec('bsl') -> {400,400,500}; +inop_prec('bsr') -> {400,400,500}; +inop_prec('or') -> {400,400,500}; +inop_prec('xor') -> {400,400,500}; +inop_prec('*') -> {500,500,600}; +inop_prec('/') -> {500,500,600}; +inop_prec('div') -> {500,500,600}; +inop_prec('rem') -> {500,500,600}; +inop_prec('band') -> {500,500,600}; +inop_prec('and') -> {500,500,600}; +inop_prec('#') -> {800,700,800}; +inop_prec(':') -> {900,800,900}; +inop_prec('.') -> {900,900,1000}. + +-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'. + +-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}. + +preop_prec('catch') -> {0,100}; +preop_prec('+') -> {600,700}; +preop_prec('-') -> {600,700}; +preop_prec('bnot') -> {600,700}; +preop_prec('not') -> {600,700}; +preop_prec('#') -> {700,800}. + +-spec func_prec() -> {800,700}. + +func_prec() -> {800,700}. + +-spec max_prec() -> 1000. + +max_prec() -> 1000. + +parse(T) -> + bar:foo(T). diff --git a/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl new file mode 100644 index 0000000000..8b66d35083 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl @@ -0,0 +1,15 @@ +%%----------------------------------------------------------------------- +%% Like ./blame_contract_range.erl, but warning is suppressed. +%%----------------------------------------------------------------------- +-module(blame_contract_range_suppressed). + +-export([foo/0]). + +foo() -> + bar(b). + +-dialyzer({nowarn_function, bar/1}). + +-spec bar(atom()) -> a. +bar(a) -> a; +bar(b) -> b. diff --git a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl index d7dfd9752e..dbabd904c2 100644 --- a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl +++ b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl @@ -136,10 +136,14 @@ q(ab) -> rec2({a, b}); % breaks the contract q(ba) -> rec2({b, a}); % breaks the contract q(aba) -> rec2({a, {b, a}}); % breaks the contract q(bab) -> rec2({b, {a, b}}); % breaks the contract -q(abab) -> rec2({a, {b, {a, b}}}); -q(baba) -> rec2({b, {a, {b, a}}}); -q(ababa) -> rec2({a, {b, {a, {b, a}}}}); -q(babab) -> rec2({b, {a, {b, {a, b}}}}). +q(abab) -> rec2({a, {b, {a, b}}}); % breaks the contract +q(baba) -> rec2({b, {a, {b, a}}}); % breaks the contract +q(ababa) -> rec2({a, {b, {a, {b, a}}}}); % breaks the contract +q(babab) -> rec2({b, {a, {b, {a, b}}}}); % breaks the contract +q(ababab) -> rec2({a, {b, {a, {b, {a, b}}}}}); +q(bababa) -> rec2({b, {a, {b, {a, {b, a}}}}}); +q(abababa) -> rec2({a, {b, {a, {b, {a, {b, a}}}}}}); +q(bababab) -> rec2({b, {a, {b, {a, {b, {a, b}}}}}}). %=============================================================================== diff --git a/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl new file mode 100644 index 0000000000..2d75f25bd5 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl @@ -0,0 +1,47 @@ +%% A bug reported by Tail-f Systems. The problem is that record types +%% are included without properly limiting their depth. + +-module(ditrap). + +-define(tref(T), ?MODULE:T). +-define(fref(T), ?MODULE:T). + +-export_type([ module_rec/0 + , typedef_rec/0 + , type_spec_fun/0 + ]). + +-record(type, { + base :: 'builtin' | external:random_type() | ?tref(typedef_rec()), + type_spec_fun :: ?fref(type_spec_fun()) + }). + +-record(typedef, {type :: #type{}}). + +-record(typedefs, { + map :: ?tref(typedef_rec()), + parent :: 'undefined' | #typedefs{} + }). + +-record(sn, { + module :: ?tref(module_rec()), + typedefs :: #typedefs{}, + type :: 'undefined' | #type{}, + keys :: 'undefined' | [#sn{}], + children = [] :: [#sn{}] + }). + +-record(augment, {children = [] :: [#sn{}]}). + +-record(module, { + submodules = [] :: [{#module{}, external:pos()}], + typedefs = #typedefs{} :: #typedefs{}, + children = [] :: [#sn{}], + remote_augments = [] :: [{ModuleName :: atom(), [#augment{}]}], + local_augments = [] :: [#augment{}] + }). + +-type typedef_rec() :: #typedef{}. +-type module_rec() :: #module{}. + +-type type_spec_fun() :: undefined | fun((#type{}, #module{}) -> any()). diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps1.erl b/lib/dialyzer/test/small_SUITE_data/src/maps1.erl index 228ffe2c22..06ced5b69e 100644 --- a/lib/dialyzer/test/small_SUITE_data/src/maps1.erl +++ b/lib/dialyzer/test/small_SUITE_data/src/maps1.erl @@ -10,7 +10,6 @@ -export([recv/3, decode/1]). --export([get_my_map/0,is_my_map/1]). %-record(can_pkt, {id, data :: binary(), timestamp}). @@ -40,38 +39,3 @@ t2() -> ok. update(#{ id := Id, val := Val } = M, X) when is_integer(Id) -> M#{ val := [Val,X] }. - -%% key coalescing - --spec get_my_map() -> map(). - -get_my_map() -> - #{labels => [one, two], - number => 27, - [1,2,3] => wer, - {4,5,6} => sdf, - kvok => #{ - <<"wat">> => v, - a => qwe, - 2 => asd, - [1,2,3] => wer, - {4,5,6} => sdf, - "abc" => zxc - } - }. - --spec is_my_map(map()) -> 'ok'. - -is_my_map(#{labels := [one, two], - number := 27, - [1,2,3] := wer, - {4,5,6} := sdf, - kvok := #{ - <<"wat">> := v, - a := qwe, - 2 := asd, - [1,2,3] := wer, - {4,5,6} := sdf, - "abc" := zxc - } - }) -> ok. diff --git a/lib/dialyzer/test/small_SUITE_data/src/predef2.erl b/lib/dialyzer/test/small_SUITE_data/src/predef2.erl deleted file mode 100644 index b1d941a49a..0000000000 --- a/lib/dialyzer/test/small_SUITE_data/src/predef2.erl +++ /dev/null @@ -1,56 +0,0 @@ --module(predef2). - --export([array/1, dict/1, digraph/1, digraph2/1, gb_set/1, gb_tree/1, - queue/1, set/1, tid/0, tid2/0]). - --export_type([array/0, digraph/0, gb_set/0]). - --spec array(array()) -> array:array(). - -array(A) -> - array:relax(A). - --spec dict(dict()) -> dict:dict(). - -dict(D) -> - dict:store(1, a, D). - --spec digraph(digraph()) -> [digraph:edge()]. - -digraph(G) -> - digraph:edges(G). - --spec digraph2(digraph:graph()) -> [digraph:edge()]. - -digraph2(G) -> - digraph:edges(G). - --spec gb_set(gb_set()) -> gb_sets:set(). - -gb_set(S) -> - gb_sets:balance(S). - --spec gb_tree(gb_tree()) -> gb_trees:tree(). - -gb_tree(S) -> - gb_trees:balance(S). - --spec queue(queue()) -> queue:queue(). - -queue(Q) -> - queue:reverse(Q). - --spec set(set()) -> sets:set(). - -set(S) -> - sets:union([S]). - --spec tid() -> tid(). - -tid() -> - ets:new(tid, []). - --spec tid2() -> ets:tid(). - -tid2() -> - ets:new(tid, []). diff --git a/lib/dialyzer/test/small_SUITE_data/src/request1.erl b/lib/dialyzer/test/small_SUITE_data/src/request1.erl new file mode 100644 index 0000000000..a6c4ab8dbd --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/request1.erl @@ -0,0 +1,12 @@ +-module(request1). + +-export([a/0]). + +-dialyzer(unmatched_returns). + +a() -> + b(), + 1. + +b() -> + {a, b}. diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl new file mode 100644 index 0000000000..c4275fa110 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl @@ -0,0 +1,50 @@ +-module(suppress_request). + +-export([test1/1, test1_b/1, test2/0, test2_b/0, + test3/0, test3_b/0, test4/0, test4_b/0]). + +-dialyzer({[specdiffs], test1/1}). +-spec test1(a | b) -> ok. % spec is subtype +test1(A) -> + ok = test1_1(A). + +-spec test1_b(a | b) -> ok. % spec is subtype (suppressed by default) +test1_b(A) -> + ok = test1_1(A). + +-spec test1_1(a | b | c) -> ok. +test1_1(_) -> + ok. + +-dialyzer(unmatched_returns). +test2() -> + tuple(), % unmatched + ok. + +test2_b() -> + tuple(), % unmatched + ok. + +-dialyzer({[no_return, no_match], [test3/0]}). +test3() -> % no local return (suppressed) + A = fun(_) -> + 1 + end, + A = 2. % can never succeed (suppressed) + +test3_b() -> % no local return (requested by default) + A = fun(_) -> + 1 + end, + A = 2. % can never succeed (requested by default) + +-dialyzer(no_improper_lists). +test4() -> + [1 | 2]. % improper list (suppressed) + +-dialyzer({no_improper_lists, test4_b/0}). +test4_b() -> + [1 | 2]. % improper list (suppressed) + +tuple() -> + {a, b}. diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl new file mode 100644 index 0000000000..00534704c3 --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl @@ -0,0 +1,33 @@ +-module(suppression1). + +-export([a/1, b/1, c/0]). + +-dialyzer({nowarn_function, a/1}). + +-spec a(_) -> integer(). + +a(_) -> + A = fun(_) -> + B = fun(_) -> + x = 7 + end, + B = 1 + end, + A. + +-spec b(_) -> integer(). + +-dialyzer({nowarn_function, b/1}). + +b(_) -> + A = fun(_) -> + 1 + end, + A = 2. + +-record(r, {a = a :: integer()}). + +-dialyzer({nowarn_function, c/0}). + +c() -> + #r{}. diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl new file mode 100644 index 0000000000..4cba53fdce --- /dev/null +++ b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl @@ -0,0 +1,32 @@ +-module(suppression2). + +-export([a/1, b/1, c/0]). + +-dialyzer({nowarn_function, [a/1, b/1, c/0]}). +-dialyzer([no_undefined_callbacks]). + +-behaviour(not_a_behaviour). + +-spec a(_) -> integer(). + +a(_) -> + A = fun(_) -> + B = fun(_) -> + x = 7 + end, + B = 1 + end, + A. + +-spec b(_) -> integer(). + +b(_) -> + A = fun(_) -> + 1 + end, + A = 2. + +-record(r, {a = a :: integer()}). + +c() -> + #r{}. diff --git a/lib/edoc/doc/overview.edoc b/lib/edoc/doc/overview.edoc index 2af425272e..3639bb43a5 100644 --- a/lib/edoc/doc/overview.edoc +++ b/lib/edoc/doc/overview.edoc @@ -76,11 +76,9 @@ The following are the main functions for running EDoc: <ul> <li>{@link edoc:application/2}: Creates documentation for a typical Erlang application.</li> - <li>{@link edoc:packages/2}: Creates documentation for one or - more packages, automatically locating source files.</li> <li>{@link edoc:files/2}: Creates documentation for a specified set of source files.</li> - <li>{@link edoc:run/3}: General interface function; the common + <li>{@link edoc:run/2}: General interface function; the common back-end for the above functions. Options are documented here.</li> </ul> @@ -184,7 +182,7 @@ The following tags can be used anywhere within a module: path (see {@link edoc:read_source/2}).</dd> <dt><a name="gtag-todo">`@todo' (or `@TODO')</a></dt> - <dd>Attaches a To-Do note to a function, module, package, or + <dd>Attaches a To-Do note to a function, module or overview-page. The content can be any XHTML text describing the issue, e.g.: ```%% @TODO Finish writing the documentation.''' @@ -338,7 +336,7 @@ The following tags can be used before a module declaration: <dt><a name="mtag-since">`@since'</a></dt> <dd>Specifies when the module was introduced, with respect to - the application, package, release or distribution it is part + the application, release or distribution it is part of. The content can be arbitrary text.</dd> <dt><a name="mtag-version">`@version'</a></dt> @@ -445,7 +443,6 @@ possible formats for references are: <table border="1" summary="reference syntax"> <tr><th>Reference syntax</th><th>Example</th><th>Scope</th></tr> <tr><td>`Module'</td><td>{@link edoc_run}, `erl.lang.list'</td><td>Global</td></tr> - <tr><td>`Package.*'</td><td>`erl.lang.*'</td><td>Global</td></tr> <tr><td>`Function/Arity'</td><td>`file/2'</td><td>Within module</td></tr> <tr><td>`Module:Function/Arity'</td><td>{@link edoc:application/2}</td><td>Global</td></tr> <tr><td>`Type()'</td><td>`filename()'</td><td>Within module</td></tr> @@ -531,7 +528,7 @@ after the empty line into separate paragraphs. For example: ```%% @doc This will all be part of the first paragraph. %% It can stretch over several lines and contain <em>any %% XHTML markup</em>. - %% + %% %% This is the second paragraph. The above line is %% regarded as "empty" by EDoc, even though it ends with %% a space.''' @@ -685,17 +682,6 @@ information. User-defined macros override predefined macros. <dd>Expands to the current date, as "<tt>Month Day Year</tt>", e.g. "{@date}".</dd> - <dt><a name="predefmacro-docRoot"><code>@{@docRoot}</code></a></dt> - <dd>Expands to the relative URL path (such as - `"../../.."') from the current page to the root - directory of the generated documentation. This can be used to - create XHTML references such as `<img - src="@{@docRoot}/images/logo.jpeg">' that are independent of how - deep down in a package structure they occur. If packages are not - used (i.e., if all modules are in the "empty" package), - <code>@{@docRoot}</code> will always resolve to the empty - string.</dd> - <dt><a name="predefmacro-link"><code>@{@link <em>reference</em>. <em>description</em>}</code></a></dt> <dd>This creates a hypertext link; cf. the @@ -710,9 +696,6 @@ information. User-defined macros override predefined macros. <dd>Expands to the name of the current module. Only defined when a module is being processed.</dd> - <dt><a name="predefmacro-package"><code>@{@package}</code></a></dt> - <dd>Expands to the name of the current package.</dd> - <dt><a name="predefmacro-section"><code>@{@section <em>heading</em>}</code></a></dt> <dd>Expands to a hypertext link to the specified section heading; diff --git a/lib/edoc/include/edoc_doclet.hrl b/lib/edoc/include/edoc_doclet.hrl index 60ec7f44e4..ac6763fb33 100644 --- a/lib/edoc/include/edoc_doclet.hrl +++ b/lib/edoc/include/edoc_doclet.hrl @@ -1,6 +1,6 @@ %% ===================================================================== %% Header file for EDoc doclet modules. -%% +%% %% Copyright (C) 2001-2004 Richard Carlsson %% %% This library is free software; you can redistribute it and/or modify @@ -43,16 +43,11 @@ %% @type doclet_gen() = #doclet_gen{sources = [string()], %% app = no_app() | atom(), -%% packages = [atom()], -%% modules = [atom()], -%% modules = [atom()], -%% filemap = function()} +%% modules = [atom()]} -record(doclet_gen, {sources = [], app = ?NO_APP, - packages = [], - modules = [], - filemap + modules = [] }). %% @type doclet_toc() = #doclet_gen{paths = [string()], diff --git a/lib/edoc/priv/edoc.dtd b/lib/edoc/priv/edoc.dtd index ba4ac0db28..4278a9e643 100644 --- a/lib/edoc/priv/edoc.dtd +++ b/lib/edoc/priv/edoc.dtd @@ -2,20 +2,13 @@ <!-- EDoc DTD Version 0.3 --> <!ELEMENT overview (title, description?, author*, copyright?, version?, - since?, see*, reference*, todo?, packages, modules)> + since?, see*, reference*, todo?, modules)> <!ATTLIST overview root CDATA #IMPLIED encoding CDATA #IMPLIED> <!ELEMENT title (#PCDATA)> -<!ELEMENT package (description?, author*, copyright?, version?, - since?, deprecated?, see*, reference*, todo?, - modules)> -<!ATTLIST package - name CDATA #REQUIRED - root CDATA #IMPLIED> - <!ELEMENT modules (module+)> diff --git a/lib/edoc/priv/stylesheet.css b/lib/edoc/priv/stylesheet.css index e426a90483..ab170c091f 100644 --- a/lib/edoc/priv/stylesheet.css +++ b/lib/edoc/priv/stylesheet.css @@ -27,10 +27,10 @@ div.spec { margin-left: 2em; background-color: #eeeeee; } -a.module,a.package { +a.module { text-decoration:none } -a.module:hover,a.package:hover { +a.module:hover { background-color: #eeeeee; } ul.definitions { diff --git a/lib/edoc/src/edoc.erl b/lib/edoc/src/edoc.erl index 983f04e8b6..88e7ab5346 100644 --- a/lib/edoc/src/edoc.erl +++ b/lib/edoc/src/edoc.erl @@ -24,12 +24,11 @@ %% TODO: option for ignoring functions matching some pattern ('..._test_'/0) %% TODO: @private_type tag, opaque unless generating private docs? %% TODO: document the record type syntax -%% TODO: some 'skip' option for ignoring particular modules/packages? -%% TODO: intermediate-level packages: document even if no local sources. +%% TODO: some 'skip' option for ignoring particular modules? %% TODO: multiline comment support (needs modified comment representation) %% TODO: config-file for default settings %% TODO: config: locations of all local docdirs; generate local doc-index page -%% TODO: config: URL:s of offline packages/apps +%% TODO: config: URL:s of offline apps %% TODO: config: default stylesheet %% TODO: config: default header/footer, etc. %% TODO: offline linkage @@ -45,10 +44,10 @@ -module(edoc). --export([packages/1, packages/2, files/1, files/2, +-export([files/1, files/2, application/1, application/2, application/3, toc/1, toc/2, toc/3, - run/3, + run/2, file/1, file/2, read/1, read/2, layout/1, layout/2, @@ -68,15 +67,15 @@ file(Name) -> file(Name, []). -%% @spec file(filename(), proplist()) -> ok +%% @spec file(filename(), proplist()) -> ok %% %% @type filename() = //kernel/file:filename() %% @type proplist() = [term()] %% %% @deprecated This is part of the old interface to EDoc and is mainly %% kept for backwards compatibility. The preferred way of generating -%% documentation is through one of the functions {@link application/2}, -%% {@link packages/2} and {@link files/2}. +%% documentation is through one of the functions {@link application/2} +%% and {@link files/2}. %% %% @doc Reads a source code file and outputs formatted documentation to %% a corresponding file. @@ -121,44 +120,24 @@ file(Name, Options) -> ?DEFAULT_FILE_SUFFIX), Dir = proplists:get_value(dir, Options, filename:dirname(Name)), Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}], - edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding). + edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, Encoding). -%% TODO: better documentation of files/1/2, packages/1/2, application/1/2/3 +%% TODO: better documentation of files/1/2, application/1/2/3 -%% @spec (Files::[filename() | {package(), [filename()]}]) -> ok -%% @equiv packages(Packages, []) +%% @spec (Files::[filename()]) -> ok files(Files) -> files(Files, []). -%% @spec (Files::[filename() | {package(), [filename()]}], +%% @spec (Files::[filename()], %% Options::proplist()) -> ok -%% @doc Runs EDoc on a given set of source files. See {@link run/3} for +%% @doc Runs EDoc on a given set of source files. See {@link run/2} for %% details, including options. %% @equiv run([], Files, Options) files(Files, Options) -> - run([], Files, Options). - -%% @spec (Packages::[package()]) -> ok -%% @equiv packages(Packages, []) - -packages(Packages) -> - packages(Packages, []). - -%% @spec (Packages::[package()], Options::proplist()) -> ok -%% @type package() = atom() | string() -%% -%% @doc Runs EDoc on a set of packages. The `source_path' option is used -%% to locate the files; see {@link run/3} for details, including -%% options. This function automatically appends the current directory to -%% the source path. -%% -%% @equiv run(Packages, [], Options) - -packages(Packages, Options) -> - run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]). + run(Files, Options). %% @spec (Application::atom()) -> ok %% @equiv application(Application, []) @@ -194,7 +173,7 @@ application(App, Options) when is_atom(App) -> %% subdirectory, if it exists, or otherwise in the application %% directory itself. %% </li> -%% <li>The {@link run/3. `subpackages'} option is turned on. All found +%% <li>The {@link run/2. `subpackages'} option is turned on. All found %% source files will be processed. %% </li> %% <li>The `include' subdirectory is automatically added to the @@ -203,7 +182,7 @@ application(App, Options) when is_atom(App) -> %% </li> %% </ul> %% -%% See {@link run/3} for details, including options. +%% See {@link run/2} for details, including options. %% %% @see application/2 @@ -219,7 +198,7 @@ application(App, Dir, Options) when is_atom(App) -> {includes, [filename:join(Dir, "include")]}], Opts1 = set_app_default(App, Dir, Opts), %% Recursively document all subpackages of '' - i.e., everything. - run([''], [], [{application, App} | Opts1]). + run([], [{application, App} | Opts1]). %% Try to set up a default application base URI in a smart way if the %% user has not specified it explicitly. @@ -240,31 +219,20 @@ set_app_default(App, Dir0, Opts) -> Opts end. -%% If no source files are found for a (specified) package, no package -%% documentation will be generated either (even if there is a -%% package-documentation file). This is the way it should be. For -%% specified files, use empty package (unless otherwise specified). The -%% assumed package is always used for creating the output. If the actual -%% module or package of the source differs from the assumption gathered -%% from the path and file name, a warning should be issued (since links -%% are likely to be incorrect). - opt_defaults() -> - [packages]. + []. opt_negations() -> [{no_preprocess, preprocess}, {no_subpackages, subpackages}, - {no_report_missing_types, report_missing_types}, - {no_packages, packages}]. + {no_report_missing_types, report_missing_types}]. -%% @spec run(Packages::[package()], -%% Files::[filename() | {package(), [filename()]}], +%% @spec run(Files::[filename()], %% Options::proplist()) -> ok -%% @doc Runs EDoc on a given set of source files and/or packages. Note +%% @doc Runs EDoc on a given set of source files. Note %% that the doclet plugin module has its own particular options; see the %% `doclet' option below. -%% +%% %% Also see {@link layout/2} for layout-related options, and %% {@link get_doc/2} for options related to reading source %% files. @@ -298,11 +266,6 @@ opt_negations() -> %% The default doclet module is {@link edoc_doclet}; see {@link %% edoc_doclet:run/2} for doclet-specific options. %% </dd> -%% <dt>{@type {exclude_packages, [package()]@}} -%% </dt> -%% <dd>Lists packages to be excluded from the documentation. Typically -%% used in conjunction with the `subpackages' option. -%% </dd> %% <dt>{@type {file_suffix, string()@}} %% </dt> %% <dd>Specifies the suffix used for output files. The default value is @@ -314,22 +277,6 @@ opt_negations() -> %% target directory will be ignored and overwritten. The default %% value is `false'. %% </dd> -%% <dt>{@type {packages, boolean()@}} -%% </dt> -%% <dd>If the value is `true', it it assumed that packages (module -%% namespaces) are being used, and that the source code directory -%% structure reflects this. The default value is `true'. (Usually, -%% this does the right thing even if all the modules belong to the -%% top-level "empty" package.) `no_packages' is an alias for -%% `{packages, false}'. See the `subpackages' option below for -%% further details. -%% -%% If the source code is organized in a hierarchy of -%% subdirectories although it does not use packages, use -%% `no_packages' together with the recursive-search `subpackages' -%% option (on by default) to automatically generate documentation -%% for all the modules. -%% </dd> %% <dt>{@type {source_path, [filename()]@}} %% </dt> %% <dd>Specifies a list of file system paths used to locate the source @@ -345,7 +292,7 @@ opt_negations() -> %% <dd>If the value is `true', all subpackages of specified packages %% will also be included in the documentation. The default value is %% `false'. `no_subpackages' is an alias for `{subpackages, -%% false}'. See also the `exclude_packages' option. +%% false}'. %% %% Subpackage source files are found by recursively searching %% for source code files in subdirectories of the known source code @@ -358,38 +305,31 @@ opt_negations() -> %% </dl> %% %% @see files/2 -%% @see packages/2 %% @see application/2 %% NEW-OPTIONS: source_path, application %% INHERIT-OPTIONS: init_context/1 %% INHERIT-OPTIONS: expand_sources/2 %% INHERIT-OPTIONS: target_dir_info/5 -%% INHERIT-OPTIONS: edoc_lib:find_sources/3 +%% INHERIT-OPTIONS: edoc_lib:find_sources/2 %% INHERIT-OPTIONS: edoc_lib:run_doclet/2 -%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4 +%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3 -run(Packages, Files, Opts0) -> +run(Files, Opts0) -> Opts = expand_opts(Opts0), Ctxt = init_context(Opts), Dir = Ctxt#context.dir, Path = proplists:append_values(source_path, Opts), - Ss = sources(Path, Packages, Opts), + Ss = sources(Path, Opts), {Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts), - Ps = [P || {_, P, _, _} <- Ss1], App = proplists:get_value(application, Opts, ?NO_APP), - {App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts), - %% The "empty package" is never included in the list of packages. - Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''], + {App1, Ms1} = target_dir_info(Dir, App, Ms, Opts), Ms2 = edoc_lib:unique(lists:sort(Ms1)), - Fs = package_files(Path, Ps2), - Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts), + Env = edoc_lib:get_doc_env(App1, Ms2, Opts), Ctxt1 = Ctxt#context{env = Env}, Cmd = #doclet_gen{sources = Ss1, app = App1, - packages = Ps2, - modules = Ms2, - filemap = Fs + modules = Ms2 }, F = fun (M) -> M:run(Cmd, Ctxt1) @@ -401,42 +341,22 @@ expand_opts(Opts0) -> Opts0 ++ opt_defaults()). %% NEW-OPTIONS: dir -%% DEFER-OPTIONS: run/3 +%% DEFER-OPTIONS: run/2 init_context(Opts) -> #context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR), opts = Opts }. -%% INHERIT-OPTIONS: edoc_lib:find_sources/3 - -sources(Path, Packages, Opts) -> - lists:foldl(fun (P, Xs) -> - edoc_lib:find_sources(Path, P, Opts) ++ Xs - end, - [], Packages). - -package_files(Path, Packages) -> - Name = ?PACKAGE_FILE, % this is hard-coded for now - D = lists:foldl(fun (P, D) -> - F = edoc_lib:find_file(Path, P, Name), - dict:store(P, F, D) - end, - dict:new(), Packages), - fun (P) -> - case dict:find(P, D) of - {ok, F} -> F; - error -> "" - end - end. +%% INHERIT-OPTIONS: edoc_lib:find_sources/2 + +sources(Path, Opts) -> + edoc_lib:find_sources(Path, Opts). %% Expand user-specified sets of files. -expand_files([{P, Fs1} | Fs]) -> - [{P, filename:basename(F), filename:dirname(F)} || F <- Fs1] - ++ expand_files(Fs); expand_files([F | Fs]) -> - [{'', filename:basename(F), filename:dirname(F)} | + [{filename:basename(F), filename:dirname(F)} | expand_files(Fs)]; expand_files([]) -> []. @@ -444,26 +364,23 @@ expand_files([]) -> %% Create the (assumed) full module names. Keep only the first source %% for each module, but preserve the order of the list. -%% NEW-OPTIONS: source_suffix, packages -%% DEFER-OPTIONS: run/3 +%% NEW-OPTIONS: source_suffix +%% DEFER-OPTIONS: run/2 expand_sources(Ss, Opts) -> Suffix = proplists:get_value(source_suffix, Opts, ?DEFAULT_SOURCE_SUFFIX), - Ss1 = case proplists:get_bool(packages, Opts) of - true -> Ss; - false -> [{'',F,D} || {_P,F,D} <- Ss] - end, + Ss1 = [{F,D} || {F,D} <- Ss], expand_sources(Ss1, Suffix, sets:new(), [], []). -expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) -> +expand_sources([{F, D} | Fs], Suffix, S, As, Ms) -> M = list_to_atom(filename:rootname(F, Suffix)), case sets:is_element(M, S) of true -> expand_sources(Fs, Suffix, S, As, Ms); false -> S1 = sets:add_element(M, S), - expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As], + expand_sources(Fs, Suffix, S1, [{M, F, D} | As], [M | Ms]) end; expand_sources([], _Suffix, _S, As, Ms) -> @@ -471,16 +388,15 @@ expand_sources([], _Suffix, _S, As, Ms) -> %% NEW-OPTIONS: new -target_dir_info(Dir, App, Ps, Ms, Opts) -> +target_dir_info(Dir, App, Ms, Opts) -> case proplists:get_bool(new, Opts) of true -> - {App, Ps, Ms}; + {App, Ms}; false -> - {App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir), + {App1, Ms1} = edoc_lib:read_info_file(Dir), {if App == ?NO_APP -> App1; true -> App end, - Ps ++ Ps1, Ms ++ Ms1} end. @@ -505,12 +421,12 @@ toc(Dir, Opts) -> %% INHERIT-OPTIONS: init_context/1 %% INHERIT-OPTIONS: edoc_lib:run_doclet/2 -%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4 +%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3 toc(Dir, Paths, Opts0) -> Opts = expand_opts(Opts0 ++ [{dir, Dir}]), Ctxt = init_context(Opts), - Env = edoc_lib:get_doc_env('', [], [], Opts), + Env = edoc_lib:get_doc_env('', [], Opts), Ctxt1 = Ctxt#context{env = Env}, F = fun (M) -> M:run(#doclet_toc{paths=Paths}, Ctxt1) @@ -562,7 +478,7 @@ layout(Doc) -> %% </dl> %% %% @see layout/1 -%% @see run/3 +%% @see run/2 %% @see read/2 %% @see file/2 @@ -853,16 +769,16 @@ get_doc(File) -> %% </dl> %% %% See {@link read_source/2}, {@link read_comments/2} and {@link -%% edoc_lib:get_doc_env/4} for further options. +%% edoc_lib:get_doc_env/3} for further options. %% %% @see get_doc/3 -%% @see run/3 +%% @see run/2 %% @see edoc_extract:source/5 %% @see read/2 %% @see layout/2 %% INHERIT-OPTIONS: get_doc/3 -%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4 +%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3 get_doc(File, Opts) -> Env = edoc_lib:get_doc_env(Opts), @@ -874,7 +790,7 @@ get_doc(File, Opts) -> %% %% @doc Like {@link get_doc/2}, but for a given environment %% parameter. `Env' is an environment created by {@link -%% edoc_lib:get_doc_env/4}. +%% edoc_lib:get_doc_env/3}. %% INHERIT-OPTIONS: read_source/2, read_comments/2, edoc_extract:source/5 %% DEFER-OPTIONS: get_doc/2 diff --git a/lib/edoc/src/edoc.hrl b/lib/edoc/src/edoc.hrl index 44c5d6fef4..5b0fb68cf9 100644 --- a/lib/edoc/src/edoc.hrl +++ b/lib/edoc/src/edoc.hrl @@ -1,6 +1,6 @@ %% ===================================================================== %% Header file for EDoc -%% +%% %% Copyright (C) 2001-2004 Richard Carlsson %% %% This library is free software; you can redistribute it and/or modify @@ -25,9 +25,7 @@ -define(APPLICATION, edoc). -define(INFO_FILE, "edoc-info"). --define(PACKAGE_FILE, "package.edoc"). -define(OVERVIEW_FILE, "overview.edoc"). --define(PACKAGE_SUMMARY, "package-summary"). -define(DEFAULT_SOURCE_SUFFIX, ".erl"). -define(DEFAULT_FILE_SUFFIX, ".html"). -define(DEFAULT_DOCLET, edoc_doclet). @@ -65,13 +63,10 @@ %% Environment for generating documentation data -record(env, {module = [], - package = [], root = "", file_suffix, - package_summary, apps, modules, - packages, app_default, macros = [], includes = [] diff --git a/lib/edoc/src/edoc_data.erl b/lib/edoc/src/edoc_data.erl index f88ba05f4b..b797d74a71 100644 --- a/lib/edoc/src/edoc_data.erl +++ b/lib/edoc/src/edoc_data.erl @@ -26,7 +26,7 @@ -module(edoc_data). --export([module/4, package/4, overview/4, type/2]). +-export([module/4, overview/4, type/2]). -export([hidden_filter/2, get_all_tags/1]). @@ -173,21 +173,34 @@ callbacks(Es, Module, Env, Opts) -> lists:keymember(callback, 1, Module#module.attributes) of true -> - try (Module#module.name):behaviour_info(callbacks) of - Fs -> - Fs1 = [{F,A} || {F,A} <- Fs, is_atom(F), is_integer(A)], - if Fs1 =:= [] -> - []; - true -> - [{callbacks, - [callback(F, Env, Opts) || F <- Fs1]}] - end - catch - _:_ -> [] - end; + M = Module#module.name, + Fs = get_callback_functions(M, callbacks), + Os1 = get_callback_functions(M, optional_callbacks), + Fs1 = [FA || FA <- Fs, not lists:member(FA, Os1)], + Req = if Fs1 =:= [] -> + []; + true -> + [{callbacks, + [callback(FA, Env, Opts) || FA <- Fs1]}] + end, + Opt = if Os1 =:= [] -> + []; + true -> + [{optional_callbacks, + [callback(FA, Env, Opts) || FA <- Os1]}] + end, + Req ++ Opt; false -> [] end. +get_callback_functions(M, Callbacks) -> + try + [FA || {F, A} = FA <- M:behaviour_info(Callbacks), + is_atom(F), is_integer(A), A >= 0] + catch + _:_ -> [] + end. + %% <!ELEMENT callback EMPTY> %% <!ATTLIST callback %% name CDATA #REQUIRED @@ -497,41 +510,14 @@ get_tags(_, []) -> []. type(T, Env) -> xmerl_lib:expand_element({type, [edoc_types:to_xml(T, Env)]}). -%% <!ELEMENT package (description?, author*, copyright?, version?, -%% since?, deprecated?, see*, reference*, todo?, -%% modules)> -%% <!ATTLIST package -%% name CDATA #REQUIRED -%% root CDATA #IMPLIED> -%% <!ELEMENT modules (module+)> - -package(Package, Tags, Env, Opts) -> - Env1 = Env#env{package = Package, - root = edoc_refs:relative_package_path('', Package)}, - xmerl_lib:expand_element(package_1(Package, Tags, Env1, Opts)). - -package_1(Package, Tags, Env, Opts) -> - {package, [{root, Env#env.root}], - ([{packageName, [atom_to_list(Package)]}] - ++ get_doc(Tags) - ++ authors(Tags) - ++ get_copyright(Tags) - ++ get_version(Tags) - ++ get_since(Tags) - ++ get_deprecated(Tags) - ++ sees(Tags, Env) - ++ references(Tags) - ++ todos(Tags, Opts)) - }. - %% <!ELEMENT overview (title, description?, author*, copyright?, version?, -%% since?, see*, reference*, todo?, packages, modules)> +%% since?, see*, reference*, todo?, modules)> %% <!ATTLIST overview %% root CDATA #IMPLIED> %% <!ELEMENT title (#PCDATA)> overview(Title, Tags, Env, Opts) -> - Env1 = Env#env{package = '', + Env1 = Env#env{ root = ""}, xmerl_lib:expand_element(overview_1(Title, Tags, Env1, Opts)). diff --git a/lib/edoc/src/edoc_doclet.erl b/lib/edoc/src/edoc_doclet.erl index 5653b5894b..5961ca8cc0 100644 --- a/lib/edoc/src/edoc_doclet.erl +++ b/lib/edoc/src/edoc_doclet.erl @@ -42,9 +42,7 @@ -define(DEFAULT_FILE_SUFFIX, ".html"). -define(INDEX_FILE, "index.html"). -define(OVERVIEW_FILE, "overview.edoc"). --define(PACKAGE_SUMMARY, "package-summary.html"). -define(OVERVIEW_SUMMARY, "overview-summary.html"). --define(PACKAGES_FRAME, "packages-frame.html"). -define(MODULES_FRAME, "modules-frame.html"). -define(STYLESHEET, "stylesheet.css"). -define(IMAGE, "erlang.png"). @@ -52,11 +50,10 @@ -include_lib("xmerl/include/xmerl.hrl"). -%% Sources is the list of inputs in the order they were found. Packages -%% and Modules are sorted lists of atoms without duplicates. (They +%% Sources is the list of inputs in the order they were found. +%% Modules are sorted lists of atoms without duplicates. (They %% usually include the data from the edoc-info file in the target -%% directory, if it exists.) Note that the "empty package" is never -%% included in Packages! +%% directory, if it exists.) %% @spec (Command::doclet_gen() | doclet_toc(), edoc_context()) -> ok %% @doc Main doclet entry point. See the file <a @@ -117,14 +114,12 @@ run(#doclet_gen{}=Cmd, Ctxt) -> gen(Cmd#doclet_gen.sources, Cmd#doclet_gen.app, - Cmd#doclet_gen.packages, Cmd#doclet_gen.modules, - Cmd#doclet_gen.filemap, Ctxt); run(#doclet_toc{}=Cmd, Ctxt) -> toc(Cmd#doclet_toc.paths, Ctxt). -gen(Sources, App, Packages, Modules, FileMap, Ctxt) -> +gen(Sources, App, Modules, Ctxt) -> Dir = Ctxt#context.dir, Env = Ctxt#context.env, Options = Ctxt#context.opts, @@ -132,11 +127,9 @@ gen(Sources, App, Packages, Modules, FileMap, Ctxt) -> CSS = stylesheet(Options), {Modules1, Error} = sources(Sources, Dir, Modules, Env, Options), modules_frame(Dir, Modules1, Title, CSS), - packages(Packages, Dir, FileMap, Env, Options), - packages_frame(Dir, Packages, Title, CSS), overview(Dir, Title, Env, Options), - index_file(Dir, length(Packages) > 1, Title), - edoc_lib:write_info_file(App, Packages, Modules1, Dir), + index_file(Dir, Title), + edoc_lib:write_info_file(App, Modules1, Dir), copy_stylesheet(Dir, Options), copy_image(Dir), %% handle postponed error during processing of source files @@ -182,19 +175,19 @@ sources(Sources, Dir, Modules, Env, Options) -> %% set if it was successful. Errors are just flagged at this stage, %% allowing all source files to be processed even if some of them fail. -source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden, +source({M, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden, Error, Options) -> File = filename:join(Path, Name), case catch {ok, edoc:get_doc(File, Env, Options)} of {ok, {Module, Doc}} -> - check_name(Module, M, P, File), + check_name(Module, M, File), case ((not is_private(Doc)) orelse Private) andalso ((not is_hidden(Doc)) orelse Hidden) of true -> Text = edoc:layout(Doc, Options), Name1 = atom_to_list(M) ++ Suffix, Encoding = [{encoding,encoding(Doc)}], - edoc_lib:write_file(Text, Dir, Name1, P, Encoding), + edoc_lib:write_file(Text, Dir, Name1, Encoding), {sets:add_element(Module, Set), Error}; false -> {Set, Error} @@ -204,8 +197,7 @@ source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden, {Set, true} end. -check_name(M, M0, P0, File) -> - P = '', +check_name(M, M0, File) -> N = M, N0 = M0, case N of @@ -222,47 +214,12 @@ check_name(M, M0, P0, File) -> ok end end, - if P =/= P0 -> - warning("file '~ts' belongs to package '~s', not '~s'.", - [File, P, P0]); - true -> - ok - end. - - -%% Generating the summary files for packages. - -%% INHERIT-OPTIONS: read_file/4 -%% INHERIT-OPTIONS: edoc_lib:run_layout/2 - -packages(Packages, Dir, FileMap, Env, Options) -> - lists:foreach(fun (P) -> - package(P, Dir, FileMap, Env, Options) - end, - Packages). - -package(P, Dir, FileMap, Env, Opts) -> - Tags = case FileMap(P) of - "" -> - []; - File -> - read_file(File, package, Env, Opts) - end, - Data = edoc_data:package(P, Tags, Env, Opts), - F = fun (M) -> - M:package(Data, Opts) - end, - Text = edoc_lib:run_layout(F, Opts), - edoc_lib:write_file(Text, Dir, ?PACKAGE_SUMMARY, P). - + ok. %% Creating an index file, with some frames optional. %% TODO: get rid of frames, or change doctype to Frameset -index_file(Dir, Packages, Title) -> - Frame1 = {frame, [{src,?PACKAGES_FRAME}, - {name,"packagesFrame"},{title,""}], - []}, +index_file(Dir, Title) -> Frame2 = {frame, [{src,?MODULES_FRAME}, {name,"modulesFrame"},{title,""}], []}, @@ -270,16 +227,7 @@ index_file(Dir, Packages, Title) -> {name,"overviewFrame"},{title,""}], []}, Frameset = {frameset, [{cols,"20%,80%"}], - case Packages of - true -> - [?NL, - {frameset, [{rows,"30%,70%"}], - [?NL, Frame1, ?NL, Frame2, ?NL]} - ]; - false -> - [?NL, Frame2, ?NL] - end - ++ [?NL, Frame3, ?NL, + [?NL, Frame2, ?NL, ?NL, Frame3, ?NL, {noframes, [?NL, {h2, ["This page uses frames"]}, @@ -296,24 +244,6 @@ index_file(Dir, Packages, Title) -> Text = xmerl:export_simple([XML], xmerl_html, []), edoc_lib:write_file(Text, Dir, ?INDEX_FILE). -packages_frame(Dir, Ps, Title, CSS) -> - Body = [?NL, - {h2, [{class, "indextitle"}], ["Packages"]}, - ?NL, - {table, [{width, "100%"}, {border, 0}, - {summary, "list of packages"}], - lists:concat( - [[?NL, - {tr, [{td, [], [{a, [{href, package_ref(P)}, - {target,"overviewFrame"}, - {class, "package"}], - [atom_to_list(P)]}]}]}] - || P <- Ps])}, - ?NL], - XML = xhtml(Title, CSS, Body), - Text = xmerl:export_simple([XML], xmerl_html, []), - edoc_lib:write_file(Text, Dir, ?PACKAGES_FRAME). - modules_frame(Dir, Ms, Title, CSS) -> Body = [?NL, {h2, [{class, "indextitle"}], ["Modules"]}, @@ -334,11 +264,7 @@ modules_frame(Dir, Ms, Title, CSS) -> edoc_lib:write_file(Text, Dir, ?MODULES_FRAME). module_ref(M) -> - edoc_refs:relative_package_path(M, '') ++ ?DEFAULT_FILE_SUFFIX. - -package_ref(P) -> - edoc_lib:join_uri(edoc_refs:relative_package_path(P, ''), - ?PACKAGE_SUMMARY). + atom_to_list(M) ++ ?DEFAULT_FILE_SUFFIX. xhtml(Title, CSS, Content) -> xhtml_1(Title, CSS, {body, [{bgcolor, "white"}], Content}). @@ -372,7 +298,7 @@ overview(Dir, Title, Env, Opts) -> end, Text = edoc_lib:run_layout(F, Opts), EncOpts = [{encoding,Encoding}], - edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, '', EncOpts). + edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, EncOpts). copy_image(Dir) -> case code:priv_dir(?EDOC_APP) of @@ -505,7 +431,7 @@ app_index_file(Paths, Dir, Env, Options) -> % Priv = proplists:get_bool(private, Options), CSS = stylesheet(Options), Apps1 = [{filename:dirname(A),filename:basename(A)} || A <- Paths], - index_file(Dir, false, Title), + index_file(Dir, Title), application_frame(Dir, Apps1, Title, CSS), modules_frame(Dir, [], Title, CSS), overview(Dir, Title, Env, Options), diff --git a/lib/edoc/src/edoc_extract.erl b/lib/edoc/src/edoc_extract.erl index 67a95e80aa..758750083d 100644 --- a/lib/edoc/src/edoc_extract.erl +++ b/lib/edoc/src/edoc_extract.erl @@ -91,7 +91,7 @@ source(Forms, Comments, File, Env, Opts) -> %% type `form_list', or a list of syntax trees representing %% "program forms" (cf. {@link edoc:read_source/2}. %% `Env' is an environment created by {@link -%% edoc_lib:get_doc_env/4}. The `File' argument is used for +%% edoc_lib:get_doc_env/3}. The `File' argument is used for %% error reporting and output file name generation only. %% %% See {@link edoc:get_doc/2} for descriptions of the `def', @@ -121,10 +121,8 @@ source1(Tree, File0, Env, Opts, TypeDocs) -> Module = get_module_info(Tree, File), {Header, Footer, Entries} = collect(Forms, Module), Name = Module#module.name, - Package = '', Env1 = Env#env{module = Name, - package = Package, - root = edoc_refs:relative_package_path('', Package)}, + root = ""}, Env2 = add_macro_defs(module_macros(Env1), Opts, Env1), Entries1 = get_tags([Header, Footer | Entries], Env2, File, TypeDocs), Entries2 = edoc_specs:add_data(Entries1, Opts, File, Module), @@ -218,13 +216,13 @@ add_macro_defs(Defs0, Opts, Env) -> %% @spec file(File::filename(), Context, Env::edoc_env(), %% Options::proplist()) -> {ok, Tags} | {error, Reason} -%% Context = overview | package +%% Context = overview %% Tags = [term()] %% Reason = term() %% %% @doc Reads a text file and returns the list of tags in the file. Any %% lines of text before the first tag are ignored. `Env' is an -%% environment created by {@link edoc_lib:get_doc_env/4}. Upon error, +%% environment created by {@link edoc_lib:get_doc_env/3}. Upon error, %% `Reason' is an atom returned from the call to {@link %% //kernel/file:read_file/1} or the atom 'invalid_unicode'. %% @@ -249,12 +247,12 @@ file(File, Context, Env, Opts) -> %% @spec (Text::string(), Context, Env::edoc_env(), %% Options::proplist()) -> Tags -%% Context = overview | package +%% Context = overview %% Tags = [term()] %% %% @doc Returns the list of tags in the text. Any lines of text before %% the first tag are ignored. `Env' is an environment created by {@link -%% edoc_lib:get_doc_env/4}. +%% edoc_lib:get_doc_env/3}. %% %% See {@link source/4} for a description of the `def' option. @@ -353,8 +351,6 @@ preprocess_forms_2(F, Fs) -> [F | preprocess_forms_1(Fs)]; {function, _} -> [F | preprocess_forms_1(Fs)]; - {rule, _} -> - [F | preprocess_forms_1(Fs)]; {attribute, {module, _}} -> [F | preprocess_forms_1(Fs)]; text -> @@ -392,15 +388,6 @@ collect([F | Fs], Cs, Ss, Ts, As, Header, Mod) -> export = Export, data = {comment_text(Cs),Ss,Ts}} | As], Header, Mod); - {rule, Name} -> - L = erl_syntax:get_pos(F), - Export = ordsets:is_element(Name, Mod#module.exports), - Args = parameters(erl_syntax:rule_clauses(F)), - collect(Fs, [], [], [], - [#entry{name = Name, args = Args, line = L, - export = Export, - data = {comment_text(Cs),Ss,Ts}} | As], - Header, Mod); {attribute, {module, _}} when Header =:= undefined -> L = erl_syntax:get_pos(F), collect(Fs, [], [], [], As, diff --git a/lib/edoc/src/edoc_layout.erl b/lib/edoc/src/edoc_layout.erl index a102d432bc..6309e88475 100644 --- a/lib/edoc/src/edoc_layout.erl +++ b/lib/edoc/src/edoc_layout.erl @@ -27,7 +27,7 @@ -module(edoc_layout). --export([module/2, package/2, overview/2, type/1]). +-export([module/2, overview/2, type/1]). -import(edoc_report, [report/2]). @@ -701,6 +701,8 @@ deprecated(Es, S) -> end. behaviours(Es, Name) -> + CBs = get_content(callbacks, Es), + OCBs = get_content(optional_callbacks, Es), (case get_elem(behaviour, Es) of [] -> []; Es1 -> @@ -709,13 +711,24 @@ behaviours(Es, Name) -> ?NL] end ++ - case get_content(callbacks, Es) of - [] -> []; - Es1 -> + if CBs =:= [], OCBs =:= [] -> + []; + true -> + Req = if CBs =:= [] -> + []; + true -> + [br, " Required callback functions: "] + ++ seq(fun callback/1, CBs, ["."]) + end, + Opt = if OCBs =:= [] -> + []; + true -> + [br, " Optional callback functions: "] + ++ seq(fun callback/1, OCBs, ["."]) + end, [{p, ([{b, ["This module defines the ", {tt, [Name]}, - " behaviour."]}, - br, " Required callback functions: "] - ++ seq(fun callback/1, Es1, ["."]))}, + " behaviour."]}] + ++ Req ++ Opt)}, ?NL] end). @@ -965,9 +978,6 @@ get_text(Name, Es) -> local_label(R) -> "#" ++ R. -xhtml(Title, CSS, Body) -> - xhtml(Title, CSS, Body, "latin1"). - xhtml(Title, CSS, Body, Encoding) -> EncString = case Encoding of "latin1" -> "ISO-8859-1"; @@ -997,27 +1007,6 @@ type(E, Ds) -> xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds, Opts), ?HTML_EXPORT). -package(E=#xmlElement{name = package, content = Es}, Options) -> - Opts = init_opts(E, Options), - Name = get_text(packageName, Es), - Title = ["Package ", Name], - Desc = get_content(description, Es), -% ShortDesc = get_content(briefDescription, Desc), - FullDesc = get_content(fullDescription, Desc), - Body = ([?NL, {h1, [Title]}, ?NL] -% ++ ShortDesc - ++ copyright(Es) - ++ deprecated(Es, "package") - ++ version(Es) - ++ since(Es) - ++ authors(Es) - ++ references(Es) - ++ sees(Es) - ++ todos(Es) - ++ FullDesc), - XML = xhtml(Title, stylesheet(Opts), Body), - xmerl:export_simple(XML, ?HTML_EXPORT, []). - overview(E=#xmlElement{name = overview, content = Es}, Options) -> Opts = init_opts(E, Options), Title = [get_text(title, Es)], diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl index c46338a2e1..c248964dc4 100644 --- a/lib/edoc/src/edoc_lib.erl +++ b/lib/edoc/src/edoc_lib.erl @@ -29,9 +29,9 @@ get_first_sentence/1, is_space/1, strip_space/1, parse_expr/2, parse_contact/2, escape_uri/1, join_uri/2, is_relative_uri/1, is_name/1, to_label/1, find_doc_dirs/0, find_sources/2, - find_sources/3, find_file/3, try_subdir/2, unique/1, - write_file/3, write_file/4, write_file/5, write_info_file/4, - read_info_file/1, get_doc_env/1, get_doc_env/4, copy_file/2, + find_file/2, try_subdir/2, unique/1, + write_file/3, write_file/4, write_info_file/3, + read_info_file/1, get_doc_env/1, get_doc_env/3, copy_file/2, uri_get/1, run_doclet/2, run_layout/2, simplify_path/1, timestr/1, datestr/1, read_encoding/2]). @@ -266,13 +266,6 @@ is_name_1([$_ | Cs]) -> is_name_1([]) -> true; is_name_1(_) -> false. -to_atom(A) when is_atom(A) -> A; -to_atom(S) when is_list(S) -> list_to_atom(S). - -to_list(A) when is_atom(A) -> atom_to_list(A); -to_list(S) when is_list(S) -> S. - - %% @private unique([X | Xs]) -> [X | unique(Xs, X)]; unique([]) -> []. @@ -674,7 +667,7 @@ simplify_path(P) -> try_subdir(Dir, Subdir) -> D = filename:join(Dir, Subdir), case filelib:is_dir(D) of - true -> D; + true -> D; false -> Dir end. @@ -686,19 +679,10 @@ try_subdir(Dir, Subdir) -> %% @private write_file(Text, Dir, Name) -> - write_file(Text, Dir, Name, ''). - -%% @spec (Text::deep_string(), Dir::edoc:filename(), -%% Name::edoc:filename(), Package::atom()|string()) -> ok -%% @doc Like {@link write_file/3}, but adds path components to the target -%% directory corresponding to the specified package. -%% @private + write_file(Text, Dir, Name, [{encoding,latin1}]). -write_file(Text, Dir, Name, Package) -> - write_file(Text, Dir, Name, Package, [{encoding,latin1}]). - -write_file(Text, Dir, Name, Package, Options) -> - File = filename:join([Dir, to_list(Package), Name]), +write_file(Text, Dir, Name, Options) -> + File = filename:join([Dir, Name]), ok = filelib:ensure_dir(File), case file:open(File, [write] ++ Options) of {ok, FD} -> @@ -711,15 +695,14 @@ write_file(Text, Dir, Name, Package, Options) -> end. %% @private -write_info_file(App, Packages, Modules, Dir) -> - Ts = [{packages, Packages}, - {modules, Modules}], +write_info_file(App, Modules, Dir) -> + Ts = [{modules, Modules}], Ts1 = if App =:= ?NO_APP -> Ts; true -> [{application, App} | Ts] end, S0 = [io_lib:fwrite("~p.\n", [T]) || T <- Ts1], S = ["%% encoding: UTF-8\n" | S0], - write_file(S, Dir, ?INFO_FILE, '', [{encoding,unicode}]). + write_file(S, Dir, ?INFO_FILE, [{encoding,unicode}]). %% @spec (Name::edoc:filename()) -> {ok, string()} | {error, Reason} %% @@ -744,9 +727,8 @@ read_file(File) -> info_file_data(Ts) -> App = proplists:get_value(application, Ts, ?NO_APP), - Ps = proplists:append_values(packages, Ts), Ms = proplists:append_values(modules, Ts), - {App, Ps, Ms}. + {App, Ms}. %% Local file access - don't complain if file does not exist. @@ -761,10 +743,10 @@ read_info_file(Dir) -> {error, R} -> R1 = file:format_error(R), warning("could not read '~ts': ~ts.", [File, R1]), - {?NO_APP, [], []} - end; + {?NO_APP, []} + end; false -> - {?NO_APP, [], []} + {?NO_APP, []} end. %% URI access @@ -776,7 +758,7 @@ uri_get_info_file(Base) -> parse_info_file(Text, URI); {error, Msg} -> warning("could not read '~ts': ~ts.", [URI, Msg]), - {?NO_APP, [], []} + {?NO_APP, []} end. parse_info_file(Text, Name) -> @@ -785,10 +767,10 @@ parse_info_file(Text, Name) -> info_file_data(Vs); {error, eof} -> warning("unexpected end of file in '~ts'.", [Name]), - {?NO_APP, [], []}; + {?NO_APP, []}; {error, {_Line,Module,R}} -> warning("~ts: ~ts.", [Module:format_error(R), Name]), - {?NO_APP, [], []} + {?NO_APP, []} end. parse_terms(Text) -> @@ -815,82 +797,67 @@ parse_terms_1([], _As, _Vs) -> %% --------------------------------------------------------------------- -%% Source files and packages +%% Source files +%% @doc See {@link edoc:run/2} for a description of the options +%% `subpackages', `source_suffix'. %% @private -find_sources(Path, Opts) -> - find_sources(Path, "", Opts). -%% @doc See {@link edoc:run/3} for a description of the options -%% `subpackages', `source_suffix' and `exclude_packages'. -%% @private +%% NEW-OPTIONS: subpackages, source_suffix +%% DEFER-OPTIONS: edoc:run/2 -%% NEW-OPTIONS: subpackages, source_suffix, exclude_packages -%% DEFER-OPTIONS: edoc:run/3 - -find_sources(Path, Pkg, Opts) -> +find_sources(Path, Opts) -> Rec = proplists:get_bool(subpackages, Opts), Ext = proplists:get_value(source_suffix, Opts, ?DEFAULT_SOURCE_SUFFIX), - find_sources(Path, Pkg, Rec, Ext, Opts). + find_sources(Path, Rec, Ext, Opts). -find_sources(Path, Pkg, Rec, Ext, Opts) -> - Skip = proplists:get_value(exclude_packages, Opts, []), - lists:flatten(find_sources_1(Path, to_atom(Pkg), Rec, Ext, Skip)). +find_sources(Path, Rec, Ext, _Opts) -> + lists:flatten(find_sources_1(Path, Rec, Ext)). -find_sources_1([P | Ps], Pkg, Rec, Ext, Skip) -> - Dir = filename:join(P, atom_to_list(Pkg)), - Fs1 = find_sources_1(Ps, Pkg, Rec, Ext, Skip), +find_sources_1([P | Ps], Rec, Ext) -> + Dir = P, + Fs1 = find_sources_1(Ps, Rec, Ext), case filelib:is_dir(Dir) of true -> - [find_sources_2(Dir, Pkg, Rec, Ext, Skip) | Fs1]; + [find_sources_2(Dir, Rec, Ext) | Fs1]; false -> Fs1 end; -find_sources_1([], _Pkg, _Rec, _Ext, _Skip) -> +find_sources_1([], _Rec, _Ext) -> []. -find_sources_2(Dir, Pkg, Rec, Ext, Skip) -> - case lists:member(Pkg, Skip) of - false -> - Es = list_dir(Dir, false), % just warn if listing fails - Es1 = [{Pkg, E, Dir} || E <- Es, is_source_file(E, Ext)], - case Rec of +find_sources_2(Dir, Rec, Ext) -> + Es = list_dir(Dir, false), % just warn if listing fails + Es1 = [{E, Dir} || E <- Es, is_source_file(E, Ext)], + case Rec of true -> - [find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) | Es1]; + [find_sources_3(Es, Dir, Rec, Ext) | Es1]; false -> - Es1 - end; - true -> - [] - end. + Es1 + end. -find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) -> +find_sources_3(Es, Dir, Rec, Ext) -> [find_sources_2(filename:join(Dir, E), - to_atom(join(Pkg, E)), Rec, Ext, Skip) - || E <- Es, is_package_dir(E, Dir)]. - -join('', E) -> E; -join(Pkg, E) -> filename:join(Pkg, E). + Rec, Ext) + || E <- Es, is_source_dir(E, Dir)]. is_source_file(Name, Ext) -> (filename:extension(Name) == Ext) andalso is_name(filename:rootname(Name, Ext)). -is_package_dir(Name, Dir) -> - is_name(filename:rootname(filename:basename(Name))) - andalso filelib:is_dir(filename:join(Dir, Name)). +is_source_dir(Name, Dir) -> + filelib:is_dir(filename:join(Dir, Name)). %% @private -find_file([P | Ps], []=Pkg, Name) -> - Pkg = [], +find_file([P | Ps], Name) -> File = filename:join(P, Name), case filelib:is_file(File) of true -> - File; + File; false -> - find_file(Ps, Pkg, Name) - end; -find_file([], [], _Name) -> + find_file(Ps, Name) + end; +find_file([], _Name) -> "". %% @private @@ -909,7 +876,7 @@ find_doc_dirs([P0 | Ps]) -> File = filename:join(Dir, ?INFO_FILE), case filelib:is_file(File) of true -> - [Dir | find_doc_dirs(Ps)]; + [Dir | find_doc_dirs(Ps)]; false -> find_doc_dirs(Ps) end; @@ -921,24 +888,23 @@ find_doc_dirs([]) -> %% implies that we use the default app-path. %% NEW-OPTIONS: doc_path -%% DEFER-OPTIONS: get_doc_env/4 +%% DEFER-OPTIONS: get_doc_env/3 -get_doc_links(App, Packages, Modules, Opts) -> +get_doc_links(App, Modules, Opts) -> Path = proplists:append_values(doc_path, Opts) ++ find_doc_dirs(), Ds = [{P, uri_get_info_file(P)} || P <- Path], - Ds1 = [{"", {App, Packages, Modules}} | Ds], + Ds1 = [{"", {App, Modules}} | Ds], D = dict:new(), - make_links(Ds1, D, D, D). + make_links(Ds1, D, D). -make_links([{Dir, {App, Ps, Ms}} | Ds], A, P, M) -> +make_links([{Dir, {App, Ms}} | Ds], A, M) -> A1 = if App == ?NO_APP -> A; true -> add_new(App, Dir, A) end, F = fun (K, D) -> add_new(K, Dir, D) end, - P1 = lists:foldl(F, P, Ps), M1 = lists:foldl(F, M, Ms), - make_links(Ds, A1, P1, M1); -make_links([], A, P, M) -> + make_links(Ds, A1, M1); +make_links([], A, M) -> F = fun (D) -> fun (K) -> case dict:find(K, D) of @@ -947,7 +913,7 @@ make_links([], A, P, M) -> end end end, - {F(A), F(P), F(M)}. + {F(A), F(M)}. add_new(K, V, D) -> case dict:is_key(K, D) of @@ -958,15 +924,14 @@ add_new(K, V, D) -> end. %% @spec (Options::proplist()) -> edoc_env() -%% @equiv get_doc_env([], [], [], Opts) +%% @equiv get_doc_env([], [], Opts) %% @private get_doc_env(Opts) -> - get_doc_env([], [], [], Opts). + get_doc_env([], [], Opts). -%% @spec (App, Packages, Modules, Options::proplist()) -> edoc_env() +%% @spec (App, Modules, Options::proplist()) -> edoc_env() %% App = [] | atom() -%% Packages = [atom()] %% Modules = [atom()] %% proplist() = [term()] %% @@ -975,7 +940,7 @@ get_doc_env(Opts) -> %% generating references. The data representation is not documented. %% %% @doc Creates an environment data structure used by parts of EDoc for -%% generating references, etc. See {@link edoc:run/3} for a description +%% generating references, etc. See {@link edoc:run/2} for a description %% of the options `file_suffix', `app_default' and `doc_path'. %% %% @see edoc_extract:source/4 @@ -983,19 +948,17 @@ get_doc_env(Opts) -> %% NEW-OPTIONS: file_suffix, app_default %% INHERIT-OPTIONS: get_doc_links/4 -%% DEFER-OPTIONS: edoc:run/3 +%% DEFER-OPTIONS: edoc:run/2 -get_doc_env(App, Packages, Modules, Opts) -> +get_doc_env(App, Modules, Opts) -> Suffix = proplists:get_value(file_suffix, Opts, ?DEFAULT_FILE_SUFFIX), AppDefault = proplists:get_value(app_default, Opts, ?APP_DEFAULT), Includes = proplists:append_values(includes, Opts), - {A, P, M} = get_doc_links(App, Packages, Modules, Opts), + {A, M} = get_doc_links(App, Modules, Opts), #env{file_suffix = Suffix, - package_summary = ?PACKAGE_SUMMARY ++ Suffix, apps = A, - packages = P, modules = M, app_default = AppDefault, includes = Includes @@ -1004,10 +967,10 @@ get_doc_env(App, Packages, Modules, Opts) -> %% --------------------------------------------------------------------- %% Plug-in modules -%% @doc See {@link edoc:run/3} for a description of the `doclet' option. +%% @doc See {@link edoc:run/2} for a description of the `doclet' option. %% NEW-OPTIONS: doclet -%% DEFER-OPTIONS: edoc:run/3 +%% DEFER-OPTIONS: edoc:run/2 %% @private run_doclet(Fun, Opts) -> diff --git a/lib/edoc/src/edoc_macros.erl b/lib/edoc/src/edoc_macros.erl index 8efbfd00c7..bdcb3fe81f 100644 --- a/lib/edoc/src/edoc_macros.erl +++ b/lib/edoc/src/edoc_macros.erl @@ -40,10 +40,6 @@ std_macros(Env) -> true -> [{module, atom_to_list(Env#env.module)}] end ++ - if Env#env.package =:= [] -> []; - true -> [{package, atom_to_list(Env#env.package)}] - end - ++ [{date, fun date_macro/3}, {docRoot, Env#env.root}, {link, fun link_macro/3}, diff --git a/lib/edoc/src/edoc_parser.yrl b/lib/edoc/src/edoc_parser.yrl index c6f8a04775..48c01c8dce 100644 --- a/lib/edoc/src/edoc_parser.yrl +++ b/lib/edoc/src/edoc_parser.yrl @@ -28,7 +28,7 @@ Nonterminals start spec func_type utype_list utype_tuple utypes utype ptypes ptype nutype function_name where_defs defs defs2 def typedef etype -throws qname ref aref mref lref pref var_list vars fields field +throws qname ref aref mref lref var_list vars fields field utype_map utype_map_fields utype_map_field futype_list bin_base_type bin_unit_type. @@ -207,14 +207,11 @@ typedef -> atom var_list '=' utype where_defs: ref -> aref: '$1'. ref -> mref: '$1'. ref -> lref: '$1'. -ref -> pref: '$1'. aref -> '//' atom: edoc_refs:app(tok_val('$2')). aref -> '//' atom '/' mref: edoc_refs:app(tok_val('$2'), '$4'). -aref -> '//' atom '/' pref: - edoc_refs:app(tok_val('$2'), '$4'). mref -> qname ':' atom '/' integer: edoc_refs:function(qname('$1'), tok_val('$3'), tok_val('$5')). @@ -223,9 +220,6 @@ mref -> qname ':' atom '(' ')': mref -> qname: edoc_refs:module(qname('$1')). -pref -> qname '.' '*': - edoc_refs:package(qname('$1')). - lref -> atom '/' integer: edoc_refs:function(tok_val('$1'), tok_val('$3')). lref -> atom '(' ')': @@ -399,7 +393,7 @@ parse_typedef_1(S, L) -> %% @doc Parses a <a %% href="overview-summary.html#References">reference</a> to a module, -%% package, function, type, or application +%% function, type, or application parse_ref(S, L) -> case edoc_scanner:string(S, L) of diff --git a/lib/edoc/src/edoc_refs.erl b/lib/edoc/src/edoc_refs.erl index ea439490ed..b9a9391053 100644 --- a/lib/edoc/src/edoc_refs.erl +++ b/lib/edoc/src/edoc_refs.erl @@ -27,10 +27,9 @@ -module(edoc_refs). --export([app/1, app/2, package/1, module/1, module/2, module/3, +-export([app/1, app/2, module/1, module/2, module/3, function/2, function/3, function/4, type/1, type/2, type/3, - to_string/1, to_label/1, get_uri/2, is_top/2, - relative_module_path/2, relative_package_path/2]). + to_string/1, to_label/1, get_uri/2, is_top/2]). -import(edoc_lib, [join_uri/2, escape_uri/1]). @@ -56,9 +55,6 @@ module(M, Ref) -> module(App, M, Ref) -> app(App, module(M, Ref)). -package(P) -> - {package, P}. - function(F, A) -> {function, F, A}. @@ -88,8 +84,6 @@ to_string({module, M}) -> atom_to_list(M) ; to_string({module, M, Ref}) -> atom_to_list(M) ++ ":" ++ to_string(Ref); -to_string({package, P}) -> - atom_to_list(P) ++ ".*"; to_string({function, F, A}) -> atom_to_list(F) ++ "/" ++ integer_to_list(A); to_string({type, T}) -> @@ -111,24 +105,19 @@ get_uri({module, M, Ref}, Env) -> module_ref(M, Env) ++ "#" ++ to_label(Ref); get_uri({module, M}, Env) -> module_ref(M, Env); -get_uri({package, P}, Env) -> - package_ref(P, Env); get_uri(Ref, _Env) -> "#" ++ to_label(Ref). abs_uri({module, M}, Env) -> module_absref(M, Env); abs_uri({module, M, Ref}, Env) -> - module_absref(M, Env) ++ "#" ++ to_label(Ref); -abs_uri({package, P}, Env) -> - package_absref(P, Env). + module_absref(M, Env) ++ "#" ++ to_label(Ref). module_ref(M, Env) -> case (Env#env.modules)(M) of "" -> File = atom_to_list(M) ++ Env#env.file_suffix, - Path = relative_module_path(M, Env#env.package), - join_uri(Path, escape_uri(File)); + escape_uri(File); Base -> join_uri(Base, module_absref(M, Env)) end. @@ -136,19 +125,6 @@ module_ref(M, Env) -> module_absref(M, Env) -> escape_uri(atom_to_list(M)) ++ escape_uri(Env#env.file_suffix). -package_ref(P, Env) -> - case (Env#env.packages)(P) of - "" -> - join_uri(relative_package_path(P, Env#env.package), - escape_uri(Env#env.package_summary)); - Base -> - join_uri(Base, package_absref(P, Env)) - end. - -package_absref(P, Env) -> - join_uri(escape_uri(atom_to_list(P)), - escape_uri(Env#env.package_summary)). - app_ref(A, Env) -> case (Env#env.apps)(A) of "" -> @@ -166,43 +142,3 @@ is_top({app, _App}, _Env) -> is_top(_Ref, _Env) -> false. -%% Each segment of a path must be separately escaped before joining. - -join_segments([S]) -> - escape_uri(S); -join_segments([S | Ss]) -> - join_uri(escape_uri(S), join_segments(Ss)). - -%% 'From' is always the "current package" here: - -%% The empty string is returned if the To module has only one segment, -%% implying a local reference. - -relative_module_path(_To, _From) -> - "". - -relative_package_path(To, From) -> - relative_path([atom_to_list(To)], [atom_to_list(From)]). - -%% This takes two lists of path segments (From, To). Note that an empty -%% string will be returned if the paths are the same. Empty leading -%% segments are stripped from both paths. - -relative_path(Ts, ["" | Fs]) -> - relative_path(Ts, Fs); -relative_path(["" | Ts], Fs) -> - relative_path(Ts, Fs); -relative_path(Ts, Fs) -> - relative_path_1(Ts, Fs). - -relative_path_1([T | Ts], [F | Fs]) when F == T -> - relative_path_1(Ts, Fs); -relative_path_1(Ts, Fs) -> - relative_path_2(Fs, Ts). - -relative_path_2([_F | Fs], Ts) -> - relative_path_2(Fs, [".." | Ts]); -relative_path_2([], []) -> - ""; -relative_path_2([], Ts) -> - join_segments(Ts). diff --git a/lib/edoc/src/edoc_run.erl b/lib/edoc/src/edoc_run.erl index b5a1ef713d..9a569d0879 100644 --- a/lib/edoc/src/edoc_run.erl +++ b/lib/edoc/src/edoc_run.erl @@ -17,7 +17,7 @@ %% @copyright 2003 Richard Carlsson %% @author Richard Carlsson <[email protected]> %% @see edoc -%% @end +%% @end %% ===================================================================== %% @doc Interface for calling EDoc from Erlang startup options. @@ -38,7 +38,7 @@ -module(edoc_run). --export([file/1, application/1, packages/1, files/1, toc/1]). +-export([file/1, application/1, files/1, toc/1]). -compile({no_auto_import,[error/1]}). @@ -92,28 +92,6 @@ files(Args) -> end, run(F). -%% @spec packages([string()]) -> none() -%% -%% @doc Calls {@link edoc:application/2} with the corresponding -%% arguments. The strings in the list are parsed as Erlang constant -%% terms. The list can be either `[Packages]' or `[Packages, Options]'. -%% In the first case {@link edoc:application/1} is called instead. -%% -%% The function call never returns; instead, the emulator is -%% automatically terminated when the call has completed, signalling -%% success or failure to the operating system. - -packages(Args) -> - F = fun () -> - case parse_args(Args) of - [Packages] -> edoc:packages(Packages); - [Packages, Opts] -> edoc:packages(Packages, Opts); - _ -> - invalid_args("edoc_run:packages/1", Args) - end - end, - run(F). - %% @hidden Not official yet toc(Args) -> F = fun () -> @@ -131,8 +109,8 @@ toc(Args) -> %% %% @deprecated This is part of the old interface to EDoc and is mainly %% kept for backwards compatibility. The preferred way of generating -%% documentation is through one of the functions {@link application/1}, -%% {@link packages/1} and {@link files/1}. +%% documentation is through one of the functions {@link application/1} +%% and {@link files/1}. %% %% @doc Calls {@link edoc:file/2} with the corresponding arguments. The %% strings in the list are parsed as Erlang constant terms. The list can diff --git a/lib/edoc/src/edoc_specs.erl b/lib/edoc/src/edoc_specs.erl index 211a354c74..3bf81c6503 100644 --- a/lib/edoc/src/edoc_specs.erl +++ b/lib/edoc/src/edoc_specs.erl @@ -362,7 +362,7 @@ d2e({type,_,map,any}) -> #t_map{ types = []}; d2e({type,_,map,Es}) -> #t_map{ types = d2e(Es) }; -d2e({type,_,map_field_assoc,K,V}) -> +d2e({type,_,map_field_assoc,[K,V]}) -> #t_map_field{ k_type = d2e(K), v_type=d2e(V) }; d2e({type,_,map_field_exact,K,V}) -> #t_map_field{ k_type = d2e(K), v_type=d2e(V) }; @@ -388,6 +388,9 @@ d2e({record_field,L,_Name}=F) -> d2e({type,_,Name,Types0}) -> Types = d2e(Types0), typevar_anno(#t_type{name = #t_name{name = Name}, args = Types}, Types); +d2e({user_type,_,Name,Types0}) -> + Types = d2e(Types0), + typevar_anno(#t_type{name = #t_name{name = Name}, args = Types}, Types); d2e({var,_,'_'}) -> #t_type{name = #t_name{name = ?TOP_TYPE}}; d2e({var,_,TypeName}) -> diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl index 264a533a52..c1c453511a 100644 --- a/lib/edoc/src/edoc_tags.erl +++ b/lib/edoc/src/edoc_tags.erl @@ -42,7 +42,7 @@ %% Name = atom() %% Parser = text | xml | (Text,Line,Where) -> term() %% Flags = [Flag] -%% Flag = module | function | package | overview | single +%% Flag = module | function | overview | single %% %% Note that the pseudo-tag '@clear' is not listed here. %% (Cf. the function 'filter_tags'.) @@ -57,11 +57,11 @@ %% - @category (useless; superseded by keywords or free text search) tags() -> - All = [module,footer,function,package,overview], - [{author, fun parse_contact/4, [module,package,overview]}, - {copyright, text, [module,package,overview,single]}, - {deprecated, xml, [module,function,package,single]}, - {doc, xml, [module,function,package,overview,single]}, + All = [module,footer,function,overview], + [{author, fun parse_contact/4, [module,overview]}, + {copyright, text, [module,overview,single]}, + {deprecated, xml, [module,function,single]}, + {doc, xml, [module,function,overview,single]}, {docfile, fun parse_file/4, All}, {'end', text, All}, {equiv, fun parse_expr/4, [function,single]}, @@ -69,17 +69,17 @@ tags() -> {hidden, text, [module,function,single]}, {param, fun parse_param/4, [function]}, {private, text, [module,function,single]}, - {reference, xml, [module,footer,package,overview]}, + {reference, xml, [module,footer,overview]}, {returns, xml, [function,single]}, - {see, fun parse_see/4, [module,function,package,overview]}, - {since, text, [module,function,package,overview,single]}, + {see, fun parse_see/4, [module,function,overview]}, + {since, text, [module,function,overview,single]}, {spec, fun parse_spec/4, [function,single]}, {throws, fun parse_throws/4, [function,single]}, {title, text, [overview,single]}, {'TODO', xml, All}, {todo, xml, All}, {type, fun parse_typedef/4, [module,footer,function]}, - {version, text, [module,package,overview,single]}]. + {version, text, [module,overview,single]}]. aliases('TODO') -> todo; aliases(return) -> returns; @@ -329,10 +329,7 @@ parse_typedef(Data, Line, _Env, Where) -> NAs = length(As), case edoc_types:is_predefined(T, NAs) of true -> - case - edoc_types:is_new_predefined(T, NAs) - orelse edoc_types:is_predefined_otp_type(T, NAs) - of + case edoc_types:is_new_predefined(T, NAs) of false -> throw_error(Line, {"redefining built-in type '~w'.", [T]}); @@ -372,7 +369,7 @@ parse_header(Data, Line, Env, Where) when is_list(Where) -> {string, _, File} -> Dir = filename:dirname(Where), Path = Env#env.includes ++ [Dir], - case edoc_lib:find_file(Path, "", File) of + case edoc_lib:find_file(Path, File) of "" -> throw_error(Line, {file_not_found, File}); File1 -> @@ -499,7 +496,6 @@ check_used_type(#t_name{name = N, module = Mod}=Name, Args, P, LocalTypes) -> Mod =/= [] orelse lists:member(TypeName, ets:lookup(DT, Name)) orelse edoc_types:is_predefined(N, NArgs) - orelse edoc_types:is_predefined_otp_type(N, NArgs) orelse lists:member(TypeName, LocalTypes) of true -> diff --git a/lib/edoc/src/edoc_types.erl b/lib/edoc/src/edoc_types.erl index 8a6c8eb33e..65fba61a72 100644 --- a/lib/edoc/src/edoc_types.erl +++ b/lib/edoc/src/edoc_types.erl @@ -25,7 +25,7 @@ -module(edoc_types). --export([is_predefined/2, is_new_predefined/2, is_predefined_otp_type/2, +-export([is_predefined/2, is_new_predefined/2, to_ref/1, to_xml/2, to_label/1, arg_names/1, set_arg_names/2, arg_descs/1, range_desc/1]). @@ -34,67 +34,13 @@ -include("edoc_types.hrl"). -include_lib("xmerl/include/xmerl.hrl"). - -is_predefined(any, 0) -> true; -is_predefined(atom, 0) -> true; -is_predefined(binary, 0) -> true; -is_predefined(bool, 0) -> true; % kept for backwards compatibility -is_predefined(char, 0) -> true; is_predefined(cons, 2) -> true; is_predefined(deep_string, 0) -> true; -is_predefined(float, 0) -> true; -is_predefined(function, 0) -> true; -is_predefined(integer, 0) -> true; -is_predefined(list, 0) -> true; -is_predefined(list, 1) -> true; -is_predefined(nil, 0) -> true; -is_predefined(none, 0) -> true; -is_predefined(no_return, 0) -> true; -is_predefined(number, 0) -> true; -is_predefined(pid, 0) -> true; -is_predefined(port, 0) -> true; -is_predefined(reference, 0) -> true; -is_predefined(string, 0) -> true; -is_predefined(term, 0) -> true; -is_predefined(tuple, 0) -> true; -is_predefined(F, A) -> is_new_predefined(F, A). +is_predefined(F, A) -> erl_internal:is_type(F, A). -%% Should eventually be coalesced with is_predefined/2. -is_new_predefined(arity, 0) -> true; -is_new_predefined(bitstring, 0) -> true; -is_new_predefined(boolean, 0) -> true; -is_new_predefined(byte, 0) -> true; -is_new_predefined(iodata, 0) -> true; -is_new_predefined(iolist, 0) -> true; is_new_predefined(map, 0) -> true; -is_new_predefined(maybe_improper_list, 0) -> true; -is_new_predefined(maybe_improper_list, 2) -> true; -is_new_predefined(mfa, 0) -> true; -is_new_predefined(module, 0) -> true; -is_new_predefined(neg_integer, 0) -> true; -is_new_predefined(node, 0) -> true; -is_new_predefined(non_neg_integer, 0) -> true; -is_new_predefined(nonempty_improper_list, 2) -> true; -is_new_predefined(nonempty_list, 0) -> true; -is_new_predefined(nonempty_list, 1) -> true; -is_new_predefined(nonempty_maybe_improper_list, 0) -> true; -is_new_predefined(nonempty_maybe_improper_list, 2) -> true; -is_new_predefined(nonempty_string, 0) -> true; -is_new_predefined(pos_integer, 0) -> true; -is_new_predefined(timeout, 0) -> true; is_new_predefined(_, _) -> false. -%% The following types will be removed later, but they are currently -%% kind of built-in. -is_predefined_otp_type(array, 0) -> true; -is_predefined_otp_type(dict, 0) -> true; -is_predefined_otp_type(digraph, 0) -> true; -is_predefined_otp_type(gb_set, 0) -> true; -is_predefined_otp_type(gb_tree, 0) -> true; -is_predefined_otp_type(queue, 0) -> true; -is_predefined_otp_type(set, 0) -> true; -is_predefined_otp_type(_, _) -> false. - to_ref(#t_typedef{name = N}) -> to_ref(N); to_ref(#t_def{name = N}) -> @@ -129,8 +75,7 @@ to_xml(#t_type{name = N, args = As}, Env) -> Predef = case N of #t_name{module = [], name = T} -> NArgs = length(As), - (is_predefined(T, NArgs) - orelse is_predefined_otp_type(T, NArgs)); + is_predefined(T, NArgs); _ -> false end, diff --git a/lib/edoc/src/otpsgml_layout.erl b/lib/edoc/src/otpsgml_layout.erl index 2c4cd919bb..052c75b9d4 100644 --- a/lib/edoc/src/otpsgml_layout.erl +++ b/lib/edoc/src/otpsgml_layout.erl @@ -28,7 +28,7 @@ -module(otpsgml_layout). --export([module/2, package/2, overview/2,type/1]). +-export([module/2, overview/2,type/1]). -import(edoc_report, [report/2]). @@ -811,27 +811,6 @@ xml(Title, CSS, Body) -> xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds), ?SGML_EXPORT). - -package(E=#xmlElement{name = package, content = Es}, Options) -> - Opts = init_opts(E, Options), - Name = get_text(packageName, Es), - Title = io_lib:fwrite("Package ~s", [Name]), - Desc = get_content(description, Es), -% ShortDesc = get_content(briefDescription, Desc), - FullDesc = get_content(fullDescription, Desc), - Body = ([?NL, {h1, [Title]}, ?NL] -% ++ ShortDesc - ++ copyright(Es) - ++ deprecated(Es, "package") - ++ version(Es) - ++ since(Es) - ++ authors(Es) - ++ references(Es) - ++ sees(Es) - ++ FullDesc), - XML = xml(Title, stylesheet(Opts), Body), - xmerl:export_simple([XML], ?SGML_EXPORT, []). - overview(E=#xmlElement{name = overview, content = Es}, Options) -> Opts = init_opts(E, Options), Title = get_text(title, Es), @@ -843,6 +822,7 @@ overview(E=#xmlElement{name = overview, content = Es}, Options) -> ++ copyright(Es) ++ version(Es) ++ since(Es) + ++ deprecated(Es, "application") ++ authors(Es) ++ references(Es) ++ sees(Es) diff --git a/lib/edoc/test/edoc_SUITE.erl b/lib/edoc/test/edoc_SUITE.erl index c63660c8c0..6b23054ce3 100644 --- a/lib/edoc/test/edoc_SUITE.erl +++ b/lib/edoc/test/edoc_SUITE.erl @@ -22,12 +22,12 @@ init_per_group/2,end_per_group/2]). %% Test cases --export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1]). +-export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1, build_app/1]). suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [app,appup,build_std,build_map_module,otp_12008]. + [app,appup,build_std,build_map_module,otp_12008, build_app]. groups() -> []. @@ -95,3 +95,20 @@ otp_12008(Config) when is_list(Config) -> ok = edoc:files([Un2], Opts2), {'EXIT', error} = (catch edoc:files([Un3], Opts2)), ok. + +build_app(suite) -> []; +build_app(doc) -> ["Build a local app with nested source directories"]; +build_app(Config) -> + DataDir = ?config(data_dir, Config), + PrivDir = ?config(priv_dir, Config), + OutDir = filename:join(PrivDir, "myapp"), + Src = filename:join(DataDir, "myapp"), + + ok = edoc:application(myapp, Src, [{dir, OutDir}, {subpackages, false}]), + true = filelib:is_regular(filename:join(OutDir, "a.html")), + false = filelib:is_regular(filename:join(OutDir, "b.html")), + + ok = edoc:application(myapp, Src, [{dir, OutDir}]), + true = filelib:is_regular(filename:join(OutDir, "a.html")), + true = filelib:is_regular(filename:join(OutDir, "b.html")), + ok. diff --git a/lib/jinterface/priv/.gitignore b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy index e69de29bb2..e69de29bb2 100644 --- a/lib/jinterface/priv/.gitignore +++ b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl new file mode 100644 index 0000000000..1b5b704551 --- /dev/null +++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl @@ -0,0 +1 @@ +-module(a). diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl new file mode 100644 index 0000000000..6d6f15dfe5 --- /dev/null +++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl @@ -0,0 +1 @@ +-module(b). diff --git a/lib/eldap/asn1/ELDAPv3.asn1 b/lib/eldap/asn1/ELDAPv3.asn1 index 72b87d7221..3fe7e815cc 100644 --- a/lib/eldap/asn1/ELDAPv3.asn1 +++ b/lib/eldap/asn1/ELDAPv3.asn1 @@ -274,5 +274,17 @@ IntermediateResponse ::= [APPLICATION 25] SEQUENCE { responseName [0] LDAPOID OPTIONAL, responseValue [1] OCTET STRING OPTIONAL } +-- Extended syntax for Password Modify (RFC 3062, Section 2) + +-- passwdModifyOID OBJECT IDENTIFIER ::= 1.3.6.1.4.1.4203.1.11.1 + +PasswdModifyRequestValue ::= SEQUENCE { + userIdentity [0] OCTET STRING OPTIONAL, + oldPasswd [1] OCTET STRING OPTIONAL, + newPasswd [2] OCTET STRING OPTIONAL } + +PasswdModifyResponseValue ::= SEQUENCE { + genPasswd [0] OCTET STRING OPTIONAL } + END diff --git a/lib/eldap/doc/src/eldap.xml b/lib/eldap/doc/src/eldap.xml index c4b1ac36ca..b68115cd82 100644 --- a/lib/eldap/doc/src/eldap.xml +++ b/lib/eldap/doc/src/eldap.xml @@ -218,6 +218,46 @@ filter() See present/1, substrings/2, </desc> </func> <func> + <name>modify_password(Handle, Dn, NewPasswd) -> ok | {ok, GenPasswd} | {error, Reason}</name> + <fsummary>Modify the password of a user.</fsummary> + <type> + <v>Dn = string()</v> + <v>NewPasswd = string()</v> + </type> + <desc> + <p>Modify the password of a user. See <seealso marker="#modify_password/4">modify_password/4</seealso>.</p> + </desc> + </func> + <func> + <name>modify_password(Handle, Dn, NewPasswd, OldPasswd) -> ok | {ok, GenPasswd} | {error, Reason}</name> + <fsummary>Modify the password of a user.</fsummary> + <type> + <v>Dn = string()</v> + <v>NewPasswd = string()</v> + <v>OldPasswd = string()</v> + <v>GenPasswd = string()</v> + </type> + <desc> + <p>Modify the password of a user.</p> + <list type="bulleted"> + <item> + <p><c>Dn</c>. The user to modify. Should be "" if the + modify request is for the user of the LDAP session.</p> + </item> + <item> + <p><c>NewPasswd</c>. The new password to set. Should be "" + if the server is to generate the password. In this case, + the result will be <c>{ok, GenPasswd}</c>.</p> + </item> + <item> + <p><c>OldPasswd</c>. Sometimes required by server policy + for a user to change their password. If not required, use + <seealso marker="#modify_password/3">modify_password/3</seealso>.</p> + </item> + </list> + </desc> + </func> + <func> <name>modify_dn(Handle, Dn, NewRDN, DeleteOldRDN, NewSupDN) -> ok | {error, Reason}</name> <fsummary>Modify the DN of an entry.</fsummary> <type> diff --git a/lib/eldap/src/eldap.erl b/lib/eldap/src/eldap.erl index 80718bc106..ae47c815c9 100644 --- a/lib/eldap/src/eldap.erl +++ b/lib/eldap/src/eldap.erl @@ -12,6 +12,7 @@ -vc('$Id$ '). -export([open/1,open/2,simple_bind/3,controlling_process/2, start_tls/2, start_tls/3, + modify_password/3, modify_password/4, getopts/2, baseObject/0,singleLevel/0,wholeSubtree/0,close/1, equalityMatch/2,greaterOrEqual/2,lessOrEqual/2, @@ -94,6 +95,23 @@ start_tls(Handle, TlsOptions, Timeout) -> recv(Handle). %%% -------------------------------------------------------------------- +%%% Modify the password of a user. +%%% +%%% Dn - Name of the entry to modify. If empty, the session user. +%%% NewPasswd - New password. If empty, the server returns a new password. +%%% OldPasswd - Original password for server verification, may be empty. +%%% +%%% Returns: ok | {ok, GenPasswd} | {error, term()} +%%% -------------------------------------------------------------------- +modify_password(Handle, Dn, NewPasswd) -> + modify_password(Handle, Dn, NewPasswd, []). + +modify_password(Handle, Dn, NewPasswd, OldPasswd) + when is_pid(Handle), is_list(Dn), is_list(NewPasswd), is_list(OldPasswd) -> + send(Handle, {passwd_modify,optional(Dn),optional(NewPasswd),optional(OldPasswd)}), + recv(Handle). + +%%% -------------------------------------------------------------------- %%% Ask for option values on the socket. %%% Warning: This is an undocumented function for testing purposes only. %%% Use at own risk... @@ -507,6 +525,11 @@ loop(Cpid, Data) -> send(From,Res), ?MODULE:loop(Cpid, NewData); + {From, {passwd_modify,Dn,NewPasswd,OldPasswd}} -> + {Res,NewData} = do_passwd_modify(Data, Dn, NewPasswd, OldPasswd), + send(From, Res), + ?MODULE:loop(Cpid, NewData); + {_From, close} -> unlink(Cpid), exit(closed); @@ -797,6 +820,60 @@ do_modify_0(Data, Obj, Mod) -> check_reply(Data#eldap{id = Id}, Resp, modifyResponse). %%% -------------------------------------------------------------------- +%%% PasswdModifyRequest +%%% -------------------------------------------------------------------- + +-define(PASSWD_MODIFY_OID, "1.3.6.1.4.1.4203.1.11.1"). + +do_passwd_modify(Data, Dn, NewPasswd, OldPasswd) -> + case catch do_passwd_modify_0(Data, Dn, NewPasswd, OldPasswd) of + {error,Emsg} -> {ldap_closed_p(Data, Emsg),Data}; + {'EXIT',Error} -> {ldap_closed_p(Data, Error),Data}; + {ok,NewData} -> {ok,NewData}; + {ok,Passwd,NewData} -> {{ok, Passwd},NewData}; + Else -> {ldap_closed_p(Data, Else),Data} + end. + +do_passwd_modify_0(Data, Dn, NewPasswd, OldPasswd) -> + Req = #'PasswdModifyRequestValue'{userIdentity = Dn, + oldPasswd = OldPasswd, + newPasswd = NewPasswd}, + log2(Data, "modify password request = ~p~n", [Req]), + {ok, Bytes} = 'ELDAPv3':encode('PasswdModifyRequestValue', Req), + ExtReq = #'ExtendedRequest'{requestName = ?PASSWD_MODIFY_OID, + requestValue = Bytes}, + Id = bump_id(Data), + log2(Data, "extended request = ~p~n", [ExtReq]), + Reply = request(Data#eldap.fd, Data, Id, {extendedReq, ExtReq}), + log2(Data, "modify password reply = ~p~n", [Reply]), + exec_passwd_modify_reply(Data#eldap{id = Id}, Reply). + +exec_passwd_modify_reply(Data, {ok,Msg}) when + Msg#'LDAPMessage'.messageID == Data#eldap.id -> + case Msg#'LDAPMessage'.protocolOp of + {extendedResp, Result} -> + case Result#'ExtendedResponse'.resultCode of + success -> + case Result#'ExtendedResponse'.responseValue of + asn1_NOVALUE -> + {ok, Data}; + Value -> + case 'ELDAPv3':decode('PasswdModifyResponseValue', Value) of + {ok,#'PasswdModifyResponseValue'{genPasswd = Passwd}} -> + {ok, Passwd, Data}; + Error -> + throw(Error) + end + end; + Error -> + {error, {response,Error}} + end; + Other -> {error, Other} + end; +exec_passwd_modify_reply(_, Error) -> + {error, Error}. + +%%% -------------------------------------------------------------------- %%% modifyDNRequest %%% -------------------------------------------------------------------- diff --git a/lib/erl_docgen/priv/bin/specs_gen.escript b/lib/erl_docgen/priv/bin/specs_gen.escript index 156311565c..e8a8f14e3a 100644 --- a/lib/erl_docgen/priv/bin/specs_gen.escript +++ b/lib/erl_docgen/priv/bin/specs_gen.escript @@ -97,7 +97,7 @@ read_file(File, Opts) -> edoc:read_source(File, Opts). extract(File, Forms, Opts) -> - Env = edoc_lib:get_doc_env([], [], [], _Opts=[]), + Env = edoc_lib:get_doc_env([], [], _Opts=[]), {_Module, Doc} = edoc_extract:source(Forms, File, Env, Opts), Doc. diff --git a/lib/erl_docgen/priv/bin/xml_from_edoc.escript b/lib/erl_docgen/priv/bin/xml_from_edoc.escript index 65a580dca2..007546e7ba 100755 --- a/lib/erl_docgen/priv/bin/xml_from_edoc.escript +++ b/lib/erl_docgen/priv/bin/xml_from_edoc.escript @@ -117,7 +117,7 @@ users_guide(File, Args) -> Text = edoc_lib:run_layout(F, Opts), OutFile = "chapter" ++ Args#args.suffix, - edoc_lib:write_file(Text, ".", OutFile, '', Encoding); + edoc_lib:write_file(Text, ".", OutFile, Encoding); false -> io:format("~s: not a regular file\n", [File]), usage() diff --git a/lib/erl_docgen/src/docgen_otp_specs.erl b/lib/erl_docgen/src/docgen_otp_specs.erl index 1075c47801..e2eee2b3c0 100644 --- a/lib/erl_docgen/src/docgen_otp_specs.erl +++ b/lib/erl_docgen/src/docgen_otp_specs.erl @@ -616,7 +616,7 @@ ot_map(Es) -> {type,0,map,[ot_map_field(E) || E <- get_elem(map_field,Es)]}. ot_map_field(#xmlElement{content=[K,V]}) -> - {type,0,map_field_assoc, ot_utype_elem(K), ot_utype_elem(V)}. + {type,0,map_field_assoc,[ot_utype_elem(K),ot_utype_elem(V)]}. ot_fun(Es) -> Range = ot_utype(get_elem(type, Es)), diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl index 74e93bf098..5b1401b34a 100644 --- a/lib/hipe/cerl/erl_bif_types.erl +++ b/lib/hipe/cerl/erl_bif_types.erl @@ -1070,9 +1070,6 @@ type(hipe_bifs, find_na_or_make_stub, 2, Xs, Opaques) -> type(hipe_bifs, fun_to_address, 1, Xs, Opaques) -> strict(hipe_bifs, fun_to_address, 1, Xs, fun (_) -> t_integer() end, Opaques); -%% type(hipe_bifs, get_emu_address, 1, Xs, Opaques) -> -%% strict(hipe_bifs, get_emu_address, 1, Xs, -%% fun (_) -> t_integer() end, Opaques); % address type(hipe_bifs, get_fe, 2, Xs, Opaques) -> strict(hipe_bifs, get_fe, 2, Xs, fun (_) -> t_integer() end, Opaques); type(hipe_bifs, get_rts_param, 1, Xs, Opaques) -> @@ -1081,9 +1078,6 @@ type(hipe_bifs, get_rts_param, 1, Xs, Opaques) -> type(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs, Opaques) -> strict(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs, fun (_) -> t_nil() end, Opaques); -%% type(hipe_bifs, make_native_stub, 2, Xs, Opaques) -> -%% strict(hipe_bifs, make_native_stub, 2, Xs, -%% fun (_) -> t_integer() end, Opaques); % address type(hipe_bifs, mark_referred_from, 1, Xs, Opaques) -> strict(hipe_bifs, mark_referred_from, 1, Xs, fun (_) -> t_nil() end, Opaques); @@ -2462,16 +2456,12 @@ arg_types(hipe_bifs, find_na_or_make_stub, 2) -> [t_mfa(), t_boolean()]; arg_types(hipe_bifs, fun_to_address, 1) -> [t_mfa()]; -%% arg_types(hipe_bifs, get_emu_address, 1) -> -%% [t_mfa()]; arg_types(hipe_bifs, get_fe, 2) -> [t_atom(), t_tuple([t_integer(), t_integer(), t_integer()])]; arg_types(hipe_bifs, get_rts_param, 1) -> [t_fixnum()]; arg_types(hipe_bifs, invalidate_funinfo_native_addresses, 1) -> [t_list(t_mfa())]; -%% arg_types(hipe_bifs, make_native_stub, 2) -> -%% [t_integer(), t_arity()]; arg_types(hipe_bifs, mark_referred_from, 1) -> [t_mfa()]; arg_types(hipe_bifs, merge_term, 1) -> diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl index 4215448c61..09dffe1280 100644 --- a/lib/hipe/cerl/erl_types.erl +++ b/lib/hipe/cerl/erl_types.erl @@ -40,7 +40,6 @@ any_none_or_unit/1, lookup_record/3, max/2, - module_builtin_opaques/1, min/2, number_max/1, number_max/2, number_min/1, number_min/2, @@ -79,10 +78,11 @@ t_non_neg_fixnum/0, t_pos_fixnum/0, t_float/0, + t_var_names/1, t_form_to_string/1, - t_from_form/1, - t_from_form/2, - t_from_form/3, + t_from_form/4, + t_from_form/5, + t_from_form_without_remote/2, t_from_range/2, t_from_range_unsafe/2, t_from_term/1, @@ -182,13 +182,11 @@ t_remote/3, t_string/0, t_struct_from_opaque/2, - t_solve_remote/3, t_subst/2, t_subtract/2, t_subtract_list/2, t_sup/1, t_sup/2, - t_tid/0, t_timeout/0, t_to_string/1, t_to_string/2, @@ -250,6 +248,8 @@ %% -define(REC_TYPE_LIMIT, 2). +-define(EXPAND_DEPTH, 16). +-define(EXPAND_LIMIT, 10000). -define(TUPLE_TAG_LIMIT, 5). -define(TUPLE_ARITY_LIMIT, 8). @@ -368,7 +368,7 @@ -type record_key() :: {'record', atom()}. -type type_key() :: {'type' | 'opaque', atom(), arity()}. --type record_value() :: orddict:orddict(). % XXX. To be refined +-type record_value() :: [{atom(), erl_parse:abstract_expr(), erl_type()}]. -type type_value() :: {module(), erl_type(), atom()}. -type type_table() :: dict:dict(record_key(), record_value()) | dict:dict(type_key(), type_value()). @@ -466,16 +466,6 @@ has_opaque_subtype(T) -> t_opaque_structure(?opaque(Elements)) -> t_sup([Struct || #opaque{struct = Struct} <- ordsets:to_list(Elements)]). --spec t_opaque_modules(erl_type()) -> [module()]. - -t_opaque_modules(?opaque(Elements)) -> - case ordsets:size(Elements) of - 1 -> - [#opaque{mod = Mod}] = set_to_list(Elements), - [Mod]; - _ -> throw({error, "Unexpected multiple opaque types"}) - end. - -spec t_contains_opaque(erl_type()) -> boolean(). t_contains_opaque(Type) -> @@ -759,7 +749,7 @@ t_opaque_from_records(RecDict) -> end end, RecDict), OpaqueTypeDict = - dict:map(fun({opaque, Name, _Arity}, {Module, _Type, ArgNames}) -> + dict:map(fun({opaque, Name, _Arity}, {{Module, _Form, ArgNames}, _Type}) -> %% Args = args_to_types(ArgNames), %% List = lists:zip(ArgNames, Args), %% TmpVarDict = dict:from_list(List), @@ -801,11 +791,6 @@ t_struct_from_opaque(Type, _Opaques) -> Type. list_struct_from_opaque(Types, Opaques) -> [t_struct_from_opaque(Type, Opaques) || Type <- Types]. --spec module_builtin_opaques(module()) -> [erl_type()]. - -module_builtin_opaques(Module) -> - [O || O <- all_opaque_builtins(), lists:member(Module, t_opaque_modules(O))]. - %%----------------------------------------------------------------------------- %% Remote types: these types are used for preprocessing; %% they should never reach the analysis stage. @@ -825,134 +810,6 @@ is_remote(_) -> false. -type mod_records() :: dict:dict(module(), type_table()). --spec t_solve_remote(erl_type(), sets:set(mfa()), mod_records()) -> erl_type(). - -t_solve_remote(Type, ExpTypes, Records) -> - {RT, _RR} = t_solve_remote(Type, ExpTypes, Records, []), - RT. - -t_solve_remote(?function(Domain, Range), ET, R, C) -> - {RT1, RR1} = t_solve_remote(Domain, ET, R, C), - {RT2, RR2} = t_solve_remote(Range, ET, R, C), - {?function(RT1, RT2), RR1 ++ RR2}; -t_solve_remote(?list(Types, Term, Size), ET, R, C) -> - {RT1, RR1} = t_solve_remote(Types, ET, R, C), - {RT2, RR2} = t_solve_remote(Term, ET, R, C), - {?list(RT1, RT2, Size), RR1 ++ RR2}; -t_solve_remote(?product(Types), ET, R, C) -> - {RL, RR} = list_solve_remote(Types, ET, R, C), - {?product(RL), RR}; -t_solve_remote(?opaque(Set), ET, R, C) -> - List = ordsets:to_list(Set), - {NewList, RR} = opaques_solve_remote(List, ET, R, C), - {?opaque(ordsets:from_list(NewList)), RR}; -t_solve_remote(?tuple(?any, _, _) = T, _ET, _R, _C) -> {T, []}; -t_solve_remote(?tuple(Types, _Arity, _Tag), ET, R, C) -> - {RL, RR} = list_solve_remote(Types, ET, R, C), - {t_tuple(RL), RR}; -t_solve_remote(?tuple_set(Set), ET, R, C) -> - {NewTuples, RR} = tuples_solve_remote(Set, ET, R, C), - {t_sup(NewTuples), RR}; -t_solve_remote(?remote(Set), ET, R, C) -> - RemoteList = ordsets:to_list(Set), - {RL, RR} = list_solve_remote_type(RemoteList, ET, R, C), - {t_sup(RL), RR}; -t_solve_remote(?union(List), ET, R, C) -> - {RL, RR} = list_solve_remote(List, ET, R, C), - {t_sup(RL), RR}; -t_solve_remote(T, _ET, _R, _C) -> {T, []}. - -t_solve_remote_type(#remote{mod = RemMod, name = Name, args = Args0} = RemType, - ET, R, C) -> - Args = lists:map(fun(A) -> - {Arg, _} = t_solve_remote(A, ET, R, C), - Arg - end, Args0), - ArgsLen = length(Args), - case dict:find(RemMod, R) of - error -> - self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, - {t_any(), []}; - {ok, RemDict} -> - MFA = {RemMod, Name, ArgsLen}, - case sets:is_element(MFA, ET) of - true -> - case lookup_type(Name, ArgsLen, RemDict) of - {type, {_Mod, Type, ArgNames}} -> - {NewType, NewCycle, NewRR} = - case can_unfold_more(RemType, C) of - true -> - List = lists:zip(ArgNames, Args), - TmpVarDict = dict:from_list(List), - {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []}; - false -> - {t_any(), C, [RemType]} - end, - {RT, RR} = t_solve_remote(NewType, ET, R, NewCycle), - RetRR = NewRR ++ RR, - RT1 = - case lists:member(RemType, RetRR) of - true -> t_limit(RT, ?REC_TYPE_LIMIT); - false -> RT - end, - {RT1, RetRR}; - {opaque, {Mod, Type, ArgNames}} -> - List = lists:zip(ArgNames, Args), - TmpVarDict = dict:from_list(List), - {Rep, NewCycle, NewRR} = - case can_unfold_more(RemType, C) of - true -> - {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []}; - false -> - {t_any(), C, [RemType]} - end, - {NewRep, RR} = t_solve_remote(Rep, ET, R, NewCycle), - RetRR = NewRR ++ RR, - RT1 = - case lists:member(RemType, RetRR) of - true -> t_limit(NewRep, ?REC_TYPE_LIMIT); - false -> NewRep - end, - {skip_opaque_alias(RT1, Mod, Name, Args), RetRR}; - error -> - Msg = io_lib:format("Unable to find remote type ~w:~w()\n", - [RemMod, Name]), - throw({error, Msg}) - end; - false -> - self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, - {t_any(), []} - end - end. - -list_solve_remote([], _ET, _R, _C) -> - {[], []}; -list_solve_remote([Type|Types], ET, R, C) -> - {RT, RR1} = t_solve_remote(Type, ET, R, C), - {RL, RR2} = list_solve_remote(Types, ET, R, C), - {[RT|RL], RR1 ++ RR2}. - -list_solve_remote_type([], _ET, _R, _C) -> - {[], []}; -list_solve_remote_type([Type|Types], ET, R, C) -> - {RT, RR1} = t_solve_remote_type(Type, ET, R, C), - {RL, RR2} = list_solve_remote_type(Types, ET, R, C), - {[RT|RL], RR1 ++ RR2}. - -opaques_solve_remote([], _ET, _R, _C) -> - {[], []}; -opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], ET, R, C) -> - {RT, RR1} = t_solve_remote(Struct, ET, R, C), - {LOp, RR2} = opaques_solve_remote(Tail, ET, R, C), - {[Remote#opaque{struct = RT}|LOp], RR1 ++ RR2}. - -tuples_solve_remote([], _ET, _R, _C) -> - {[], []}; -tuples_solve_remote([{_Sz, Tuples}|Tail], ET, R, C) -> - {RL, RR1} = list_solve_remote(Tuples, ET, R, C), - {LSzTpls, RR2} = tuples_solve_remote(Tail, ET, R, C), - {RL ++ LSzTpls, RR1 ++ RR2}. - %%----------------------------------------------------------------------------- %% Unit type. Signals non termination. %% @@ -1987,82 +1844,6 @@ t_parameterized_module() -> t_timeout() -> t_sup(t_non_neg_integer(), t_atom('infinity')). -%%----------------------------------------------------------------------------- -%% Some built-in opaque types -%% - --spec t_array() -> erl_type(). - -t_array() -> - t_opaque(array, array, [t_any()], - t_tuple([t_atom('array'), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_any(), - t_any()])). - --spec t_dict() -> erl_type(). - -t_dict() -> - t_opaque(dict, dict, [t_any(), t_any()], - t_tuple([t_atom('dict'), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_non_neg_integer()]), - t_sup([t_atom('undefined'), t_tuple()]), - t_sup([t_atom('undefined'), t_tuple()])])). - --spec t_digraph() -> erl_type(). - -t_digraph() -> - t_opaque(digraph, digraph, [], - t_tuple([t_atom('digraph'), - t_sup(t_atom(), t_tid()), - t_sup(t_atom(), t_tid()), - t_sup(t_atom(), t_tid()), - t_boolean()])). - --spec t_gb_set() -> erl_type(). - -t_gb_set() -> - t_opaque(gb_sets, gb_set, [], - t_tuple([t_non_neg_integer(), t_sup(t_atom('nil'), t_tuple(3))])). - --spec t_gb_tree() -> erl_type(). - -t_gb_tree() -> - t_opaque(gb_trees, gb_tree, [], - t_tuple([t_non_neg_integer(), t_sup(t_atom('nil'), t_tuple(4))])). - --spec t_queue() -> erl_type(). - -t_queue() -> - t_opaque(queue, queue, [t_any()], t_tuple([t_list(), t_list()])). - --spec t_set() -> erl_type(). - -t_set() -> - t_opaque(sets, set, [t_any()], - t_tuple([t_atom('set'), t_non_neg_integer(), t_non_neg_integer(), - t_pos_integer(), t_non_neg_integer(), t_non_neg_integer(), - t_non_neg_integer(), - t_sup([t_atom('undefined'), t_tuple()]), - t_sup([t_atom('undefined'), t_tuple()])])). - --spec t_tid() -> erl_type(). - -t_tid() -> - t_opaque(ets, tid, [], t_integer()). - --spec all_opaque_builtins() -> [erl_type(),...]. - -all_opaque_builtins() -> - [t_array(), t_dict(), t_digraph(), t_gb_set(), - t_gb_tree(), t_queue(), t_set(), t_tid()]. - %%------------------------------------ %% ?none is allowed in products. A product of size 1 is not a product. @@ -2357,14 +2138,19 @@ expand_range_from_set(Range = ?int_range(From, To), Set) -> -spec t_sup([erl_type()]) -> erl_type(). -t_sup([?any|_]) -> - ?any; -t_sup([H1, H2|T]) -> - t_sup([t_sup(H1, H2)|T]); -t_sup([H]) -> - subst_all_vars_to_any(H); -t_sup([]) -> - ?none. +t_sup([]) -> ?none; +t_sup(Ts) -> + case lists:any(fun is_any/1, Ts) of + true -> ?any; + false -> + t_sup1(Ts, []) + end. + +t_sup1([H1, H2|T], L) -> + t_sup1(T, [t_sup(H1, H2)|L]); +t_sup1([T], []) -> subst_all_vars_to_any(T); +t_sup1(Ts, L) -> + t_sup1(Ts++L, []). -spec t_sup(erl_type(), erl_type()) -> erl_type(). @@ -3182,12 +2968,12 @@ t_subst_aux(T, _VarMap) -> subst_all_remote(Type0, Substitute) -> Map = fun(Type) -> - case erl_types:t_is_remote(Type) of + case t_is_remote(Type) of true -> Substitute; false -> Type end end, - erl_types:t_map(Map, Type0). + t_map(Map, Type0). %%----------------------------------------------------------------------------- %% Unification @@ -3317,8 +3103,8 @@ is_opaque_type2(#opaque{mod = Mod1, name = Name1, args = Args1}, Opaques) -> is_type_name(Mod, Name, Args1, Mod, Name, Args2) -> length(Args1) =:= length(Args2); -is_type_name(Mod1, Name1, Args1, Mod2, Name2, Args2) -> - is_same_type_name2(Mod1, Name1, Args1, Mod2, Name2, Args2). +is_type_name(_Mod1, _Name1, _Args1, _Mod2, _Name2, _Args2) -> + false. %% Two functions since t_unify is not symmetric. unify_tuple_set_and_tuple1(?tuple_set([{Arity, List}]), @@ -3869,7 +3655,7 @@ t_abstract_records(?tuple(Elements, Arity, ?atom(_) = Tag), RecDict) -> [TagAtom] = atom_vals(Tag), case lookup_record(TagAtom, Arity - 1, RecDict) of error -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]); - {ok, Fields} -> t_tuple([Tag|[T || {_Name, T} <- Fields]]) + {ok, Fields} -> t_tuple([Tag|[T || {_Name, _Abstr, T} <- Fields]]) end; t_abstract_records(?tuple(Elements, _Arity, _Tag), RecDict) -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]); @@ -4090,7 +3876,8 @@ record_to_string(Tag, [_|Fields], FieldNames, RecDict) -> FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []), "#" ++ atom_to_string(Tag) ++ "{" ++ string:join(FieldStrings, ",") ++ "}". -record_fields_to_string([F|Fs], [{FName, _DefType}|FDefs], RecDict, Acc) -> +record_fields_to_string([F|Fs], [{FName, _Abstr, _DefType}|FDefs], + RecDict, Acc) -> NewAcc = case t_is_equal(F, t_any()) orelse t_is_any_atom('undefined', F) of true -> Acc; @@ -4116,7 +3903,7 @@ record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) -> FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []), string:join(FieldDiffs, " and "). -field_diffs([F|Fs], [{FName, DefType}|FDefs], RecDict, Acc) -> +field_diffs([F|Fs], [{FName, _Abstr, DefType}|FDefs], RecDict, Acc) -> %% Don't care about opaqueness for now. NewAcc = case not t_is_none(t_inf(F, DefType)) of @@ -4156,15 +3943,7 @@ opaque_name(Mod, Name, Extra) -> flat_format("~s(~s)", [S, Extra]). mod_name(Mod, Name) -> - case is_obsolete_opaque_builtin(Mod, Name) of - true -> flat_format("~w", [Name]); - false -> flat_format("~w:~w", [Mod, Name]) - end. - -is_obsolete_opaque_builtin(digraph, digraph) -> true; -is_obsolete_opaque_builtin(gb_sets, gb_set) -> true; -is_obsolete_opaque_builtin(gb_trees, gb_tree) -> true; -is_obsolete_opaque_builtin(_, _) -> false. + flat_format("~w:~w", [Mod, Name]). %%============================================================================= %% @@ -4172,374 +3951,476 @@ is_obsolete_opaque_builtin(_, _) -> false. %% %%============================================================================= --spec t_from_form(parse_form()) -> erl_type(). +-type type_names() :: [type_key() | record_key()]. -t_from_form(Form) -> - t_from_form(Form, dict:new()). +-spec t_from_form(parse_form(), sets:set(mfa()), + module(), mod_records()) -> erl_type(). --spec t_from_form(parse_form(), type_table()) -> erl_type(). +t_from_form(Form, ExpTypes, Module, RecDict) -> + t_from_form(Form, ExpTypes, Module, RecDict, dict:new()). -t_from_form(Form, RecDict) -> - t_from_form(Form, RecDict, dict:new()). +-spec t_from_form(parse_form(), sets:set(mfa()), + module(), mod_records(), var_table()) -> erl_type(). --spec t_from_form(parse_form(), type_table(), var_table()) -> erl_type(). +t_from_form(Form, ExpTypes, Module, RecDict, VarDict) -> + {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, VarDict), + T. + +%% Replace external types with with none(). +-spec t_from_form_without_remote(parse_form(), type_table()) -> erl_type(). -t_from_form(Form, RecDict, VarDict) -> - {T, _R} = t_from_form(Form, [], RecDict, VarDict), +t_from_form_without_remote(Form, TypeTable) -> + Module = mod, + RecDict = dict:from_list([{Module, TypeTable}]), + ExpTypes = replace_by_none, + {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, dict:new()), T. --type type_names() :: [type_key() | record_key()]. +%% REC_TYPE_LIMIT is used for limiting the depth of recursive types. +%% EXPAND_LIMIT is used for limiting the size of types by +%% limiting the number of elements of lists within one type form. +%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the +%% types balanced (unions will otherwise collapse to any()) by limiting +%% the depth the same way as t_limit/2 does. + +-type expand_limit() :: integer(). + +-type expand_depth() :: integer(). --spec t_from_form(parse_form(), type_names(), type_table(), var_table()) -> - {erl_type(), type_names()}. +t_from_form1(Form, TypeNames, ET, M, MR, V) -> + t_from_form1(Form, TypeNames, ET, M, MR, V, ?EXPAND_DEPTH). -t_from_form({var, _L, '_'}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({var, _L, Name}, _TypeNames, _RecDict, VarDict) -> - case dict:find(Name, VarDict) of - error -> {t_var(Name), []}; - {ok, Val} -> {Val, []} +t_from_form1(Form, TypeNames, ET, M, MR, V, D) -> + L = ?EXPAND_LIMIT, + {T, L1} = t_from_form(Form, TypeNames, ET, M, MR, V, D, L), + if + L1 =< 0, D > 1 -> + D1 = D div 2, + t_from_form1(Form, TypeNames, ET, M, MR, V, D1); + true -> + {T, L1} + end. + +-spec t_from_form(parse_form(), type_names(), + sets:set(mfa()) | 'replace_by_none', + module(), mod_records(), var_table(), + expand_depth(), expand_limit()) + -> {erl_type(), expand_limit()}. + +%% If there is something wrong with parse_form() +%% throw({error, io_lib:chars()} is called; +%% for unknown remote types +%% self() ! {self(), ext_types, {RemMod, Name, ArgsLen}} +%% is called, unless 'replace_by_none' is given. +%% +%% It is assumed that M can be found in MR. + +t_from_form(_, _TypeNames, _ET, _M, _MR, _V, D, L) when D =< 0 ; L =< 0 -> + {t_any(), L}; +t_from_form({var, _L, '_'}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({var, _L, Name}, _TypeNames, _ET, _M, _MR, V, _D, L) -> + case dict:find(Name, V) of + error -> {t_var(Name), L}; + {ok, Val} -> {Val, L} end; -t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, RecDict, VarDict) -> - t_from_form(Type, TypeNames, RecDict, VarDict); -t_from_form({paren_type, _L, [Type]}, TypeNames, RecDict, VarDict) -> - t_from_form(Type, TypeNames, RecDict, VarDict); +t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, ET, M, MR, V, D, L) -> + t_from_form(Type, TypeNames, ET, M, MR, V, D, L); +t_from_form({paren_type, _L, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + t_from_form(Type, TypeNames, ET, M, MR, V, D, L); t_from_form({remote_type, _L, [{atom, _, Module}, {atom, _, Type}, Args]}, - TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_remote(Module, Type, L), R}; -t_from_form({atom, _L, Atom}, _TypeNames, _RecDict, _VarDict) -> - {t_atom(Atom), []}; -t_from_form({integer, _L, Int}, _TypeNames, _RecDict, _VarDict) -> - {t_integer(Int), []}; -t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _RecDict, _VarDict) -> + TypeNames, ET, M, MR, V, D, L) -> + remote_from_form(Module, Type, Args, TypeNames, ET, M, MR, V, D, L); +t_from_form({atom, _L, Atom}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_atom(Atom), L}; +t_from_form({integer, _L, Int}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_integer(Int), L}; +t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _ET, _M, _MR, _V, _D, L) -> case erl_eval:partial_eval(Op) of {integer, _, Val} -> - {t_integer(Val), []}; + {t_integer(Val), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])}) end; t_from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _TypeNames, - _RecDict, _VarDict) -> + _ET, _M, _MR, _V, _D, L) -> case erl_eval:partial_eval(Op) of {integer, _, Val} -> - {t_integer(Val), []}; + {t_integer(Val), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])}) end; -t_from_form({type, _L, any, []}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({type, _L, arity, []}, _TypeNames, _RecDict, _VarDict) -> - {t_arity(), []}; -t_from_form({type, _L, array, []}, TypeNames, RecDict, VarDict) -> - builtin_type(array, t_array(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, atom, []}, _TypeNames, _RecDict, _VarDict) -> - {t_atom(), []}; -t_from_form({type, _L, binary, []}, _TypeNames, _RecDict, _VarDict) -> - {t_binary(), []}; +t_from_form({type, _L, any, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({type, _L, arity, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_arity(), L}; +t_from_form({type, _L, atom, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_atom(), L}; +t_from_form({type, _L, binary, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_binary(), L}; t_from_form({type, _L, binary, [Base, Unit]} = Type, - _TypeNames, _RecDict, _VarDict) -> + _TypeNames, _ET, _M, _MR, _V, _D, L) -> case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of {{integer, _, B}, {integer, _, U}} when B >= 0, U >= 0 -> - {t_bitstr(U, B), []}; + {t_bitstr(U, B), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])}) end; -t_from_form({type, _L, bitstring, []}, _TypeNames, _RecDict, _VarDict) -> - {t_bitstr(), []}; -t_from_form({type, _L, bool, []}, _TypeNames, _RecDict, _VarDict) -> - {t_boolean(), []}; % XXX: Temporarily -t_from_form({type, _L, boolean, []}, _TypeNames, _RecDict, _VarDict) -> - {t_boolean(), []}; -t_from_form({type, _L, byte, []}, _TypeNames, _RecDict, _VarDict) -> - {t_byte(), []}; -t_from_form({type, _L, char, []}, _TypeNames, _RecDict, _VarDict) -> - {t_char(), []}; -t_from_form({type, _L, dict, []}, TypeNames, RecDict, VarDict) -> - builtin_type(dict, t_dict(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, digraph, []}, TypeNames, RecDict, VarDict) -> - builtin_type(digraph, t_digraph(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, float, []}, _TypeNames, _RecDict, _VarDict) -> - {t_float(), []}; -t_from_form({type, _L, function, []}, _TypeNames, _RecDict, _VarDict) -> - {t_fun(), []}; -t_from_form({type, _L, 'fun', []}, _TypeNames, _RecDict, _VarDict) -> - {t_fun(), []}; +t_from_form({type, _L, bitstring, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_bitstr(), L}; +t_from_form({type, _L, bool, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_boolean(), L}; % XXX: Temporarily +t_from_form({type, _L, boolean, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_boolean(), L}; +t_from_form({type, _L, byte, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_byte(), L}; +t_from_form({type, _L, char, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_char(), L}; +t_from_form({type, _L, float, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_float(), L}; +t_from_form({type, _L, function, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_fun(), L}; +t_from_form({type, _L, 'fun', []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_fun(), L}; t_from_form({type, _L, 'fun', [{type, _, any}, Range]}, TypeNames, - RecDict, VarDict) -> - {T, R} = t_from_form(Range, TypeNames, RecDict, VarDict), - {t_fun(T), R}; + ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L - 1), + {t_fun(T), L1}; t_from_form({type, _L, 'fun', [{type, _, product, Domain}, Range]}, - TypeNames, RecDict, VarDict) -> - {L, R1} = list_from_form(Domain, TypeNames, RecDict, VarDict), - {T, R2} = t_from_form(Range, TypeNames, RecDict, VarDict), - {t_fun(L, T), R1 ++ R2}; -t_from_form({type, _L, gb_set, []}, TypeNames, RecDict, VarDict) -> - builtin_type(gb_set, t_gb_set(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, gb_tree, []}, TypeNames, RecDict, VarDict) -> - builtin_type(gb_tree, t_gb_tree(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, identifier, []}, _TypeNames, _RecDict, _VarDict) -> - {t_identifier(), []}; -t_from_form({type, _L, integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_integer(), []}; -t_from_form({type, _L, iodata, []}, _TypeNames, _RecDict, _VarDict) -> - {t_iodata(), []}; -t_from_form({type, _L, iolist, []}, _TypeNames, _RecDict, _VarDict) -> - {t_iolist(), []}; -t_from_form({type, _L, list, []}, _TypeNames, _RecDict, _VarDict) -> - {t_list(), []}; -t_from_form({type, _L, list, [Type]}, TypeNames, RecDict, VarDict) -> - {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict), - {t_list(T), R}; -t_from_form({type, _L, map, As0}, TypeNames, RecDict, VarDict) -> - As = case is_list(As0) of - true -> As0; - false -> [] - end, - builtin_type(map, t_map([]), As, TypeNames, RecDict, VarDict); -t_from_form({type, _L, mfa, []}, _TypeNames, _RecDict, _VarDict) -> - {t_mfa(), []}; -t_from_form({type, _L, module, []}, _TypeNames, _RecDict, _VarDict) -> - {t_module(), []}; -t_from_form({type, _L, nil, []}, _TypeNames, _RecDict, _VarDict) -> - {t_nil(), []}; -t_from_form({type, _L, neg_integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_neg_integer(), []}; -t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _RecDict, - _VarDict) -> - {t_non_neg_integer(), []}; -t_from_form({type, _L, no_return, []}, _TypeNames, _RecDict, _VarDict) -> - {t_unit(), []}; -t_from_form({type, _L, node, []}, _TypeNames, _RecDict, _VarDict) -> - {t_node(), []}; -t_from_form({type, _L, none, []}, _TypeNames, _RecDict, _VarDict) -> - {t_none(), []}; -t_from_form({type, _L, nonempty_list, []}, _TypeNames, _RecDict, _VarDict) -> - {t_nonempty_list(), []}; -t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, RecDict, VarDict) -> - {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict), - {t_nonempty_list(T), R}; + TypeNames, ET, M, MR, V, D, L) -> + {Dom1, L1} = list_from_form(Domain, TypeNames, ET, M, MR, V, D, L), + {Ran1, L2} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L1), + {t_fun(Dom1, Ran1), L2}; +t_from_form({type, _L, identifier, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_identifier(), L}; +t_from_form({type, _L, integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_integer(), L}; +t_from_form({type, _L, iodata, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_iodata(), L}; +t_from_form({type, _L, iolist, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_iolist(), L}; +t_from_form({type, _L, list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_list(), L}; +t_from_form({type, _L, list, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D - 1, L - 1), + {t_list(T), L1}; +t_from_form({type, _L, map, _}, TypeNames, ET, M, MR, V, D, L) -> + builtin_type(map, t_map([]), TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, mfa, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_mfa(), L}; +t_from_form({type, _L, module, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_module(), L}; +t_from_form({type, _L, nil, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nil(), L}; +t_from_form({type, _L, neg_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_neg_integer(), L}; +t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _ET, _M, _MR, + _V, _D, L) -> + {t_non_neg_integer(), L}; +t_from_form({type, _L, no_return, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_unit(), L}; +t_from_form({type, _L, node, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_node(), L}; +t_from_form({type, _L, none, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_none(), L}; +t_from_form({type, _L, nonempty_list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nonempty_list(), L}; +t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, ET, M, MR, V, D, L) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1), + {t_nonempty_list(T), L1}; t_from_form({type, _L, nonempty_improper_list, [Cont, Term]}, TypeNames, - RecDict, VarDict) -> - {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict), - {t_cons(T1, T2), R1 ++ R2}; + ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1), + {t_cons(T1, T2), L2}; t_from_form({type, _L, nonempty_maybe_improper_list, []}, _TypeNames, - _RecDict, _VarDict) -> - {t_cons(?any, ?any), []}; + _ET, _M, _MR, _V, _D, L) -> + {t_cons(?any, ?any), L}; t_from_form({type, _L, nonempty_maybe_improper_list, [Cont, Term]}, - TypeNames, RecDict, VarDict) -> - {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict), - {t_cons(T1, T2), R1 ++ R2}; -t_from_form({type, _L, nonempty_string, []}, _TypeNames, _RecDict, - _VarDict) -> - {t_nonempty_string(), []}; -t_from_form({type, _L, number, []}, _TypeNames, _RecDict, _VarDict) -> - {t_number(), []}; -t_from_form({type, _L, pid, []}, _TypeNames, _RecDict, _VarDict) -> - {t_pid(), []}; -t_from_form({type, _L, port, []}, _TypeNames, _RecDict, _VarDict) -> - {t_port(), []}; -t_from_form({type, _L, pos_integer, []}, _TypeNames, _RecDict, _VarDict) -> - {t_pos_integer(), []}; + TypeNames, ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1), + {t_cons(T1, T2), L2}; +t_from_form({type, _L, nonempty_string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_nonempty_string(), L}; +t_from_form({type, _L, number, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_number(), L}; +t_from_form({type, _L, pid, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_pid(), L}; +t_from_form({type, _L, port, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_port(), L}; +t_from_form({type, _L, pos_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_pos_integer(), L}; t_from_form({type, _L, maybe_improper_list, []}, _TypeNames, - _RecDict, _VarDict) -> - {t_maybe_improper_list(), []}; + _ET, _M, _MR, _V, _D, L) -> + {t_maybe_improper_list(), L}; t_from_form({type, _L, maybe_improper_list, [Content, Termination]}, - TypeNames, RecDict, VarDict) -> - {T1, R1} = t_from_form(Content, TypeNames, RecDict, VarDict), - {T2, R2} = t_from_form(Termination, TypeNames, RecDict, VarDict), - {t_maybe_improper_list(T1, T2), R1 ++ R2}; -t_from_form({type, _L, product, Elements}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Elements, TypeNames, RecDict, VarDict), - {t_product(L), R}; -t_from_form({type, _L, queue, []}, TypeNames, RecDict, VarDict) -> - builtin_type(queue, t_queue(), [], TypeNames, RecDict, VarDict); + TypeNames, ET, M, MR, V, D, L) -> + {T1, L1} = t_from_form(Content, TypeNames, ET, M, MR, V, D, L - 1), + {T2, L2} = t_from_form(Termination, TypeNames, ET, M, MR, V, D, L1), + {t_maybe_improper_list(T1, T2), L2}; +t_from_form({type, _L, product, Elements}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Elements, TypeNames, ET, M, MR, V, D - 1, L), + {t_product(Lst), L1}; t_from_form({type, _L, range, [From, To]} = Type, - _TypeNames, _RecDict, _VarDict) -> + _TypeNames, _ET, _M, _MR, _V, _D, L) -> case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of {{integer, _, FromVal}, {integer, _, ToVal}} -> - {t_from_range(FromVal, ToVal), []}; + {t_from_range(FromVal, ToVal), L}; _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])}) end; -t_from_form({type, _L, record, [Name|Fields]}, TypeNames, RecDict, VarDict) -> - record_from_form(Name, Fields, TypeNames, RecDict, VarDict); -t_from_form({type, _L, reference, []}, _TypeNames, _RecDict, _VarDict) -> - {t_reference(), []}; -t_from_form({type, _L, set, []}, TypeNames, RecDict, VarDict) -> - builtin_type(set, t_set(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, string, []}, _TypeNames, _RecDict, _VarDict) -> - {t_string(), []}; -t_from_form({type, _L, term, []}, _TypeNames, _RecDict, _VarDict) -> - {t_any(), []}; -t_from_form({type, _L, tid, []}, TypeNames, RecDict, VarDict) -> - builtin_type(tid, t_tid(), [], TypeNames, RecDict, VarDict); -t_from_form({type, _L, timeout, []}, _TypeNames, _RecDict, _VarDict) -> - {t_timeout(), []}; -t_from_form({type, _L, tuple, any}, _TypeNames, _RecDict, _VarDict) -> - {t_tuple(), []}; -t_from_form({type, _L, tuple, Args}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_tuple(L), R}; -t_from_form({type, _L, union, Args}, TypeNames, RecDict, VarDict) -> - {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict), - {t_sup(L), R}; -t_from_form({type, _L, Name, Args}, TypeNames, RecDict, VarDict) -> - type_from_form(Name, Args, TypeNames, RecDict, VarDict); +t_from_form({type, _L, record, [Name|Fields]}, TypeNames, ET, M, MR, V, D, L) -> + record_from_form(Name, Fields, TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, reference, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_reference(), L}; +t_from_form({type, _L, string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_string(), L}; +t_from_form({type, _L, term, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_any(), L}; +t_from_form({type, _L, timeout, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_timeout(), L}; +t_from_form({type, _L, tuple, any}, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {t_tuple(), L}; +t_from_form({type, _L, tuple, Args}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D - 1, L), + {t_tuple(Lst), L1}; +t_from_form({type, _L, union, Args}, TypeNames, ET, M, MR, V, D, L) -> + {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + {t_sup(Lst), L1}; +t_from_form({user_type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) -> + type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L); +t_from_form({type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) -> + %% Compatibility: modules compiled before Erlang/OTP 18.0. + type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L); t_from_form({opaque, _L, Name, {Mod, Args, Rep}}, _TypeNames, - _RecDict, _VarDict) -> - {t_opaque(Mod, Name, Args, Rep), []}. - -builtin_type(Name, Type, Args, TypeNames, RecDict, VarDict) -> - case lookup_type(Name, length(Args), RecDict) of - {_, {_M, _T, _A}} -> - type_from_form(Name, Args, TypeNames, RecDict, VarDict); + _ET, _M, _MR, _V, _D, L) -> + %% XXX. To be removed. + {t_opaque(Mod, Name, Args, Rep), L}. + +builtin_type(Name, Type, TypeNames, ET, M, MR, V, D, L) -> + case dict:find(M, MR) of + {ok, R} -> + case lookup_type(Name, 0, R) of + {_, {{_M, _F, _A}, _T}} -> + type_from_form(Name, [], TypeNames, ET, M, MR, V, D, L); + error -> + {Type, L} + end; error -> - {Type, []} + {Type, L} end. -type_from_form(Name, Args, TypeNames, RecDict, VarDict) -> +type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L) -> ArgsLen = length(Args), - ArgTypes = forms_to_types(Args, TypeNames, RecDict, VarDict), - case lookup_type(Name, ArgsLen, RecDict) of - {type, {_Module, Type, ArgNames}} -> - TypeName = {type, Name, ArgsLen}, + {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + {ok, R} = dict:find(M, MR), + case lookup_type(Name, ArgsLen, R) of + {type, {{Module, Form, ArgNames}, _Type}} -> + TypeName = {type, Module, Name, ArgsLen}, case can_unfold_more(TypeName, TypeNames) of true -> List = lists:zip(ArgNames, ArgTypes), - TmpVarDict = dict:from_list(List), - {T, R} = t_from_form(Type, [TypeName|TypeNames], - RecDict, TmpVarDict), - case lists:member(TypeName, R) of - true -> {t_limit(T, ?REC_TYPE_LIMIT), R}; - false -> {T, R} - end; - false -> {t_any(), [TypeName]} + TmpV = dict:from_list(List), + t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1); + false -> + {t_any(), L1} end; - {opaque, {Module, Type, ArgNames}} -> - TypeName = {opaque, Name, ArgsLen}, - {Rep, Rret} = + {opaque, {{Module, Form, ArgNames}, Type}} -> + TypeName = {opaque, Module, Name, ArgsLen}, + {Rep, L2} = case can_unfold_more(TypeName, TypeNames) of true -> List = lists:zip(ArgNames, ArgTypes), - TmpVarDict = dict:from_list(List), - {T, R} = t_from_form(Type, [TypeName|TypeNames], - RecDict, TmpVarDict), - case lists:member(TypeName, R) of - true -> {t_limit(T, ?REC_TYPE_LIMIT), R}; - false -> {T, R} - end; - false -> {t_any(), [TypeName]} + TmpV = dict:from_list(List), + t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1); + false -> {t_any(), L1} end, + Rep1 = choose_opaque_type(Rep, Type), Args2 = [subst_all_vars_to_any(ArgType) || ArgType <- ArgTypes], - {skip_opaque_alias(Rep, Module, Name, Args2), Rret}; + {skip_opaque_alias(Rep1, Module, Name, Args2), L2}; error -> Msg = io_lib:format("Unable to find type ~w/~w\n", [Name, ArgsLen]), throw({error, Msg}) end. -forms_to_types(Forms, TypeNames, RecDict, VarDict) -> - {Types, _} = list_from_form(Forms, TypeNames, RecDict, VarDict), - Types. - skip_opaque_alias(?opaque(_) = T, _Mod, _Name, _Args) -> T; skip_opaque_alias(T, Module, Name, Args) -> t_opaque(Module, Name, Args, T). -record_from_form({atom, _, Name}, ModFields, TypeNames, RecDict, VarDict) -> +remote_from_form(RemMod, Name, Args, TypeNames, ET, M, MR, V, D, L) -> + {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L), + if + ET =:= replace_by_none -> + {t_none(), L1}; + true -> + ArgsLen = length(Args), + case dict:find(RemMod, MR) of + error -> + self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, + {t_any(), L1}; + {ok, RemDict} -> + MFA = {RemMod, Name, ArgsLen}, + case sets:is_element(MFA, ET) of + true -> + case lookup_type(Name, ArgsLen, RemDict) of + {type, {{_Mod, Form, ArgNames}, _Type}} -> + RemType = {type, RemMod, Name, ArgsLen}, + case can_unfold_more(RemType, TypeNames) of + true -> + List = lists:zip(ArgNames, ArgTypes), + TmpVarDict = dict:from_list(List), + NewTypeNames = [RemType|TypeNames], + t_from_form(Form, NewTypeNames, ET, + RemMod, MR, TmpVarDict, D, L1); + false -> + {t_any(), L1} + end; + {opaque, {{Mod, Form, ArgNames}, Type}} -> + RemType = {opaque, RemMod, Name, ArgsLen}, + List = lists:zip(ArgNames, ArgTypes), + TmpVarDict = dict:from_list(List), + {NewRep, L2} = + case can_unfold_more(RemType, TypeNames) of + true -> + NewTypeNames = [RemType|TypeNames], + t_from_form(Form, NewTypeNames, ET, RemMod, MR, + TmpVarDict, D, L1); + false -> + {t_any(), L1} + end, + NewRep1 = choose_opaque_type(NewRep, Type), + {skip_opaque_alias(NewRep1, Mod, Name, ArgTypes), L2}; + error -> + Msg = io_lib:format("Unable to find remote type ~w:~w()\n", + [RemMod, Name]), + throw({error, Msg}) + end; + false -> + self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}, + {t_any(), L1} + end + end + end. + +%% Opaque types (both local and remote) are problematic when it comes +%% to the limits (TypeNames, D, and L). The reason is that if any() is +%% substituted for a more specialized subtype of an opaque type, the +%% property stated along with decorate_with_opaque() (the type has to +%% be a subtype of the declared type) no longer holds. +%% +%% The less than perfect remedy: if the opaque type created from a +%% form is not a subset of the declared type, the declared type is +%% used instead, effectively bypassing the limits, and potentially +%% resulting in huge types. +choose_opaque_type(Type, DeclType) -> + case + t_is_subtype(subst_all_vars_to_any(Type), + subst_all_vars_to_any(DeclType)) + of + true -> Type; + false -> DeclType + end. + +record_from_form({atom, _, Name}, ModFields, TypeNames, ET, M, MR, V, D, L) -> case can_unfold_more({record, Name}, TypeNames) of true -> - case lookup_record(Name, RecDict) of + {ok, R} = dict:find(M, MR), + case lookup_record(Name, R) of {ok, DeclFields} -> - TypeNames1 = [{record, Name}|TypeNames], - AreTyped = [is_erl_type(FieldType) - || {_FieldName, FieldType} <- DeclFields], - {DeclFields1, R1} = - case lists:all(fun(Elem) -> Elem end, AreTyped) of - true -> {DeclFields, []}; - false -> fields_from_form(DeclFields, TypeNames1, - RecDict, dict:new()) - end, - {GetModRec, R2} = get_mod_record(ModFields, DeclFields1, - TypeNames1, - RecDict, VarDict), + NewTypeNames = [{record, Name}|TypeNames], + {GetModRec, L1} = get_mod_record(ModFields, DeclFields, + NewTypeNames, ET, M, MR, V, D, L), case GetModRec of {error, FieldName} -> throw({error, io_lib:format("Illegal declaration of #~w{~w}\n", [Name, FieldName])}); {ok, NewFields} -> - {t_tuple( - [t_atom(Name)|[Type || {_FieldName, Type} <- NewFields]]), - R1 ++ R2} + {NewFields1, L2} = + fields_from_form(NewFields, NewTypeNames, ET, M, MR, + dict:new(), D, L1), + Rec = t_tuple( + [t_atom(Name)|[Type + || {_FieldName, Type} <- NewFields1]]), + {Rec, L2} end; error -> throw({error, io_lib:format("Unknown record #~w{}\n", [Name])}) end; - false -> {t_any(), []} + false -> + {t_any(), L} end. -get_mod_record([], DeclFields, _TypeNames, _RecDict, _VarDict) -> - {{ok, DeclFields}, []}; -get_mod_record(ModFields, DeclFields, TypeNames, RecDict, VarDict) -> - DeclFieldsDict = orddict:from_list(DeclFields), - {ModFieldsDict, R} = build_field_dict(ModFields, TypeNames, - RecDict, VarDict), - case get_mod_record(DeclFieldsDict, ModFieldsDict, []) of - {error, _FieldName} = Error -> {Error, R}; - {ok, FinalOrdDict} -> - {{ok, [{FieldName, orddict:fetch(FieldName, FinalOrdDict)} - || {FieldName, _} <- DeclFields]}, - R} +get_mod_record([], DeclFields, _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {{ok, DeclFields}, L}; +get_mod_record(ModFields, DeclFields, TypeNames, ET, M, MR, V, D, L) -> + DeclFieldsDict = lists:keysort(1, DeclFields), + {ModFieldsDict, L1} = + build_field_dict(ModFields, TypeNames, ET, M, MR, V, D, L), + case get_mod_record_types(DeclFieldsDict, ModFieldsDict, []) of + {error, _FieldName} = Error -> {Error, L1}; + {ok, FinalKeyDict} -> + Fields = [lists:keyfind(FieldName, 1, FinalKeyDict) + || {FieldName, _, _} <- DeclFields], + {{ok, Fields}, L1} end. -build_field_dict(FieldTypes, TypeNames, RecDict, VarDict) -> - build_field_dict(FieldTypes, TypeNames, RecDict, VarDict, []). - -build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left], - TypeNames, RecDict, VarDict, Acc) -> - {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict), - NewAcc = [{Name, T}|Acc], - {D, R2} = build_field_dict(Left, TypeNames, RecDict, VarDict, NewAcc), - {D, R1 ++ R2}; -build_field_dict([], _TypeNames, _RecDict, _VarDict, Acc) -> - {orddict:from_list(Acc), []}. - -get_mod_record([{FieldName, DeclType}|Left1], - [{FieldName, ModType}|Left2], Acc) -> - ModTypeNoVars = subst_all_vars_to_any(ModType), - case - contains_remote(ModTypeNoVars) - orelse contains_remote(DeclType) - orelse t_is_subtype(ModTypeNoVars, DeclType) - of +build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L) -> + build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L, []). + +build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left], + TypeNames, ET, M, MR, V, D, L, Acc) -> + {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1), + %% The cached record field type (DeclType) in + %% get_mod_record_types()), was created with a similar call as TT. + %% Using T for the subtype test does not work since any() is not + %% always a subset of the field type. + TT = t_from_form(Type, ET, M, MR, V), + NewAcc = [{Name, Type, T, TT}|Acc], + {Dict, L2} = + build_field_dict(Left, TypeNames, ET, M, MR, V, D, L1, NewAcc), + {Dict, L2}; +build_field_dict([], _TypeNames, _ET, _M, _MR, _V, _D, L, Acc) -> + {lists:keysort(1, Acc), L}. + +get_mod_record_types([{FieldName, _Abstr, DeclType}|Left1], + [{FieldName, TypeForm, ModType, ModTypeTest}|Left2], + Acc) -> + ModTypeNoVars = subst_all_vars_to_any(ModTypeTest), + case t_is_subtype(ModTypeNoVars, DeclType) of false -> {error, FieldName}; - true -> get_mod_record(Left1, Left2, [{FieldName, ModType}|Acc]) + true -> get_mod_record_types(Left1, Left2, + [{FieldName, TypeForm, ModType}|Acc]) end; -get_mod_record([{FieldName1, _DeclType} = DT|Left1], - [{FieldName2, _ModType}|_] = List2, - Acc) when FieldName1 < FieldName2 -> - get_mod_record(Left1, List2, [DT|Acc]); -get_mod_record(DeclFields, [], Acc) -> - {ok, orddict:from_list(Acc ++ DeclFields)}; -get_mod_record(_, [{FieldName2, _ModType}|_], _Acc) -> +get_mod_record_types([{FieldName1, _Abstr, _DeclType} = DT|Left1], + [{FieldName2, _FormType, _ModType, _TT}|_] = List2, + Acc) when FieldName1 < FieldName2 -> + get_mod_record_types(Left1, List2, [DT|Acc]); +get_mod_record_types(Left1, [], Acc) -> + {ok, lists:keysort(1, Left1++Acc)}; +get_mod_record_types(_, [{FieldName2, _FormType, _ModType, _TT}|_], _Acc) -> {error, FieldName2}. -contains_remote(Type) -> - TypeNoRemote = subst_all_remote(Type, t_none()), - not t_is_equal(Type, TypeNoRemote). - -fields_from_form([], _TypeNames, _RecDict, _VarDict) -> - {[], []}; -fields_from_form([{Name, Type}|Tail], TypeNames, RecDict, - VarDict) -> - {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict), - {F, R2} = fields_from_form(Tail, TypeNames, RecDict, VarDict), - {[{Name, T}|F], R1 ++ R2}. - -list_from_form([], _TypeNames, _RecDict, _VarDict) -> - {[], []}; -list_from_form([H|Tail], TypeNames, RecDict, VarDict) -> - {T, R1} = t_from_form(H, TypeNames, RecDict, VarDict), - {L, R2} = list_from_form(Tail, TypeNames, RecDict, VarDict), - {[T|L], R1 ++ R2}. +%% It is important to create a limited version of the record type +%% since nested record types can otherwise easily result in huge +%% terms. +fields_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {[], L}; +fields_from_form([{Name, Abstr, _Type}|Tail], TypeNames, ET, M, MR, + V, D, L) -> + {T, L1} = t_from_form(Abstr, TypeNames, ET, M, MR, V, D, L), + {F, L2} = fields_from_form(Tail, TypeNames, ET, M, MR, V, D, L1), + {[{Name, T}|F], L2}. + +list_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) -> + {[], L}; +list_from_form([H|Tail], TypeNames, ET, M, MR, V, D, L) -> + {H1, L1} = t_from_form(H, TypeNames, ET, M, MR, V, D, L - 1), + {T1, L2} = list_from_form(Tail, TypeNames, ET, M, MR, V, D, L1), + {[H1|T1], L2}. + +-spec t_var_names([erl_type()]) -> [atom()]. + +t_var_names([{var, _, Name}|L]) when L =/= '_' -> + [Name|t_var_names(L)]; +t_var_names([]) -> + []. -spec t_form_to_string(parse_form()) -> string(). @@ -4592,7 +4473,7 @@ t_form_to_string({type, _L, iodata, []}) -> "iodata()"; t_form_to_string({type, _L, iolist, []}) -> "iolist()"; t_form_to_string({type, _L, list, [Type]}) -> "[" ++ t_form_to_string(Type) ++ "]"; -t_form_to_string({type, _L, map, Args}) when not is_list(Args) -> +t_form_to_string({type, _L, map, _}) -> "#{}"; t_form_to_string({type, _L, mfa, []}) -> "mfa()"; t_form_to_string({type, _L, module, []}) -> "module()"; @@ -4623,12 +4504,21 @@ t_form_to_string({type, _L, tuple, Args}) -> t_form_to_string({type, _L, union, Args}) -> string:join(t_form_to_string_list(Args), " | "); t_form_to_string({type, _L, Name, []} = T) -> - try t_to_string(t_from_form(T)) + try + M = mod, + D0 = dict:new(), + MR = dict:from_list([{M, D0}]), + {T1, _} = + t_from_form(T, [], sets:new(), M, MR, D0, _Deep=1000, _ALot=100000), + t_to_string(T1) catch throw:{error, _} -> atom_to_string(Name) ++ "()" end; -t_form_to_string({type, _L, Name, List}) -> +t_form_to_string({user_type, _L, Name, List}) -> flat_format("~w(~s)", - [Name, string:join(t_form_to_string_list(List), ",")]). + [Name, string:join(t_form_to_string_list(List), ",")]); +t_form_to_string({type, L, Name, List}) -> + %% Compatibility: modules compiled before Erlang/OTP 18.0. + t_form_to_string({user_type, L, Name, List}). t_form_to_string_list(List) -> t_form_to_string_list(List, []). @@ -4671,7 +4561,7 @@ is_erl_type(#c{}) -> true; is_erl_type(_) -> false. -spec lookup_record(atom(), type_table()) -> - 'error' | {'ok', [{atom(), parse_form() | erl_type()}]}. + 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}. lookup_record(Tag, RecDict) when is_atom(Tag) -> case dict:find({record, Tag}, RecDict) of @@ -4686,7 +4576,7 @@ lookup_record(Tag, RecDict) when is_atom(Tag) -> end. -spec lookup_record(atom(), arity(), type_table()) -> - 'error' | {'ok', [{atom(), erl_type()}]}. + 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}. lookup_record(Tag, Arity, RecDict) when is_atom(Tag) -> case dict:find({record, Tag}, RecDict) of @@ -4741,27 +4631,14 @@ do_opaque(Type, _Opaques, Pred) -> is_same_type_name(ModNameArgs, ModNameArgs) -> true; is_same_type_name({Mod, Name, Args1}, {Mod, Name, Args2}) -> all_any(Args1) orelse all_any(Args2); -is_same_type_name({Mod1, Name1, Args1}, {Mod2, Name2, Args2}) -> - is_same_type_name2(Mod1, Name1, Args1, Mod2, Name2, Args2). +is_same_type_name(_ModNameArgs1, _ModNameArgs2) -> + false. all_any([]) -> true; all_any([T|L]) -> t_is_any(T) andalso all_any(L); all_any(_) -> false. -%% Compatibility. In Erlang/OTP 17 the pre-defined opaque types -%% digraph() and so on can be used, but there are also new types such -%% as digraph:graph() with the exact same meaning. In Erlang/OTP R18.0 -%% all but the last clause can be removed. - -is_same_type_name2(digraph, digraph, [], digraph, graph, []) -> true; -is_same_type_name2(digraph, graph, [], digraph, digraph, []) -> true; -is_same_type_name2(gb_sets, gb_set, [], gb_sets, set, [_]) -> true; -is_same_type_name2(gb_sets, set, [_], gb_sets, gb_set, []) -> true; -is_same_type_name2(gb_trees, gb_tree, [], gb_trees, tree, [_, _]) -> true; -is_same_type_name2(gb_trees, tree, [_, _], gb_trees, gb_tree, []) -> true; -is_same_type_name2(_, _, _, _, _, _) -> false. - map_keys(?map(Pairs)) -> [K || {K, _} <- Pairs]. diff --git a/lib/ic/test/java_client_erl_server_SUITE.erl b/lib/ic/test/java_client_erl_server_SUITE.erl index cbcf32515e..6ac08fd0fe 100644 --- a/lib/ic/test/java_client_erl_server_SUITE.erl +++ b/lib/ic/test/java_client_erl_server_SUITE.erl @@ -280,11 +280,7 @@ classpath(Dir) -> Dir++PS++ filename:join([code:lib_dir(ic),"priv","ic.jar"])++PS++ filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++ - case os:getenv("CLASSPATH") of - false -> ""; - Classpath -> Classpath - end. - + os:getenv("CLASSPATH", ""). cmd(Cmd) -> PortOpts = [{line,80},eof,exit_status,stderr_to_stdout], diff --git a/lib/inets/src/http_client/httpc_handler.erl b/lib/inets/src/http_client/httpc_handler.erl index 0bbd40d656..7f7328f1d9 100644 --- a/lib/inets/src/http_client/httpc_handler.erl +++ b/lib/inets/src/http_client/httpc_handler.erl @@ -316,8 +316,9 @@ handle_call(#request{address = Addr} = Request, _, {reply, ok, State} end; {error, Reason} -> - ?hcri("failed sending request", [{reason, Reason}]), - {reply, {pipeline_failed, Reason}, State0} + ?hcri("failed sending request", [{reason, Reason}]), + NewPipeline = queue:in(Request, State0#state.pipeline), + {stop, shutdown, {pipeline_failed, Reason}, State0#state{pipeline = NewPipeline}} end; handle_call(#request{address = Addr} = Request, _, @@ -355,25 +356,25 @@ handle_call(#request{address = Addr} = Request, _, ?hcrd("no current request", []), cancel_timer(Timers#timers.queue_timer, timeout_queue), + NewTimers = Timers#timers{queue_timer = undefined}, + State1 = State0#state{timers = NewTimers}, Address = handle_proxy(Addr, Proxy), case httpc_request:send(Address, Session, Request) of ok -> ?hcrd("request sent", []), %% Activate the request time out for the new request - State1 = - activate_request_timeout(State0#state{request = Request}), - NewTimers = State1#state.timers, + State2 = + activate_request_timeout(State1#state{request = Request}), NewSession = Session#session{queue_length = 1, client_close = ClientClose}, insert_session(NewSession, ProfileName), - State = init_wait_for_response_state(Request, State1#state{session = NewSession, - timers = NewTimers}), + State = init_wait_for_response_state(Request, State2#state{session = NewSession}), {reply, ok, State}; {error, Reason} -> ?hcri("failed sending request", [{reason, Reason}]), - {reply, {request_failed, Reason}, State0} + {stop, shutdown, {keepalive_failed, Reason}, State1} end end; @@ -1329,7 +1330,7 @@ handle_keep_alive_queue(#state{status = keep_alive, Session, <<>>, State#state{keep_alive = KeepAlive}); {error, Reason} -> - {reply, {keep_alive_failed, Reason}, State} + {stop, shutdown, {keepalive_failed, Reason}, State} end end end. @@ -1850,6 +1851,7 @@ update_session(ProfileName, #session{id = SessionId} = Session, Pos, Value) -> Session2 = erlang:setelement(Pos, Session, Value), insert_session(Session2, ProfileName); T:E -> + Stacktrace = erlang:get_stacktrace(), error_logger:error_msg("Failed updating session: " "~n ProfileName: ~p" "~n SessionId: ~p" @@ -1873,7 +1875,7 @@ update_session(ProfileName, #session{id = SessionId} = Session, Pos, Value) -> {value, Value}, {etype, T}, {error, E}, - {stacktrace, erlang:get_stacktrace()}]}) + {stacktrace, Stacktrace}]}) end. diff --git a/lib/inets/test/erl_make_certs.erl b/lib/inets/test/erl_make_certs.erl index 22dc951ac1..6c168a5704 100644 --- a/lib/inets/test/erl_make_certs.erl +++ b/lib/inets/test/erl_make_certs.erl @@ -204,7 +204,7 @@ issuer_der(Issuer) -> Subject. subject(undefined, IsRootCA) -> - User = if IsRootCA -> "RootCA"; true -> user() end, + User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end, Opts = [{email, User ++ "@erlang.org"}, {name, User}, {city, "Stockholm"}, @@ -215,14 +215,6 @@ subject(undefined, IsRootCA) -> subject(Opts, _) -> subject(Opts). -user() -> - case os:getenv("USER") of - false -> - "test_user"; - User -> - User - end. - subject(SubjectOpts) when is_list(SubjectOpts) -> Encode = fun(Opt) -> {Type,Value} = subject_enc(Opt), diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl index 21be7862cb..0e89e831fb 100644 --- a/lib/inets/test/httpc_SUITE.erl +++ b/lib/inets/test/httpc_SUITE.erl @@ -28,6 +28,7 @@ -include_lib("common_test/include/ct.hrl"). -include("inets_test_lib.hrl"). -include("http_internal.hrl"). +-include("httpc_internal.hrl"). %% Note: This directive should only be used in test suites. -compile(export_all). @@ -106,6 +107,7 @@ only_simulated() -> empty_response_header, remote_socket_close, remote_socket_close_async, + process_leak_on_keepalive, transfer_encoding, transfer_encoding_identity, redirect_loop, @@ -913,6 +915,33 @@ remote_socket_close_async(Config) when is_list(Config) -> %%------------------------------------------------------------------------- +process_leak_on_keepalive(Config) -> + {ok, ClosedSocket} = gen_tcp:listen(6666, [{active, false}]), + ok = gen_tcp:close(ClosedSocket), + Request = {url(group_name(Config), "/dummy.html", Config), []}, + HttpcHandlers0 = supervisor:which_children(httpc_handler_sup), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, Request, [], []), + HttpcHandlers1 = supervisor:which_children(httpc_handler_sup), + ChildrenCount = supervisor:count_children(httpc_handler_sup), + %% Assuming that the new handler will be selected for keep_alive + %% which could not be the case if other handlers existed + [{undefined, Pid, worker, [httpc_handler]}] = + ordsets:to_list( + ordsets:subtract(ordsets:from_list(HttpcHandlers1), + ordsets:from_list(HttpcHandlers0))), + sys:replace_state( + Pid, fun (State) -> + Session = element(3, State), + setelement(3, State, Session#session{socket=ClosedSocket}) + end), + {ok, {{_, 200, _}, _, Body}} = httpc:request(get, Request, [], []), + %% bad handler with the closed socket should get replaced by + %% the new one, so children count should stay the same + ChildrenCount = supervisor:count_children(httpc_handler_sup), + ok. + +%%------------------------------------------------------------------------- + stream_to_pid(Config) when is_list(Config) -> ReceiverPid = create_receiver(pid), Receiver = ReceiverPid, diff --git a/lib/jinterface/.classpath b/lib/jinterface/.classpath new file mode 100644 index 0000000000..9785e55986 --- /dev/null +++ b/lib/jinterface/.classpath @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="src" path="java_src"/> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/> + <classpathentry kind="output" path="priv"/> +</classpath> diff --git a/lib/jinterface/.gitignore b/lib/jinterface/.gitignore new file mode 100644 index 0000000000..3418d354a9 --- /dev/null +++ b/lib/jinterface/.gitignore @@ -0,0 +1,2 @@ +priv/ + diff --git a/lib/jinterface/.project b/lib/jinterface/.project new file mode 100644 index 0000000000..450b96dc12 --- /dev/null +++ b/lib/jinterface/.project @@ -0,0 +1,17 @@ +<?xml version="1.0" encoding="UTF-8"?> +<projectDescription> + <name>jinterface</name> + <comment></comment> + <projects> + </projects> + <buildSpec> + <buildCommand> + <name>org.eclipse.jdt.core.javabuilder</name> + <arguments> + </arguments> + </buildCommand> + </buildSpec> + <natures> + <nature>org.eclipse.jdt.core.javanature</nature> + </natures> +</projectDescription> diff --git a/lib/jinterface/.settings/org.eclipse.jdt.core.prefs b/lib/jinterface/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..e8b3772a8a --- /dev/null +++ b/lib/jinterface/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,296 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 +org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve +org.eclipse.jdt.core.compiler.compliance=1.5 +org.eclipse.jdt.core.compiler.debug.lineNumber=generate +org.eclipse.jdt.core.compiler.debug.localVariable=generate +org.eclipse.jdt.core.compiler.debug.sourceFile=generate +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.5 +org.eclipse.jdt.core.formatter.align_type_members_on_columns=false +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16 +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0 +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16 +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16 +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16 +org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16 +org.eclipse.jdt.core.formatter.alignment_for_assignment=0 +org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16 +org.eclipse.jdt.core.formatter.alignment_for_compact_if=16 +org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80 +org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0 +org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16 +org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0 +org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16 +org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_resources_in_try=80 +org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16 +org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16 +org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch=16 +org.eclipse.jdt.core.formatter.blank_lines_after_imports=1 +org.eclipse.jdt.core.formatter.blank_lines_after_package=1 +org.eclipse.jdt.core.formatter.blank_lines_before_field=0 +org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0 +org.eclipse.jdt.core.formatter.blank_lines_before_imports=1 +org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1 +org.eclipse.jdt.core.formatter.blank_lines_before_method=1 +org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1 +org.eclipse.jdt.core.formatter.blank_lines_before_package=0 +org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1 +org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1 +org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_lambda_body=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line +org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line +org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false +org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false +org.eclipse.jdt.core.formatter.comment.format_block_comments=true +org.eclipse.jdt.core.formatter.comment.format_header=false +org.eclipse.jdt.core.formatter.comment.format_html=true +org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true +org.eclipse.jdt.core.formatter.comment.format_line_comments=true +org.eclipse.jdt.core.formatter.comment.format_source_code=true +org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true +org.eclipse.jdt.core.formatter.comment.indent_root_tags=true +org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert +org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert +org.eclipse.jdt.core.formatter.comment.line_length=80 +org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true +org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true +org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true +org.eclipse.jdt.core.formatter.compact_else_if=true +org.eclipse.jdt.core.formatter.continuation_indentation=2 +org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2 +org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off +org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on +org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false +org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true +org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true +org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true +org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true +org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true +org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true +org.eclipse.jdt.core.formatter.indent_empty_lines=false +org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true +org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true +org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true +org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false +org.eclipse.jdt.core.formatter.indentation.size=4 +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert +org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=insert +org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert +org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert +org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert +org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert +org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert +org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert +org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert +org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert +org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert +org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert +org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert +org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert +org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert +org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert +org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert +org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow=insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert +org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert +org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert +org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources=insert +org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert +org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert +org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert +org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try=insert +org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert +org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert +org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert +org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert +org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources=do not insert +org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert +org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert +org.eclipse.jdt.core.formatter.join_lines_in_comments=true +org.eclipse.jdt.core.formatter.join_wrapped_lines=true +org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false +org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false +org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false +org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false +org.eclipse.jdt.core.formatter.lineSplit=80 +org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false +org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false +org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0 +org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1 +org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true +org.eclipse.jdt.core.formatter.tabulation.char=space +org.eclipse.jdt.core.formatter.tabulation.size=4 +org.eclipse.jdt.core.formatter.use_on_off_tags=true +org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false +org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true +org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch=true +org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true diff --git a/lib/jinterface/.settings/org.eclipse.jdt.ui.prefs b/lib/jinterface/.settings/org.eclipse.jdt.ui.prefs new file mode 100644 index 0000000000..9190d818bc --- /dev/null +++ b/lib/jinterface/.settings/org.eclipse.jdt.ui.prefs @@ -0,0 +1,121 @@ +cleanup.add_default_serial_version_id=true +cleanup.add_generated_serial_version_id=false +cleanup.add_missing_annotations=true +cleanup.add_missing_deprecated_annotations=true +cleanup.add_missing_methods=false +cleanup.add_missing_nls_tags=false +cleanup.add_missing_override_annotations=true +cleanup.add_missing_override_annotations_interface_methods=true +cleanup.add_serial_version_id=false +cleanup.always_use_blocks=true +cleanup.always_use_parentheses_in_expressions=false +cleanup.always_use_this_for_non_static_field_access=false +cleanup.always_use_this_for_non_static_method_access=false +cleanup.convert_functional_interfaces=false +cleanup.convert_to_enhanced_for_loop=false +cleanup.correct_indentation=false +cleanup.format_source_code=true +cleanup.format_source_code_changes_only=false +cleanup.insert_inferred_type_arguments=false +cleanup.make_local_variable_final=true +cleanup.make_parameters_final=true +cleanup.make_private_fields_final=true +cleanup.make_type_abstract_if_missing_method=false +cleanup.make_variable_declarations_final=true +cleanup.never_use_blocks=false +cleanup.never_use_parentheses_in_expressions=true +cleanup.organize_imports=true +cleanup.qualify_static_field_accesses_with_declaring_class=false +cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +cleanup.qualify_static_member_accesses_with_declaring_class=true +cleanup.qualify_static_method_accesses_with_declaring_class=false +cleanup.remove_private_constructors=true +cleanup.remove_redundant_type_arguments=true +cleanup.remove_trailing_whitespaces=true +cleanup.remove_trailing_whitespaces_all=true +cleanup.remove_trailing_whitespaces_ignore_empty=false +cleanup.remove_unnecessary_casts=true +cleanup.remove_unnecessary_nls_tags=true +cleanup.remove_unused_imports=true +cleanup.remove_unused_local_variables=false +cleanup.remove_unused_private_fields=true +cleanup.remove_unused_private_members=false +cleanup.remove_unused_private_methods=true +cleanup.remove_unused_private_types=true +cleanup.sort_members=false +cleanup.sort_members_all=false +cleanup.use_anonymous_class_creation=false +cleanup.use_blocks=true +cleanup.use_blocks_only_for_return_and_throw=false +cleanup.use_lambda=true +cleanup.use_parentheses_in_expressions=true +cleanup.use_this_for_non_static_field_access=true +cleanup.use_this_for_non_static_field_access_only_if_necessary=true +cleanup.use_this_for_non_static_method_access=true +cleanup.use_this_for_non_static_method_access_only_if_necessary=true +cleanup.use_type_arguments=false +cleanup_profile=_jinterface +cleanup_settings_version=2 +eclipse.preferences.version=1 +editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true +formatter_profile=_jinterface +formatter_settings_version=12 +sp_cleanup.add_default_serial_version_id=true +sp_cleanup.add_generated_serial_version_id=false +sp_cleanup.add_missing_annotations=true +sp_cleanup.add_missing_deprecated_annotations=true +sp_cleanup.add_missing_methods=false +sp_cleanup.add_missing_nls_tags=false +sp_cleanup.add_missing_override_annotations=true +sp_cleanup.add_missing_override_annotations_interface_methods=true +sp_cleanup.add_serial_version_id=false +sp_cleanup.always_use_blocks=true +sp_cleanup.always_use_parentheses_in_expressions=false +sp_cleanup.always_use_this_for_non_static_field_access=false +sp_cleanup.always_use_this_for_non_static_method_access=false +sp_cleanup.convert_functional_interfaces=false +sp_cleanup.convert_to_enhanced_for_loop=false +sp_cleanup.correct_indentation=false +sp_cleanup.format_source_code=true +sp_cleanup.format_source_code_changes_only=false +sp_cleanup.insert_inferred_type_arguments=false +sp_cleanup.make_local_variable_final=true +sp_cleanup.make_parameters_final=true +sp_cleanup.make_private_fields_final=true +sp_cleanup.make_type_abstract_if_missing_method=false +sp_cleanup.make_variable_declarations_final=true +sp_cleanup.never_use_blocks=false +sp_cleanup.never_use_parentheses_in_expressions=true +sp_cleanup.on_save_use_additional_actions=true +sp_cleanup.organize_imports=true +sp_cleanup.qualify_static_field_accesses_with_declaring_class=false +sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true +sp_cleanup.qualify_static_member_accesses_with_declaring_class=true +sp_cleanup.qualify_static_method_accesses_with_declaring_class=false +sp_cleanup.remove_private_constructors=true +sp_cleanup.remove_redundant_type_arguments=false +sp_cleanup.remove_trailing_whitespaces=true +sp_cleanup.remove_trailing_whitespaces_all=true +sp_cleanup.remove_trailing_whitespaces_ignore_empty=false +sp_cleanup.remove_unnecessary_casts=true +sp_cleanup.remove_unnecessary_nls_tags=true +sp_cleanup.remove_unused_imports=true +sp_cleanup.remove_unused_local_variables=false +sp_cleanup.remove_unused_private_fields=true +sp_cleanup.remove_unused_private_members=false +sp_cleanup.remove_unused_private_methods=true +sp_cleanup.remove_unused_private_types=true +sp_cleanup.sort_members=false +sp_cleanup.sort_members_all=false +sp_cleanup.use_anonymous_class_creation=false +sp_cleanup.use_blocks=true +sp_cleanup.use_blocks_only_for_return_and_throw=false +sp_cleanup.use_lambda=false +sp_cleanup.use_parentheses_in_expressions=true +sp_cleanup.use_this_for_non_static_field_access=false +sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true +sp_cleanup.use_this_for_non_static_method_access=false +sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true +sp_cleanup.use_type_arguments=false diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java index b8a973753a..1b0fe3e2e6 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2010. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ @@ -27,26 +27,26 @@ import java.util.Random; * Maintains a connection between a Java process and a remote Erlang, Java or C * node. The object maintains connection state and allows data to be sent to and * received from the peer. - * + * * <p> * This abstract class provides the neccesary methods to maintain the actual * connection and encode the messages and headers in the proper format according * to the Erlang distribution protocol. Subclasses can use these methods to * provide a more or less transparent communication channel as desired. * </p> - * + * * <p> * Note that no receive methods are provided. Subclasses must provide methods * for message delivery, and may implement their own receive methods. * <p> - * + * * <p> * If an exception occurs in any of the methods in this class, the connection * will be closed and must be reopened in order to resume communication with the * peer. This will be indicated to the subclass by passing the exception to its * delivery() method. * </p> - * + * * <p> * The System property OtpConnection.trace can be used to change the initial * trace level setting for all connections. Normally the initial trace level is @@ -106,104 +106,104 @@ public abstract class AbstractConnection extends Thread { private int flags = 0; static { - // trace this connection? - final String trace = System.getProperties().getProperty( - "OtpConnection.trace"); - try { - if (trace != null) { - defaultLevel = Integer.valueOf(trace).intValue(); - } - } catch (final NumberFormatException e) { - defaultLevel = 0; - } - random = new Random(); + // trace this connection? + final String trace = System.getProperties().getProperty( + "OtpConnection.trace"); + try { + if (trace != null) { + defaultLevel = Integer.valueOf(trace).intValue(); + } + } catch (final NumberFormatException e) { + defaultLevel = 0; + } + random = new Random(); } // private AbstractConnection() { // } /** - * Accept an incoming connection from a remote node. Used by {@link - * OtpSelf#accept() OtpSelf.accept()} to create a connection based on data - * received when handshaking with the peer node, when the remote node is the - * connection intitiator. - * - * @exception java.io.IOException if it was not possible to connect to the - * peer. - * - * @exception OtpAuthException if handshake resulted in an authentication - * error + * Accept an incoming connection from a remote node. Used by + * {@link OtpSelf#accept() OtpSelf.accept()} to create a connection based on + * data received when handshaking with the peer node, when the remote node + * is the connection intitiator. + * + * @exception java.io.IOException + * if it was not possible to connect to the peer. + * + * @exception OtpAuthException + * if handshake resulted in an authentication error */ protected AbstractConnection(final OtpLocalNode self, final Socket s) - throws IOException, OtpAuthException { - this.localNode = self; - peer = new OtpPeer(); - socket = s; - - socket.setTcpNoDelay(true); - - traceLevel = defaultLevel; - setDaemon(true); - - if (traceLevel >= handshakeThreshold) { - System.out.println("<- ACCEPT FROM " + s.getInetAddress() + ":" - + s.getPort()); - } - - // get his info - recvName(peer); - - // now find highest common dist value - if (peer.proto != self.proto || self.distHigh < peer.distLow - || self.distLow > peer.distHigh) { - close(); - throw new IOException( - "No common protocol found - cannot accept connection"); - } - // highest common version: min(peer.distHigh, self.distHigh) - peer.distChoose = peer.distHigh > self.distHigh ? self.distHigh - : peer.distHigh; - - doAccept(); - name = peer.node(); + throws IOException, OtpAuthException { + localNode = self; + peer = new OtpPeer(); + socket = s; + + socket.setTcpNoDelay(true); + + traceLevel = defaultLevel; + setDaemon(true); + + if (traceLevel >= handshakeThreshold) { + System.out.println("<- ACCEPT FROM " + s.getInetAddress() + ":" + + s.getPort()); + } + + // get his info + recvName(peer); + + // now find highest common dist value + if (peer.proto != self.proto || self.distHigh < peer.distLow + || self.distLow > peer.distHigh) { + close(); + throw new IOException( + "No common protocol found - cannot accept connection"); + } + // highest common version: min(peer.distHigh, self.distHigh) + peer.distChoose = peer.distHigh > self.distHigh ? self.distHigh + : peer.distHigh; + + doAccept(); + name = peer.node(); } /** * Intiate and open a connection to a remote node. - * - * @exception java.io.IOException if it was not possible to connect to the - * peer. - * - * @exception OtpAuthException if handshake resulted in an authentication - * error. + * + * @exception java.io.IOException + * if it was not possible to connect to the peer. + * + * @exception OtpAuthException + * if handshake resulted in an authentication error. */ protected AbstractConnection(final OtpLocalNode self, final OtpPeer other) - throws IOException, OtpAuthException { - peer = other; - this.localNode = self; - socket = null; - int port; + throws IOException, OtpAuthException { + peer = other; + localNode = self; + socket = null; + int port; - traceLevel = defaultLevel; - setDaemon(true); + traceLevel = defaultLevel; + setDaemon(true); - // now get a connection between the two... - port = OtpEpmd.lookupPort(peer); + // now get a connection between the two... + port = OtpEpmd.lookupPort(peer); - // now find highest common dist value - if (peer.proto != self.proto || self.distHigh < peer.distLow - || self.distLow > peer.distHigh) { - throw new IOException("No common protocol found - cannot connect"); - } + // now find highest common dist value + if (peer.proto != self.proto || self.distHigh < peer.distLow + || self.distLow > peer.distHigh) { + throw new IOException("No common protocol found - cannot connect"); + } - // highest common version: min(peer.distHigh, self.distHigh) - peer.distChoose = peer.distHigh > self.distHigh ? self.distHigh - : peer.distHigh; + // highest common version: min(peer.distHigh, self.distHigh) + peer.distChoose = peer.distHigh > self.distHigh ? self.distHigh + : peer.distHigh; - doConnect(port); + doConnect(port); - name = peer.node(); - connected = true; + name = peer.node(); + connected = true; } /** @@ -218,91 +218,91 @@ public abstract class AbstractConnection extends Thread { /** * Send a pre-encoded message to a named process on a remote node. - * + * * @param dest * the name of the remote process. * @param payload * the encoded message to send. - * + * * @exception java.io.IOException * if the connection is not active or a communication error * occurs. */ protected void sendBuf(final OtpErlangPid from, final String dest, - final OtpOutputStream payload) throws IOException { - if (!connected) { - throw new IOException("Not connected"); - } - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); - - // preamble: 4 byte length + "passthrough" tag + version - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); - - // header info - header.write_tuple_head(4); - header.write_long(regSendTag); - header.write_any(from); - if (sendCookie) { - header.write_atom(localNode.cookie()); - } else { - header.write_atom(""); - } - header.write_atom(dest); - - // version for payload - header.write1(version); - - // fix up length in preamble - header.poke4BE(0, header.size() + payload.size() - 4); - - do_send(header, payload); + final OtpOutputStream payload) throws IOException { + if (!connected) { + throw new IOException("Not connected"); + } + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); + + // preamble: 4 byte length + "passthrough" tag + version + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); + + // header info + header.write_tuple_head(4); + header.write_long(regSendTag); + header.write_any(from); + if (sendCookie) { + header.write_atom(localNode.cookie()); + } else { + header.write_atom(""); + } + header.write_atom(dest); + + // version for payload + header.write1(version); + + // fix up length in preamble + header.poke4BE(0, header.size() + payload.size() - 4); + + do_send(header, payload); } /** * Send a pre-encoded message to a process on a remote node. - * + * * @param dest * the Erlang PID of the remote process. * @param payload * the encoded message to send. - * + * * @exception java.io.IOException * if the connection is not active or a communication error * occurs. */ protected void sendBuf(final OtpErlangPid from, final OtpErlangPid dest, - final OtpOutputStream payload) throws IOException { - if (!connected) { - throw new IOException("Not connected"); - } - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); - - // preamble: 4 byte length + "passthrough" tag + version - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); - - // header info - header.write_tuple_head(3); - header.write_long(sendTag); - if (sendCookie) { - header.write_atom(localNode.cookie()); - } else { - header.write_atom(""); - } - header.write_any(dest); - - // version for payload - header.write1(version); - - // fix up length in preamble - header.poke4BE(0, header.size() + payload.size() - 4); - - do_send(header, payload); + final OtpOutputStream payload) throws IOException { + if (!connected) { + throw new IOException("Not connected"); + } + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); + + // preamble: 4 byte length + "passthrough" tag + version + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); + + // header info + header.write_tuple_head(3); + header.write_long(sendTag); + if (sendCookie) { + header.write_atom(localNode.cookie()); + } else { + header.write_atom(""); + } + header.write_any(dest); + + // version for payload + header.write1(version); + + // fix up length in preamble + header.poke4BE(0, header.size() + payload.size() - 4); + + do_send(header, payload); } /* @@ -311,60 +311,60 @@ public abstract class AbstractConnection extends Thread { * otherwise */ private void cookieError(final OtpLocalNode local, - final OtpErlangAtom cookie) throws OtpAuthException { - try { - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); - - // preamble: 4 byte length + "passthrough" tag + version - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); - - header.write_tuple_head(4); - header.write_long(regSendTag); - header.write_any(local.createPid()); // disposable pid - header.write_atom(cookie.atomValue()); // important: his cookie, - // not mine... - header.write_atom("auth"); - - // version for payload - header.write1(version); - - // the payload - - // the no_auth message (copied from Erlang) Don't change this - // (Erlang will crash) - // {$gen_cast, {print, "~n** Unauthorized cookie ~w **~n", - // [foo@aule]}} - final OtpErlangObject[] msg = new OtpErlangObject[2]; - final OtpErlangObject[] msgbody = new OtpErlangObject[3]; - - msgbody[0] = new OtpErlangAtom("print"); - msgbody[1] = new OtpErlangString("~n** Bad cookie sent to " + local - + " **~n"); - // Erlang will crash and burn if there is no third argument here... - msgbody[2] = new OtpErlangList(); // empty list - - msg[0] = new OtpErlangAtom("$gen_cast"); - msg[1] = new OtpErlangTuple(msgbody); - - @SuppressWarnings("resource") - final OtpOutputStream payload = new OtpOutputStream( - new OtpErlangTuple(msg)); - - // fix up length in preamble - header.poke4BE(0, header.size() + payload.size() - 4); - - try { - do_send(header, payload); - } catch (final IOException e) { - } // ignore - } finally { - close(); - } - throw new OtpAuthException("Remote cookie not authorized: " - + cookie.atomValue()); + final OtpErlangAtom cookie) throws OtpAuthException { + try { + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); + + // preamble: 4 byte length + "passthrough" tag + version + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); + + header.write_tuple_head(4); + header.write_long(regSendTag); + header.write_any(local.createPid()); // disposable pid + header.write_atom(cookie.atomValue()); // important: his cookie, + // not mine... + header.write_atom("auth"); + + // version for payload + header.write1(version); + + // the payload + + // the no_auth message (copied from Erlang) Don't change this + // (Erlang will crash) + // {$gen_cast, {print, "~n** Unauthorized cookie ~w **~n", + // [foo@aule]}} + final OtpErlangObject[] msg = new OtpErlangObject[2]; + final OtpErlangObject[] msgbody = new OtpErlangObject[3]; + + msgbody[0] = new OtpErlangAtom("print"); + msgbody[1] = new OtpErlangString("~n** Bad cookie sent to " + local + + " **~n"); + // Erlang will crash and burn if there is no third argument here... + msgbody[2] = new OtpErlangList(); // empty list + + msg[0] = new OtpErlangAtom("$gen_cast"); + msg[1] = new OtpErlangTuple(msgbody); + + @SuppressWarnings("resource") + final OtpOutputStream payload = new OtpOutputStream( + new OtpErlangTuple(msg)); + + // fix up length in preamble + header.poke4BE(0, header.size() + payload.size() - 4); + + try { + do_send(header, payload); + } catch (final IOException e) { + } // ignore + } finally { + close(); + } + throw new OtpAuthException("Remote cookie not authorized: " + + cookie.atomValue()); } // link to pid @@ -374,364 +374,364 @@ public abstract class AbstractConnection extends Thread { * remote node. If the link is still active when the remote process * terminates, an exit signal will be sent to this connection. Use * {@link #sendUnlink unlink()} to remove the link. - * + * * @param dest * the Erlang PID of the remote process. - * + * * @exception java.io.IOException * if the connection is not active or a communication error * occurs. */ protected void sendLink(final OtpErlangPid from, final OtpErlangPid dest) - throws IOException { - if (!connected) { - throw new IOException("Not connected"); - } - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); + throws IOException { + if (!connected) { + throw new IOException("Not connected"); + } + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); - // preamble: 4 byte length + "passthrough" tag - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); + // preamble: 4 byte length + "passthrough" tag + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); - // header - header.write_tuple_head(3); - header.write_long(linkTag); - header.write_any(from); - header.write_any(dest); + // header + header.write_tuple_head(3); + header.write_long(linkTag); + header.write_any(from); + header.write_any(dest); - // fix up length in preamble - header.poke4BE(0, header.size() - 4); + // fix up length in preamble + header.poke4BE(0, header.size() - 4); - do_send(header); + do_send(header); } /** * Remove a link between the local node and the specified process on the * remote node. This method deactivates links created with {@link #sendLink * link()}. - * + * * @param dest * the Erlang PID of the remote process. - * + * * @exception java.io.IOException * if the connection is not active or a communication error * occurs. */ protected void sendUnlink(final OtpErlangPid from, final OtpErlangPid dest) - throws IOException { - if (!connected) { - throw new IOException("Not connected"); - } - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); + throws IOException { + if (!connected) { + throw new IOException("Not connected"); + } + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); - // preamble: 4 byte length + "passthrough" tag - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); + // preamble: 4 byte length + "passthrough" tag + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); - // header - header.write_tuple_head(3); - header.write_long(unlinkTag); - header.write_any(from); - header.write_any(dest); + // header + header.write_tuple_head(3); + header.write_long(unlinkTag); + header.write_any(from); + header.write_any(dest); - // fix up length in preamble - header.poke4BE(0, header.size() - 4); + // fix up length in preamble + header.poke4BE(0, header.size() - 4); - do_send(header); + do_send(header); } /* used internally when "processes" terminate */ protected void sendExit(final OtpErlangPid from, final OtpErlangPid dest, - final OtpErlangObject reason) throws IOException { - sendExit(exitTag, from, dest, reason); + final OtpErlangObject reason) throws IOException { + sendExit(exitTag, from, dest, reason); } /** * Send an exit signal to a remote process. - * + * * @param dest * the Erlang PID of the remote process. * @param reason * an Erlang term describing the exit reason. - * + * * @exception java.io.IOException * if the connection is not active or a communication error * occurs. */ protected void sendExit2(final OtpErlangPid from, final OtpErlangPid dest, - final OtpErlangObject reason) throws IOException { - sendExit(exit2Tag, from, dest, reason); + final OtpErlangObject reason) throws IOException { + sendExit(exit2Tag, from, dest, reason); } private void sendExit(final int tag, final OtpErlangPid from, - final OtpErlangPid dest, final OtpErlangObject reason) - throws IOException { - if (!connected) { - throw new IOException("Not connected"); - } - @SuppressWarnings("resource") - final OtpOutputStream header = new OtpOutputStream(headerLen); + final OtpErlangPid dest, final OtpErlangObject reason) + throws IOException { + if (!connected) { + throw new IOException("Not connected"); + } + @SuppressWarnings("resource") + final OtpOutputStream header = new OtpOutputStream(headerLen); - // preamble: 4 byte length + "passthrough" tag - header.write4BE(0); // reserve space for length - header.write1(passThrough); - header.write1(version); + // preamble: 4 byte length + "passthrough" tag + header.write4BE(0); // reserve space for length + header.write1(passThrough); + header.write1(version); - // header - header.write_tuple_head(4); - header.write_long(tag); - header.write_any(from); - header.write_any(dest); - header.write_any(reason); + // header + header.write_tuple_head(4); + header.write_long(tag); + header.write_any(from); + header.write_any(dest); + header.write_any(reason); - // fix up length in preamble - header.poke4BE(0, header.size() - 4); + // fix up length in preamble + header.poke4BE(0, header.size() - 4); - do_send(header); + do_send(header); } @SuppressWarnings("resource") @Override public void run() { - if (!connected) { - deliver(new IOException("Not connected")); - return; - } - - final byte[] lbuf = new byte[4]; - OtpInputStream ibuf; - OtpErlangObject traceobj; - int len; - final byte[] tock = { 0, 0, 0, 0 }; - - try { - receive_loop: while (!done) { - // don't return until we get a real message - // or a failure of some kind (e.g. EXIT) - // read length and read buffer must be atomic! - do { - // read 4 bytes - get length of incoming packet - // socket.getInputStream().read(lbuf); - readSock(socket, lbuf); - ibuf = new OtpInputStream(lbuf, flags); - len = ibuf.read4BE(); - - // received tick? send tock! - if (len == 0) { - synchronized (this) { - socket.getOutputStream().write(tock); - } - } - - } while (len == 0); // tick_loop - - // got a real message (maybe) - read len bytes - final byte[] tmpbuf = new byte[len]; - // i = socket.getInputStream().read(tmpbuf); - readSock(socket, tmpbuf); - ibuf.close(); - ibuf = new OtpInputStream(tmpbuf, flags); - - if (ibuf.read1() != passThrough) { - break receive_loop; - } - - // got a real message (really) - OtpErlangObject reason = null; - OtpErlangAtom cookie = null; - OtpErlangObject tmp = null; - OtpErlangTuple head = null; - OtpErlangAtom toName; - OtpErlangPid to; - OtpErlangPid from; - int tag; - - // decode the header - tmp = ibuf.read_any(); - if (!(tmp instanceof OtpErlangTuple)) { - break receive_loop; - } - - head = (OtpErlangTuple) tmp; - if (!(head.elementAt(0) instanceof OtpErlangLong)) { - break receive_loop; - } - - // lets see what kind of message this is - tag = (int) ((OtpErlangLong) head.elementAt(0)).longValue(); - - switch (tag) { - case sendTag: // { SEND, Cookie, ToPid } - case sendTTTag: // { SEND, Cookie, ToPid, TraceToken } - if (!cookieOk) { - // we only check this once, he can send us bad cookies - // later if he likes - if (!(head.elementAt(1) instanceof OtpErlangAtom)) { - break receive_loop; - } - cookie = (OtpErlangAtom) head.elementAt(1); - if (sendCookie) { - if (!cookie.atomValue().equals(localNode.cookie())) { - cookieError(localNode, cookie); - } - } else { - if (!cookie.atomValue().equals("")) { - cookieError(localNode, cookie); - } - } - cookieOk = true; - } - - if (traceLevel >= sendThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - - /* show received payload too */ - ibuf.mark(0); - traceobj = ibuf.read_any(); - - if (traceobj != null) { - System.out.println(" " + traceobj); - } else { - System.out.println(" (null)"); - } - ibuf.reset(); - } - - to = (OtpErlangPid) head.elementAt(2); - - deliver(new OtpMsg(to, ibuf)); - break; - - case regSendTag: // { REG_SEND, FromPid, Cookie, ToName } - case regSendTTTag: // { REG_SEND, FromPid, Cookie, ToName, - // TraceToken } - if (!cookieOk) { - // we only check this once, he can send us bad cookies - // later if he likes - if (!(head.elementAt(2) instanceof OtpErlangAtom)) { - break receive_loop; - } - cookie = (OtpErlangAtom) head.elementAt(2); - if (sendCookie) { - if (!cookie.atomValue().equals(localNode.cookie())) { - cookieError(localNode, cookie); - } - } else { - if (!cookie.atomValue().equals("")) { - cookieError(localNode, cookie); - } - } - cookieOk = true; - } - - if (traceLevel >= sendThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - - /* show received payload too */ - ibuf.mark(0); - traceobj = ibuf.read_any(); - - if (traceobj != null) { - System.out.println(" " + traceobj); - } else { - System.out.println(" (null)"); - } - ibuf.reset(); - } - - from = (OtpErlangPid) head.elementAt(1); - toName = (OtpErlangAtom) head.elementAt(3); - - deliver(new OtpMsg(from, toName.atomValue(), ibuf)); - break; - - case exitTag: // { EXIT, FromPid, ToPid, Reason } - case exit2Tag: // { EXIT2, FromPid, ToPid, Reason } - if (head.elementAt(3) == null) { - break receive_loop; - } - if (traceLevel >= ctrlThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - } - - from = (OtpErlangPid) head.elementAt(1); - to = (OtpErlangPid) head.elementAt(2); - reason = head.elementAt(3); - - deliver(new OtpMsg(tag, from, to, reason)); - break; - - case exitTTTag: // { EXIT, FromPid, ToPid, TraceToken, Reason } - case exit2TTTag: // { EXIT2, FromPid, ToPid, TraceToken, - // Reason - // } - // as above, but bifferent element number - if (head.elementAt(4) == null) { - break receive_loop; - } - if (traceLevel >= ctrlThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - } - - from = (OtpErlangPid) head.elementAt(1); - to = (OtpErlangPid) head.elementAt(2); - reason = head.elementAt(4); - - deliver(new OtpMsg(tag, from, to, reason)); - break; - - case linkTag: // { LINK, FromPid, ToPid} - case unlinkTag: // { UNLINK, FromPid, ToPid} - if (traceLevel >= ctrlThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - } - - from = (OtpErlangPid) head.elementAt(1); - to = (OtpErlangPid) head.elementAt(2); - - deliver(new OtpMsg(tag, from, to)); - break; - - // absolutely no idea what to do with these, so we ignore - // them... - case groupLeaderTag: // { GROUPLEADER, FromPid, ToPid} - // (just show trace) - if (traceLevel >= ctrlThreshold) { - System.out.println("<- " + headerType(head) + " " - + head); - } - break; - - default: - // garbage? - break receive_loop; - } - } // end receive_loop - - // this section reachable only with break - // we have received garbage from peer - deliver(new OtpErlangExit("Remote is sending garbage")); - - } // try - - catch (final OtpAuthException e) { - deliver(e); - } catch (final OtpErlangDecodeException e) { - deliver(new OtpErlangExit("Remote is sending garbage")); - } catch (final IOException e) { - deliver(new OtpErlangExit("Remote has closed connection")); - } finally { - close(); - } + if (!connected) { + deliver(new IOException("Not connected")); + return; + } + + final byte[] lbuf = new byte[4]; + OtpInputStream ibuf; + OtpErlangObject traceobj; + int len; + final byte[] tock = { 0, 0, 0, 0 }; + + try { + receive_loop: while (!done) { + // don't return until we get a real message + // or a failure of some kind (e.g. EXIT) + // read length and read buffer must be atomic! + do { + // read 4 bytes - get length of incoming packet + // socket.getInputStream().read(lbuf); + readSock(socket, lbuf); + ibuf = new OtpInputStream(lbuf, flags); + len = ibuf.read4BE(); + + // received tick? send tock! + if (len == 0) { + synchronized (this) { + socket.getOutputStream().write(tock); + } + } + + } while (len == 0); // tick_loop + + // got a real message (maybe) - read len bytes + final byte[] tmpbuf = new byte[len]; + // i = socket.getInputStream().read(tmpbuf); + readSock(socket, tmpbuf); + ibuf.close(); + ibuf = new OtpInputStream(tmpbuf, flags); + + if (ibuf.read1() != passThrough) { + break receive_loop; + } + + // got a real message (really) + OtpErlangObject reason = null; + OtpErlangAtom cookie = null; + OtpErlangObject tmp = null; + OtpErlangTuple head = null; + OtpErlangAtom toName; + OtpErlangPid to; + OtpErlangPid from; + int tag; + + // decode the header + tmp = ibuf.read_any(); + if (!(tmp instanceof OtpErlangTuple)) { + break receive_loop; + } + + head = (OtpErlangTuple) tmp; + if (!(head.elementAt(0) instanceof OtpErlangLong)) { + break receive_loop; + } + + // lets see what kind of message this is + tag = (int) ((OtpErlangLong) head.elementAt(0)).longValue(); + + switch (tag) { + case sendTag: // { SEND, Cookie, ToPid } + case sendTTTag: // { SEND, Cookie, ToPid, TraceToken } + if (!cookieOk) { + // we only check this once, he can send us bad cookies + // later if he likes + if (!(head.elementAt(1) instanceof OtpErlangAtom)) { + break receive_loop; + } + cookie = (OtpErlangAtom) head.elementAt(1); + if (sendCookie) { + if (!cookie.atomValue().equals(localNode.cookie())) { + cookieError(localNode, cookie); + } + } else { + if (!cookie.atomValue().equals("")) { + cookieError(localNode, cookie); + } + } + cookieOk = true; + } + + if (traceLevel >= sendThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + + /* show received payload too */ + ibuf.mark(0); + traceobj = ibuf.read_any(); + + if (traceobj != null) { + System.out.println(" " + traceobj); + } else { + System.out.println(" (null)"); + } + ibuf.reset(); + } + + to = (OtpErlangPid) head.elementAt(2); + + deliver(new OtpMsg(to, ibuf)); + break; + + case regSendTag: // { REG_SEND, FromPid, Cookie, ToName } + case regSendTTTag: // { REG_SEND, FromPid, Cookie, ToName, + // TraceToken } + if (!cookieOk) { + // we only check this once, he can send us bad cookies + // later if he likes + if (!(head.elementAt(2) instanceof OtpErlangAtom)) { + break receive_loop; + } + cookie = (OtpErlangAtom) head.elementAt(2); + if (sendCookie) { + if (!cookie.atomValue().equals(localNode.cookie())) { + cookieError(localNode, cookie); + } + } else { + if (!cookie.atomValue().equals("")) { + cookieError(localNode, cookie); + } + } + cookieOk = true; + } + + if (traceLevel >= sendThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + + /* show received payload too */ + ibuf.mark(0); + traceobj = ibuf.read_any(); + + if (traceobj != null) { + System.out.println(" " + traceobj); + } else { + System.out.println(" (null)"); + } + ibuf.reset(); + } + + from = (OtpErlangPid) head.elementAt(1); + toName = (OtpErlangAtom) head.elementAt(3); + + deliver(new OtpMsg(from, toName.atomValue(), ibuf)); + break; + + case exitTag: // { EXIT, FromPid, ToPid, Reason } + case exit2Tag: // { EXIT2, FromPid, ToPid, Reason } + if (head.elementAt(3) == null) { + break receive_loop; + } + if (traceLevel >= ctrlThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + } + + from = (OtpErlangPid) head.elementAt(1); + to = (OtpErlangPid) head.elementAt(2); + reason = head.elementAt(3); + + deliver(new OtpMsg(tag, from, to, reason)); + break; + + case exitTTTag: // { EXIT, FromPid, ToPid, TraceToken, Reason } + case exit2TTTag: // { EXIT2, FromPid, ToPid, TraceToken, + // Reason + // } + // as above, but bifferent element number + if (head.elementAt(4) == null) { + break receive_loop; + } + if (traceLevel >= ctrlThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + } + + from = (OtpErlangPid) head.elementAt(1); + to = (OtpErlangPid) head.elementAt(2); + reason = head.elementAt(4); + + deliver(new OtpMsg(tag, from, to, reason)); + break; + + case linkTag: // { LINK, FromPid, ToPid} + case unlinkTag: // { UNLINK, FromPid, ToPid} + if (traceLevel >= ctrlThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + } + + from = (OtpErlangPid) head.elementAt(1); + to = (OtpErlangPid) head.elementAt(2); + + deliver(new OtpMsg(tag, from, to)); + break; + + // absolutely no idea what to do with these, so we ignore + // them... + case groupLeaderTag: // { GROUPLEADER, FromPid, ToPid} + // (just show trace) + if (traceLevel >= ctrlThreshold) { + System.out.println("<- " + headerType(head) + " " + + head); + } + break; + + default: + // garbage? + break receive_loop; + } + } // end receive_loop + + // this section reachable only with break + // we have received garbage from peer + deliver(new OtpErlangExit("Remote is sending garbage")); + + } // try + + catch (final OtpAuthException e) { + deliver(e); + } catch (final OtpErlangDecodeException e) { + deliver(new OtpErlangExit("Remote is sending garbage")); + } catch (final IOException e) { + deliver(new OtpErlangExit("Remote has closed connection")); + } finally { + close(); + } } /** @@ -739,7 +739,7 @@ public abstract class AbstractConnection extends Thread { * Set the trace level for this connection. Normally tracing is off by * default unless System property OtpConnection.trace was set. * </p> - * + * * <p> * The following levels are valid: 0 turns off tracing completely, 1 shows * ordinary send and receive messages, 2 shows control messages such as link @@ -747,632 +747,640 @@ public abstract class AbstractConnection extends Thread { * communication with Epmd. Each level includes the information shown by the * lower ones. * </p> - * + * * @param level * the level to set. - * + * * @return the previous trace level. */ - public int setTraceLevel(int level) { - final int oldLevel = traceLevel; + public int setTraceLevel(final int level) { + final int oldLevel = traceLevel; - // pin the value - int theLevel = level; - if (level < 0) { - theLevel = 0; - } else if (level > 4) { - theLevel = 4; - } + // pin the value + int theLevel = level; + if (level < 0) { + theLevel = 0; + } else if (level > 4) { + theLevel = 4; + } - traceLevel = theLevel; + traceLevel = theLevel; - return oldLevel; + return oldLevel; } /** * Get the trace level for this connection. - * + * * @return the current trace level. */ public int getTraceLevel() { - return traceLevel; + return traceLevel; } /** * Close the connection to the remote node. */ public void close() { - done = true; - connected = false; - synchronized (this) { - try { - if (socket != null) { - if (traceLevel >= ctrlThreshold) { - System.out.println("-> CLOSE"); - } - socket.close(); - } - } catch (final IOException e) { /* ignore socket close errors */ - } finally { - socket = null; - } - } + done = true; + connected = false; + synchronized (this) { + try { + if (socket != null) { + if (traceLevel >= ctrlThreshold) { + System.out.println("-> CLOSE"); + } + socket.close(); + } + } catch (final IOException e) { /* ignore socket close errors */ + } finally { + socket = null; + } + } } @Override protected void finalize() { - close(); + close(); } /** * Determine if the connection is still alive. Note that this method only * reports the status of the connection, and that it is possible that there * are unread messages waiting in the receive queue. - * + * * @return true if the connection is alive. */ public boolean isConnected() { - return connected; + return connected; } // used by send and send_reg (message types with payload) protected synchronized void do_send(final OtpOutputStream header, - final OtpOutputStream payload) throws IOException { - try { - if (traceLevel >= sendThreshold) { - // Need to decode header and output buffer to show trace - // message! - // First make OtpInputStream, then decode. - try { - final OtpErlangObject h = header.getOtpInputStream(5) - .read_any(); - System.out.println("-> " + headerType(h) + " " + h); - - OtpErlangObject o = payload.getOtpInputStream(0).read_any(); - System.out.println(" " + o); - o = null; - } catch (final OtpErlangDecodeException e) { - System.out.println(" " + "can't decode output buffer:" - + e); - } - } - - header.writeTo(socket.getOutputStream()); - payload.writeTo(socket.getOutputStream()); - } catch (final IOException e) { - close(); - throw e; - } + final OtpOutputStream payload) throws IOException { + try { + if (traceLevel >= sendThreshold) { + // Need to decode header and output buffer to show trace + // message! + // First make OtpInputStream, then decode. + try { + final OtpErlangObject h = header.getOtpInputStream(5) + .read_any(); + System.out.println("-> " + headerType(h) + " " + h); + + OtpErlangObject o = payload.getOtpInputStream(0).read_any(); + System.out.println(" " + o); + o = null; + } catch (final OtpErlangDecodeException e) { + System.out.println(" " + "can't decode output buffer:" + + e); + } + } + + header.writeTo(socket.getOutputStream()); + payload.writeTo(socket.getOutputStream()); + } catch (final IOException e) { + close(); + throw e; + } } // used by the other message types protected synchronized void do_send(final OtpOutputStream header) - throws IOException { - try { - if (traceLevel >= ctrlThreshold) { - try { - final OtpErlangObject h = header.getOtpInputStream(5) - .read_any(); - System.out.println("-> " + headerType(h) + " " + h); - } catch (final OtpErlangDecodeException e) { - System.out.println(" " + "can't decode output buffer: " - + e); - } - } - header.writeTo(socket.getOutputStream()); - } catch (final IOException e) { - close(); - throw e; - } + throws IOException { + try { + if (traceLevel >= ctrlThreshold) { + try { + final OtpErlangObject h = header.getOtpInputStream(5) + .read_any(); + System.out.println("-> " + headerType(h) + " " + h); + } catch (final OtpErlangDecodeException e) { + System.out.println(" " + "can't decode output buffer: " + + e); + } + } + header.writeTo(socket.getOutputStream()); + } catch (final IOException e) { + close(); + throw e; + } } protected String headerType(final OtpErlangObject h) { - int tag = -1; + int tag = -1; - if (h instanceof OtpErlangTuple) { - tag = (int) ((OtpErlangLong) ((OtpErlangTuple) h).elementAt(0)) - .longValue(); - } + if (h instanceof OtpErlangTuple) { + tag = (int) ((OtpErlangLong) ((OtpErlangTuple) h).elementAt(0)) + .longValue(); + } - switch (tag) { - case linkTag: - return "LINK"; + switch (tag) { + case linkTag: + return "LINK"; - case sendTag: - return "SEND"; + case sendTag: + return "SEND"; - case exitTag: - return "EXIT"; + case exitTag: + return "EXIT"; - case unlinkTag: - return "UNLINK"; + case unlinkTag: + return "UNLINK"; - case regSendTag: - return "REG_SEND"; + case regSendTag: + return "REG_SEND"; - case groupLeaderTag: - return "GROUP_LEADER"; + case groupLeaderTag: + return "GROUP_LEADER"; - case exit2Tag: - return "EXIT2"; + case exit2Tag: + return "EXIT2"; - case sendTTTag: - return "SEND_TT"; + case sendTTTag: + return "SEND_TT"; - case exitTTTag: - return "EXIT_TT"; + case exitTTTag: + return "EXIT_TT"; - case regSendTTTag: - return "REG_SEND_TT"; + case regSendTTTag: + return "REG_SEND_TT"; - case exit2TTTag: - return "EXIT2_TT"; - } + case exit2TTTag: + return "EXIT2_TT"; + } - return "(unknown type)"; + return "(unknown type)"; } /* this method now throws exception if we don't get full read */ protected int readSock(final Socket s, final byte[] b) throws IOException { - int got = 0; - final int len = b.length; - int i; - - synchronized (this) { - if (s == null) { - throw new IOException("expected " + len - + " bytes, socket was closed"); - } - } - - while (got < len) { - i = s.getInputStream().read(b, got, len - got); - - if (i < 0) { - throw new IOException("expected " + len - + " bytes, got EOF after " + got + " bytes"); - } else if (i == 0 && len != 0) { - /* - * This is a corner case. According to - * http://java.sun.com/j2se/1.4.2/docs/api/ class InputStream - * is.read(,,l) can only return 0 if l==0. In other words it - * should not happen, but apparently did. - */ - throw new IOException("Remote connection closed"); - } else { - got += i; - } - } - return got; + int got = 0; + final int len = b.length; + int i; + + synchronized (this) { + if (s == null) { + throw new IOException("expected " + len + + " bytes, socket was closed"); + } + } + + while (got < len) { + i = s.getInputStream().read(b, got, len - got); + + if (i < 0) { + throw new IOException("expected " + len + + " bytes, got EOF after " + got + " bytes"); + } else if (i == 0 && len != 0) { + /* + * This is a corner case. According to + * http://java.sun.com/j2se/1.4.2/docs/api/ class InputStream + * is.read(,,l) can only return 0 if l==0. In other words it + * should not happen, but apparently did. + */ + throw new IOException("Remote connection closed"); + } else { + got += i; + } + } + return got; } protected void doAccept() throws IOException, OtpAuthException { - try { - sendStatus("ok"); - final int our_challenge = genChallenge(); - sendChallenge(peer.distChoose, localNode.flags, our_challenge); - final int her_challenge = recvChallengeReply(our_challenge); - final byte[] our_digest = genDigest(her_challenge, localNode.cookie()); - sendChallengeAck(our_digest); - connected = true; - cookieOk = true; - sendCookie = false; - } catch (final IOException ie) { - close(); - throw ie; - } catch (final OtpAuthException ae) { - close(); - throw ae; - } catch (final Exception e) { - final String nn = peer.node(); - close(); - IOException ioe = new IOException("Error accepting connection from " + nn); - ioe.initCause(e); - throw ioe; - } - if (traceLevel >= handshakeThreshold) { - System.out.println("<- MD5 ACCEPTED " + peer.host()); - } + try { + sendStatus("ok"); + final int our_challenge = genChallenge(); + sendChallenge(peer.distChoose, localNode.flags, our_challenge); + final int her_challenge = recvChallengeReply(our_challenge); + final byte[] our_digest = genDigest(her_challenge, + localNode.cookie()); + sendChallengeAck(our_digest); + connected = true; + cookieOk = true; + sendCookie = false; + } catch (final IOException ie) { + close(); + throw ie; + } catch (final OtpAuthException ae) { + close(); + throw ae; + } catch (final Exception e) { + final String nn = peer.node(); + close(); + final IOException ioe = new IOException( + "Error accepting connection from " + nn); + ioe.initCause(e); + throw ioe; + } + if (traceLevel >= handshakeThreshold) { + System.out.println("<- MD5 ACCEPTED " + peer.host()); + } } protected void doConnect(final int port) throws IOException, - OtpAuthException { - try { - socket = new Socket(peer.host(), port); - socket.setTcpNoDelay(true); - - if (traceLevel >= handshakeThreshold) { - System.out.println("-> MD5 CONNECT TO " + peer.host() + ":" - + port); - } - sendName(peer.distChoose, localNode.flags); - recvStatus(); - final int her_challenge = recvChallenge(); - final byte[] our_digest = genDigest(her_challenge, localNode.cookie()); - final int our_challenge = genChallenge(); - sendChallengeReply(our_challenge, our_digest); - recvChallengeAck(our_challenge); - cookieOk = true; - sendCookie = false; - } catch (final OtpAuthException ae) { - close(); - throw ae; - } catch (final Exception e) { - close(); - IOException ioe = new IOException("Cannot connect to peer node"); - ioe.initCause(e); - throw ioe; - } + OtpAuthException { + try { + socket = new Socket(peer.host(), port); + socket.setTcpNoDelay(true); + + if (traceLevel >= handshakeThreshold) { + System.out.println("-> MD5 CONNECT TO " + peer.host() + ":" + + port); + } + sendName(peer.distChoose, localNode.flags); + recvStatus(); + final int her_challenge = recvChallenge(); + final byte[] our_digest = genDigest(her_challenge, + localNode.cookie()); + final int our_challenge = genChallenge(); + sendChallengeReply(our_challenge, our_digest); + recvChallengeAck(our_challenge); + cookieOk = true; + sendCookie = false; + } catch (final OtpAuthException ae) { + close(); + throw ae; + } catch (final Exception e) { + close(); + final IOException ioe = new IOException( + "Cannot connect to peer node"); + ioe.initCause(e); + throw ioe; + } } // This is nooo good as a challenge, // XXX fix me. static protected int genChallenge() { - return random.nextInt(); + return random.nextInt(); } // Used to debug print a message digest static String hex0(final byte x) { - final char tab[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', - 'a', 'b', 'c', 'd', 'e', 'f' }; - int uint; - if (x < 0) { - uint = x & 0x7F; - uint |= 1 << 7; - } else { - uint = x; - } - return "" + tab[uint >>> 4] + tab[uint & 0xF]; + final char tab[] = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', + 'a', 'b', 'c', 'd', 'e', 'f' }; + int uint; + if (x < 0) { + uint = x & 0x7F; + uint |= 1 << 7; + } else { + uint = x; + } + return "" + tab[uint >>> 4] + tab[uint & 0xF]; } static String hex(final byte[] b) { - final StringBuffer sb = new StringBuffer(); - try { - int i; - for (i = 0; i < b.length; ++i) { - sb.append(hex0(b[i])); - } - } catch (final Exception e) { - // Debug function, ignore errors. - } - return sb.toString(); + final StringBuffer sb = new StringBuffer(); + try { + int i; + for (i = 0; i < b.length; ++i) { + sb.append(hex0(b[i])); + } + } catch (final Exception e) { + // Debug function, ignore errors. + } + return sb.toString(); } protected byte[] genDigest(final int challenge, final String cookie) { - int i; - long ch2; - - if (challenge < 0) { - ch2 = 1L << 31; - ch2 |= challenge & 0x7FFFFFFF; - } else { - ch2 = challenge; - } - final OtpMD5 context = new OtpMD5(); - context.update(cookie); - context.update("" + ch2); - - final int[] tmp = context.final_bytes(); - final byte[] res = new byte[tmp.length]; - for (i = 0; i < tmp.length; ++i) { - res[i] = (byte) (tmp[i] & 0xFF); - } - return res; + int i; + long ch2; + + if (challenge < 0) { + ch2 = 1L << 31; + ch2 |= challenge & 0x7FFFFFFF; + } else { + ch2 = challenge; + } + final OtpMD5 context = new OtpMD5(); + context.update(cookie); + context.update("" + ch2); + + final int[] tmp = context.final_bytes(); + final byte[] res = new byte[tmp.length]; + for (i = 0; i < tmp.length; ++i) { + res[i] = (byte) (tmp[i] & 0xFF); + } + return res; } - protected void sendName(final int dist, final int aflags) throws IOException { + protected void sendName(final int dist, final int aflags) + throws IOException { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - final String str = localNode.node(); - obuf.write2BE(str.length() + 7); // 7 bytes + nodename - obuf.write1(AbstractNode.NTYPE_R6); - obuf.write2BE(dist); - obuf.write4BE(aflags); - obuf.write(str.getBytes()); - - obuf.writeTo(socket.getOutputStream()); - - if (traceLevel >= handshakeThreshold) { - System.out.println("-> " + "HANDSHAKE sendName" + " flags=" + aflags - + " dist=" + dist + " local=" + localNode); - } + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + final String str = localNode.node(); + obuf.write2BE(str.length() + 7); // 7 bytes + nodename + obuf.write1(AbstractNode.NTYPE_R6); + obuf.write2BE(dist); + obuf.write4BE(aflags); + obuf.write(str.getBytes()); + + obuf.writeTo(socket.getOutputStream()); + + if (traceLevel >= handshakeThreshold) { + System.out.println("-> " + "HANDSHAKE sendName" + " flags=" + + aflags + " dist=" + dist + " local=" + localNode); + } } protected void sendChallenge(final int dist, final int aflags, - final int challenge) throws IOException { + final int challenge) throws IOException { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - final String str = localNode.node(); - obuf.write2BE(str.length() + 11); // 11 bytes + nodename - obuf.write1(AbstractNode.NTYPE_R6); - obuf.write2BE(dist); - obuf.write4BE(aflags); - obuf.write4BE(challenge); - obuf.write(str.getBytes()); - - obuf.writeTo(socket.getOutputStream()); - - if (traceLevel >= handshakeThreshold) { - System.out.println("-> " + "HANDSHAKE sendChallenge" + " flags=" - + aflags + " dist=" + dist + " challenge=" + challenge - + " local=" + localNode); - } + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + final String str = localNode.node(); + obuf.write2BE(str.length() + 11); // 11 bytes + nodename + obuf.write1(AbstractNode.NTYPE_R6); + obuf.write2BE(dist); + obuf.write4BE(aflags); + obuf.write4BE(challenge); + obuf.write(str.getBytes()); + + obuf.writeTo(socket.getOutputStream()); + + if (traceLevel >= handshakeThreshold) { + System.out.println("-> " + "HANDSHAKE sendChallenge" + " flags=" + + aflags + " dist=" + dist + " challenge=" + challenge + + " local=" + localNode); + } } protected byte[] read2BytePackage() throws IOException, - OtpErlangDecodeException { + OtpErlangDecodeException { - final byte[] lbuf = new byte[2]; - byte[] tmpbuf; + final byte[] lbuf = new byte[2]; + byte[] tmpbuf; - readSock(socket, lbuf); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(lbuf, 0); - final int len = ibuf.read2BE(); - tmpbuf = new byte[len]; - readSock(socket, tmpbuf); - return tmpbuf; + readSock(socket, lbuf); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(lbuf, 0); + final int len = ibuf.read2BE(); + tmpbuf = new byte[len]; + readSock(socket, tmpbuf); + return tmpbuf; } protected void recvName(final OtpPeer apeer) throws IOException { - String hisname = ""; - - try { - final byte[] tmpbuf = read2BytePackage(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); - byte[] tmpname; - final int len = tmpbuf.length; - apeer.ntype = ibuf.read1(); - if (apeer.ntype != AbstractNode.NTYPE_R6) { - throw new IOException("Unknown remote node type"); - } - apeer.distLow = apeer.distHigh = ibuf.read2BE(); - if (apeer.distLow < 5) { - throw new IOException("Unknown remote node type"); - } - apeer.flags = ibuf.read4BE(); - tmpname = new byte[len - 7]; - ibuf.readN(tmpname); - hisname = OtpErlangString.newString(tmpname); - // Set the old nodetype parameter to indicate hidden/normal status - // When the old handshake is removed, the ntype should also be. - if ((apeer.flags & AbstractNode.dFlagPublished) != 0) { - apeer.ntype = AbstractNode.NTYPE_R4_ERLANG; - } else { - apeer.ntype = AbstractNode.NTYPE_R4_HIDDEN; - } - - if ((apeer.flags & AbstractNode.dFlagExtendedReferences) == 0) { - throw new IOException( - "Handshake failed - peer cannot handle extended references"); - } - - if ((apeer.flags & AbstractNode.dFlagExtendedPidsPorts) == 0) { - throw new IOException( - "Handshake failed - peer cannot handle extended pids and ports"); - } - - } catch (final OtpErlangDecodeException e) { - throw new IOException("Handshake failed - not enough data"); - } - - final int i = hisname.indexOf('@', 0); - apeer.node = hisname; - apeer.alive = hisname.substring(0, i); - apeer.host = hisname.substring(i + 1, hisname.length()); - - if (traceLevel >= handshakeThreshold) { - System.out.println("<- " + "HANDSHAKE" + " ntype=" + apeer.ntype - + " dist=" + apeer.distHigh + " remote=" + apeer); - } + String hisname = ""; + + try { + final byte[] tmpbuf = read2BytePackage(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); + byte[] tmpname; + final int len = tmpbuf.length; + apeer.ntype = ibuf.read1(); + if (apeer.ntype != AbstractNode.NTYPE_R6) { + throw new IOException("Unknown remote node type"); + } + apeer.distLow = apeer.distHigh = ibuf.read2BE(); + if (apeer.distLow < 5) { + throw new IOException("Unknown remote node type"); + } + apeer.flags = ibuf.read4BE(); + tmpname = new byte[len - 7]; + ibuf.readN(tmpname); + hisname = OtpErlangString.newString(tmpname); + // Set the old nodetype parameter to indicate hidden/normal status + // When the old handshake is removed, the ntype should also be. + if ((apeer.flags & AbstractNode.dFlagPublished) != 0) { + apeer.ntype = AbstractNode.NTYPE_R4_ERLANG; + } else { + apeer.ntype = AbstractNode.NTYPE_R4_HIDDEN; + } + + if ((apeer.flags & AbstractNode.dFlagExtendedReferences) == 0) { + throw new IOException( + "Handshake failed - peer cannot handle extended references"); + } + + if ((apeer.flags & AbstractNode.dFlagExtendedPidsPorts) == 0) { + throw new IOException( + "Handshake failed - peer cannot handle extended pids and ports"); + } + + } catch (final OtpErlangDecodeException e) { + throw new IOException("Handshake failed - not enough data"); + } + + final int i = hisname.indexOf('@', 0); + apeer.node = hisname; + apeer.alive = hisname.substring(0, i); + apeer.host = hisname.substring(i + 1, hisname.length()); + + if (traceLevel >= handshakeThreshold) { + System.out.println("<- " + "HANDSHAKE" + " ntype=" + apeer.ntype + + " dist=" + apeer.distHigh + " remote=" + apeer); + } } protected int recvChallenge() throws IOException { - int challenge; - - try { - final byte[] buf = read2BytePackage(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(buf, 0); - peer.ntype = ibuf.read1(); - if (peer.ntype != AbstractNode.NTYPE_R6) { - throw new IOException("Unexpected peer type"); - } - peer.distLow = peer.distHigh = ibuf.read2BE(); - peer.flags = ibuf.read4BE(); - challenge = ibuf.read4BE(); - final byte[] tmpname = new byte[buf.length - 11]; - ibuf.readN(tmpname); - final String hisname = OtpErlangString.newString(tmpname); - if (!hisname.equals(peer.node)) { - throw new IOException( - "Handshake failed - peer has wrong name: " + hisname); - } - - if ((peer.flags & AbstractNode.dFlagExtendedReferences) == 0) { - throw new IOException( - "Handshake failed - peer cannot handle extended references"); - } - - if ((peer.flags & AbstractNode.dFlagExtendedPidsPorts) == 0) { - throw new IOException( - "Handshake failed - peer cannot handle extended pids and ports"); - } - - } catch (final OtpErlangDecodeException e) { - throw new IOException("Handshake failed - not enough data"); - } - - if (traceLevel >= handshakeThreshold) { - System.out.println("<- " + "HANDSHAKE recvChallenge" + " from=" - + peer.node + " challenge=" + challenge + " local=" + localNode); - } - - return challenge; + int challenge; + + try { + final byte[] buf = read2BytePackage(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(buf, 0); + peer.ntype = ibuf.read1(); + if (peer.ntype != AbstractNode.NTYPE_R6) { + throw new IOException("Unexpected peer type"); + } + peer.distLow = peer.distHigh = ibuf.read2BE(); + peer.flags = ibuf.read4BE(); + challenge = ibuf.read4BE(); + final byte[] tmpname = new byte[buf.length - 11]; + ibuf.readN(tmpname); + final String hisname = OtpErlangString.newString(tmpname); + if (!hisname.equals(peer.node)) { + throw new IOException( + "Handshake failed - peer has wrong name: " + hisname); + } + + if ((peer.flags & AbstractNode.dFlagExtendedReferences) == 0) { + throw new IOException( + "Handshake failed - peer cannot handle extended references"); + } + + if ((peer.flags & AbstractNode.dFlagExtendedPidsPorts) == 0) { + throw new IOException( + "Handshake failed - peer cannot handle extended pids and ports"); + } + + } catch (final OtpErlangDecodeException e) { + throw new IOException("Handshake failed - not enough data"); + } + + if (traceLevel >= handshakeThreshold) { + System.out.println("<- " + "HANDSHAKE recvChallenge" + " from=" + + peer.node + " challenge=" + challenge + " local=" + + localNode); + } + + return challenge; } protected void sendChallengeReply(final int challenge, final byte[] digest) - throws IOException { + throws IOException { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - obuf.write2BE(21); - obuf.write1(ChallengeReply); - obuf.write4BE(challenge); - obuf.write(digest); - obuf.writeTo(socket.getOutputStream()); - - if (traceLevel >= handshakeThreshold) { - System.out.println("-> " + "HANDSHAKE sendChallengeReply" - + " challenge=" + challenge + " digest=" + hex(digest) - + " local=" + localNode); - } + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + obuf.write2BE(21); + obuf.write1(ChallengeReply); + obuf.write4BE(challenge); + obuf.write(digest); + obuf.writeTo(socket.getOutputStream()); + + if (traceLevel >= handshakeThreshold) { + System.out.println("-> " + "HANDSHAKE sendChallengeReply" + + " challenge=" + challenge + " digest=" + hex(digest) + + " local=" + localNode); + } } // Would use Array.equals in newer JDK... private boolean digests_equals(final byte[] a, final byte[] b) { - int i; - for (i = 0; i < 16; ++i) { - if (a[i] != b[i]) { - return false; - } - } - return true; + int i; + for (i = 0; i < 16; ++i) { + if (a[i] != b[i]) { + return false; + } + } + return true; } protected int recvChallengeReply(final int our_challenge) - throws IOException, OtpAuthException { - - int challenge; - final byte[] her_digest = new byte[16]; - - try { - final byte[] buf = read2BytePackage(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(buf, 0); - final int tag = ibuf.read1(); - if (tag != ChallengeReply) { - throw new IOException("Handshake protocol error"); - } - challenge = ibuf.read4BE(); - ibuf.readN(her_digest); - final byte[] our_digest = genDigest(our_challenge, localNode.cookie()); - if (!digests_equals(her_digest, our_digest)) { - throw new OtpAuthException("Peer authentication error."); - } - } catch (final OtpErlangDecodeException e) { - throw new IOException("Handshake failed - not enough data"); - } - - if (traceLevel >= handshakeThreshold) { - System.out.println("<- " + "HANDSHAKE recvChallengeReply" - + " from=" + peer.node + " challenge=" + challenge - + " digest=" + hex(her_digest) + " local=" + localNode); - } - - return challenge; + throws IOException, OtpAuthException { + + int challenge; + final byte[] her_digest = new byte[16]; + + try { + final byte[] buf = read2BytePackage(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(buf, 0); + final int tag = ibuf.read1(); + if (tag != ChallengeReply) { + throw new IOException("Handshake protocol error"); + } + challenge = ibuf.read4BE(); + ibuf.readN(her_digest); + final byte[] our_digest = genDigest(our_challenge, + localNode.cookie()); + if (!digests_equals(her_digest, our_digest)) { + throw new OtpAuthException("Peer authentication error."); + } + } catch (final OtpErlangDecodeException e) { + throw new IOException("Handshake failed - not enough data"); + } + + if (traceLevel >= handshakeThreshold) { + System.out.println("<- " + "HANDSHAKE recvChallengeReply" + + " from=" + peer.node + " challenge=" + challenge + + " digest=" + hex(her_digest) + " local=" + localNode); + } + + return challenge; } protected void sendChallengeAck(final byte[] digest) throws IOException { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - obuf.write2BE(17); - obuf.write1(ChallengeAck); - obuf.write(digest); + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + obuf.write2BE(17); + obuf.write1(ChallengeAck); + obuf.write(digest); - obuf.writeTo(socket.getOutputStream()); + obuf.writeTo(socket.getOutputStream()); - if (traceLevel >= handshakeThreshold) { - System.out.println("-> " + "HANDSHAKE sendChallengeAck" - + " digest=" + hex(digest) + " local=" + localNode); - } + if (traceLevel >= handshakeThreshold) { + System.out.println("-> " + "HANDSHAKE sendChallengeAck" + + " digest=" + hex(digest) + " local=" + localNode); + } } protected void recvChallengeAck(final int our_challenge) - throws IOException, OtpAuthException { - - final byte[] her_digest = new byte[16]; - try { - final byte[] buf = read2BytePackage(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(buf, 0); - final int tag = ibuf.read1(); - if (tag != ChallengeAck) { - throw new IOException("Handshake protocol error"); - } - ibuf.readN(her_digest); - final byte[] our_digest = genDigest(our_challenge, localNode.cookie()); - if (!digests_equals(her_digest, our_digest)) { - throw new OtpAuthException("Peer authentication error."); - } - } catch (final OtpErlangDecodeException e) { - throw new IOException("Handshake failed - not enough data"); - } catch (final Exception e) { - throw new OtpAuthException("Peer authentication error."); - } - - if (traceLevel >= handshakeThreshold) { - System.out.println("<- " + "HANDSHAKE recvChallengeAck" + " from=" - + peer.node + " digest=" + hex(her_digest) + " local=" - + localNode); - } + throws IOException, OtpAuthException { + + final byte[] her_digest = new byte[16]; + try { + final byte[] buf = read2BytePackage(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(buf, 0); + final int tag = ibuf.read1(); + if (tag != ChallengeAck) { + throw new IOException("Handshake protocol error"); + } + ibuf.readN(her_digest); + final byte[] our_digest = genDigest(our_challenge, + localNode.cookie()); + if (!digests_equals(her_digest, our_digest)) { + throw new OtpAuthException("Peer authentication error."); + } + } catch (final OtpErlangDecodeException e) { + throw new IOException("Handshake failed - not enough data"); + } catch (final Exception e) { + throw new OtpAuthException("Peer authentication error."); + } + + if (traceLevel >= handshakeThreshold) { + System.out.println("<- " + "HANDSHAKE recvChallengeAck" + " from=" + + peer.node + " digest=" + hex(her_digest) + " local=" + + localNode); + } } protected void sendStatus(final String status) throws IOException { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - obuf.write2BE(status.length() + 1); - obuf.write1(ChallengeStatus); - obuf.write(status.getBytes()); + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + obuf.write2BE(status.length() + 1); + obuf.write1(ChallengeStatus); + obuf.write(status.getBytes()); - obuf.writeTo(socket.getOutputStream()); + obuf.writeTo(socket.getOutputStream()); - if (traceLevel >= handshakeThreshold) { - System.out.println("-> " + "HANDSHAKE sendStatus" + " status=" - + status + " local=" + localNode); - } + if (traceLevel >= handshakeThreshold) { + System.out.println("-> " + "HANDSHAKE sendStatus" + " status=" + + status + " local=" + localNode); + } } protected void recvStatus() throws IOException { - try { - final byte[] buf = read2BytePackage(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(buf, 0); - final int tag = ibuf.read1(); - if (tag != ChallengeStatus) { - throw new IOException("Handshake protocol error"); - } - final byte[] tmpbuf = new byte[buf.length - 1]; - ibuf.readN(tmpbuf); - final String status = OtpErlangString.newString(tmpbuf); - - if (status.compareTo("ok") != 0) { - throw new IOException("Peer replied with status '" + status - + "' instead of 'ok'"); - } - } catch (final OtpErlangDecodeException e) { - throw new IOException("Handshake failed - not enough data"); - } - if (traceLevel >= handshakeThreshold) { - System.out.println("<- " + "HANDSHAKE recvStatus (ok)" + " local=" - + localNode); - } + try { + final byte[] buf = read2BytePackage(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(buf, 0); + final int tag = ibuf.read1(); + if (tag != ChallengeStatus) { + throw new IOException("Handshake protocol error"); + } + final byte[] tmpbuf = new byte[buf.length - 1]; + ibuf.readN(tmpbuf); + final String status = OtpErlangString.newString(tmpbuf); + + if (status.compareTo("ok") != 0) { + throw new IOException("Peer replied with status '" + status + + "' instead of 'ok'"); + } + } catch (final OtpErlangDecodeException e) { + throw new IOException("Handshake failed - not enough data"); + } + if (traceLevel >= handshakeThreshold) { + System.out.println("<- " + "HANDSHAKE recvStatus (ok)" + " local=" + + localNode); + } } public void setFlags(final int flags) { - this.flags = flags; + this.flags = flags; } public int getFlags() { - return flags; + return flags; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java index 3bb1bbbd18..6f07d8171e 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java @@ -1,20 +1,20 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2014. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * - * %CopyrightEnd% + * + * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -29,7 +29,7 @@ import java.net.UnknownHostException; * <p> * Represents an OTP node. * </p> - * + * * <p> * About nodenames: Erlang nodenames consist of two components, an alivename and * a hostname separated by '@'. Additionally, there are two nodename formats: @@ -40,7 +40,7 @@ import java.net.UnknownHostException; * however Jinterface makes no distinction. See the Erlang documentation for * more information about nodenames. * </p> - * + * * <p> * The constructors for the AbstractNode classes will create names exactly as * you provide them as long as the name contains '@'. If the string you provide @@ -48,7 +48,7 @@ import java.net.UnknownHostException; * host will be appended, resulting in a shortname. Nodenames longer than 255 * characters will be truncated without warning. * </p> - * + * * <p> * Upon initialization, this class attempts to read the file .erlang.cookie in * the user's home directory, and uses the trimmed first line of the file as the @@ -58,7 +58,7 @@ import java.net.UnknownHostException; * using the system property "user.home", which may not be automatically set on * all platforms. * </p> - * + * * <p> * Instances of this class cannot be created directly, use one of the subclasses * instead. @@ -100,50 +100,50 @@ public class AbstractNode { int distLow = 5; // Cannot talk to nodes before R6 int creation = 0; int flags = dFlagExtendedReferences | dFlagExtendedPidsPorts - | dFlagBitBinaries | dFlagNewFloats | dFlagFunTags - | dflagNewFunTags | dFlagUtf8Atoms | dFlagMapTag; + | dFlagBitBinaries | dFlagNewFloats | dFlagFunTags + | dflagNewFunTags | dFlagUtf8Atoms | dFlagMapTag; /* initialize hostname and default cookie */ static { - try { - localHost = InetAddress.getLocalHost().getHostName(); - /* - * Make sure it's a short name, i.e. strip of everything after first - * '.' - */ - final int dot = localHost.indexOf("."); - if (dot != -1) { - localHost = localHost.substring(0, dot); - } - } catch (final UnknownHostException e) { - localHost = "localhost"; - } + try { + localHost = InetAddress.getLocalHost().getHostName(); + /* + * Make sure it's a short name, i.e. strip of everything after first + * '.' + */ + final int dot = localHost.indexOf("."); + if (dot != -1) { + localHost = localHost.substring(0, dot); + } + } catch (final UnknownHostException e) { + localHost = "localhost"; + } - final String homeDir = getHomeDir(); - final String dotCookieFilename = homeDir + File.separator + final String homeDir = getHomeDir(); + final String dotCookieFilename = homeDir + File.separator + ".erlang.cookie"; - BufferedReader br = null; + BufferedReader br = null; - try { - final File dotCookieFile = new File(dotCookieFilename); + try { + final File dotCookieFile = new File(dotCookieFilename); - br = new BufferedReader(new FileReader(dotCookieFile)); - final String line = br.readLine(); - if (line == null) { - defaultCookie = ""; - } else { - defaultCookie = line.trim(); - } - } catch (final IOException e) { - defaultCookie = ""; - } finally { - try { - if (br != null) { - br.close(); - } - } catch (final IOException e) { - } - } + br = new BufferedReader(new FileReader(dotCookieFile)); + final String line = br.readLine(); + if (line == null) { + defaultCookie = ""; + } else { + defaultCookie = line.trim(); + } + } catch (final IOException e) { + defaultCookie = ""; + } finally { + try { + if (br != null) { + br.close(); + } + } catch (final IOException e) { + } + } } protected AbstractNode() { @@ -153,119 +153,119 @@ public class AbstractNode { * Create a node with the given name and the default cookie. */ protected AbstractNode(final String node) { - this(node, defaultCookie); + this(node, defaultCookie); } /** * Create a node with the given name and cookie. */ protected AbstractNode(final String name, final String cookie) { - this.cookie = cookie; + this.cookie = cookie; - final int i = name.indexOf('@', 0); - if (i < 0) { - alive = name; - host = localHost; - } else { - alive = name.substring(0, i); - host = name.substring(i + 1, name.length()); - } + final int i = name.indexOf('@', 0); + if (i < 0) { + alive = name; + host = localHost; + } else { + alive = name.substring(0, i); + host = name.substring(i + 1, name.length()); + } - if (alive.length() > 0xff) { - alive = alive.substring(0, 0xff); - } + if (alive.length() > 0xff) { + alive = alive.substring(0, 0xff); + } - node = alive + "@" + host; + node = alive + "@" + host; } /** * Get the name of this node. - * + * * @return the name of the node represented by this object. */ public String node() { - return node; + return node; } /** * Get the hostname part of the nodename. Nodenames are composed of two * parts, an alivename and a hostname, separated by '@'. This method returns * the part of the nodename following the '@'. - * + * * @return the hostname component of the nodename. */ public String host() { - return host; + return host; } /** * Get the alivename part of the hostname. Nodenames are composed of two * parts, an alivename and a hostname, separated by '@'. This method returns * the part of the nodename preceding the '@'. - * + * * @return the alivename component of the nodename. */ public String alive() { - return alive; + return alive; } /** * Get the authorization cookie used by this node. - * + * * @return the authorization cookie used by this node. */ public String cookie() { - return cookie; + return cookie; } // package scope int type() { - return ntype; + return ntype; } // package scope int distHigh() { - return distHigh; + return distHigh; } // package scope int distLow() { - return distLow; + return distLow; } // package scope: useless information? int proto() { - return proto; + return proto; } // package scope int creation() { - return creation; + return creation; } /** * Set the authorization cookie used by this node. - * + * * @return the previous authorization cookie used by this node. */ public String setCookie(final String cookie) { - final String prev = this.cookie; - this.cookie = cookie; - return prev; + final String prev = this.cookie; + this.cookie = cookie; + return prev; } @Override public String toString() { - return node(); + return node(); } private static String getHomeDir() { - final String home = System.getProperty("user.home"); - if (System.getProperty("os.name").toLowerCase().contains("windows")) { - final String drive = System.getenv("HOMEDRIVE"); - final String path = System.getenv("HOMEPATH"); - return (drive != null && path != null) ? drive + path : home; - } - return home; + final String home = System.getProperty("user.home"); + if (System.getProperty("os.name").toLowerCase().contains("windows")) { + final String drive = System.getenv("HOMEDRIVE"); + final String path = System.getenv("HOMEPATH"); + return drive != null && path != null ? drive + path : home; + } + return home; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/GenericQueue.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/GenericQueue.java index 80bb02f16c..8a66190e6f 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/GenericQueue.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/GenericQueue.java @@ -1,19 +1,19 @@ -/* +/* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -34,128 +34,128 @@ public class GenericQueue { private int count; private void init() { - head = null; - tail = null; - count = 0; + head = null; + tail = null; + count = 0; } /** Create an empty queue */ public GenericQueue() { - init(); - status = open; + init(); + status = open; } /** Clear a queue */ public void flush() { - init(); + init(); } public void close() { - status = closing; + status = closing; } /** * Add an object to the tail of the queue. - * + * * @param o - * Object to insert in the queue + * Object to insert in the queue */ public synchronized void put(final Object o) { - final Bucket b = new Bucket(o); - - if (tail != null) { - tail.setNext(b); - tail = b; - } else { - // queue was empty but has one element now - head = tail = b; - } - count++; - - // notify any waiting tasks - notify(); + final Bucket b = new Bucket(o); + + if (tail != null) { + tail.setNext(b); + tail = b; + } else { + // queue was empty but has one element now + head = tail = b; + } + count++; + + // notify any waiting tasks + notify(); } /** * Retrieve an object from the head of the queue, or block until one * arrives. - * + * * @return The object at the head of the queue. */ public synchronized Object get() { - Object o = null; - - while ((o = tryGet()) == null) { - try { - this.wait(); - } catch (final InterruptedException e) { - } - } - return o; + Object o = null; + + while ((o = tryGet()) == null) { + try { + this.wait(); + } catch (final InterruptedException e) { + } + } + return o; } /** * Retrieve an object from the head of the queue, blocking until one arrives * or until timeout occurs. - * + * * @param timeout - * Maximum time to block on queue, in ms. Use 0 to poll the - * queue. - * + * Maximum time to block on queue, in ms. Use 0 to poll the + * queue. + * * @exception InterruptedException - * if the operation times out. - * + * if the operation times out. + * * @return The object at the head of the queue, or null if none arrived in * time. */ public synchronized Object get(final long timeout) - throws InterruptedException { - if (status == closed) { - return null; - } - - long currentTime = System.currentTimeMillis(); - final long stopTime = currentTime + timeout; - Object o = null; - - while (true) { - if ((o = tryGet()) != null) { - return o; - } - - currentTime = System.currentTimeMillis(); - if (stopTime <= currentTime) { - throw new InterruptedException("Get operation timed out"); - } - - try { - this.wait(stopTime - currentTime); - } catch (final InterruptedException e) { - // ignore, but really should retry operation instead - } - } + throws InterruptedException { + if (status == closed) { + return null; + } + + long currentTime = System.currentTimeMillis(); + final long stopTime = currentTime + timeout; + Object o = null; + + while (true) { + if ((o = tryGet()) != null) { + return o; + } + + currentTime = System.currentTimeMillis(); + if (stopTime <= currentTime) { + throw new InterruptedException("Get operation timed out"); + } + + try { + this.wait(stopTime - currentTime); + } catch (final InterruptedException e) { + // ignore, but really should retry operation instead + } + } } // attempt to retrieve message from queue head public Object tryGet() { - Object o = null; + Object o = null; - if (head != null) { - o = head.getContents(); - head = head.getNext(); - count--; + if (head != null) { + o = head.getContents(); + head = head.getNext(); + count--; - if (head == null) { - tail = null; - count = 0; - } - } + if (head == null) { + tail = null; + count = 0; + } + } - return o; + return o; } public synchronized int getCount() { - return count; + return count; } /* @@ -163,24 +163,24 @@ public class GenericQueue { * The container holds the queued object and a reference to the next Bucket. */ class Bucket { - private Bucket next; - private final Object contents; + private Bucket next; + private final Object contents; - public Bucket(final Object o) { - next = null; - contents = o; - } + public Bucket(final Object o) { + next = null; + contents = o; + } - public void setNext(final Bucket newNext) { - next = newNext; - } + public void setNext(final Bucket newNext) { + next = newNext; + } - public Bucket getNext() { - return next; - } + public Bucket getNext() { + return next; + } - public Object getContents() { - return contents; - } + public Object getContents() { + return contents; + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/Link.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/Link.java index c8b4fcebde..33ba94e53f 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/Link.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/Link.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -25,34 +25,34 @@ class Link { private int hashCodeValue = 0; public Link(final OtpErlangPid local, final OtpErlangPid remote) { - this.local = local; - this.remote = remote; + this.local = local; + this.remote = remote; } public OtpErlangPid local() { - return local; + return local; } public OtpErlangPid remote() { - return remote; + return remote; } public boolean contains(final OtpErlangPid pid) { - return local.equals(pid) || remote.equals(pid); + return local.equals(pid) || remote.equals(pid); } public boolean equals(final OtpErlangPid alocal, final OtpErlangPid aremote) { - return local.equals(alocal) && remote.equals(aremote) - || local.equals(aremote) && remote.equals(alocal); + return local.equals(alocal) && remote.equals(aremote) + || local.equals(aremote) && remote.equals(alocal); } - + @Override public int hashCode() { - if (hashCodeValue == 0) { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(5); - hash.combine(local.hashCode() + remote.hashCode()); - hashCodeValue = hash.valueOf(); - } - return hashCodeValue; + if (hashCodeValue == 0) { + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(5); + hash.combine(local.hashCode() + remote.hashCode()); + hashCodeValue = hash.valueOf(); + } + return hashCodeValue; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/Links.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/Links.java index 0bb4a708a3..38517860ed 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/Links.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/Links.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -24,100 +24,100 @@ class Links { int count; Links() { - this(10); + this(10); } Links(final int initialSize) { - links = new Link[initialSize]; - count = 0; + links = new Link[initialSize]; + count = 0; } synchronized void addLink(final OtpErlangPid local, - final OtpErlangPid remote) { - if (find(local, remote) == -1) { - if (count >= links.length) { - final Link[] tmp = new Link[count * 2]; - System.arraycopy(links, 0, tmp, 0, count); - links = tmp; - } - links[count++] = new Link(local, remote); - } + final OtpErlangPid remote) { + if (find(local, remote) == -1) { + if (count >= links.length) { + final Link[] tmp = new Link[count * 2]; + System.arraycopy(links, 0, tmp, 0, count); + links = tmp; + } + links[count++] = new Link(local, remote); + } } synchronized void removeLink(final OtpErlangPid local, - final OtpErlangPid remote) { - int i; + final OtpErlangPid remote) { + int i; - if ((i = find(local, remote)) != -1) { - count--; - links[i] = links[count]; - links[count] = null; - } + if ((i = find(local, remote)) != -1) { + count--; + links[i] = links[count]; + links[count] = null; + } } synchronized boolean exists(final OtpErlangPid local, - final OtpErlangPid remote) { - return find(local, remote) != -1; + final OtpErlangPid remote) { + return find(local, remote) != -1; } synchronized int find(final OtpErlangPid local, final OtpErlangPid remote) { - for (int i = 0; i < count; i++) { - if (links[i].equals(local, remote)) { - return i; - } - } - return -1; + for (int i = 0; i < count; i++) { + if (links[i].equals(local, remote)) { + return i; + } + } + return -1; } int count() { - return count; + return count; } /* all local pids get notified about broken connection */ synchronized OtpErlangPid[] localPids() { - OtpErlangPid[] ret = null; - if (count != 0) { - ret = new OtpErlangPid[count]; - for (int i = 0; i < count; i++) { - ret[i] = links[i].local(); - } - } - return ret; + OtpErlangPid[] ret = null; + if (count != 0) { + ret = new OtpErlangPid[count]; + for (int i = 0; i < count; i++) { + ret[i] = links[i].local(); + } + } + return ret; } /* all remote pids get notified about failed pid */ synchronized OtpErlangPid[] remotePids() { - OtpErlangPid[] ret = null; - if (count != 0) { - ret = new OtpErlangPid[count]; - for (int i = 0; i < count; i++) { - ret[i] = links[i].remote(); - } - } - return ret; + OtpErlangPid[] ret = null; + if (count != 0) { + ret = new OtpErlangPid[count]; + for (int i = 0; i < count; i++) { + ret[i] = links[i].remote(); + } + } + return ret; } /* clears the link table, returns a copy */ synchronized Link[] clearLinks() { - Link[] ret = null; - if (count != 0) { - ret = new Link[count]; - for (int i = 0; i < count; i++) { - ret[i] = links[i]; - links[i] = null; - } - count = 0; - } - return ret; + Link[] ret = null; + if (count != 0) { + ret = new Link[count]; + for (int i = 0; i < count; i++) { + ret[i] = links[i]; + links[i] = null; + } + count = 0; + } + return ret; } /* returns a copy of the link table */ synchronized Link[] links() { - Link[] ret = null; - if (count != 0) { - ret = new Link[count]; - System.arraycopy(links, 0, ret, 0, count); - } - return ret; + Link[] ret = null; + if (count != 0) { + ret = new Link[count]; + System.arraycopy(links, 0, ret, 0, count); + } + return ret; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpAuthException.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpAuthException.java index 39d254d9fa..47646121c3 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpAuthException.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpAuthException.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -22,7 +22,7 @@ package com.ericsson.otp.erlang; * Exception raised when a node attempts to establish a communication channel * when it is not authorized to do so, or when a node sends a message containing * an invalid cookie on an established channel. - * + * * @see OtpConnection */ public class OtpAuthException extends OtpException { @@ -32,6 +32,6 @@ public class OtpAuthException extends OtpException { * Provides a detailed message. */ public OtpAuthException(final String s) { - super(s); + super(s); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java index 9ad02506fd..2c9b7766bc 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -25,22 +25,22 @@ import java.net.Socket; * Maintains a connection between a Java process and a remote Erlang, Java or C * node. The object maintains connection state and allows data to be sent to and * received from the peer. - * + * * <p> * Once a connection is established between the local node and a remote node, * the connection object can be used to send and receive messages between the * nodes and make rpc calls (assuming that the remote node is a real Erlang * node). - * + * * <p> * The various receive methods are all blocking and will return only when a * valid message has been received or an exception is raised. - * + * * <p> * If an exception occurs in any of the methods in this class, the connection * will be closed and must be explicitely reopened in order to resume * communication with the peer. - * + * * <p> * It is not possible to create an instance of this class directly. * OtpConnection objects are returned by {@link OtpSelf#connect(OtpPeer) @@ -55,66 +55,66 @@ public class OtpConnection extends AbstractConnection { * OtpSelf#accept() OtpSelf.accept()} to create a connection based on data * received when handshaking with the peer node, when the remote node is the * connection intitiator. - * + * * @exception java.io.IOException if it was not possible to connect to the * peer. - * + * * @exception OtpAuthException if handshake resulted in an authentication * error */ // package scope OtpConnection(final OtpSelf self, final Socket s) throws IOException, - OtpAuthException { - super(self, s); - this.self = self; - queue = new GenericQueue(); - start(); + OtpAuthException { + super(self, s); + this.self = self; + queue = new GenericQueue(); + start(); } /* * Intiate and open a connection to a remote node. - * + * * @exception java.io.IOException if it was not possible to connect to the * peer. - * + * * @exception OtpAuthException if handshake resulted in an authentication * error. */ // package scope OtpConnection(final OtpSelf self, final OtpPeer other) throws IOException, - OtpAuthException { - super(self, other); - this.self = self; - queue = new GenericQueue(); - start(); + OtpAuthException { + super(self, other); + this.self = self; + queue = new GenericQueue(); + start(); } @Override public void deliver(final Exception e) { - queue.put(e); + queue.put(e); } @Override public void deliver(final OtpMsg msg) { - queue.put(msg); + queue.put(msg); } /** * Get information about the node at the peer end of this connection. - * + * * @return the {@link OtpPeer Node} representing the peer node. */ public OtpPeer peer() { - return peer; + return peer; } /** * Get information about the node at the local end of this connection. - * + * * @return the {@link OtpSelf Node} representing the local node. */ public OtpSelf self() { - return self; + return self; } /** @@ -122,416 +122,412 @@ public class OtpConnection extends AbstractConnection { * this connection. */ public int msgCount() { - return queue.getCount(); + return queue.getCount(); } /** * Receive a message from a remote process. This method blocks until a valid * message is received or an exception is raised. - * + * * <p> * If the remote node sends a message that cannot be decoded properly, the * connection is closed and the method throws an exception. - * + * * @return an object containing a single Erlang term. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node. - * + * if an exit signal is received from a process on the peer + * node. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. + * if the remote node sends a message containing an invalid + * cookie. */ public OtpErlangObject receive() throws IOException, OtpErlangExit, - OtpAuthException { - try { - return receiveMsg().getMsg(); - } catch (final OtpErlangDecodeException e) { - close(); - throw new IOException(e.getMessage()); - } + OtpAuthException { + try { + return receiveMsg().getMsg(); + } catch (final OtpErlangDecodeException e) { + close(); + throw new IOException(e.getMessage()); + } } /** * Receive a message from a remote process. This method blocks at most for * the specified time, until a valid message is received or an exception is * raised. - * + * * <p> * If the remote node sends a message that cannot be decoded properly, the * connection is closed and the method throws an exception. - * + * * @param timeout - * the time in milliseconds that this operation will block. - * Specify 0 to poll the queue. - * + * the time in milliseconds that this operation will block. + * Specify 0 to poll the queue. + * * @return an object containing a single Erlang term. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node. - * + * if an exit signal is received from a process on the peer + * node. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. - * + * if the remote node sends a message containing an invalid + * cookie. + * * @exception InterruptedException - * if no message if the method times out before a message - * becomes available. + * if no message if the method times out before a message + * becomes available. */ public OtpErlangObject receive(final long timeout) - throws InterruptedException, IOException, OtpErlangExit, - OtpAuthException { - try { - return receiveMsg(timeout).getMsg(); - } catch (final OtpErlangDecodeException e) { - close(); - throw new IOException(e.getMessage()); - } + throws InterruptedException, IOException, OtpErlangExit, + OtpAuthException { + try { + return receiveMsg(timeout).getMsg(); + } catch (final OtpErlangDecodeException e) { + close(); + throw new IOException(e.getMessage()); + } } /** * Receive a raw (still encoded) message from a remote process. This message * blocks until a valid message is received or an exception is raised. - * + * * <p> * If the remote node sends a message that cannot be decoded properly, the * connection is closed and the method throws an exception. - * + * * @return an object containing a raw (still encoded) Erlang term. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node, or if the connection is lost for any - * reason. - * + * if an exit signal is received from a process on the peer + * node, or if the connection is lost for any reason. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. + * if the remote node sends a message containing an invalid + * cookie. */ public OtpInputStream receiveBuf() throws IOException, OtpErlangExit, - OtpAuthException { - return receiveMsg().getMsgBuf(); + OtpAuthException { + return receiveMsg().getMsgBuf(); } /** * Receive a raw (still encoded) message from a remote process. This message * blocks at most for the specified time until a valid message is received * or an exception is raised. - * + * * <p> * If the remote node sends a message that cannot be decoded properly, the * connection is closed and the method throws an exception. - * + * * @param timeout - * the time in milliseconds that this operation will block. - * Specify 0 to poll the queue. - * + * the time in milliseconds that this operation will block. + * Specify 0 to poll the queue. + * * @return an object containing a raw (still encoded) Erlang term. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node, or if the connection is lost for any - * reason. - * + * if an exit signal is received from a process on the peer + * node, or if the connection is lost for any reason. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. - * + * if the remote node sends a message containing an invalid + * cookie. + * * @exception InterruptedException - * if no message if the method times out before a message - * becomes available. + * if no message if the method times out before a message + * becomes available. */ public OtpInputStream receiveBuf(final long timeout) - throws InterruptedException, IOException, OtpErlangExit, - OtpAuthException { - return receiveMsg(timeout).getMsgBuf(); + throws InterruptedException, IOException, OtpErlangExit, + OtpAuthException { + return receiveMsg(timeout).getMsgBuf(); } /** * Receive a messge complete with sender and recipient information. - * + * * @return an {@link OtpMsg OtpMsg} containing the header information about * the sender and recipient, as well as the actual message contents. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node, or if the connection is lost for any - * reason. - * + * if an exit signal is received from a process on the peer + * node, or if the connection is lost for any reason. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. + * if the remote node sends a message containing an invalid + * cookie. */ public OtpMsg receiveMsg() throws IOException, OtpErlangExit, - OtpAuthException { - final Object o = queue.get(); - - if (o instanceof OtpMsg) { - return (OtpMsg) o; - } else if (o instanceof IOException) { - throw (IOException) o; - } else if (o instanceof OtpErlangExit) { - throw (OtpErlangExit) o; - } else if (o instanceof OtpAuthException) { - throw (OtpAuthException) o; - } - - return null; + OtpAuthException { + final Object o = queue.get(); + + if (o instanceof OtpMsg) { + return (OtpMsg) o; + } else if (o instanceof IOException) { + throw (IOException) o; + } else if (o instanceof OtpErlangExit) { + throw (OtpErlangExit) o; + } else if (o instanceof OtpAuthException) { + throw (OtpAuthException) o; + } + + return null; } /** * Receive a messge complete with sender and recipient information. This * method blocks at most for the specified time. - * + * * @param timeout - * the time in milliseconds that this operation will block. - * Specify 0 to poll the queue. - * + * the time in milliseconds that this operation will block. + * Specify 0 to poll the queue. + * * @return an {@link OtpMsg OtpMsg} containing the header information about * the sender and recipient, as well as the actual message contents. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node, or if the connection is lost for any - * reason. - * + * if an exit signal is received from a process on the peer + * node, or if the connection is lost for any reason. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. - * + * if the remote node sends a message containing an invalid + * cookie. + * * @exception InterruptedException - * if no message if the method times out before a message - * becomes available. + * if no message if the method times out before a message + * becomes available. */ public OtpMsg receiveMsg(final long timeout) throws InterruptedException, - IOException, OtpErlangExit, OtpAuthException { - final Object o = queue.get(timeout); - - if (o instanceof OtpMsg) { - return (OtpMsg) o; - } else if (o instanceof IOException) { - throw (IOException) o; - } else if (o instanceof OtpErlangExit) { - throw (OtpErlangExit) o; - } else if (o instanceof OtpAuthException) { - throw (OtpAuthException) o; - } - - return null; + IOException, OtpErlangExit, OtpAuthException { + final Object o = queue.get(timeout); + + if (o instanceof OtpMsg) { + return (OtpMsg) o; + } else if (o instanceof IOException) { + throw (IOException) o; + } else if (o instanceof OtpErlangExit) { + throw (OtpErlangExit) o; + } else if (o instanceof OtpAuthException) { + throw (OtpAuthException) o; + } + + return null; } /** * Send a message to a process on a remote node. - * + * * @param dest - * the Erlang PID of the remote process. + * the Erlang PID of the remote process. * @param msg - * the message to send. - * + * the message to send. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ @SuppressWarnings("resource") public void send(final OtpErlangPid dest, final OtpErlangObject msg) - throws IOException { - // encode and send the message - super.sendBuf(self.pid(), dest, new OtpOutputStream(msg)); + throws IOException { + // encode and send the message + super.sendBuf(self.pid(), dest, new OtpOutputStream(msg)); } /** * Send a message to a named process on a remote node. - * + * * @param dest - * the name of the remote process. + * the name of the remote process. * @param msg - * the message to send. - * + * the message to send. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ @SuppressWarnings("resource") public void send(final String dest, final OtpErlangObject msg) - throws IOException { - // encode and send the message - super.sendBuf(self.pid(), dest, new OtpOutputStream(msg)); + throws IOException { + // encode and send the message + super.sendBuf(self.pid(), dest, new OtpOutputStream(msg)); } /** * Send a pre-encoded message to a named process on a remote node. - * + * * @param dest - * the name of the remote process. + * the name of the remote process. * @param payload - * the encoded message to send. - * + * the encoded message to send. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void sendBuf(final String dest, final OtpOutputStream payload) - throws IOException { - super.sendBuf(self.pid(), dest, payload); + throws IOException { + super.sendBuf(self.pid(), dest, payload); } /** * Send a pre-encoded message to a process on a remote node. - * + * * @param dest - * the Erlang PID of the remote process. + * the Erlang PID of the remote process. * @param payload - * the encoded message to send. - * + * the encoded message to send. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void sendBuf(final OtpErlangPid dest, final OtpOutputStream payload) - throws IOException { - super.sendBuf(self.pid(), dest, payload); + throws IOException { + super.sendBuf(self.pid(), dest, payload); } /** * Send an RPC request to the remote Erlang node. This convenience function * creates the following message and sends it to 'rex' on the remote node: - * + * * <pre> * { self, { call, Mod, Fun, Args, user } } * </pre> - * + * * <p> * Note that this method has unpredicatble results if the remote node is not * an Erlang node. * </p> - * + * * @param mod - * the name of the Erlang module containing the function to - * be called. + * the name of the Erlang module containing the function to be + * called. * @param fun - * the name of the function to call. + * the name of the function to call. * @param args - * an array of Erlang terms, to be used as arguments to the - * function. - * + * an array of Erlang terms, to be used as arguments to the + * function. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void sendRPC(final String mod, final String fun, - final OtpErlangObject[] args) throws IOException { - sendRPC(mod, fun, new OtpErlangList(args)); + final OtpErlangObject[] args) throws IOException { + sendRPC(mod, fun, new OtpErlangList(args)); } /** * Send an RPC request to the remote Erlang node. This convenience function * creates the following message and sends it to 'rex' on the remote node: - * + * * <pre> * { self, { call, Mod, Fun, Args, user } } * </pre> - * + * * <p> * Note that this method has unpredicatble results if the remote node is not * an Erlang node. * </p> - * + * * @param mod - * the name of the Erlang module containing the function to - * be called. + * the name of the Erlang module containing the function to be + * called. * @param fun - * the name of the function to call. + * the name of the function to call. * @param args - * a list of Erlang terms, to be used as arguments to the - * function. - * + * a list of Erlang terms, to be used as arguments to the + * function. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void sendRPC(final String mod, final String fun, - final OtpErlangList args) throws IOException { - final OtpErlangObject[] rpc = new OtpErlangObject[2]; - final OtpErlangObject[] call = new OtpErlangObject[5]; + final OtpErlangList args) throws IOException { + final OtpErlangObject[] rpc = new OtpErlangObject[2]; + final OtpErlangObject[] call = new OtpErlangObject[5]; - /* {self, { call, Mod, Fun, Args, user}} */ + /* {self, { call, Mod, Fun, Args, user}} */ - call[0] = new OtpErlangAtom("call"); - call[1] = new OtpErlangAtom(mod); - call[2] = new OtpErlangAtom(fun); - call[3] = args; - call[4] = new OtpErlangAtom("user"); + call[0] = new OtpErlangAtom("call"); + call[1] = new OtpErlangAtom(mod); + call[2] = new OtpErlangAtom(fun); + call[3] = args; + call[4] = new OtpErlangAtom("user"); - rpc[0] = self.pid(); - rpc[1] = new OtpErlangTuple(call); + rpc[0] = self.pid(); + rpc[1] = new OtpErlangTuple(call); - send("rex", new OtpErlangTuple(rpc)); + send("rex", new OtpErlangTuple(rpc)); } /** * Receive an RPC reply from the remote Erlang node. This convenience * function receives a message from the remote node, and expects it to have * the following format: - * + * * <pre> * { rex, Term } * </pre> - * + * * @return the second element of the tuple if the received message is a * two-tuple, otherwise null. No further error checking is * performed. - * + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. - * + * if the connection is not active or a communication error + * occurs. + * * @exception OtpErlangExit - * if an exit signal is received from a process on the - * peer node. - * + * if an exit signal is received from a process on the peer + * node. + * * @exception OtpAuthException - * if the remote node sends a message containing an - * invalid cookie. + * if the remote node sends a message containing an invalid + * cookie. */ public OtpErlangObject receiveRPC() throws IOException, OtpErlangExit, - OtpAuthException { + OtpAuthException { - final OtpErlangObject msg = receive(); + final OtpErlangObject msg = receive(); - if (msg instanceof OtpErlangTuple) { - final OtpErlangTuple t = (OtpErlangTuple) msg; - if (t.arity() == 2) { - return t.elementAt(1); // obs: second element - } - } + if (msg instanceof OtpErlangTuple) { + final OtpErlangTuple t = (OtpErlangTuple) msg; + if (t.arity() == 2) { + return t.elementAt(1); // obs: second element + } + } - return null; + return null; } /** @@ -539,48 +535,48 @@ public class OtpConnection extends AbstractConnection { * remote node. If the link is still active when the remote process * terminates, an exit signal will be sent to this connection. Use * {@link #unlink unlink()} to remove the link. - * + * * @param dest - * the Erlang PID of the remote process. - * + * the Erlang PID of the remote process. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void link(final OtpErlangPid dest) throws IOException { - super.sendLink(self.pid(), dest); + super.sendLink(self.pid(), dest); } /** * Remove a link between the local node and the specified process on the - * remote node. This method deactivates links created with - * {@link #link link()}. - * + * remote node. This method deactivates links created with {@link #link + * link()}. + * * @param dest - * the Erlang PID of the remote process. - * + * the Erlang PID of the remote process. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void unlink(final OtpErlangPid dest) throws IOException { - super.sendUnlink(self.pid(), dest); + super.sendUnlink(self.pid(), dest); } /** * Send an exit signal to a remote process. - * + * * @param dest - * the Erlang PID of the remote process. + * the Erlang PID of the remote process. * @param reason - * an Erlang term describing the exit reason. - * + * an Erlang term describing the exit reason. + * * @exception java.io.IOException - * if the connection is not active or a communication - * error occurs. + * if the connection is not active or a communication error + * occurs. */ public void exit(final OtpErlangPid dest, final OtpErlangObject reason) - throws IOException { - super.sendExit2(self.pid(), dest, reason); + throws IOException { + super.sendExit2(self.pid(), dest, reason); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java index 43b0cad222..4d80f61d52 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -27,29 +27,29 @@ import java.net.Socket; * node. The object maintains connection state and allows data to be sent to and * received from the peer. * </p> - * + * * <p> * Once a connection is established between the local node and a remote node, * the connection object can be used to send and receive messages between the * nodes. * </p> - * + * * <p> * The various receive methods are all blocking and will return only when a * valid message has been received or an exception is raised. * </p> - * + * * <p> * If an exception occurs in any of the methods in this class, the connection * will be closed and must be reopened in order to resume communication with the * peer. * </p> - * + * * <p> * The message delivery methods in this class deliver directly to * {@link OtpMbox mailboxes} in the {@link OtpNode OtpNode} class. * </p> - * + * * <p> * It is not possible to create an instance of this class directly. * OtpCookedConnection objects are created as needed by the underlying mailbox @@ -70,45 +70,45 @@ public class OtpCookedConnection extends AbstractConnection { * OtpSelf#accept() OtpSelf.accept()} to create a connection based on data * received when handshaking with the peer node, when the remote node is the * connection intitiator. - * + * * @exception java.io.IOException if it was not possible to connect to the * peer. - * + * * @exception OtpAuthException if handshake resulted in an authentication * error */ // package scope OtpCookedConnection(final OtpNode self, final Socket s) throws IOException, - OtpAuthException { - super(self, s); - this.self = self; - links = new Links(25); - start(); + OtpAuthException { + super(self, s); + this.self = self; + links = new Links(25); + start(); } /* * Intiate and open a connection to a remote node. - * + * * @exception java.io.IOException if it was not possible to connect to the * peer. - * + * * @exception OtpAuthException if handshake resulted in an authentication * error. */ // package scope OtpCookedConnection(final OtpNode self, final OtpPeer other) - throws IOException, OtpAuthException { - super(self, other); - this.self = self; - links = new Links(25); - start(); + throws IOException, OtpAuthException { + super(self, other); + this.self = self; + links = new Links(25); + start(); } // pass the error to the node @Override public void deliver(final Exception e) { - self.deliverError(this, e); - return; + self.deliverError(this, e); + return; } /* @@ -118,32 +118,32 @@ public class OtpCookedConnection extends AbstractConnection { */ @Override public void deliver(final OtpMsg msg) { - final boolean delivered = self.deliver(msg); - - switch (msg.type()) { - case OtpMsg.linkTag: - if (delivered) { - links.addLink(msg.getRecipientPid(), msg.getSenderPid()); - } else { - try { - // no such pid - send exit to sender - super.sendExit(msg.getRecipientPid(), msg.getSenderPid(), - new OtpErlangAtom("noproc")); - } catch (final IOException e) { - } - } - break; - - case OtpMsg.unlinkTag: - case OtpMsg.exitTag: - links.removeLink(msg.getRecipientPid(), msg.getSenderPid()); - break; - - case OtpMsg.exit2Tag: - break; - } - - return; + final boolean delivered = self.deliver(msg); + + switch (msg.type()) { + case OtpMsg.linkTag: + if (delivered) { + links.addLink(msg.getRecipientPid(), msg.getSenderPid()); + } else { + try { + // no such pid - send exit to sender + super.sendExit(msg.getRecipientPid(), msg.getSenderPid(), + new OtpErlangAtom("noproc")); + } catch (final IOException e) { + } + } + break; + + case OtpMsg.unlinkTag: + case OtpMsg.exitTag: + links.removeLink(msg.getRecipientPid(), msg.getSenderPid()); + break; + + case OtpMsg.exit2Tag: + break; + } + + return; } /* @@ -151,9 +151,9 @@ public class OtpCookedConnection extends AbstractConnection { */ @SuppressWarnings("resource") void send(final OtpErlangPid from, final OtpErlangPid dest, - final OtpErlangObject msg) throws IOException { - // encode and send the message - sendBuf(from, dest, new OtpOutputStream(msg)); + final OtpErlangObject msg) throws IOException { + // encode and send the message + sendBuf(from, dest, new OtpOutputStream(msg)); } /* @@ -162,66 +162,66 @@ public class OtpCookedConnection extends AbstractConnection { */ @SuppressWarnings("resource") void send(final OtpErlangPid from, final String dest, - final OtpErlangObject msg) throws IOException { - // encode and send the message - sendBuf(from, dest, new OtpOutputStream(msg)); + final OtpErlangObject msg) throws IOException { + // encode and send the message + sendBuf(from, dest, new OtpOutputStream(msg)); } @Override public void close() { - super.close(); - breakLinks(); + super.close(); + breakLinks(); } @Override protected void finalize() { - close(); + close(); } /* * this one called by dying/killed process */ void exit(final OtpErlangPid from, final OtpErlangPid to, - final OtpErlangObject reason) { - try { - super.sendExit(from, to, reason); - } catch (final Exception e) { - } + final OtpErlangObject reason) { + try { + super.sendExit(from, to, reason); + } catch (final Exception e) { + } } /* * this one called explicitely by user code => use exit2 */ void exit2(final OtpErlangPid from, final OtpErlangPid to, - final OtpErlangObject reason) { - try { - super.sendExit2(from, to, reason); - } catch (final Exception e) { - } + final OtpErlangObject reason) { + try { + super.sendExit2(from, to, reason); + } catch (final Exception e) { + } } /* * snoop for outgoing links and update own table */ synchronized void link(final OtpErlangPid from, final OtpErlangPid to) - throws OtpErlangExit { - try { - super.sendLink(from, to); - links.addLink(from, to); - } catch (final IOException e) { - throw new OtpErlangExit("noproc", to); - } + throws OtpErlangExit { + try { + super.sendLink(from, to); + links.addLink(from, to); + } catch (final IOException e) { + throw new OtpErlangExit("noproc", to); + } } /* * snoop for outgoing unlinks and update own table */ synchronized void unlink(final OtpErlangPid from, final OtpErlangPid to) { - links.removeLink(from, to); - try { - super.sendUnlink(from, to); - } catch (final IOException e) { - } + links.removeLink(from, to); + try { + super.sendUnlink(from, to); + } catch (final IOException e) { + } } /* @@ -229,18 +229,18 @@ public class OtpCookedConnection extends AbstractConnection { * through this connection */ synchronized void breakLinks() { - if (links != null) { - final Link[] l = links.clearLinks(); - - if (l != null) { - final int len = l.length; - - for (int i = 0; i < len; i++) { - // send exit "from" remote pids to local ones - self.deliver(new OtpMsg(OtpMsg.exitTag, l[i].remote(), l[i] - .local(), new OtpErlangAtom("noconnection"))); - } - } - } + if (links != null) { + final Link[] l = links.clearLinks(); + + if (l != null) { + final int len = l.length; + + for (int i = 0; i < len; i++) { + // send exit "from" remote pids to local ones + self.deliver(new OtpMsg(OtpMsg.exitTag, l[i].remote(), l[i] + .local(), new OtpErlangAtom("noconnection"))); + } + } + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java index 8a8ba785d9..796babee1b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -29,12 +29,12 @@ import java.net.Socket; * information about the port on which incoming connections are accepted, as * well as which versions of the Erlang communication protocolt the node * supports. - * + * * <p> * Nodes wishing to contact other nodes must first request information from Epmd * before a connection can be set up, however this is done automatically by * {@link OtpSelf#connect(OtpPeer) OtpSelf.connect()} when necessary. - * + * * <p> * The methods {@link #publishPort(OtpLocalNode) publishPort()} and * {@link #unPublishPort(OtpLocalNode) unPublishPort()} will fail if an Epmd @@ -42,32 +42,33 @@ import java.net.Socket; * {@link #lookupPort(AbstractNode) lookupPort()} will fail if there is no Epmd * process running on the host where the specified node is running. See the * Erlang documentation for information about starting Epmd. - * + * * <p> * This class contains only static methods, there are no constructors. */ public class OtpEpmd { private static class EpmdPort { - private static int epmdPort = 0; - - public static int get() { - if (epmdPort == 0) { - String env; - try { - env = System.getenv("ERL_EPMD_PORT"); - } - catch (java.lang.SecurityException e) { - env = null; - } - epmdPort = (env != null) ? Integer.parseInt(env) : 4369; - } - return epmdPort; - } - public static void set(int port) { - epmdPort = port; - } + private static int epmdPort = 0; + + public static int get() { + if (epmdPort == 0) { + String env; + try { + env = System.getenv("ERL_EPMD_PORT"); + } catch (final java.lang.SecurityException e) { + env = null; + } + epmdPort = env != null ? Integer.parseInt(env) : 4369; + } + return epmdPort; + } + + public static void set(final int port) { + epmdPort = port; + } } + // common values private static final byte stopReq = (byte) 115; @@ -81,16 +82,16 @@ public class OtpEpmd { private static final int traceThreshold = 4; static { - // debug this connection? - final String trace = System.getProperties().getProperty( - "OtpConnection.trace"); - try { - if (trace != null) { - traceLevel = Integer.valueOf(trace).intValue(); - } - } catch (final NumberFormatException e) { - traceLevel = 0; - } + // debug this connection? + final String trace = System.getProperties().getProperty( + "OtpConnection.trace"); + try { + if (trace != null) { + traceLevel = Integer.valueOf(trace).intValue(); + } + } catch (final NumberFormatException e) { + traceLevel = 0; + } } // only static methods: no public constructors @@ -98,51 +99,50 @@ public class OtpEpmd { private OtpEpmd() { } - /** - * Set the port number to be used to contact the epmd process. - * Only needed when the default port is not desired and system environment - * variable ERL_EPMD_PORT can not be read (applet). + * Set the port number to be used to contact the epmd process. Only needed + * when the default port is not desired and system environment variable + * ERL_EPMD_PORT can not be read (applet). */ - public static void useEpmdPort(int port) { - EpmdPort.set(port); + public static void useEpmdPort(final int port) { + EpmdPort.set(port); } /** * Determine what port a node listens for incoming connections on. - * + * * @return the listen port for the specified node, or 0 if the node was not * registered with Epmd. - * + * * @exception java.io.IOException * if there was no response from the name server. */ public static int lookupPort(final AbstractNode node) throws IOException { - return r4_lookupPort(node); + return r4_lookupPort(node); } /** * Register with Epmd, so that other nodes are able to find and connect to * it. - * + * * @param node * the server node that should be registered with Epmd. - * + * * @return true if the operation was successful. False if the node was * already registered. - * + * * @exception java.io.IOException * if there was no response from the name server. */ public static boolean publishPort(final OtpLocalNode node) - throws IOException { - Socket s = null; + throws IOException { + Socket s = null; - s = r4_publish(node); + s = r4_publish(node); - node.setEpmd(s); + node.setEpmd(s); - return s != null; + return s != null; } // Ask epmd to close his end of the connection. @@ -151,275 +151,274 @@ public class OtpEpmd { /** * Unregister from Epmd. Other nodes wishing to connect will no longer be * able to. - * + * * <p> * This method does not report any failures. */ public static void unPublishPort(final OtpLocalNode node) { - Socket s = null; - - try { - s = new Socket((String) null, EpmdPort.get()); - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - obuf.write2BE(node.alive().length() + 1); - obuf.write1(stopReq); - obuf.writeN(node.alive().getBytes()); - obuf.writeTo(s.getOutputStream()); - // don't even wait for a response (is there one?) - if (traceLevel >= traceThreshold) { - System.out.println("-> UNPUBLISH " + node + " port=" - + node.port()); - System.out.println("<- OK (assumed)"); - } - } catch (final Exception e) {/* ignore all failures */ - } finally { - try { - if (s != null) { - s.close(); - } - } catch (final IOException e) { /* ignore close failure */ - } - s = null; - } + Socket s = null; + + try { + s = new Socket((String) null, EpmdPort.get()); + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + obuf.write2BE(node.alive().length() + 1); + obuf.write1(stopReq); + obuf.writeN(node.alive().getBytes()); + obuf.writeTo(s.getOutputStream()); + // don't even wait for a response (is there one?) + if (traceLevel >= traceThreshold) { + System.out.println("-> UNPUBLISH " + node + " port=" + + node.port()); + System.out.println("<- OK (assumed)"); + } + } catch (final Exception e) {/* ignore all failures */ + } finally { + try { + if (s != null) { + s.close(); + } + } catch (final IOException e) { /* ignore close failure */ + } + s = null; + } } private static int r4_lookupPort(final AbstractNode node) - throws IOException { - int port = 0; - Socket s = null; - - try { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - s = new Socket(node.host(), EpmdPort.get()); - - // build and send epmd request - // length[2], tag[1], alivename[n] (length = n+1) - obuf.write2BE(node.alive().length() + 1); - obuf.write1(port4req); - obuf.writeN(node.alive().getBytes()); - - // send request - obuf.writeTo(s.getOutputStream()); - - if (traceLevel >= traceThreshold) { - System.out.println("-> LOOKUP (r4) " + node); - } - - // receive and decode reply - // resptag[1], result[1], port[2], ntype[1], proto[1], - // disthigh[2], distlow[2], nlen[2], alivename[n], - // elen[2], edata[m] - final byte[] tmpbuf = new byte[100]; - - final int n = s.getInputStream().read(tmpbuf); - - if (n < 0) { - s.close(); - throw new IOException("Nameserver not responding on " - + node.host() + " when looking up " + node.alive()); - } - - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); - - final int response = ibuf.read1(); - if (response == port4resp) { - final int result = ibuf.read1(); - if (result == 0) { - port = ibuf.read2BE(); - - node.ntype = ibuf.read1(); - node.proto = ibuf.read1(); - node.distHigh = ibuf.read2BE(); - node.distLow = ibuf.read2BE(); - // ignore rest of fields - } - } - } catch (final IOException e) { - if (traceLevel >= traceThreshold) { - System.out.println("<- (no response)"); - } - throw new IOException("Nameserver not responding on " + node.host() - + " when looking up " + node.alive()); - } catch (final OtpErlangDecodeException e) { - if (traceLevel >= traceThreshold) { - System.out.println("<- (invalid response)"); - } - throw new IOException("Nameserver not responding on " + node.host() - + " when looking up " + node.alive()); - } finally { - try { - if (s != null) { - s.close(); - } - } catch (final IOException e) { /* ignore close errors */ - } - s = null; - } - - if (traceLevel >= traceThreshold) { - if (port == 0) { - System.out.println("<- NOT FOUND"); - } else { - System.out.println("<- PORT " + port); - } - } - return port; + throws IOException { + int port = 0; + Socket s = null; + + try { + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + s = new Socket(node.host(), EpmdPort.get()); + + // build and send epmd request + // length[2], tag[1], alivename[n] (length = n+1) + obuf.write2BE(node.alive().length() + 1); + obuf.write1(port4req); + obuf.writeN(node.alive().getBytes()); + + // send request + obuf.writeTo(s.getOutputStream()); + + if (traceLevel >= traceThreshold) { + System.out.println("-> LOOKUP (r4) " + node); + } + + // receive and decode reply + // resptag[1], result[1], port[2], ntype[1], proto[1], + // disthigh[2], distlow[2], nlen[2], alivename[n], + // elen[2], edata[m] + final byte[] tmpbuf = new byte[100]; + + final int n = s.getInputStream().read(tmpbuf); + + if (n < 0) { + s.close(); + throw new IOException("Nameserver not responding on " + + node.host() + " when looking up " + node.alive()); + } + + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); + + final int response = ibuf.read1(); + if (response == port4resp) { + final int result = ibuf.read1(); + if (result == 0) { + port = ibuf.read2BE(); + + node.ntype = ibuf.read1(); + node.proto = ibuf.read1(); + node.distHigh = ibuf.read2BE(); + node.distLow = ibuf.read2BE(); + // ignore rest of fields + } + } + } catch (final IOException e) { + if (traceLevel >= traceThreshold) { + System.out.println("<- (no response)"); + } + throw new IOException("Nameserver not responding on " + node.host() + + " when looking up " + node.alive()); + } catch (final OtpErlangDecodeException e) { + if (traceLevel >= traceThreshold) { + System.out.println("<- (invalid response)"); + } + throw new IOException("Nameserver not responding on " + node.host() + + " when looking up " + node.alive()); + } finally { + try { + if (s != null) { + s.close(); + } + } catch (final IOException e) { /* ignore close errors */ + } + s = null; + } + + if (traceLevel >= traceThreshold) { + if (port == 0) { + System.out.println("<- NOT FOUND"); + } else { + System.out.println("<- PORT " + port); + } + } + return port; } /* - * this function will get an exception if it tries to talk to a - * very old epmd, or if something else happens that it cannot - * forsee. In both cases we return an exception. We no longer - * support r3, so the exception is fatal. If we manage to - * successfully communicate with an r4 epmd, we return either the - * socket, or null, depending on the result. + * this function will get an exception if it tries to talk to a very old + * epmd, or if something else happens that it cannot forsee. In both cases + * we return an exception. We no longer support r3, so the exception is + * fatal. If we manage to successfully communicate with an r4 epmd, we + * return either the socket, or null, depending on the result. */ private static Socket r4_publish(final OtpLocalNode node) - throws IOException { - Socket s = null; - - try { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - s = new Socket((String) null, EpmdPort.get()); - - obuf.write2BE(node.alive().length() + 13); - - obuf.write1(publish4req); - obuf.write2BE(node.port()); - - obuf.write1(node.type()); - - obuf.write1(node.proto()); - obuf.write2BE(node.distHigh()); - obuf.write2BE(node.distLow()); - - obuf.write2BE(node.alive().length()); - obuf.writeN(node.alive().getBytes()); - obuf.write2BE(0); // No extra - - // send request - obuf.writeTo(s.getOutputStream()); - - if (traceLevel >= traceThreshold) { - System.out.println("-> PUBLISH (r4) " + node + " port=" - + node.port()); - } - - // get reply - final byte[] tmpbuf = new byte[100]; - final int n = s.getInputStream().read(tmpbuf); - - if (n < 0) { - s.close(); - throw new IOException("Nameserver not responding on " - + node.host() + " when publishing " + node.alive()); - } - - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); - - final int response = ibuf.read1(); - if (response == publish4resp) { - final int result = ibuf.read1(); - if (result == 0) { - node.creation = ibuf.read2BE(); - if (traceLevel >= traceThreshold) { - System.out.println("<- OK"); - } - return s; // success - } - } - } catch (final IOException e) { - // epmd closed the connection = fail - if (s != null) { - s.close(); - } - if (traceLevel >= traceThreshold) { - System.out.println("<- (no response)"); - } - throw new IOException("Nameserver not responding on " + node.host() - + " when publishing " + node.alive()); - } catch (final OtpErlangDecodeException e) { - s.close(); - if (traceLevel >= traceThreshold) { - System.out.println("<- (invalid response)"); - } - throw new IOException("Nameserver not responding on " + node.host() - + " when publishing " + node.alive()); - } - - s.close(); - return null; + throws IOException { + Socket s = null; + + try { + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + s = new Socket((String) null, EpmdPort.get()); + + obuf.write2BE(node.alive().length() + 13); + + obuf.write1(publish4req); + obuf.write2BE(node.port()); + + obuf.write1(node.type()); + + obuf.write1(node.proto()); + obuf.write2BE(node.distHigh()); + obuf.write2BE(node.distLow()); + + obuf.write2BE(node.alive().length()); + obuf.writeN(node.alive().getBytes()); + obuf.write2BE(0); // No extra + + // send request + obuf.writeTo(s.getOutputStream()); + + if (traceLevel >= traceThreshold) { + System.out.println("-> PUBLISH (r4) " + node + " port=" + + node.port()); + } + + // get reply + final byte[] tmpbuf = new byte[100]; + final int n = s.getInputStream().read(tmpbuf); + + if (n < 0) { + s.close(); + throw new IOException("Nameserver not responding on " + + node.host() + " when publishing " + node.alive()); + } + + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); + + final int response = ibuf.read1(); + if (response == publish4resp) { + final int result = ibuf.read1(); + if (result == 0) { + node.creation = ibuf.read2BE(); + if (traceLevel >= traceThreshold) { + System.out.println("<- OK"); + } + return s; // success + } + } + } catch (final IOException e) { + // epmd closed the connection = fail + if (s != null) { + s.close(); + } + if (traceLevel >= traceThreshold) { + System.out.println("<- (no response)"); + } + throw new IOException("Nameserver not responding on " + node.host() + + " when publishing " + node.alive()); + } catch (final OtpErlangDecodeException e) { + s.close(); + if (traceLevel >= traceThreshold) { + System.out.println("<- (invalid response)"); + } + throw new IOException("Nameserver not responding on " + node.host() + + " when publishing " + node.alive()); + } + + s.close(); + return null; } public static String[] lookupNames() throws IOException { - return lookupNames(InetAddress.getByName(null)); + return lookupNames(InetAddress.getByName(null)); } public static String[] lookupNames(final InetAddress address) - throws IOException { - Socket s = null; - - try { - @SuppressWarnings("resource") - final OtpOutputStream obuf = new OtpOutputStream(); - try { - s = new Socket(address, EpmdPort.get()); - - obuf.write2BE(1); - obuf.write1(names4req); - // send request - obuf.writeTo(s.getOutputStream()); - - if (traceLevel >= traceThreshold) { - System.out.println("-> NAMES (r4) "); - } - - // get reply - final byte[] buffer = new byte[256]; - final ByteArrayOutputStream out = new ByteArrayOutputStream(256); - while (true) { - final int bytesRead = s.getInputStream().read(buffer); - if (bytesRead == -1) { - break; - } - out.write(buffer, 0, bytesRead); - } - final byte[] tmpbuf = out.toByteArray(); - @SuppressWarnings("resource") - final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); - ibuf.read4BE(); // read port int - // final int port = ibuf.read4BE(); - // check if port = epmdPort - - final int n = tmpbuf.length; - final byte[] buf = new byte[n - 4]; - System.arraycopy(tmpbuf, 4, buf, 0, n - 4); - final String all = OtpErlangString.newString(buf); - return all.split("\n"); - } finally { - if (s != null) { - s.close(); - } - } - - } catch (final IOException e) { - if (traceLevel >= traceThreshold) { - System.out.println("<- (no response)"); - } - throw new IOException( - "Nameserver not responding when requesting names"); - } catch (final OtpErlangDecodeException e) { - if (traceLevel >= traceThreshold) { - System.out.println("<- (invalid response)"); - } - throw new IOException( - "Nameserver not responding when requesting names"); - } + throws IOException { + Socket s = null; + + try { + @SuppressWarnings("resource") + final OtpOutputStream obuf = new OtpOutputStream(); + try { + s = new Socket(address, EpmdPort.get()); + + obuf.write2BE(1); + obuf.write1(names4req); + // send request + obuf.writeTo(s.getOutputStream()); + + if (traceLevel >= traceThreshold) { + System.out.println("-> NAMES (r4) "); + } + + // get reply + final byte[] buffer = new byte[256]; + final ByteArrayOutputStream out = new ByteArrayOutputStream(256); + while (true) { + final int bytesRead = s.getInputStream().read(buffer); + if (bytesRead == -1) { + break; + } + out.write(buffer, 0, bytesRead); + } + final byte[] tmpbuf = out.toByteArray(); + @SuppressWarnings("resource") + final OtpInputStream ibuf = new OtpInputStream(tmpbuf, 0); + ibuf.read4BE(); // read port int + // final int port = ibuf.read4BE(); + // check if port = epmdPort + + final int n = tmpbuf.length; + final byte[] buf = new byte[n - 4]; + System.arraycopy(tmpbuf, 4, buf, 0, n - 4); + final String all = OtpErlangString.newString(buf); + return all.split("\n"); + } finally { + if (s != null) { + s.close(); + } + } + + } catch (final IOException e) { + if (traceLevel >= traceThreshold) { + System.out.println("<- (no response)"); + } + throw new IOException( + "Nameserver not responding when requesting names"); + } catch (final OtpErlangDecodeException e) { + if (traceLevel >= traceThreshold) { + System.out.println("<- (invalid response)"); + } + throw new IOException( + "Nameserver not responding when requesting names"); + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangAtom.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangAtom.java index bff3e2c0e3..5b2a2baad5 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangAtom.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangAtom.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang atoms. Atoms can be created from * strings whose length is not more than {@link #maxAtomLength maxAtomLength} @@ -35,72 +34,71 @@ public class OtpErlangAtom extends OtpErlangObject { /** * Create an atom from the given string. - * + * * @param atom - * the string to create the atom from. - * + * the string to create the atom from. + * * @exception java.lang.IllegalArgumentException - * if the string is null or contains more than - * {@link #maxAtomLength maxAtomLength} characters. + * if the string is null or contains more than + * {@link #maxAtomLength maxAtomLength} characters. */ public OtpErlangAtom(final String atom) { - if (atom == null) { - throw new java.lang.IllegalArgumentException( - "null string value"); - } - - if (atom.codePointCount(0, atom.length()) > maxAtomLength) { - throw new java.lang.IllegalArgumentException("Atom may not exceed " - + maxAtomLength + " characters: " + atom); - } - this.atom = atom; + if (atom == null) { + throw new java.lang.IllegalArgumentException("null string value"); + } + + if (atom.codePointCount(0, atom.length()) > maxAtomLength) { + throw new java.lang.IllegalArgumentException("Atom may not exceed " + + maxAtomLength + " characters: " + atom); + } + this.atom = atom; } /** * Create an atom from a stream containing an atom encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded atom. - * + * the stream containing the encoded atom. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang atom. + * if the buffer does not contain a valid external + * representation of an Erlang atom. */ public OtpErlangAtom(final OtpInputStream buf) - throws OtpErlangDecodeException { - atom = buf.read_atom(); + throws OtpErlangDecodeException { + atom = buf.read_atom(); } /** * Create an atom whose value is "true" or "false". */ public OtpErlangAtom(final boolean t) { - atom = String.valueOf(t); + atom = String.valueOf(t); } /** * Get the actual string contained in this object. - * + * * @return the raw string contained in this object, without regard to Erlang * quoting rules. - * + * * @see #toString */ public String atomValue() { - return atom; + return atom; } /** * The boolean value of this atom. - * + * * @return the value of this atom expressed as a boolean value. If the atom * consists of the characters "true" (independent of case) the value * will be true. For any other values, the value will be false. - * + * */ public boolean booleanValue() { - return Boolean.valueOf(atomValue()).booleanValue(); + return Boolean.valueOf(atomValue()).booleanValue(); } /** @@ -108,92 +106,91 @@ public class OtpErlangAtom extends OtpErlangObject { * between this method and {link #atomValue atomValue()} is that the * printname is quoted and escaped where necessary, according to the Erlang * rules for atom naming. - * + * * @return the printname representation of this atom object. - * + * * @see #atomValue */ @Override public String toString() { - if (atomNeedsQuoting(atom)) { - return "'" + escapeSpecialChars(atom) + "'"; - } - return atom; + if (atomNeedsQuoting(atom)) { + return "'" + escapeSpecialChars(atom) + "'"; + } + return atom; } /** * Determine if two atoms are equal. - * + * * @param o - * the other object to compare to. - * + * the other object to compare to. + * * @return true if the atoms are equal, false otherwise. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangAtom)) { - return false; - } + if (!(o instanceof OtpErlangAtom)) { + return false; + } - final OtpErlangAtom other = (OtpErlangAtom) o; - return this.atom.compareTo(other.atom) == 0; + final OtpErlangAtom other = (OtpErlangAtom) o; + return atom.compareTo(other.atom) == 0; } - + @Override protected int doHashCode() { - return atom.hashCode(); + return atom.hashCode(); } /** * Convert this atom to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded atom should be - * written. + * an output stream to which the encoded atom should be written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_atom(atom); + buf.write_atom(atom); } /* the following four predicates are helpers for the toString() method */ private boolean isErlangDigit(final char c) { - return c >= '0' && c <= '9'; + return c >= '0' && c <= '9'; } private boolean isErlangUpper(final char c) { - return c >= 'A' && c <= 'Z' || c == '_'; + return c >= 'A' && c <= 'Z' || c == '_'; } private boolean isErlangLower(final char c) { - return c >= 'a' && c <= 'z'; + return c >= 'a' && c <= 'z'; } private boolean isErlangLetter(final char c) { - return isErlangLower(c) || isErlangUpper(c); + return isErlangLower(c) || isErlangUpper(c); } // true if the atom should be displayed with quotation marks private boolean atomNeedsQuoting(final String s) { - char c; - - if (s.length() == 0) { - return true; - } - if (!isErlangLower(s.charAt(0))) { - return true; - } - - final int len = s.length(); - for (int i = 1; i < len; i++) { - c = s.charAt(i); - - if (!isErlangLetter(c) && !isErlangDigit(c) && c != '@') { - return true; - } - } - return false; + char c; + + if (s.length() == 0) { + return true; + } + if (!isErlangLower(s.charAt(0))) { + return true; + } + + final int len = s.length(); + for (int i = 1; i < len; i++) { + c = s.charAt(i); + + if (!isErlangLetter(c) && !isErlangDigit(c) && c != '@') { + return true; + } + } + return false; } /* @@ -202,80 +199,80 @@ public class OtpErlangAtom extends OtpErlangObject { * printable. */ private String escapeSpecialChars(final String s) { - char c; - final StringBuffer so = new StringBuffer(); - - final int len = s.length(); - for (int i = 0; i < len; i++) { - c = s.charAt(i); - - /* - * note that some of these escape sequences are unique to Erlang, - * which is why the corresponding 'case' values use octal. The - * resulting string is, of course, in Erlang format. - */ - - switch (c) { - // some special escape sequences - case '\b': - so.append("\\b"); - break; - - case 0177: - so.append("\\d"); - break; - - case 033: - so.append("\\e"); - break; - - case '\f': - so.append("\\f"); - break; - - case '\n': - so.append("\\n"); - break; - - case '\r': - so.append("\\r"); - break; - - case '\t': - so.append("\\t"); - break; - - case 013: - so.append("\\v"); - break; - - case '\\': - so.append("\\\\"); - break; - - case '\'': - so.append("\\'"); - break; - - case '\"': - so.append("\\\""); - break; - - default: - // some other character classes - if (c < 027) { - // control chars show as "\^@", "\^A" etc - so.append("\\^" + (char) ('A' - 1 + c)); - } else if (c > 126) { - // 8-bit chars show as \345 \344 \366 etc - so.append("\\" + Integer.toOctalString(c)); - } else { - // character is printable without modification! - so.append(c); - } - } - } - return new String(so); + char c; + final StringBuffer so = new StringBuffer(); + + final int len = s.length(); + for (int i = 0; i < len; i++) { + c = s.charAt(i); + + /* + * note that some of these escape sequences are unique to Erlang, + * which is why the corresponding 'case' values use octal. The + * resulting string is, of course, in Erlang format. + */ + + switch (c) { + // some special escape sequences + case '\b': + so.append("\\b"); + break; + + case 0177: + so.append("\\d"); + break; + + case 033: + so.append("\\e"); + break; + + case '\f': + so.append("\\f"); + break; + + case '\n': + so.append("\\n"); + break; + + case '\r': + so.append("\\r"); + break; + + case '\t': + so.append("\\t"); + break; + + case 013: + so.append("\\v"); + break; + + case '\\': + so.append("\\\\"); + break; + + case '\'': + so.append("\\'"); + break; + + case '\"': + so.append("\\\""); + break; + + default: + // some other character classes + if (c < 027) { + // control chars show as "\^@", "\^A" etc + so.append("\\^" + (char) ('A' - 1 + c)); + } else if (c > 126) { + // 8-bit chars show as \345 \344 \366 etc + so.append("\\" + Integer.toOctalString(c)); + } else { + // character is printable without modification! + so.append(c); + } + } + } + return new String(so); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBinary.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBinary.java index 0891781f8d..c86a7bb05b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBinary.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBinary.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang binaries. Anything that can be * represented as a sequence of bytes can be made into an Erlang binary. @@ -29,58 +28,58 @@ public class OtpErlangBinary extends OtpErlangBitstr { /** * Create a binary from a byte array - * + * * @param bin - * the array of bytes from which to create the binary. + * the array of bytes from which to create the binary. */ public OtpErlangBinary(final byte[] bin) { - super(bin); + super(bin); } /** * Create a binary from a stream containing a binary encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded binary. - * + * the stream containing the encoded binary. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang binary. + * if the buffer does not contain a valid external + * representation of an Erlang binary. */ public OtpErlangBinary(final OtpInputStream buf) - throws OtpErlangDecodeException { - super(new byte[0]); - bin = buf.read_binary(); - pad_bits = 0; + throws OtpErlangDecodeException { + super(new byte[0]); + bin = buf.read_binary(); + pad_bits = 0; } /** * Create a binary from an arbitrary Java Object. The object must implement * java.io.Serializable or java.io.Externalizable. - * + * * @param o - * the object to serialize and create this binary from. + * the object to serialize and create this binary from. */ public OtpErlangBinary(final Object o) { - super(o); + super(o); } /** * Convert this binary to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded binary should be - * written. + * an output stream to which the encoded binary should be + * written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_binary(bin); + buf.write_binary(bin); } @Override public Object clone() { - final OtpErlangBinary that = (OtpErlangBinary) super.clone(); - return that; + final OtpErlangBinary that = (OtpErlangBinary) super.clone(); + return that; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBitstr.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBitstr.java index 8cb4e0e685..7724892bd3 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBitstr.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBitstr.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2007-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -35,249 +35,249 @@ public class OtpErlangBitstr extends OtpErlangObject { /** * Create a bitstr from a byte array - * + * * @param bin - * the array of bytes from which to create the bitstr. + * the array of bytes from which to create the bitstr. */ public OtpErlangBitstr(final byte[] bin) { - this.bin = new byte[bin.length]; - System.arraycopy(bin, 0, this.bin, 0, bin.length); - pad_bits = 0; + this.bin = new byte[bin.length]; + System.arraycopy(bin, 0, this.bin, 0, bin.length); + pad_bits = 0; } /** * Create a bitstr with pad bits from a byte array. - * + * * @param bin - * the array of bytes from which to create the bitstr. + * the array of bytes from which to create the bitstr. * @param pad_bits - * the number of unused bits in the low end of the last byte. + * the number of unused bits in the low end of the last byte. */ public OtpErlangBitstr(final byte[] bin, final int pad_bits) { - this.bin = new byte[bin.length]; - System.arraycopy(bin, 0, this.bin, 0, bin.length); - this.pad_bits = pad_bits; + this.bin = new byte[bin.length]; + System.arraycopy(bin, 0, this.bin, 0, bin.length); + this.pad_bits = pad_bits; - check_bitstr(this.bin, this.pad_bits); + check_bitstr(this.bin, this.pad_bits); } private void check_bitstr(final byte[] abin, final int a_pad_bits) { - if (a_pad_bits < 0 || 7 < a_pad_bits) { - throw new java.lang.IllegalArgumentException( - "Padding must be in range 0..7"); - } - if (a_pad_bits != 0 && abin.length == 0) { - throw new java.lang.IllegalArgumentException( - "Padding on zero length bitstr"); - } - if (abin.length != 0) { - // Make sure padding is zero - abin[abin.length - 1] &= ~((1 << a_pad_bits) - 1); - } + if (a_pad_bits < 0 || 7 < a_pad_bits) { + throw new java.lang.IllegalArgumentException( + "Padding must be in range 0..7"); + } + if (a_pad_bits != 0 && abin.length == 0) { + throw new java.lang.IllegalArgumentException( + "Padding on zero length bitstr"); + } + if (abin.length != 0) { + // Make sure padding is zero + abin[abin.length - 1] &= ~((1 << a_pad_bits) - 1); + } } /** * Create a bitstr from a stream containing a bitstr encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded bitstr. - * + * the stream containing the encoded bitstr. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang bitstr. + * if the buffer does not contain a valid external + * representation of an Erlang bitstr. */ public OtpErlangBitstr(final OtpInputStream buf) - throws OtpErlangDecodeException { - final int pbs[] = { 0 }; // This is ugly just to get a value-result - // parameter - bin = buf.read_bitstr(pbs); - pad_bits = pbs[0]; + throws OtpErlangDecodeException { + final int pbs[] = { 0 }; // This is ugly just to get a value-result + // parameter + bin = buf.read_bitstr(pbs); + pad_bits = pbs[0]; - check_bitstr(bin, pad_bits); + check_bitstr(bin, pad_bits); } /** * Create a bitstr from an arbitrary Java Object. The object must implement * java.io.Serializable or java.io.Externalizable. - * + * * @param o - * the object to serialize and create this bitstr from. + * the object to serialize and create this bitstr from. */ public OtpErlangBitstr(final Object o) { - try { - bin = toByteArray(o); - pad_bits = 0; - } catch (final IOException e) { - throw new java.lang.IllegalArgumentException( - "Object must implement Serializable"); - } + try { + bin = toByteArray(o); + pad_bits = 0; + } catch (final IOException e) { + throw new java.lang.IllegalArgumentException( + "Object must implement Serializable"); + } } private static byte[] toByteArray(final Object o) - throws java.io.IOException { + throws java.io.IOException { - if (o == null) { - return null; - } + if (o == null) { + return null; + } - /* need to synchronize use of the shared baos */ - final java.io.ByteArrayOutputStream baos = new ByteArrayOutputStream(); - final java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( - baos); + /* need to synchronize use of the shared baos */ + final java.io.ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final java.io.ObjectOutputStream oos = new java.io.ObjectOutputStream( + baos); - oos.writeObject(o); - oos.flush(); + oos.writeObject(o); + oos.flush(); - return baos.toByteArray(); + return baos.toByteArray(); } private static Object fromByteArray(final byte[] buf) { - if (buf == null) { - return null; - } + if (buf == null) { + return null; + } - try { - final java.io.ByteArrayInputStream bais = new java.io.ByteArrayInputStream( - buf); - final java.io.ObjectInputStream ois = new java.io.ObjectInputStream( - bais); - return ois.readObject(); - } catch (final java.lang.ClassNotFoundException e) { - } catch (final java.io.IOException e) { - } + try { + final java.io.ByteArrayInputStream bais = new java.io.ByteArrayInputStream( + buf); + final java.io.ObjectInputStream ois = new java.io.ObjectInputStream( + bais); + return ois.readObject(); + } catch (final java.lang.ClassNotFoundException e) { + } catch (final java.io.IOException e) { + } - return null; + return null; } /** * Get the byte array from a bitstr, padded with zero bits in the little end * of the last byte. - * + * * @return the byte array containing the bytes for this bitstr. */ public byte[] binaryValue() { - return bin; + return bin; } /** * Get the size in whole bytes of the bitstr, rest bits in the last byte not * counted. - * + * * @return the number of bytes contained in the bintstr. */ public int size() { - if (pad_bits == 0) { - return bin.length; - } - if (bin.length == 0) { - throw new java.lang.IllegalStateException("Impossible length"); - } - return bin.length - 1; + if (pad_bits == 0) { + return bin.length; + } + if (bin.length == 0) { + throw new java.lang.IllegalStateException("Impossible length"); + } + return bin.length - 1; } /** * Get the number of pad bits in the last byte of the bitstr. The pad bits * are zero and in the little end. - * + * * @return the number of pad bits in the bitstr. */ public int pad_bits() { - return pad_bits; + return pad_bits; } /** * Get the java Object from the bitstr. If the bitstr contains a serialized * Java object, then this method will recreate the object. - * - * + * + * * @return the java Object represented by this bitstr, or null if the bitstr * does not represent a Java Object. */ public Object getObject() { - if (pad_bits != 0) { - return null; - } - return fromByteArray(bin); + if (pad_bits != 0) { + return null; + } + return fromByteArray(bin); } /** * Get the string representation of this bitstr object. A bitstr is printed * as #Bin<N>, where N is the number of bytes contained in the object * or #bin<N-M> if there are M pad bits. - * + * * @return the Erlang string representation of this bitstr. */ @Override public String toString() { - if (pad_bits == 0) { - return "#Bin<" + bin.length + ">"; - } - if (bin.length == 0) { - throw new java.lang.IllegalStateException("Impossible length"); - } - return "#Bin<" + bin.length + "-" + pad_bits + ">"; + if (pad_bits == 0) { + return "#Bin<" + bin.length + ">"; + } + if (bin.length == 0) { + throw new java.lang.IllegalStateException("Impossible length"); + } + return "#Bin<" + bin.length + "-" + pad_bits + ">"; } /** * Convert this bitstr to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded bitstr should be - * written. + * an output stream to which the encoded bitstr should be + * written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_bitstr(bin, pad_bits); + buf.write_bitstr(bin, pad_bits); } /** * Determine if two bitstrs are equal. Bitstrs are equal if they have the * same byte length and tail length, and the array of bytes is identical. - * + * * @param o - * the bitstr to compare to. - * + * the bitstr to compare to. + * * @return true if the bitstrs contain the same bits, false otherwise. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangBitstr)) { - return false; - } + if (!(o instanceof OtpErlangBitstr)) { + return false; + } - final OtpErlangBitstr that = (OtpErlangBitstr) o; - if (pad_bits != that.pad_bits) { - return false; - } + final OtpErlangBitstr that = (OtpErlangBitstr) o; + if (pad_bits != that.pad_bits) { + return false; + } - final int len = bin.length; - if (len != that.bin.length) { - return false; - } + final int len = bin.length; + if (len != that.bin.length) { + return false; + } - for (int i = 0; i < len; i++) { - if (bin[i] != that.bin[i]) { - return false; // early exit - } - } + for (int i = 0; i < len; i++) { + if (bin[i] != that.bin[i]) { + return false; // early exit + } + } - return true; + return true; } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(15); - hash.combine(bin); - hash.combine(pad_bits); - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(15); + hash.combine(bin); + hash.combine(pad_bits); + return hash.valueOf(); } - + @Override public Object clone() { - final OtpErlangBitstr that = (OtpErlangBitstr) super.clone(); - that.bin = bin.clone(); - that.pad_bits = pad_bits; - return that; + final OtpErlangBitstr that = (OtpErlangBitstr) super.clone(); + that.bin = bin.clone(); + that.pad_bits = pad_bits; + return that; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBoolean.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBoolean.java index eecd2ea288..3f15317a94 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBoolean.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangBoolean.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang booleans, which are special cases of * atoms with values 'true' and 'false'. @@ -29,12 +28,12 @@ public class OtpErlangBoolean extends OtpErlangAtom { /** * Create a boolean from the given value - * + * * @param t - * the boolean value to represent as an atom. + * the boolean value to represent as an atom. */ public OtpErlangBoolean(final boolean t) { - super(t); + super(t); } /** @@ -42,13 +41,13 @@ public class OtpErlangBoolean extends OtpErlangAtom { * external format. The value of the boolean will be true if the atom * represented by the stream is "true" without regard to case. For other * atom values, the boolean will have the value false. - * + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang atom. + * if the buffer does not contain a valid external + * representation of an Erlang atom. */ public OtpErlangBoolean(final OtpInputStream buf) - throws OtpErlangDecodeException { - super(buf); + throws OtpErlangDecodeException { + super(buf); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangByte.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangByte.java index eb6f3d8aba..622e31fa3b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangByte.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangByte.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,32 +27,32 @@ public class OtpErlangByte extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param b - * the byte value to use. + * the byte value to use. */ public OtpErlangByte(final byte b) { - super(b); + super(b); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a byte. + * if the value is too large to be represented as a byte. */ public OtpErlangByte(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - byteValue(); + byteValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangChar.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangChar.java index e7c6dd8ad4..1401716839 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangChar.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangChar.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,32 +27,32 @@ public class OtpErlangChar extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param c - * the char value to use. + * the char value to use. */ public OtpErlangChar(final char c) { - super(c); + super(c); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a char. + * if the value is too large to be represented as a char. */ public OtpErlangChar(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - charValue(); + charValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDecodeException.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDecodeException.java index 6986e26908..a7a9e71a08 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDecodeException.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDecodeException.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -22,7 +22,7 @@ package com.ericsson.otp.erlang; * Exception raised when an attempt is made to create an Erlang term by decoding * a sequence of bytes that does not represent the type of term that was * requested. - * + * * @see OtpInputStream */ public class OtpErlangDecodeException extends OtpErlangException { @@ -32,6 +32,6 @@ public class OtpErlangDecodeException extends OtpErlangException { * Provides a detailed message. */ public OtpErlangDecodeException(final String msg) { - super(msg); + super(msg); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDouble.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDouble.java index e92ce11431..bf0b7d5c11 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDouble.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangDouble.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang floats and doubles. Erlang defines * only one floating point numeric type, however this class and its subclass @@ -35,96 +34,95 @@ public class OtpErlangDouble extends OtpErlangObject { * Create an Erlang float from the given double value. */ public OtpErlangDouble(final double d) { - this.d = d; + this.d = d; } /** * Create an Erlang float from a stream containing a double encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang float. + * if the buffer does not contain a valid external + * representation of an Erlang float. */ public OtpErlangDouble(final OtpInputStream buf) - throws OtpErlangDecodeException { - d = buf.read_double(); + throws OtpErlangDecodeException { + d = buf.read_double(); } /** * Get the value, as a double. - * + * * @return the value of this object, as a double. */ public double doubleValue() { - return d; + return d; } /** * Get the value, as a float. - * + * * @return the value of this object, as a float. - * + * * @exception OtpErlangRangeException - * if the value cannot be represented as a float. + * if the value cannot be represented as a float. */ public float floatValue() throws OtpErlangRangeException { - final float f = (float) d; + final float f = (float) d; - if (f != d) { - throw new OtpErlangRangeException("Value too large for float: " + d); - } + if (f != d) { + throw new OtpErlangRangeException("Value too large for float: " + d); + } - return f; + return f; } /** * Get the string representation of this double. - * + * * @return the string representation of this double. */ @Override public String toString() { - return "" + d; + return "" + d; } /** * Convert this double to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded value should be - * written. + * an output stream to which the encoded value should be written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_double(d); + buf.write_double(d); } /** * Determine if two floats are equal. Floats are equal if they contain the * same value. - * + * * @param o - * the float to compare to. - * + * the float to compare to. + * * @return true if the floats have the same value. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangDouble)) { - return false; - } + if (!(o instanceof OtpErlangDouble)) { + return false; + } - final OtpErlangDouble other = (OtpErlangDouble) o; - return this.d == other.d; + final OtpErlangDouble other = (OtpErlangDouble) o; + return d == other.d; } - + @Override protected int doHashCode() { - Double v = new Double(d); - return v.hashCode(); + final Double v = new Double(d); + return v.hashCode(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangException.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangException.java index 5b111a56a8..2e250488fa 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangException.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangException.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -28,13 +28,13 @@ public class OtpErlangException extends OtpException { * Provides no message. */ public OtpErlangException() { - super(); + super(); } /** * Provides a detailed message. */ public OtpErlangException(final String msg) { - super(msg); + super(msg); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExit.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExit.java index 6b9015c0e5..f4c6f21207 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExit.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExit.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -21,13 +21,13 @@ package com.ericsson.otp.erlang; /** * Exception raised when a communication channel is broken. This can be caused * for a number of reasons, for example: - * + * * <ul> - * <li> an error in communication has occurred - * <li> a remote process has sent an exit signal - * <li> a linked process has exited + * <li>an error in communication has occurred + * <li>a remote process has sent an exit signal + * <li>a linked process has exited * </ul> - * + * * @see OtpConnection */ @@ -39,13 +39,13 @@ public class OtpErlangExit extends OtpErlangException { /** * Create an OtpErlangExit exception with the given reason. - * + * * @param reason - * the reason this exit signal has been sent. + * the reason this exit signal has been sent. */ public OtpErlangExit(final OtpErlangObject reason) { - super(reason.toString()); - this.reason = reason; + super(reason.toString()); + this.reason = reason; } /** @@ -53,29 +53,29 @@ public class OtpErlangExit extends OtpErlangException { * Equivalent to <code>OtpErlangExit(new * OtpErlangAtom(reason)</code>. * </p> - * + * * @param reason - * the reason this exit signal has been sent. - * + * the reason this exit signal has been sent. + * * @see #OtpErlangExit(OtpErlangObject) */ public OtpErlangExit(final String reason) { - this(new OtpErlangAtom(reason)); + this(new OtpErlangAtom(reason)); } /** * Create an OtpErlangExit exception with the given reason and sender pid. - * + * * @param reason - * the reason this exit signal has been sent. - * + * the reason this exit signal has been sent. + * * @param pid - * the pid that sent this exit. + * the pid that sent this exit. */ public OtpErlangExit(final OtpErlangObject reason, final OtpErlangPid pid) { - super(reason.toString()); - this.reason = reason; - this.pid = pid; + super(reason.toString()); + this.reason = reason; + this.pid = pid; } /** @@ -83,30 +83,30 @@ public class OtpErlangExit extends OtpErlangException { * Equivalent to <code>OtpErlangExit(new OtpErlangAtom(reason), * pid)</code>. * </p> - * + * * @param reason - * the reason this exit signal has been sent. - * + * the reason this exit signal has been sent. + * * @param pid - * the pid that sent this exit. - * + * the pid that sent this exit. + * * @see #OtpErlangExit(OtpErlangObject, OtpErlangPid) */ public OtpErlangExit(final String reason, final OtpErlangPid pid) { - this(new OtpErlangAtom(reason), pid); + this(new OtpErlangAtom(reason), pid); } /** * Get the reason associated with this exit signal. */ public OtpErlangObject reason() { - return reason; + return reason; } /** * Get the pid that sent this exit. */ public OtpErlangPid pid() { - return pid; + return pid; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExternalFun.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExternalFun.java index 09f36b1ff4..80751cae53 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExternalFun.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangExternalFun.java @@ -1,20 +1,20 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * - * %CopyrightEnd% + * + * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -27,47 +27,47 @@ public class OtpErlangExternalFun extends OtpErlangObject { private final int arity; public OtpErlangExternalFun(final String module, final String function, - final int arity) { - super(); - this.module = module; - this.function = function; - this.arity = arity; + final int arity) { + super(); + this.module = module; + this.function = function; + this.arity = arity; } public OtpErlangExternalFun(final OtpInputStream buf) - throws OtpErlangDecodeException { - final OtpErlangExternalFun f = buf.read_external_fun(); - module = f.module; - function = f.function; - arity = f.arity; + throws OtpErlangDecodeException { + final OtpErlangExternalFun f = buf.read_external_fun(); + module = f.module; + function = f.function; + arity = f.arity; } @Override public void encode(final OtpOutputStream buf) { - buf.write_external_fun(module, function, arity); + buf.write_external_fun(module, function, arity); } @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangExternalFun)) { - return false; - } - final OtpErlangExternalFun f = (OtpErlangExternalFun) o; - return module.equals(f.module) && function.equals(f.function) - && arity == f.arity; + if (!(o instanceof OtpErlangExternalFun)) { + return false; + } + final OtpErlangExternalFun f = (OtpErlangExternalFun) o; + return module.equals(f.module) && function.equals(f.function) + && arity == f.arity; } @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(14); - hash.combine(module.hashCode(), function.hashCode()); - hash.combine(arity); - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(14); + hash.combine(module.hashCode(), function.hashCode()); + hash.combine(arity); + return hash.valueOf(); } - + @Override public String toString() { - return "#Fun<" + module + "." + function + "." + arity + ">"; + return "#Fun<" + module + "." + function + "." + arity + ">"; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFloat.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFloat.java index 7d48f848f0..6dcf3e7c3a 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFloat.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFloat.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang floats and doubles. */ @@ -30,27 +29,27 @@ public class OtpErlangFloat extends OtpErlangDouble { * Create an Erlang float from the given float value. */ public OtpErlangFloat(final float f) { - super(f); + super(f); } /** * Create an Erlang float from a stream containing a float encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang float. - * + * if the buffer does not contain a valid external + * representation of an Erlang float. + * * @exception OtpErlangRangeException - * if the value cannot be represented as a Java float. + * if the value cannot be represented as a Java float. */ public OtpErlangFloat(final OtpInputStream buf) - throws OtpErlangDecodeException, OtpErlangRangeException { - super(buf); + throws OtpErlangDecodeException, OtpErlangRangeException { + super(buf); - floatValue(); + floatValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFun.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFun.java index 05fa0cbb23..2de284029b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFun.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangFun.java @@ -1,20 +1,20 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * - * %CopyrightEnd% + * + * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -34,97 +34,97 @@ public class OtpErlangFun extends OtpErlangObject { private final byte[] md5; public OtpErlangFun(final OtpInputStream buf) - throws OtpErlangDecodeException { - final OtpErlangFun f = buf.read_fun(); - pid = f.pid; - module = f.module; - arity = f.arity; - md5 = f.md5; - index = f.index; - old_index = f.old_index; - uniq = f.uniq; - freeVars = f.freeVars; + throws OtpErlangDecodeException { + final OtpErlangFun f = buf.read_fun(); + pid = f.pid; + module = f.module; + arity = f.arity; + md5 = f.md5; + index = f.index; + old_index = f.old_index; + uniq = f.uniq; + freeVars = f.freeVars; } public OtpErlangFun(final OtpErlangPid pid, final String module, - final long index, final long uniq, final OtpErlangObject[] freeVars) { - this.pid = pid; - this.module = module; - arity = -1; - md5 = null; - this.index = index; - old_index = 0; - this.uniq = uniq; - this.freeVars = freeVars; + final long index, final long uniq, final OtpErlangObject[] freeVars) { + this.pid = pid; + this.module = module; + arity = -1; + md5 = null; + this.index = index; + old_index = 0; + this.uniq = uniq; + this.freeVars = freeVars; } public OtpErlangFun(final OtpErlangPid pid, final String module, - final int arity, final byte[] md5, final int index, - final long old_index, final long uniq, - final OtpErlangObject[] freeVars) { - this.pid = pid; - this.module = module; - this.arity = arity; - this.md5 = md5; - this.index = index; - this.old_index = old_index; - this.uniq = uniq; - this.freeVars = freeVars; + final int arity, final byte[] md5, final int index, + final long old_index, final long uniq, + final OtpErlangObject[] freeVars) { + this.pid = pid; + this.module = module; + this.arity = arity; + this.md5 = md5; + this.index = index; + this.old_index = old_index; + this.uniq = uniq; + this.freeVars = freeVars; } @Override public void encode(final OtpOutputStream buf) { - buf - .write_fun(pid, module, old_index, arity, md5, index, uniq, - freeVars); + buf.write_fun(pid, module, old_index, arity, md5, index, uniq, freeVars); } @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangFun)) { - return false; - } - final OtpErlangFun f = (OtpErlangFun) o; - if (!pid.equals(f.pid) || !module.equals(f.module) || arity != f.arity) { - return false; - } - if (md5 == null) { - if (f.md5 != null) { - return false; - } - } else { - if (!Arrays.equals(md5, f.md5)) { - return false; - } - } - if (index != f.index || uniq != f.uniq) { - return false; - } - if (freeVars == null) { - return f.freeVars == null; - } - return Arrays.equals(freeVars, f.freeVars); + if (!(o instanceof OtpErlangFun)) { + return false; + } + final OtpErlangFun f = (OtpErlangFun) o; + if (!pid.equals(f.pid) || !module.equals(f.module) || arity != f.arity) { + return false; + } + if (md5 == null) { + if (f.md5 != null) { + return false; + } + } else { + if (!Arrays.equals(md5, f.md5)) { + return false; + } + } + if (index != f.index || uniq != f.uniq) { + return false; + } + if (freeVars == null) { + return f.freeVars == null; + } + return Arrays.equals(freeVars, f.freeVars); } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(1); - hash.combine(pid.hashCode(), module.hashCode()); - hash.combine(arity); - if (md5 != null) hash.combine(md5); - hash.combine(index); - hash.combine(uniq); - if (freeVars != null) { - for (OtpErlangObject o: freeVars) { - hash.combine(o.hashCode(), 1); - } - } - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(1); + hash.combine(pid.hashCode(), module.hashCode()); + hash.combine(arity); + if (md5 != null) { + hash.combine(md5); + } + hash.combine(index); + hash.combine(uniq); + if (freeVars != null) { + for (final OtpErlangObject o : freeVars) { + hash.combine(o.hashCode(), 1); + } + } + return hash.valueOf(); } - + @Override public String toString() { - return "#Fun<" + module + "." + old_index + "." + uniq + ">"; + return "#Fun<" + module + "." + old_index + "." + uniq + ">"; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangInt.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangInt.java index 741fc29dd0..628e3f6e6e 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangInt.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangInt.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,32 +27,32 @@ public class OtpErlangInt extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param i - * the int value to use. + * the int value to use. */ public OtpErlangInt(final int i) { - super(i); + super(i); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as an int. + * if the value is too large to be represented as an int. */ public OtpErlangInt(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - intValue(); + intValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java index 9f7c5f5499..990e50ddcd 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -24,12 +24,12 @@ import java.util.NoSuchElementException; /** * Provides a Java representation of Erlang lists. Lists are created from zero * or more arbitrary Erlang terms. - * + * * <p> * The arity of the list is the number of elements it contains. */ public class OtpErlangList extends OtpErlangObject implements - Iterable<OtpErlangObject> { + Iterable<OtpErlangObject> { // don't change this! private static final long serialVersionUID = 5999112769036676548L; @@ -43,69 +43,69 @@ public class OtpErlangList extends OtpErlangObject implements * Create an empty list. */ public OtpErlangList() { - elems = NO_ELEMENTS; + elems = NO_ELEMENTS; } /** - * Create a list of Erlang integers representing Unicode codePoints. - * This method does not check if the string contains valid code points. - * + * Create a list of Erlang integers representing Unicode codePoints. This + * method does not check if the string contains valid code points. + * * @param str * the characters from which to create the list. */ public OtpErlangList(final String str) { - if (str == null || str.length() == 0) { - elems = NO_ELEMENTS; - } else { - final int[] codePoints = OtpErlangString.stringToCodePoints(str); - elems = new OtpErlangObject[codePoints.length]; - for (int i = 0; i < elems.length; i++) { - elems[i] = new OtpErlangInt(codePoints[i]); - } - } + if (str == null || str.length() == 0) { + elems = NO_ELEMENTS; + } else { + final int[] codePoints = OtpErlangString.stringToCodePoints(str); + elems = new OtpErlangObject[codePoints.length]; + for (int i = 0; i < elems.length; i++) { + elems[i] = new OtpErlangInt(codePoints[i]); + } + } } /** * Create a list containing one element. - * + * * @param elem * the elememet to make the list from. */ public OtpErlangList(final OtpErlangObject elem) { - elems = new OtpErlangObject[] { elem }; + elems = new OtpErlangObject[] { elem }; } /** * Create a list from an array of arbitrary Erlang terms. - * + * * @param elems * the array of terms from which to create the list. */ public OtpErlangList(final OtpErlangObject[] elems) { - this(elems, 0, elems.length); + this(elems, 0, elems.length); } /** * Create a list from an array of arbitrary Erlang terms. Tail can be * specified, if not null, the list will not be proper. - * + * * @param elems * array of terms from which to create the list * @param lastTail * @throws OtpErlangException */ public OtpErlangList(final OtpErlangObject[] elems, - final OtpErlangObject lastTail) throws OtpErlangException { - this(elems, 0, elems.length); - if (elems.length == 0 && lastTail != null) { - throw new OtpErlangException("Bad list, empty head, non-empty tail"); - } - this.lastTail = lastTail; + final OtpErlangObject lastTail) throws OtpErlangException { + this(elems, 0, elems.length); + if (elems.length == 0 && lastTail != null) { + throw new OtpErlangException("Bad list, empty head, non-empty tail"); + } + this.lastTail = lastTail; } /** * Create a list from an array of arbitrary Erlang terms. - * + * * @param elems * the array of terms from which to create the list. * @param start @@ -114,152 +114,152 @@ public class OtpErlangList extends OtpErlangObject implements * the number of terms to insert. */ public OtpErlangList(final OtpErlangObject[] elems, final int start, - final int count) { - if (elems != null && count > 0) { - this.elems = new OtpErlangObject[count]; - System.arraycopy(elems, start, this.elems, 0, count); - } else { - this.elems = NO_ELEMENTS; - } + final int count) { + if (elems != null && count > 0) { + this.elems = new OtpErlangObject[count]; + System.arraycopy(elems, start, this.elems, 0, count); + } else { + this.elems = NO_ELEMENTS; + } } /** * Create a list from a stream containing an list encoded in Erlang external * format. - * + * * @param buf * the stream containing the encoded list. - * + * * @exception OtpErlangDecodeException * if the buffer does not contain a valid external * representation of an Erlang list. */ public OtpErlangList(final OtpInputStream buf) - throws OtpErlangDecodeException { - final int arity = buf.read_list_head(); - if (arity > 0) { - elems = new OtpErlangObject[arity]; - for (int i = 0; i < arity; i++) { - elems[i] = buf.read_any(); - } - /* discard the terminating nil (empty list) or read tail */ - if (buf.peek1() == OtpExternal.nilTag) { - buf.read_nil(); - } else { - lastTail = buf.read_any(); - } - } else { - elems = NO_ELEMENTS; - } + throws OtpErlangDecodeException { + final int arity = buf.read_list_head(); + if (arity > 0) { + elems = new OtpErlangObject[arity]; + for (int i = 0; i < arity; i++) { + elems[i] = buf.read_any(); + } + /* discard the terminating nil (empty list) or read tail */ + if (buf.peek1() == OtpExternal.nilTag) { + buf.read_nil(); + } else { + lastTail = buf.read_any(); + } + } else { + elems = NO_ELEMENTS; + } } /** * Get the arity of the list. - * + * * @return the number of elements contained in the list. */ public int arity() { - return elems.length; + return elems.length; } /** * Get the specified element from the list. - * + * * @param i * the index of the requested element. List elements are numbered * as array elements, starting at 0. - * + * * @return the requested element, of null if i is not a valid element index. */ public OtpErlangObject elementAt(final int i) { - if (i >= arity() || i < 0) { - return null; - } - return elems[i]; + if (i >= arity() || i < 0) { + return null; + } + return elems[i]; } /** * Get all the elements from the list as an array. - * + * * @return an array containing all of the list's elements. */ public OtpErlangObject[] elements() { - if (arity() == 0) { - return NO_ELEMENTS; + if (arity() == 0) { + return NO_ELEMENTS; + } + final OtpErlangObject[] res = new OtpErlangObject[arity()]; + System.arraycopy(elems, 0, res, 0, res.length); + return res; } - final OtpErlangObject[] res = new OtpErlangObject[arity()]; - System.arraycopy(elems, 0, res, 0, res.length); - return res; - } /** * Get the string representation of the list. - * + * * @return the string representation of the list. */ @Override public String toString() { - return toString(0); + return toString(0); } protected String toString(final int start) { - final StringBuffer s = new StringBuffer(); - s.append("["); - - for (int i = start; i < arity(); i++) { - if (i > start) { - s.append(","); - } - s.append(elems[i].toString()); - } - if (lastTail != null) { - s.append("|").append(lastTail.toString()); - } - s.append("]"); - - return s.toString(); + final StringBuffer s = new StringBuffer(); + s.append("["); + + for (int i = start; i < arity(); i++) { + if (i > start) { + s.append(","); + } + s.append(elems[i].toString()); + } + if (lastTail != null) { + s.append("|").append(lastTail.toString()); + } + s.append("]"); + + return s.toString(); } /** * Convert this list to the equivalent Erlang external representation. Note * that this method never encodes lists as strings, even when it is possible * to do so. - * + * * @param buf * An output stream to which the encoded list should be written. - * + * */ @Override public void encode(final OtpOutputStream buf) { - encode(buf, 0); + encode(buf, 0); } protected void encode(final OtpOutputStream buf, final int start) { - final int arity = arity() - start; - - if (arity > 0) { - buf.write_list_head(arity); - - for (int i = start; i < arity + start; i++) { - buf.write_any(elems[i]); - } - } - if (lastTail == null) { - buf.write_nil(); - } else { - buf.write_any(lastTail); - } + final int arity = arity() - start; + + if (arity > 0) { + buf.write_list_head(arity); + + for (int i = start; i < arity + start; i++) { + buf.write_any(elems[i]); + } + } + if (lastTail == null) { + buf.write_nil(); + } else { + buf.write_any(lastTail); + } } /** * Determine if two lists are equal. Lists are equal if they have the same * arity and all of the elements are equal. - * + * * @param o * the list to compare to. - * + * * @return true if the lists have the same arity and all the elements are * equal. */ @@ -267,236 +267,234 @@ public class OtpErlangList extends OtpErlangObject implements @Override public boolean equals(final Object o) { - /* - * Be careful to use methods even for "this", so that equals work also - * for sublists - */ - - if (!(o instanceof OtpErlangList)) { - return false; - } - - final OtpErlangList l = (OtpErlangList) o; - - final int a = arity(); - if (a != l.arity()) { - return false; - } - for (int i = 0; i < a; i++) { - if (!elementAt(i).equals(l.elementAt(i))) { - return false; // early exit - } - } - final OtpErlangObject otherTail = l.getLastTail(); - if (getLastTail() == null && otherTail == null) { - return true; - } - if (getLastTail() == null) { - return false; - } - return getLastTail().equals(l.getLastTail()); + /* + * Be careful to use methods even for "this", so that equals work also + * for sublists + */ + + if (!(o instanceof OtpErlangList)) { + return false; + } + + final OtpErlangList l = (OtpErlangList) o; + + final int a = arity(); + if (a != l.arity()) { + return false; + } + for (int i = 0; i < a; i++) { + if (!elementAt(i).equals(l.elementAt(i))) { + return false; // early exit + } + } + final OtpErlangObject otherTail = l.getLastTail(); + if (getLastTail() == null && otherTail == null) { + return true; + } + if (getLastTail() == null) { + return false; + } + return getLastTail().equals(l.getLastTail()); } public OtpErlangObject getLastTail() { - return lastTail; + return lastTail; } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(4); - final int a = arity(); - if (a == 0) { - return (int)3468870702L; - } - for (int i = 0; i < a; i++) { - hash.combine(elementAt(i).hashCode()); - } - final OtpErlangObject t = getLastTail(); - if (t != null) { - int h = t.hashCode(); - hash.combine(h, h); - } - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(4); + final int a = arity(); + if (a == 0) { + return (int) 3468870702L; + } + for (int i = 0; i < a; i++) { + hash.combine(elementAt(i).hashCode()); + } + final OtpErlangObject t = getLastTail(); + if (t != null) { + final int h = t.hashCode(); + hash.combine(h, h); + } + return hash.valueOf(); } - + @Override public Object clone() { - try { - return new OtpErlangList(elements(), getLastTail()); - } catch (final OtpErlangException e) { - throw new AssertionError(this); - } + try { + return new OtpErlangList(elements(), getLastTail()); + } catch (final OtpErlangException e) { + throw new AssertionError(this); + } } public Iterator<OtpErlangObject> iterator() { - return iterator(0); + return iterator(0); } private Iterator<OtpErlangObject> iterator(final int start) { - return new Itr(start); + return new Itr(start); } /** * @return true if the list is proper, i.e. the last tail is nil */ public boolean isProper() { - return lastTail == null; + return lastTail == null; } public OtpErlangObject getHead() { - if (arity() > 0) { - return elems[0]; - } - return null; + if (arity() > 0) { + return elems[0]; + } + return null; } public OtpErlangObject getTail() { - return getNthTail(1); + return getNthTail(1); } public OtpErlangObject getNthTail(final int n) { - final int arity = arity(); - if (arity >= n) { - if (arity == n && lastTail != null) { - return lastTail; - } - return new SubList(this, n); - } - return null; + final int arity = arity(); + if (arity >= n) { + if (arity == n && lastTail != null) { + return lastTail; + } + return new SubList(this, n); + } + return null; } /** - * Convert a list of integers into a Unicode string, - * interpreting each integer as a Unicode code point value. - * - * @return A java.lang.String object created through its - * constructor String(int[], int, int). + * Convert a list of integers into a Unicode string, interpreting each + * integer as a Unicode code point value. + * + * @return A java.lang.String object created through its constructor + * String(int[], int, int). * * @exception OtpErlangException - * for non-proper and non-integer lists. + * for non-proper and non-integer lists. * * @exception OtpErlangRangeException - * if any integer does not fit into a Java int. + * if any integer does not fit into a Java int. * * @exception java.security.InvalidParameterException - * if any integer is not within the Unicode range. + * if any integer is not within the Unicode range. * * @see String#String(int[], int, int) * */ public String stringValue() throws OtpErlangException { - if (! isProper()) { - throw new OtpErlangException("Non-proper list: " + this); - } - final int[] values = new int[arity()]; - for (int i = 0; i < values.length; ++i) { - final OtpErlangObject o = elementAt(i); - if (! (o instanceof OtpErlangLong)) { - throw new OtpErlangException("Non-integer term: " + o); - } - final OtpErlangLong l = (OtpErlangLong) o; - values[i] = l.intValue(); - } - return new String(values, 0, values.length); + if (!isProper()) { + throw new OtpErlangException("Non-proper list: " + this); + } + final int[] values = new int[arity()]; + for (int i = 0; i < values.length; ++i) { + final OtpErlangObject o = elementAt(i); + if (!(o instanceof OtpErlangLong)) { + throw new OtpErlangException("Non-integer term: " + o); + } + final OtpErlangLong l = (OtpErlangLong) o; + values[i] = l.intValue(); + } + return new String(values, 0, values.length); } - - public static class SubList extends OtpErlangList { - private static final long serialVersionUID = OtpErlangList.serialVersionUID; - - private final int start; - - private final OtpErlangList parent; - - private SubList(final OtpErlangList parent, final int start) { - super(); - this.parent = parent; - this.start = start; - } - - @Override - public int arity() { - return parent.arity() - start; - } - - @Override - public OtpErlangObject elementAt(final int i) { - return parent.elementAt(i + start); - } - - @Override - public OtpErlangObject[] elements() { - final int n = parent.arity() - start; - final OtpErlangObject[] res = new OtpErlangObject[n]; - for (int i = 0; i < res.length; i++) { - res[i] = parent.elementAt(i + start); - } - return res; - } - - @Override - public boolean isProper() { - return parent.isProper(); - } - - @Override - public OtpErlangObject getHead() { - return parent.elementAt(start); - } - - @Override - public OtpErlangObject getNthTail(final int n) { - return parent.getNthTail(n + start); - } - - @Override - public String toString() { - return parent.toString(start); - } - - @Override - public void encode(final OtpOutputStream stream) { - parent.encode(stream, start); - } - - @Override - public OtpErlangObject getLastTail() { - return parent.getLastTail(); - } - - @Override - public Iterator<OtpErlangObject> iterator() { - return parent.iterator(start); - } + private static final long serialVersionUID = OtpErlangList.serialVersionUID; + + private final int start; + + private final OtpErlangList parent; + + private SubList(final OtpErlangList parent, final int start) { + super(); + this.parent = parent; + this.start = start; + } + + @Override + public int arity() { + return parent.arity() - start; + } + + @Override + public OtpErlangObject elementAt(final int i) { + return parent.elementAt(i + start); + } + + @Override + public OtpErlangObject[] elements() { + final int n = parent.arity() - start; + final OtpErlangObject[] res = new OtpErlangObject[n]; + for (int i = 0; i < res.length; i++) { + res[i] = parent.elementAt(i + start); + } + return res; + } + + @Override + public boolean isProper() { + return parent.isProper(); + } + + @Override + public OtpErlangObject getHead() { + return parent.elementAt(start); + } + + @Override + public OtpErlangObject getNthTail(final int n) { + return parent.getNthTail(n + start); + } + + @Override + public String toString() { + return parent.toString(start); + } + + @Override + public void encode(final OtpOutputStream stream) { + parent.encode(stream, start); + } + + @Override + public OtpErlangObject getLastTail() { + return parent.getLastTail(); + } + + @Override + public Iterator<OtpErlangObject> iterator() { + return parent.iterator(start); + } } private class Itr implements Iterator<OtpErlangObject> { - /** - * Index of element to be returned by subsequent call to next. - */ - private int cursor; - - private Itr(final int cursor) { - this.cursor = cursor; - } - - public boolean hasNext() { - return cursor < elems.length; - } - - public OtpErlangObject next() { - try { - return elems[cursor++]; - } catch (final IndexOutOfBoundsException e) { - throw new NoSuchElementException(); - } - } - - public void remove() { - throw new UnsupportedOperationException( - "OtpErlangList cannot be modified!"); - } + /** + * Index of element to be returned by subsequent call to next. + */ + private int cursor; + + private Itr(final int cursor) { + this.cursor = cursor; + } + + public boolean hasNext() { + return cursor < elems.length; + } + + public OtpErlangObject next() { + try { + return elems[cursor++]; + } catch (final IndexOutOfBoundsException e) { + throw new NoSuchElementException(); + } + } + + public void remove() { + throw new UnsupportedOperationException( + "OtpErlangList cannot be modified!"); + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangLong.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangLong.java index c6021a6ae1..47a691224b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangLong.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangLong.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -23,11 +23,11 @@ import java.math.BigInteger; /** * Provides a Java representation of Erlang integral types. Erlang does not * distinguish between different integral types, however this class and its - * subclasses {@link OtpErlangByte}, {@link OtpErlangChar}, - * {@link OtpErlangInt}, and {@link OtpErlangShort} attempt to map the Erlang - * types onto the various Java integral types. Two additional classes, - * {@link OtpErlangUInt} and {@link OtpErlangUShort} are provided for Corba - * compatibility. See the documentation for IC for more information. + * subclasses {@link OtpErlangByte}, {@link OtpErlangChar}, {@link OtpErlangInt} + * , and {@link OtpErlangShort} attempt to map the Erlang types onto the various + * Java integral types. Two additional classes, {@link OtpErlangUInt} and + * {@link OtpErlangUShort} are provided for Corba compatibility. See the + * documentation for IC for more information. */ public class OtpErlangLong extends OtpErlangObject { // don't change this! @@ -38,354 +38,353 @@ public class OtpErlangLong extends OtpErlangObject { /** * Create an Erlang integer from the given value. - * + * * @param l - * the long value to use. + * the long value to use. */ public OtpErlangLong(final long l) { - val = l; + val = l; } /** * Create an Erlang integer from the given value. - * + * * @param v - * the big integer value to use. + * the big integer value to use. */ public OtpErlangLong(final BigInteger v) { - if (v == null) { - throw new java.lang.NullPointerException(); - } - if (v.bitLength() < 64) { - val = v.longValue(); - } else { - bigVal = v; - } + if (v == null) { + throw new java.lang.NullPointerException(); + } + if (v.bitLength() < 64) { + val = v.longValue(); + } else { + bigVal = v; + } } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. + * if the buffer does not contain a valid external + * representation of an Erlang integer. */ public OtpErlangLong(final OtpInputStream buf) - throws OtpErlangDecodeException { - final byte[] b = buf.read_integer_byte_array(); - try { - val = OtpInputStream.byte_array_to_long(b, false); - } catch (final OtpErlangDecodeException e) { - bigVal = new BigInteger(b); - } + throws OtpErlangDecodeException { + final byte[] b = buf.read_integer_byte_array(); + try { + val = OtpInputStream.byte_array_to_long(b, false); + } catch (final OtpErlangDecodeException e) { + bigVal = new BigInteger(b); + } } /** * Get this number as a BigInteger. - * + * * @return the value of this number, as a BigInteger. */ public BigInteger bigIntegerValue() { - if (bigVal != null) { - return bigVal; - } - return BigInteger.valueOf(val); + if (bigVal != null) { + return bigVal; + } + return BigInteger.valueOf(val); } /** * Get this number as a long, or rather truncate all but the least * significant 64 bits from the 2's complement representation of this number * and return them as a long. - * + * * @return the value of this number, as a long. */ public long longValue() { - if (bigVal != null) { - return bigVal.longValue(); - } - return val; + if (bigVal != null) { + return bigVal.longValue(); + } + return val; } /** * Determine if this value can be represented as a long without truncation. - * + * * @return true if this value fits in a long, false otherwise. */ public boolean isLong() { - // To just chech this.bigVal is a wee bit to simple, since - // there just might have be a mean bignum that arrived on - // a stream, and was a long disguised as more than 8 byte integer. - if (bigVal != null) { - return bigVal.bitLength() < 64; - } - return true; + // To just chech this.bigVal is a wee bit to simple, since + // there just might have be a mean bignum that arrived on + // a stream, and was a long disguised as more than 8 byte integer. + if (bigVal != null) { + return bigVal.bitLength() < 64; + } + return true; } /** * Determine if this value can be represented as an unsigned long without * truncation, that is if the value is non-negative and its bit pattern * completely fits in a long. - * + * * @return true if this value is non-negative and fits in a long false * otherwise. */ public boolean isULong() { - // Here we have the same problem as for isLong(), plus - // the whole range 1<<63 .. (1<<64-1) is allowed. - if (bigVal != null) { - return bigVal.signum() >= 0 && bigVal.bitLength() <= 64; - } - return val >= 0; + // Here we have the same problem as for isLong(), plus + // the whole range 1<<63 .. (1<<64-1) is allowed. + if (bigVal != null) { + return bigVal.signum() >= 0 && bigVal.bitLength() <= 64; + } + return val >= 0; } /** * Returns the number of bits in the minimal two's-complement representation * of this BigInteger, excluding a sign bit. - * + * * @return number of bits in the minimal two's-complement representation of * this BigInteger, excluding a sign bit. */ public int bitLength() { - if (bigVal != null) { - return bigVal.bitLength(); - } - if (val == 0 || val == -1) { - return 0; + if (bigVal != null) { + return bigVal.bitLength(); + } + if (val == 0 || val == -1) { + return 0; + } + // Binary search for bit length + int i = 32; // mask length + long m = (1L << i) - 1; // AND mask with ones in little end + if (val < 0) { + m = ~m; // OR mask with ones in big end + for (int j = i >> 1; j > 0; j >>= 1) { // mask delta + if ((val | m) == val) { // mask >= enough + i -= j; + m >>= j; // try less bits + } else { + i += j; + m <<= j; // try more bits + } + } + if ((val | m) != val) { + i++; // mask < enough + } + } else { + for (int j = i >> 1; j > 0; j >>= 1) { // mask delta + if ((val & m) == val) { // mask >= enough + i -= j; + m >>= j; // try less bits + } else { + i += j; + m = m << j | m; // try more bits + } + } + if ((val & m) != val) { + i++; // mask < enough + } + } + return i; } - // Binary search for bit length - int i = 32; // mask length - long m = (1L << i) - 1; // AND mask with ones in little end - if (val < 0) { - m = ~m; // OR mask with ones in big end - for (int j = i >> 1; j > 0; j >>= 1) { // mask delta - if ((val | m) == val) { // mask >= enough - i -= j; - m >>= j; // try less bits - } else { - i += j; - m <<= j; // try more bits - } - } - if ((val | m) != val) { - i++; // mask < enough - } - } else { - for (int j = i >> 1; j > 0; j >>= 1) { // mask delta - if ((val & m) == val) { // mask >= enough - i -= j; - m >>= j; // try less bits - } else { - i += j; - m = m << j | m; // try more bits - } - } - if ((val & m) != val) { - i++; // mask < enough - } - } - return i; - } /** * Return the signum function of this object. - * + * * @return -1, 0 or 1 as the value is negative, zero or positive. */ public int signum() { - if (bigVal != null) { - return bigVal.signum(); - } - return val > 0 ? 1 : val < 0 ? -1 : 0; + if (bigVal != null) { + return bigVal.signum(); + } + return val > 0 ? 1 : val < 0 ? -1 : 0; } /** * Get this number as an int. - * + * * @return the value of this number, as an int. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as an int. + * if the value is too large to be represented as an int. */ public int intValue() throws OtpErlangRangeException { - final long l = longValue(); - final int i = (int) l; + final long l = longValue(); + final int i = (int) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for int: " + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for int: " + val); + } - return i; + return i; } /** * Get this number as a non-negative int. - * + * * @return the value of this number, as an int. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as an int, - * or if the value is negative. + * if the value is too large to be represented as an int, or + * if the value is negative. */ public int uIntValue() throws OtpErlangRangeException { - final long l = longValue(); - final int i = (int) l; + final long l = longValue(); + final int i = (int) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for int: " + val); - } else if (i < 0) { - throw new OtpErlangRangeException("Value not positive: " + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for int: " + val); + } else if (i < 0) { + throw new OtpErlangRangeException("Value not positive: " + val); + } - return i; + return i; } /** * Get this number as a short. - * + * * @return the value of this number, as a short. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a - * short. + * if the value is too large to be represented as a short. */ public short shortValue() throws OtpErlangRangeException { - final long l = longValue(); - final short i = (short) l; + final long l = longValue(); + final short i = (short) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for short: " - + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for short: " + + val); + } - return i; + return i; } /** * Get this number as a non-negative short. - * + * * @return the value of this number, as a short. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a - * short, or if the value is negative. + * if the value is too large to be represented as a short, or + * if the value is negative. */ public short uShortValue() throws OtpErlangRangeException { - final long l = longValue(); - final short i = (short) l; + final long l = longValue(); + final short i = (short) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for short: " - + val); - } else if (i < 0) { - throw new OtpErlangRangeException("Value not positive: " + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for short: " + + val); + } else if (i < 0) { + throw new OtpErlangRangeException("Value not positive: " + val); + } - return i; + return i; } /** * Get this number as a char. - * + * * @return the char value of this number. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a char. + * if the value is too large to be represented as a char. */ public char charValue() throws OtpErlangRangeException { - final long l = longValue(); - final char i = (char) l; + final long l = longValue(); + final char i = (char) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for char: " - + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for char: " + + val); + } - return i; + return i; } /** * Get this number as a byte. - * + * * @return the byte value of this number. - * + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a byte. + * if the value is too large to be represented as a byte. */ public byte byteValue() throws OtpErlangRangeException { - final long l = longValue(); - final byte i = (byte) l; + final long l = longValue(); + final byte i = (byte) l; - if (i != l) { - throw new OtpErlangRangeException("Value too large for byte: " - + val); - } + if (i != l) { + throw new OtpErlangRangeException("Value too large for byte: " + + val); + } - return i; + return i; } /** * Get the string representation of this number. - * + * * @return the string representation of this number. */ @Override public String toString() { - if (bigVal != null) { - return "" + bigVal; - } - return "" + val; + if (bigVal != null) { + return "" + bigVal; + } + return "" + val; } /** * Convert this number to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded number should be - * written. + * an output stream to which the encoded number should be + * written. */ @Override public void encode(final OtpOutputStream buf) { - if (bigVal != null) { - buf.write_big_integer(bigVal); - } else { - buf.write_long(val); - } + if (bigVal != null) { + buf.write_big_integer(bigVal); + } else { + buf.write_long(val); + } } /** * Determine if two numbers are equal. Numbers are equal if they contain the * same value. - * + * * @param o - * the number to compare to. - * + * the number to compare to. + * * @return true if the numbers have the same value. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangLong)) { - return false; - } - - final OtpErlangLong that = (OtpErlangLong) o; - - if (bigVal != null && that.bigVal != null) { - return bigVal.equals(that.bigVal); - } else if (bigVal == null && that.bigVal == null) { - return val == that.val; - } - return false; + if (!(o instanceof OtpErlangLong)) { + return false; + } + + final OtpErlangLong that = (OtpErlangLong) o; + + if (bigVal != null && that.bigVal != null) { + return bigVal.equals(that.bigVal); + } else if (bigVal == null && that.bigVal == null) { + return val == that.val; + } + return false; } - + @Override protected int doHashCode() { - if (bigVal != null) { - return bigVal.hashCode(); - } - return BigInteger.valueOf(val).hashCode(); + if (bigVal != null) { + return bigVal.hashCode(); + } + return BigInteger.valueOf(val).hashCode(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java index 7f1a64b87d..7f2621923a 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java @@ -18,15 +18,14 @@ */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang maps. Maps are created from one or * more arbitrary Erlang terms. - * + * * <p> * The arity of the map is the number of elements it contains. The keys and * values can be retrieved as arrays and the value for a key can be queried. - * + * */ public class OtpErlangMap extends OtpErlangObject { // don't change this! @@ -39,23 +38,23 @@ public class OtpErlangMap extends OtpErlangObject { /** * Create a map from an array of keys and an array of values. - * + * * @param keys * the array of terms to create the map keys from. * @param values * the array of terms to create the map values from. - * + * * @exception java.lang.IllegalArgumentException * if any array is empty (null) or contains null elements. */ public OtpErlangMap(final OtpErlangObject[] keys, - final OtpErlangObject[] values) { - this(keys, 0, keys.length, values, 0, values.length); + final OtpErlangObject[] values) { + this(keys, 0, keys.length, values, 0, values.length); } /** * Create a map from an array of terms. - * + * * @param keys * the array of terms to create the map from. * @param kstart @@ -68,228 +67,228 @@ public class OtpErlangMap extends OtpErlangObject { * the offset of the first value to insert. * @param vcount * the number of values to insert. - * + * * @exception java.lang.IllegalArgumentException * if any array is empty (null) or contains null elements. * @exception java.lang.IllegalArgumentException * if kcount and vcount differ. */ public OtpErlangMap(final OtpErlangObject[] keys, final int kstart, - final int kcount, final OtpErlangObject[] values, final int vstart, - final int vcount) { - if (keys == null || values == null) { - throw new java.lang.IllegalArgumentException( - "Map content can't be null"); - } else if (kcount != vcount) { - throw new java.lang.IllegalArgumentException( - "Map keys and values must have same arity"); - } else if (vcount < 1) { - this.keys = NO_ELEMENTS; - this.values = NO_ELEMENTS; - } else { - this.keys = new OtpErlangObject[vcount]; - for (int i = 0; i < vcount; i++) { - if (keys[kstart + i] != null) { - this.keys[i] = keys[kstart + i]; - } else { - throw new java.lang.IllegalArgumentException( - "Map key cannot be null (element" + (kstart + i) - + ")"); - } - } - this.values = new OtpErlangObject[vcount]; - for (int i = 0; i < vcount; i++) { - if (values[vstart + i] != null) { - this.values[i] = values[vstart + i]; - } else { - throw new java.lang.IllegalArgumentException( - "Map value cannot be null (element" + (vstart + i) - + ")"); - } - } - } + final int kcount, final OtpErlangObject[] values, final int vstart, + final int vcount) { + if (keys == null || values == null) { + throw new java.lang.IllegalArgumentException( + "Map content can't be null"); + } else if (kcount != vcount) { + throw new java.lang.IllegalArgumentException( + "Map keys and values must have same arity"); + } else if (vcount < 1) { + this.keys = NO_ELEMENTS; + this.values = NO_ELEMENTS; + } else { + this.keys = new OtpErlangObject[vcount]; + for (int i = 0; i < vcount; i++) { + if (keys[kstart + i] != null) { + this.keys[i] = keys[kstart + i]; + } else { + throw new java.lang.IllegalArgumentException( + "Map key cannot be null (element" + (kstart + i) + + ")"); + } + } + this.values = new OtpErlangObject[vcount]; + for (int i = 0; i < vcount; i++) { + if (values[vstart + i] != null) { + this.values[i] = values[vstart + i]; + } else { + throw new java.lang.IllegalArgumentException( + "Map value cannot be null (element" + (vstart + i) + + ")"); + } + } + } } /** * Create a map from a stream containing a map encoded in Erlang external * format. - * + * * @param buf * the stream containing the encoded map. - * + * * @exception OtpErlangDecodeException * if the buffer does not contain a valid external * representation of an Erlang map. */ public OtpErlangMap(final OtpInputStream buf) - throws OtpErlangDecodeException { - final int arity = buf.read_map_head(); + throws OtpErlangDecodeException { + final int arity = buf.read_map_head(); - if (arity > 0) { - keys = new OtpErlangObject[arity]; - values = new OtpErlangObject[arity]; + if (arity > 0) { + keys = new OtpErlangObject[arity]; + values = new OtpErlangObject[arity]; - for (int i = 0; i < arity; i++) { - keys[i] = buf.read_any(); - values[i] = buf.read_any(); - } - } else { - keys = NO_ELEMENTS; - values = NO_ELEMENTS; - } + for (int i = 0; i < arity; i++) { + keys[i] = buf.read_any(); + values[i] = buf.read_any(); + } + } else { + keys = NO_ELEMENTS; + values = NO_ELEMENTS; + } } /** * Get the arity of the map. - * + * * @return the number of elements contained in the map. */ public int arity() { - return keys.length; + return keys.length; } /** * Get the specified value from the map. - * + * * @param key * the key of the requested value. - * + * * @return the requested value, of null if key is not a valid key. */ public OtpErlangObject get(final OtpErlangObject key) { - if (key == null) { - return null; - } - for (int i = 0; i < keys.length; i++) { - if (key.equals(keys[i])) { - return values[i]; - } - } - return null; + if (key == null) { + return null; + } + for (int i = 0; i < keys.length; i++) { + if (key.equals(keys[i])) { + return values[i]; + } + } + return null; } /** * Get all the keys from the map as an array. - * + * * @return an array containing all of the map's keys. */ public OtpErlangObject[] keys() { - final OtpErlangObject[] res = new OtpErlangObject[arity()]; - System.arraycopy(keys, 0, res, 0, res.length); - return res; + final OtpErlangObject[] res = new OtpErlangObject[arity()]; + System.arraycopy(keys, 0, res, 0, res.length); + return res; } /** * Get all the values from the map as an array. - * + * * @return an array containing all of the map's values. */ public OtpErlangObject[] values() { - final OtpErlangObject[] res = new OtpErlangObject[arity()]; - System.arraycopy(values, 0, res, 0, res.length); - return res; + final OtpErlangObject[] res = new OtpErlangObject[arity()]; + System.arraycopy(values, 0, res, 0, res.length); + return res; } /** * Get the string representation of the map. - * + * * @return the string representation of the map. */ @Override public String toString() { - int i; - final StringBuffer s = new StringBuffer(); - final int arity = values.length; + int i; + final StringBuffer s = new StringBuffer(); + final int arity = values.length; - s.append("#{"); + s.append("#{"); - for (i = 0; i < arity; i++) { - if (i > 0) { - s.append(","); - } - s.append(keys[i].toString()); - s.append(" => "); - s.append(values[i].toString()); - } + for (i = 0; i < arity; i++) { + if (i > 0) { + s.append(","); + } + s.append(keys[i].toString()); + s.append(" => "); + s.append(values[i].toString()); + } - s.append("}"); + s.append("}"); - return s.toString(); + return s.toString(); } /** * Convert this map to the equivalent Erlang external representation. - * + * * @param buf * an output stream to which the encoded map should be written. */ @Override public void encode(final OtpOutputStream buf) { - final int arity = values.length; + final int arity = values.length; - buf.write_map_head(arity); + buf.write_map_head(arity); - for (int i = 0; i < arity; i++) { - buf.write_any(keys[i]); - buf.write_any(values[i]); - } + for (int i = 0; i < arity; i++) { + buf.write_any(keys[i]); + buf.write_any(values[i]); + } } /** * Determine if two maps are equal. Maps are equal if they have the same * arity and all of the elements are equal. - * + * * @param o * the map to compare to. - * + * * @return true if the maps have the same arity and all the elements are * equal. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangMap)) { - return false; - } + if (!(o instanceof OtpErlangMap)) { + return false; + } - final OtpErlangMap t = (OtpErlangMap) o; - final int a = arity(); + final OtpErlangMap t = (OtpErlangMap) o; + final int a = arity(); - if (a != t.arity()) { - return false; - } + if (a != t.arity()) { + return false; + } - for (int i = 0; i < a; i++) { - if (!keys[i].equals(t.keys[i])) { - return false; // early exit - } - } - for (int i = 0; i < a; i++) { - if (!values[i].equals(t.values[i])) { - return false; // early exit - } - } + for (int i = 0; i < a; i++) { + if (!keys[i].equals(t.keys[i])) { + return false; // early exit + } + } + for (int i = 0; i < a; i++) { + if (!values[i].equals(t.values[i])) { + return false; // early exit + } + } - return true; + return true; } @Override protected int doHashCode() { - final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9); - final int a = arity(); - hash.combine(a); - for (int i = 0; i < a; i++) { - hash.combine(keys[i].hashCode()); - } - for (int i = 0; i < a; i++) { - hash.combine(values[i].hashCode()); - } - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9); + final int a = arity(); + hash.combine(a); + for (int i = 0; i < a; i++) { + hash.combine(keys[i].hashCode()); + } + for (int i = 0; i < a; i++) { + hash.combine(values[i].hashCode()); + } + return hash.valueOf(); } @Override public Object clone() { - final OtpErlangMap newMap = (OtpErlangMap) super.clone(); - newMap.values = values.clone(); - return newMap; + final OtpErlangMap newMap = (OtpErlangMap) super.clone(); + newMap.values = values.clone(); + return newMap; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java index 5215e5887b..7ab160bcdd 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -26,7 +26,7 @@ import java.io.Serializable; */ public abstract class OtpErlangObject implements Serializable, Cloneable { protected int hashCodeValue = 0; - + // don't change this! static final long serialVersionUID = -8435938572339430044L; @@ -42,10 +42,9 @@ public abstract class OtpErlangObject implements Serializable, Cloneable { * Convert the object according to the rules of the Erlang external format. * This is mainly used for sending Erlang terms in messages, however it can * also be used for storing terms to disk. - * + * * @param buf - * an output stream to which the encoded term should be - * written. + * an output stream to which the encoded term should be written. */ public abstract void encode(OtpOutputStream buf); @@ -54,137 +53,150 @@ public abstract class OtpErlangObject implements Serializable, Cloneable { * corresponding Erlang data type object. This method is normally used when * Erlang terms are received in messages, however it can also be used for * reading terms from disk. - * + * * @param buf - * an input stream containing one or more encoded Erlang - * terms. - * + * an input stream containing one or more encoded Erlang terms. + * * @return an object representing one of the Erlang data types. - * + * * @exception OtpErlangDecodeException - * if the stream does not contain a valid representation - * of an Erlang term. + * if the stream does not contain a valid representation of + * an Erlang term. */ public static OtpErlangObject decode(final OtpInputStream buf) - throws OtpErlangDecodeException { - return buf.read_any(); + throws OtpErlangDecodeException { + return buf.read_any(); } /** * Determine if two Erlang objects are equal. In general, Erlang objects are * equal if the components they consist of are equal. - * + * * @param o - * the object to compare to. - * + * the object to compare to. + * * @return true if the objects are identical. */ @Override public abstract boolean equals(Object o); - + @Override public int hashCode() { - if (hashCodeValue == 0) { - hashCodeValue = doHashCode(); - } - return hashCodeValue; + if (hashCodeValue == 0) { + hashCodeValue = doHashCode(); + } + return hashCodeValue; } - + protected int doHashCode() { - return super.hashCode(); + return super.hashCode(); } - + @Override public Object clone() { - try { - return super.clone(); - } catch (final CloneNotSupportedException e) { - /* cannot happen */ - throw new InternalError(e.toString()); - } + try { + return super.clone(); + } catch (final CloneNotSupportedException e) { + /* cannot happen */ + throw new InternalError(e.toString()); + } } protected final static class Hash { - int abc[] = {0, 0, 0}; - - /* Hash function suggested by Bob Jenkins. - * The same as in the Erlang VM (beam); utils.c. - */ - - private final static int HASH_CONST[] = { - 0, // not used - 0x9e3779b9, // the golden ratio; an arbitrary value - 0x3c6ef372, // (hashHConst[1] * 2) % (1<<32) - 0xdaa66d2b, // 1 3 - 0x78dde6e4, // 1 4 - 0x1715609d, // 1 5 - 0xb54cda56, // 1 6 - 0x5384540f, // 1 7 - 0xf1bbcdc8, // 1 8 - 0x8ff34781, // 1 9 - 0x2e2ac13a, // 1 10 - 0xcc623af3, // 1 11 - 0x6a99b4ac, // 1 12 - 0x08d12e65, // 1 13 - 0xa708a81e, // 1 14 - 0x454021d7, // 1 15 - }; - - protected Hash(int i) { - abc[0] = abc[1] = HASH_CONST[i]; - abc[2] = 0; - } - - //protected Hash() { - // Hash(1); - //} - - private void mix() { - abc[0] -= abc[1]; abc[0] -= abc[2]; abc[0] ^= (abc[2]>>>13); - abc[1] -= abc[2]; abc[1] -= abc[0]; abc[1] ^= (abc[0]<<8); - abc[2] -= abc[0]; abc[2] -= abc[1]; abc[2] ^= (abc[1]>>>13); - abc[0] -= abc[1]; abc[0] -= abc[2]; abc[0] ^= (abc[2]>>>12); - abc[1] -= abc[2]; abc[1] -= abc[0]; abc[1] ^= (abc[0]<<16); - abc[2] -= abc[0]; abc[2] -= abc[1]; abc[2] ^= (abc[1]>>>5); - abc[0] -= abc[1]; abc[0] -= abc[2]; abc[0] ^= (abc[2]>>>3); - abc[1] -= abc[2]; abc[1] -= abc[0]; abc[1] ^= (abc[0]<<10); - abc[2] -= abc[0]; abc[2] -= abc[1]; abc[2] ^= (abc[1]>>>15); - } - - protected void combine(int a) { - abc[0] += a; - mix(); - } - - protected void combine(long a) { - combine((int)(a >>> 32), (int) a); - } - - protected void combine(int a, int b) { - abc[0] += a; - abc[1] += b; - mix(); - } - - protected void combine(byte b[]) { - int j, k; - for (j = 0, k = 0; - j + 4 < b.length; - j += 4, k += 1, k %= 3) { - abc[k] += (b[j+0] & 0xFF) + (b[j+1]<<8 & 0xFF00) - + (b[j+2]<<16 & 0xFF0000) + (b[j+3]<<24); - mix(); - } - for (int n = 0, m = 0xFF; - j < b.length; - j++, n += 8, m <<= 8) { - abc[k] += b[j]<<n & m; - } - mix(); - } - - protected int valueOf() { - return abc[2]; - } + int abc[] = { 0, 0, 0 }; + + /* + * Hash function suggested by Bob Jenkins. The same as in the Erlang VM + * (beam); utils.c. + */ + + private final static int HASH_CONST[] = { 0, // not used + 0x9e3779b9, // the golden ratio; an arbitrary value + 0x3c6ef372, // (hashHConst[1] * 2) % (1<<32) + 0xdaa66d2b, // 1 3 + 0x78dde6e4, // 1 4 + 0x1715609d, // 1 5 + 0xb54cda56, // 1 6 + 0x5384540f, // 1 7 + 0xf1bbcdc8, // 1 8 + 0x8ff34781, // 1 9 + 0x2e2ac13a, // 1 10 + 0xcc623af3, // 1 11 + 0x6a99b4ac, // 1 12 + 0x08d12e65, // 1 13 + 0xa708a81e, // 1 14 + 0x454021d7, // 1 15 + }; + + protected Hash(final int i) { + abc[0] = abc[1] = HASH_CONST[i]; + abc[2] = 0; + } + + // protected Hash() { + // Hash(1); + // } + + private void mix() { + abc[0] -= abc[1]; + abc[0] -= abc[2]; + abc[0] ^= abc[2] >>> 13; + abc[1] -= abc[2]; + abc[1] -= abc[0]; + abc[1] ^= abc[0] << 8; + abc[2] -= abc[0]; + abc[2] -= abc[1]; + abc[2] ^= abc[1] >>> 13; + abc[0] -= abc[1]; + abc[0] -= abc[2]; + abc[0] ^= abc[2] >>> 12; + abc[1] -= abc[2]; + abc[1] -= abc[0]; + abc[1] ^= abc[0] << 16; + abc[2] -= abc[0]; + abc[2] -= abc[1]; + abc[2] ^= abc[1] >>> 5; + abc[0] -= abc[1]; + abc[0] -= abc[2]; + abc[0] ^= abc[2] >>> 3; + abc[1] -= abc[2]; + abc[1] -= abc[0]; + abc[1] ^= abc[0] << 10; + abc[2] -= abc[0]; + abc[2] -= abc[1]; + abc[2] ^= abc[1] >>> 15; + } + + protected void combine(final int a) { + abc[0] += a; + mix(); + } + + protected void combine(final long a) { + combine((int) (a >>> 32), (int) a); + } + + protected void combine(final int a, final int b) { + abc[0] += a; + abc[1] += b; + mix(); + } + + protected void combine(final byte b[]) { + int j, k; + for (j = 0, k = 0; j + 4 < b.length; j += 4, k += 1, k %= 3) { + abc[k] += (b[j + 0] & 0xFF) + (b[j + 1] << 8 & 0xFF00) + + (b[j + 2] << 16 & 0xFF0000) + (b[j + 3] << 24); + mix(); + } + for (int n = 0, m = 0xFF; j < b.length; j++, n += 8, m <<= 8) { + abc[k] += b[j] << n & m; + } + mix(); + } + + protected int valueOf() { + return abc[2]; + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPid.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPid.java index 4c9f5c78a3..0f6ba8c538 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPid.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPid.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang PIDs. PIDs represent Erlang * processes and consist of a nodename and a number of integers. @@ -34,172 +33,170 @@ public class OtpErlangPid extends OtpErlangObject implements Comparable<Object> /** * Create a unique Erlang PID belonging to the local node. - * + * * @param self - * the local node. - * + * the local node. + * * @deprecated use OtpLocalNode:createPid() instead */ @Deprecated public OtpErlangPid(final OtpLocalNode self) { - final OtpErlangPid p = self.createPid(); + final OtpErlangPid p = self.createPid(); - id = p.id; - serial = p.serial; - creation = p.creation; - node = p.node; + id = p.id; + serial = p.serial; + creation = p.creation; + node = p.node; } /** * Create an Erlang PID from a stream containing a PID encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded PID. - * + * the stream containing the encoded PID. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang PID. + * if the buffer does not contain a valid external + * representation of an Erlang PID. */ public OtpErlangPid(final OtpInputStream buf) - throws OtpErlangDecodeException { - final OtpErlangPid p = buf.read_pid(); + throws OtpErlangDecodeException { + final OtpErlangPid p = buf.read_pid(); - node = p.node(); - id = p.id(); - serial = p.serial(); - creation = p.creation(); + node = p.node(); + id = p.id(); + serial = p.serial(); + creation = p.creation(); } /** * Create an Erlang pid from its components. - * + * * @param node - * the nodename. - * + * the nodename. + * * @param id - * an arbitrary number. Only the low order 15 bits will be - * used. - * + * an arbitrary number. Only the low order 15 bits will be used. + * * @param serial - * another arbitrary number. Only the low order 13 bits will - * be used. - * + * another arbitrary number. Only the low order 13 bits will be + * used. + * * @param creation - * yet another arbitrary number. Only the low order 2 bits - * will be used. + * yet another arbitrary number. Only the low order 2 bits will + * be used. */ public OtpErlangPid(final String node, final int id, final int serial, - final int creation) { - this.node = node; - this.id = id & 0x7fff; // 15 bits - this.serial = serial & 0x1fff; // 13 bits - this.creation = creation & 0x03; // 2 bits + final int creation) { + this.node = node; + this.id = id & 0x7fff; // 15 bits + this.serial = serial & 0x1fff; // 13 bits + this.creation = creation & 0x03; // 2 bits } /** * Get the serial number from the PID. - * + * * @return the serial number from the PID. */ public int serial() { - return serial; + return serial; } /** * Get the id number from the PID. - * + * * @return the id number from the PID. */ public int id() { - return id; + return id; } /** * Get the creation number from the PID. - * + * * @return the creation number from the PID. */ public int creation() { - return creation; + return creation; } /** * Get the node name from the PID. - * + * * @return the node name from the PID. */ public String node() { - return node; + return node; } /** * Get the string representation of the PID. Erlang PIDs are printed as * #Pid<node.id.serial> - * + * * @return the string representation of the PID. */ @Override public String toString() { - return "#Pid<" + node.toString() + "." + id + "." + serial + ">"; + return "#Pid<" + node.toString() + "." + id + "." + serial + ">"; } /** * Convert this PID to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded PID should be - * written. + * an output stream to which the encoded PID should be written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_pid(node, id, serial, creation); + buf.write_pid(node, id, serial, creation); } /** * Determine if two PIDs are equal. PIDs are equal if their components are * equal. - * + * * @param o - * the other PID to compare to. - * + * the other PID to compare to. + * * @return true if the PIDs are equal, false otherwise. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangPid)) { - return false; - } + if (!(o instanceof OtpErlangPid)) { + return false; + } - final OtpErlangPid pid = (OtpErlangPid) o; + final OtpErlangPid pid = (OtpErlangPid) o; - return creation == pid.creation && serial == pid.serial && id == pid.id - && node.compareTo(pid.node) == 0; + return creation == pid.creation && serial == pid.serial && id == pid.id + && node.compareTo(pid.node) == 0; } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(5); - hash.combine(creation, serial); - hash.combine(id, node.hashCode()); - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(5); + hash.combine(creation, serial); + hash.combine(id, node.hashCode()); + return hash.valueOf(); } - + public int compareTo(final Object o) { - if (!(o instanceof OtpErlangPid)) { - return -1; - } - - final OtpErlangPid pid = (OtpErlangPid) o; - if (creation == pid.creation) { - if (serial == pid.serial) { - if (id == pid.id) { - return node.compareTo(pid.node); + if (!(o instanceof OtpErlangPid)) { + return -1; + } + + final OtpErlangPid pid = (OtpErlangPid) o; + if (creation == pid.creation) { + if (serial == pid.serial) { + if (id == pid.id) { + return node.compareTo(pid.node); + } + return id - pid.id; + } + return serial - pid.serial; } - return id - pid.id; - } - return serial - pid.serial; - } - return creation - pid.creation; - } + return creation - pid.creation; } +} diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPort.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPort.java index 8557e17325..fc7345aaff 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPort.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangPort.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang ports. */ @@ -33,136 +32,134 @@ public class OtpErlangPort extends OtpErlangObject { /* * Create a unique Erlang port belonging to the local node. Since it isn't * meaninful to do so, this constructor is private... - * + * * @param self the local node. - * + * * @deprecated use OtpLocalNode:createPort() instead */ @SuppressWarnings("unused") private OtpErlangPort(final OtpSelf self) { - final OtpErlangPort p = self.createPort(); + final OtpErlangPort p = self.createPort(); - id = p.id; - creation = p.creation; - node = p.node; + id = p.id; + creation = p.creation; + node = p.node; } /** * Create an Erlang port from a stream containing a port encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded port. - * + * the stream containing the encoded port. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang port. + * if the buffer does not contain a valid external + * representation of an Erlang port. */ public OtpErlangPort(final OtpInputStream buf) - throws OtpErlangDecodeException { - final OtpErlangPort p = buf.read_port(); + throws OtpErlangDecodeException { + final OtpErlangPort p = buf.read_port(); - node = p.node(); - id = p.id(); - creation = p.creation(); + node = p.node(); + id = p.id(); + creation = p.creation(); } /** * Create an Erlang port from its components. - * + * * @param node - * the nodename. - * + * the nodename. + * * @param id - * an arbitrary number. Only the low order 28 bits will be - * used. - * + * an arbitrary number. Only the low order 28 bits will be used. + * * @param creation - * another arbitrary number. Only the low order 2 bits will - * be used. + * another arbitrary number. Only the low order 2 bits will be + * used. */ public OtpErlangPort(final String node, final int id, final int creation) { - this.node = node; - this.id = id & 0xfffffff; // 28 bits - this.creation = creation & 0x03; // 2 bits + this.node = node; + this.id = id & 0xfffffff; // 28 bits + this.creation = creation & 0x03; // 2 bits } /** * Get the id number from the port. - * + * * @return the id number from the port. */ public int id() { - return id; + return id; } /** * Get the creation number from the port. - * + * * @return the creation number from the port. */ public int creation() { - return creation; + return creation; } /** * Get the node name from the port. - * + * * @return the node name from the port. */ public String node() { - return node; + return node; } /** * Get the string representation of the port. Erlang ports are printed as * #Port<node.id>. - * + * * @return the string representation of the port. */ @Override public String toString() { - return "#Port<" + node + "." + id + ">"; + return "#Port<" + node + "." + id + ">"; } /** * Convert this port to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded port should be - * written. + * an output stream to which the encoded port should be written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_port(node, id, creation); + buf.write_port(node, id, creation); } /** * Determine if two ports are equal. Ports are equal if their components are * equal. - * + * * @param o - * the other port to compare to. - * + * the other port to compare to. + * * @return true if the ports are equal, false otherwise. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangPort)) { - return false; - } + if (!(o instanceof OtpErlangPort)) { + return false; + } - final OtpErlangPort port = (OtpErlangPort) o; + final OtpErlangPort port = (OtpErlangPort) o; - return creation == port.creation && id == port.id - && node.compareTo(port.node) == 0; + return creation == port.creation && id == port.id + && node.compareTo(port.node) == 0; } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(6); - hash.combine(creation); - hash.combine(id, node.hashCode()); - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(6); + hash.combine(creation); + hash.combine(id, node.hashCode()); + return hash.valueOf(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRangeException.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRangeException.java index a78b6df6ef..21732717d3 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRangeException.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRangeException.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -21,7 +21,7 @@ package com.ericsson.otp.erlang; /** * Exception raised when an attempt is made to create an Erlang term with data * that is out of range for the term in question. - * + * * @see OtpErlangByte * @see OtpErlangChar * @see OtpErlangInt @@ -37,6 +37,6 @@ public class OtpErlangRangeException extends OtpErlangException { * Provides a detailed message. */ public OtpErlangRangeException(final String msg) { - super(msg); + super(msg); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRef.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRef.java index 13a83333fa..f8031fb2e6 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRef.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangRef.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang refs. There are two styles of Erlang * refs, old style (one id value) and new style (array of id values). This class @@ -37,203 +36,201 @@ public class OtpErlangRef extends OtpErlangObject { /** * Create a unique Erlang ref belonging to the local node. - * + * * @param self - * the local node. - * + * the local node. + * * @deprecated use OtpLocalNode:createRef() instead */ @Deprecated public OtpErlangRef(final OtpLocalNode self) { - final OtpErlangRef r = self.createRef(); + final OtpErlangRef r = self.createRef(); - ids = r.ids; - creation = r.creation; - node = r.node; + ids = r.ids; + creation = r.creation; + node = r.node; } /** * Create an Erlang ref from a stream containing a ref encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded ref. - * + * the stream containing the encoded ref. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang ref. + * if the buffer does not contain a valid external + * representation of an Erlang ref. */ public OtpErlangRef(final OtpInputStream buf) - throws OtpErlangDecodeException { - final OtpErlangRef r = buf.read_ref(); + throws OtpErlangDecodeException { + final OtpErlangRef r = buf.read_ref(); - node = r.node(); - creation = r.creation(); + node = r.node(); + creation = r.creation(); - ids = r.ids(); + ids = r.ids(); } /** * Create an old style Erlang ref from its components. - * + * * @param node - * the nodename. - * + * the nodename. + * * @param id - * an arbitrary number. Only the low order 18 bits will be - * used. - * + * an arbitrary number. Only the low order 18 bits will be used. + * * @param creation - * another arbitrary number. Only the low order 2 bits will - * be used. + * another arbitrary number. Only the low order 2 bits will be + * used. */ public OtpErlangRef(final String node, final int id, final int creation) { - this.node = node; - ids = new int[1]; - ids[0] = id & 0x3ffff; // 18 bits - this.creation = creation & 0x03; // 2 bits + this.node = node; + ids = new int[1]; + ids[0] = id & 0x3ffff; // 18 bits + this.creation = creation & 0x03; // 2 bits } /** * Create a new style Erlang ref from its components. - * + * * @param node - * the nodename. - * + * the nodename. + * * @param ids - * an array of arbitrary numbers. Only the low order 18 bits - * of the first number will be used. If the array contains - * only one number, an old style ref will be written instead. - * At most three numbers will be read from the array. - * + * an array of arbitrary numbers. Only the low order 18 bits of + * the first number will be used. If the array contains only one + * number, an old style ref will be written instead. At most + * three numbers will be read from the array. + * * @param creation - * another arbitrary number. Only the low order 2 bits will - * be used. + * another arbitrary number. Only the low order 2 bits will be + * used. */ public OtpErlangRef(final String node, final int[] ids, final int creation) { - this.node = node; - this.creation = creation & 0x03; // 2 bits + this.node = node; + this.creation = creation & 0x03; // 2 bits - // use at most 82 bits (18 + 32 + 32) - int len = ids.length; - this.ids = new int[3]; - this.ids[0] = 0; - this.ids[1] = 0; - this.ids[2] = 0; + // use at most 82 bits (18 + 32 + 32) + int len = ids.length; + this.ids = new int[3]; + this.ids[0] = 0; + this.ids[1] = 0; + this.ids[2] = 0; - if (len > 3) { - len = 3; - } - System.arraycopy(ids, 0, this.ids, 0, len); - this.ids[0] &= 0x3ffff; // only 18 significant bits in first number + if (len > 3) { + len = 3; + } + System.arraycopy(ids, 0, this.ids, 0, len); + this.ids[0] &= 0x3ffff; // only 18 significant bits in first number } /** * Get the id number from the ref. Old style refs have only one id number. * If this is a new style ref, the first id number is returned. - * + * * @return the id number from the ref. */ public int id() { - return ids[0]; + return ids[0]; } /** * Get the array of id numbers from the ref. If this is an old style ref, * the array is of length 1. If this is a new style ref, the array has * length 3. - * + * * @return the array of id numbers from the ref. */ public int[] ids() { - return ids; + return ids; } /** * Determine whether this is a new style ref. - * + * * @return true if this ref is a new style ref, false otherwise. */ public boolean isNewRef() { - return ids.length > 1; + return ids.length > 1; } /** * Get the creation number from the ref. - * + * * @return the creation number from the ref. */ public int creation() { - return creation; + return creation; } /** * Get the node name from the ref. - * + * * @return the node name from the ref. */ public String node() { - return node; + return node; } /** * Get the string representation of the ref. Erlang refs are printed as * #Ref<node.id> - * + * * @return the string representation of the ref. */ @Override public String toString() { - String s = "#Ref<" + node; + String s = "#Ref<" + node; - for (int i = 0; i < ids.length; i++) { - s += "." + ids[i]; - } + for (int i = 0; i < ids.length; i++) { + s += "." + ids[i]; + } - s += ">"; + s += ">"; - return s; + return s; } /** * Convert this ref to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded ref should be - * written. + * an output stream to which the encoded ref should be written. */ @Override public void encode(final OtpOutputStream buf) { - buf.write_ref(node, ids, creation); + buf.write_ref(node, ids, creation); } /** * Determine if two refs are equal. Refs are equal if their components are * equal. New refs and old refs are considered equal if the node, creation * and first id numnber are equal. - * + * * @param o - * the other ref to compare to. - * + * the other ref to compare to. + * * @return true if the refs are equal, false otherwise. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangRef)) { - return false; - } + if (!(o instanceof OtpErlangRef)) { + return false; + } - final OtpErlangRef ref = (OtpErlangRef) o; + final OtpErlangRef ref = (OtpErlangRef) o; - if (!(node.equals(ref.node()) && creation == ref.creation())) { - return false; - } + if (!(node.equals(ref.node()) && creation == ref.creation())) { + return false; + } - if (isNewRef() && ref.isNewRef()) { - return ids[0] == ref.ids[0] && ids[1] == ref.ids[1] - && ids[2] == ref.ids[2]; - } - return ids[0] == ref.ids[0]; + if (isNewRef() && ref.isNewRef()) { + return ids[0] == ref.ids[0] && ids[1] == ref.ids[1] + && ids[2] == ref.ids[2]; + } + return ids[0] == ref.ids[0]; } /** @@ -245,18 +242,18 @@ public class OtpErlangRef extends OtpErlangObject { @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(7); - hash.combine(creation, ids[0]); - if (isNewRef()) { - hash.combine(ids[1], ids[2]); - } - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(7); + hash.combine(creation, ids[0]); + if (isNewRef()) { + hash.combine(ids[1], ids[2]); + } + return hash.valueOf(); } - + @Override public Object clone() { - final OtpErlangRef newRef = (OtpErlangRef) super.clone(); - newRef.ids = ids.clone(); - return newRef; + final OtpErlangRef newRef = (OtpErlangRef) super.clone(); + newRef.ids = ids.clone(); + return newRef; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangShort.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangShort.java index 6ef56defbd..0083066141 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangShort.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangShort.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,34 +27,33 @@ public class OtpErlangShort extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param s - * the short value to use. + * the short value to use. */ public OtpErlangShort(final short s) { - super(s); + super(s); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a - * short. + * if the value is too large to be represented as a short. */ public OtpErlangShort(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - shortValue(); + shortValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangString.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangString.java index 1bccfcc567..9e5450ca75 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangString.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangString.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2012. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -33,75 +33,74 @@ public class OtpErlangString extends OtpErlangObject { * Create an Erlang string from the given string. */ public OtpErlangString(final String str) { - this.str = str; + this.str = str; } /** * Create an Erlang string from a list of integers. * * @throws OtpErlangException - * for non-proper and non-integer lists. + * for non-proper and non-integer lists. * @throws OtpErlangRangeException - * if an integer in the list is not - * a valid Unicode code point according to Erlang. + * if an integer in the list is not a valid Unicode code point + * according to Erlang. */ - public OtpErlangString(final OtpErlangList list) - throws OtpErlangException { - String s = list.stringValue(); - final int n = s.length(); - for (int i = 0; i < n; i = s.offsetByCodePoints(i, 1)) { - int cp = s.codePointAt(i); - if (! isValidCodePoint(cp)) { - throw new OtpErlangRangeException("Invalid CodePoint: " + cp); - } - } - str = s; + public OtpErlangString(final OtpErlangList list) throws OtpErlangException { + final String s = list.stringValue(); + final int n = s.length(); + for (int i = 0; i < n; i = s.offsetByCodePoints(i, 1)) { + final int cp = s.codePointAt(i); + if (!isValidCodePoint(cp)) { + throw new OtpErlangRangeException("Invalid CodePoint: " + cp); + } + } + str = s; } /** * Create an Erlang string from a stream containing a string encoded in * Erlang external format. - * + * * @param buf * the stream containing the encoded string. - * + * * @exception OtpErlangDecodeException * if the buffer does not contain a valid external * representation of an Erlang string. */ public OtpErlangString(final OtpInputStream buf) - throws OtpErlangDecodeException { - str = buf.read_string(); + throws OtpErlangDecodeException { + str = buf.read_string(); } /** * Get the actual string contained in this object. - * + * * @return the raw string contained in this object, without regard to Erlang * quoting rules. - * + * * @see #toString */ public String stringValue() { - return str; + return str; } /** * Get the printable version of the string contained in this object. - * + * * @return the string contained in this object, quoted. - * + * * @see #stringValue */ @Override public String toString() { - return "\"" + str + "\""; + return "\"" + str + "\""; } /** * Convert this string to the equivalent Erlang external representation. - * + * * @param buf * an output stream to which the encoded string should be * written. @@ -109,48 +108,48 @@ public class OtpErlangString extends OtpErlangObject { @Override public void encode(final OtpOutputStream buf) { - buf.write_string(str); + buf.write_string(str); } /** * Determine if two strings are equal. They are equal if they represent the * same sequence of characters. This method can be used to compare * OtpErlangStrings with each other and with Strings. - * + * * @param o * the OtpErlangString or String to compare to. - * + * * @return true if the strings consist of the same sequence of characters, * false otherwise. */ @Override public boolean equals(final Object o) { - if (o instanceof String) { - return str.compareTo((String) o) == 0; - } else if (o instanceof OtpErlangString) { - return str.compareTo(((OtpErlangString) o).str) == 0; - } + if (o instanceof String) { + return str.compareTo((String) o) == 0; + } else if (o instanceof OtpErlangString) { + return str.compareTo(((OtpErlangString) o).str) == 0; + } - return false; + return false; } - + @Override protected int doHashCode() { - return str.hashCode(); + return str.hashCode(); } /** * Create Unicode code points from a String. - * - * @param s - * a String to convert to an Unicode code point array * - * @return the corresponding array of integers representing - * Unicode code points + * @param s + * a String to convert to an Unicode code point array + * + * @return the corresponding array of integers representing Unicode code + * points */ - public static int[] stringToCodePoints(final String s) { + public static int[] stringToCodePoints(final String s) { final int m = s.codePointCount(0, s.length()); final int[] codePoints = new int[m]; int j = 0; @@ -163,34 +162,34 @@ public class OtpErlangString extends OtpErlangObject { } /** - * Validate a code point according to Erlang definition; Unicode 3.0. - * That is; valid in the range U+0..U+10FFFF, but not in the range - * U+D800..U+DFFF (surrogat pairs). + * Validate a code point according to Erlang definition; Unicode 3.0. That + * is; valid in the range U+0..U+10FFFF, but not in the range U+D800..U+DFFF + * (surrogat pairs). * - * @param cp - * the code point value to validate + * @param cp + * the code point value to validate * - * @return true if the code point is valid, - * false otherwise. + * @return true if the code point is valid, false otherwise. */ public static boolean isValidCodePoint(final int cp) { - // Erlang definition of valid Unicode code points; - // Unicode 3.0, XML, et.al. - return (cp>>>16) <= 0x10 // in 0..10FFFF; Unicode range - && (cp & ~0x7FF) != 0xD800; // not in D800..DFFF; surrogate range + // Erlang definition of valid Unicode code points; + // Unicode 3.0, XML, et.al. + return cp >>> 16 <= 0x10 // in 0..10FFFF; Unicode range + && (cp & ~0x7FF) != 0xD800; // not in D800..DFFF; surrogate + // range } /** - * Construct a String from a Latin-1 (ISO-8859-1) encoded byte array, - * if Latin-1 is available, otherwise use the default encoding. + * Construct a String from a Latin-1 (ISO-8859-1) encoded byte array, if + * Latin-1 is available, otherwise use the default encoding. * */ public static String newString(final byte[] bytes) { - try { - return new String(bytes, "ISO-8859-1"); - } catch (final UnsupportedEncodingException e) { - } - return new String(bytes); + try { + return new String(bytes, "ISO-8859-1"); + } catch (final UnsupportedEncodingException e) { + } + return new String(bytes); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java index dffaa530cd..af2559e62e 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java @@ -1,28 +1,27 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang tuples. Tuples are created from one * or more arbitrary Erlang terms. - * + * * <p> * The arity of the tuple is the number of elements it contains. Elements are * indexed from 0 to (arity-1) and can be retrieved individually by using the @@ -38,222 +37,219 @@ public class OtpErlangTuple extends OtpErlangObject { /** * Create a unary tuple containing the given element. - * + * * @param elem - * the element to create the tuple from. - * + * the element to create the tuple from. + * * @exception java.lang.IllegalArgumentException - * if the element is null. + * if the element is null. */ public OtpErlangTuple(final OtpErlangObject elem) { - if (elem == null) { - throw new java.lang.IllegalArgumentException( - "Tuple element cannot be null"); - } - elems = new OtpErlangObject[] { elem }; + if (elem == null) { + throw new java.lang.IllegalArgumentException( + "Tuple element cannot be null"); + } + elems = new OtpErlangObject[] { elem }; } /** * Create a tuple from an array of terms. - * + * * @param elems - * the array of terms to create the tuple from. - * + * the array of terms to create the tuple from. + * * @exception java.lang.IllegalArgumentException - * if the array is empty (null) or contains null - * elements. + * if the array is empty (null) or contains null elements. */ public OtpErlangTuple(final OtpErlangObject[] elems) { - this(elems, 0, elems.length); + this(elems, 0, elems.length); } /** * Create a tuple from an array of terms. - * + * * @param elems - * the array of terms to create the tuple from. + * the array of terms to create the tuple from. * @param start - * the offset of the first term to insert. + * the offset of the first term to insert. * @param count - * the number of terms to insert. - * + * the number of terms to insert. + * * @exception java.lang.IllegalArgumentException - * if the array is empty (null) or contains null - * elements. + * if the array is empty (null) or contains null elements. */ public OtpErlangTuple(final OtpErlangObject[] elems, final int start, - final int count) { - if (elems == null) { - throw new java.lang.IllegalArgumentException( - "Tuple content can't be null"); - } else if (count < 1) { - this.elems = NO_ELEMENTS; - } else { - this.elems = new OtpErlangObject[count]; - for (int i = 0; i < count; i++) { - if (elems[start + i] != null) { - this.elems[i] = elems[start + i]; - } else { - throw new java.lang.IllegalArgumentException( - "Tuple element cannot be null (element" - + (start + i) + ")"); - } - } - } + final int count) { + if (elems == null) { + throw new java.lang.IllegalArgumentException( + "Tuple content can't be null"); + } else if (count < 1) { + this.elems = NO_ELEMENTS; + } else { + this.elems = new OtpErlangObject[count]; + for (int i = 0; i < count; i++) { + if (elems[start + i] != null) { + this.elems[i] = elems[start + i]; + } else { + throw new java.lang.IllegalArgumentException( + "Tuple element cannot be null (element" + + (start + i) + ")"); + } + } + } } /** * Create a tuple from a stream containing an tuple encoded in Erlang * external format. - * + * * @param buf - * the stream containing the encoded tuple. - * + * the stream containing the encoded tuple. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang tuple. + * if the buffer does not contain a valid external + * representation of an Erlang tuple. */ public OtpErlangTuple(final OtpInputStream buf) - throws OtpErlangDecodeException { - final int arity = buf.read_tuple_head(); - - if (arity > 0) { - elems = new OtpErlangObject[arity]; - - for (int i = 0; i < arity; i++) { - elems[i] = buf.read_any(); - } - } else { - elems = NO_ELEMENTS; - } + throws OtpErlangDecodeException { + final int arity = buf.read_tuple_head(); + + if (arity > 0) { + elems = new OtpErlangObject[arity]; + + for (int i = 0; i < arity; i++) { + elems[i] = buf.read_any(); + } + } else { + elems = NO_ELEMENTS; + } } /** * Get the arity of the tuple. - * + * * @return the number of elements contained in the tuple. */ public int arity() { - return elems.length; + return elems.length; } /** * Get the specified element from the tuple. - * + * * @param i - * the index of the requested element. Tuple elements are - * numbered as array elements, starting at 0. - * + * the index of the requested element. Tuple elements are + * numbered as array elements, starting at 0. + * * @return the requested element, of null if i is not a valid element index. */ public OtpErlangObject elementAt(final int i) { - if (i >= arity() || i < 0) { - return null; - } - return elems[i]; + if (i >= arity() || i < 0) { + return null; + } + return elems[i]; } /** * Get all the elements from the tuple as an array. - * + * * @return an array containing all of the tuple's elements. */ public OtpErlangObject[] elements() { - final OtpErlangObject[] res = new OtpErlangObject[arity()]; - System.arraycopy(elems, 0, res, 0, res.length); - return res; + final OtpErlangObject[] res = new OtpErlangObject[arity()]; + System.arraycopy(elems, 0, res, 0, res.length); + return res; } /** * Get the string representation of the tuple. - * + * * @return the string representation of the tuple. */ @Override public String toString() { - int i; - final StringBuffer s = new StringBuffer(); - final int arity = elems.length; + int i; + final StringBuffer s = new StringBuffer(); + final int arity = elems.length; - s.append("{"); + s.append("{"); - for (i = 0; i < arity; i++) { - if (i > 0) { - s.append(","); - } - s.append(elems[i].toString()); - } + for (i = 0; i < arity; i++) { + if (i > 0) { + s.append(","); + } + s.append(elems[i].toString()); + } - s.append("}"); + s.append("}"); - return s.toString(); + return s.toString(); } /** * Convert this tuple to the equivalent Erlang external representation. - * + * * @param buf - * an output stream to which the encoded tuple should be - * written. + * an output stream to which the encoded tuple should be written. */ @Override public void encode(final OtpOutputStream buf) { - final int arity = elems.length; + final int arity = elems.length; - buf.write_tuple_head(arity); + buf.write_tuple_head(arity); - for (int i = 0; i < arity; i++) { - buf.write_any(elems[i]); - } + for (int i = 0; i < arity; i++) { + buf.write_any(elems[i]); + } } /** * Determine if two tuples are equal. Tuples are equal if they have the same * arity and all of the elements are equal. - * + * * @param o - * the tuple to compare to. - * + * the tuple to compare to. + * * @return true if the tuples have the same arity and all the elements are * equal. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpErlangTuple)) { - return false; - } + if (!(o instanceof OtpErlangTuple)) { + return false; + } - final OtpErlangTuple t = (OtpErlangTuple) o; - final int a = arity(); + final OtpErlangTuple t = (OtpErlangTuple) o; + final int a = arity(); - if (a != t.arity()) { - return false; - } + if (a != t.arity()) { + return false; + } - for (int i = 0; i < a; i++) { - if (!elems[i].equals(t.elems[i])) { - return false; // early exit - } - } + for (int i = 0; i < a; i++) { + if (!elems[i].equals(t.elems[i])) { + return false; // early exit + } + } - return true; + return true; } - + @Override protected int doHashCode() { - OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9); - final int a = arity(); - hash.combine(a); - for (int i = 0; i < a; i++) { - hash.combine(elems[i].hashCode()); - } - return hash.valueOf(); + final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9); + final int a = arity(); + hash.combine(a); + for (int i = 0; i < a; i++) { + hash.combine(elems[i].hashCode()); + } + return hash.valueOf(); } - + @Override public Object clone() { - final OtpErlangTuple newTuple = (OtpErlangTuple) super.clone(); - newTuple.elems = elems.clone(); - return newTuple; + final OtpErlangTuple newTuple = (OtpErlangTuple) super.clone(); + newTuple.elems = elems.clone(); + return newTuple; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUInt.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUInt.java index a02996e437..f45cce87b2 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUInt.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUInt.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,38 +27,38 @@ public class OtpErlangUInt extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param i - * the non-negative int value to use. - * + * the non-negative int value to use. + * * @exception OtpErlangRangeException - * if the value is negative. + * if the value is negative. */ public OtpErlangUInt(final int i) throws OtpErlangRangeException { - super(i); + super(i); - uIntValue(); + uIntValue(); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as an int, - * or the value is negative. + * if the value is too large to be represented as an int, or + * the value is negative. */ public OtpErlangUInt(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - uIntValue(); + uIntValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUShort.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUShort.java index e9d251f815..96f6ed807b 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUShort.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangUShort.java @@ -1,24 +1,23 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; - /** * Provides a Java representation of Erlang integral types. */ @@ -28,38 +27,38 @@ public class OtpErlangUShort extends OtpErlangLong { /** * Create an Erlang integer from the given value. - * + * * @param s - * the non-negative short value to use. - * + * the non-negative short value to use. + * * @exception OtpErlangRangeException - * if the value is negative. + * if the value is negative. */ public OtpErlangUShort(final short s) throws OtpErlangRangeException { - super(s); + super(s); - uShortValue(); + uShortValue(); } /** * Create an Erlang integer from a stream containing an integer encoded in * Erlang external format. - * + * * @param buf - * the stream containing the encoded value. - * + * the stream containing the encoded value. + * * @exception OtpErlangDecodeException - * if the buffer does not contain a valid external - * representation of an Erlang integer. - * + * if the buffer does not contain a valid external + * representation of an Erlang integer. + * * @exception OtpErlangRangeException - * if the value is too large to be represented as a - * short, or the value is negative. + * if the value is too large to be represented as a short, or + * the value is negative. */ public OtpErlangUShort(final OtpInputStream buf) - throws OtpErlangRangeException, OtpErlangDecodeException { - super(buf); + throws OtpErlangRangeException, OtpErlangDecodeException { + super(buf); - uShortValue(); + uShortValue(); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpException.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpException.java index 874c7da104..0a8323c635 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpException.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpException.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -28,13 +28,13 @@ public abstract class OtpException extends Exception { * Provides no message. */ public OtpException() { - super(); + super(); } /** * Provides a detailed message. */ public OtpException(final String msg) { - super(msg); + super(msg); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpExternal.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpExternal.java index fa0fe18e95..eeb40462dc 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpExternal.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpExternal.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpInputStream.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpInputStream.java index bab0629382..2762c83494 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpInputStream.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpInputStream.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -25,7 +25,7 @@ import java.util.Arrays; /** * Provides a stream for decoding Erlang terms from external format. - * + * * <p> * Note that this class is not synchronized, if you need synchronization you * must provide it yourself. @@ -40,1211 +40,1211 @@ public class OtpInputStream extends ByteArrayInputStream { * @param buf */ public OtpInputStream(final byte[] buf) { - this(buf, 0); + this(buf, 0); } /** * Create a stream from a buffer containing encoded Erlang terms. - * + * * @param flags */ public OtpInputStream(final byte[] buf, final int flags) { - super(buf); - this.flags = flags; + super(buf); + this.flags = flags; } /** * Create a stream from a buffer containing encoded Erlang terms at the * given offset and length. - * + * * @param flags */ public OtpInputStream(final byte[] buf, final int offset, final int length, - final int flags) { - super(buf, offset, length); - this.flags = flags; + final int flags) { + super(buf, offset, length); + this.flags = flags; } /** * Get the current position in the stream. - * + * * @return the current position in the stream. */ public int getPos() { - return super.pos; + return super.pos; } /** * Set the current position in the stream. - * + * * @param pos * the position to move to in the stream. If pos indicates a * position beyond the end of the stream, the position is move to * the end of the stream instead. If pos is negative, the * position is moved to the beginning of the stream instead. - * + * * @return the previous position in the stream. */ public int setPos(final int pos) { - final int oldpos = super.pos; + final int oldpos = super.pos; - int apos = pos; - if (pos > super.count) { - apos = super.count; - } else if (pos < 0) { - apos = 0; - } + int apos = pos; + if (pos > super.count) { + apos = super.count; + } else if (pos < 0) { + apos = 0; + } - super.pos = apos; + super.pos = apos; - return oldpos; + return oldpos; } /** * Read an array of bytes from the stream. The method reads at most * buf.length bytes from the input stream. - * + * * @return the number of bytes read. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int readN(final byte[] abuf) throws OtpErlangDecodeException { - return this.readN(abuf, 0, abuf.length); + return this.readN(abuf, 0, abuf.length); } /** * Read an array of bytes from the stream. The method reads at most len * bytes from the input stream into offset off of the buffer. - * + * * @return the number of bytes read. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int readN(final byte[] abuf, final int off, final int len) - throws OtpErlangDecodeException { - if (len == 0 && available() == 0) { - return 0; - } - final int i = super.read(abuf, off, len); - if (i < 0) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - return i; + throws OtpErlangDecodeException { + if (len == 0 && available() == 0) { + return 0; + } + final int i = super.read(abuf, off, len); + if (i < 0) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + return i; } /** * Alias for peek1() */ public int peek() throws OtpErlangDecodeException { - return peek1(); + return peek1(); } /** * Look ahead one position in the stream without consuming the byte found * there. - * + * * @return the next byte in the stream, as an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int peek1() throws OtpErlangDecodeException { - int i; - try { - i = super.buf[super.pos]; - if (i < 0) { - i += 256; - } - - return i; - } catch (final Exception e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } + int i; + try { + i = super.buf[super.pos]; + if (i < 0) { + i += 256; + } + + return i; + } catch (final Exception e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } } public int peek1skip_version() throws OtpErlangDecodeException { - int i = peek1(); - if (i == OtpExternal.versionTag) { - read1(); - i = peek1(); - } - return i; + int i = peek1(); + if (i == OtpExternal.versionTag) { + read1(); + i = peek1(); + } + return i; } /** * Read a one byte integer from the stream. - * + * * @return the byte read, as an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int read1() throws OtpErlangDecodeException { - int i; - i = super.read(); + int i; + i = super.read(); - if (i < 0) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } + if (i < 0) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } - return i; + return i; } public int read1skip_version() throws OtpErlangDecodeException { - int tag = read1(); - if (tag == OtpExternal.versionTag) { - tag = read1(); - } - return tag; + int tag = read1(); + if (tag == OtpExternal.versionTag) { + tag = read1(); + } + return tag; } /** * Read a two byte big endian integer from the stream. - * + * * @return the bytes read, converted from big endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int read2BE() throws OtpErlangDecodeException { - final byte[] b = new byte[2]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - return (b[0] << 8 & 0xff00) + (b[1] & 0xff); + final byte[] b = new byte[2]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + return (b[0] << 8 & 0xff00) + (b[1] & 0xff); } /** * Read a four byte big endian integer from the stream. - * + * * @return the bytes read, converted from big endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int read4BE() throws OtpErlangDecodeException { - final byte[] b = new byte[4]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - return (b[0] << 24 & 0xff000000) + (b[1] << 16 & 0xff0000) - + (b[2] << 8 & 0xff00) + (b[3] & 0xff); + final byte[] b = new byte[4]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + return (b[0] << 24 & 0xff000000) + (b[1] << 16 & 0xff0000) + + (b[2] << 8 & 0xff00) + (b[3] & 0xff); } /** * Read a two byte little endian integer from the stream. - * + * * @return the bytes read, converted from little endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int read2LE() throws OtpErlangDecodeException { - final byte[] b = new byte[2]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - return (b[1] << 8 & 0xff00) + (b[0] & 0xff); + final byte[] b = new byte[2]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + return (b[1] << 8 & 0xff00) + (b[0] & 0xff); } /** * Read a four byte little endian integer from the stream. - * + * * @return the bytes read, converted from little endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public int read4LE() throws OtpErlangDecodeException { - final byte[] b = new byte[4]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - return (b[3] << 24 & 0xff000000) + (b[2] << 16 & 0xff0000) - + (b[1] << 8 & 0xff00) + (b[0] & 0xff); + final byte[] b = new byte[4]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + return (b[3] << 24 & 0xff000000) + (b[2] << 16 & 0xff0000) + + (b[1] << 8 & 0xff00) + (b[0] & 0xff); } /** * Read a little endian integer from the stream. - * + * * @param n * the number of bytes to read - * + * * @return the bytes read, converted from little endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public long readLE(final int n) throws OtpErlangDecodeException { - final byte[] b = new byte[n]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - long v = 0; - int i = n; - while (i-- > 0) { - v = v << 8 | (long) b[i] & 0xff; - } - return v; + final byte[] b = new byte[n]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + long v = 0; + int i = n; + while (i-- > 0) { + v = v << 8 | (long) b[i] & 0xff; + } + return v; } /** * Read a bigendian integer from the stream. - * + * * @param n * the number of bytes to read - * + * * @return the bytes read, converted from big endian to an integer. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public long readBE(final int n) throws OtpErlangDecodeException { - final byte[] b = new byte[n]; - try { - super.read(b); - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - long v = 0; - for (int i = 0; i < n; i++) { - v = v << 8 | (long) b[i] & 0xff; - } - return v; + final byte[] b = new byte[n]; + try { + super.read(b); + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + long v = 0; + for (int i = 0; i < n; i++) { + v = v << 8 | (long) b[i] & 0xff; + } + return v; } /** * Read an Erlang atom from the stream and interpret the value as a boolean. - * + * * @return true if the atom at the current position in the stream contains * the value 'true' (ignoring case), false otherwise. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an atom. */ public boolean read_boolean() throws OtpErlangDecodeException { - return Boolean.valueOf(read_atom()).booleanValue(); + return Boolean.valueOf(read_atom()).booleanValue(); } /** * Read an Erlang atom from the stream. - * + * * @return a String containing the value of the atom. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an atom. */ @SuppressWarnings("fallthrough") public String read_atom() throws OtpErlangDecodeException { - int tag; - int len = -1; - byte[] strbuf; - String atom; - - tag = read1skip_version(); - - switch (tag) { - - case OtpExternal.atomTag: - len = read2BE(); - strbuf = new byte[len]; - this.readN(strbuf); - try { - atom = new String(strbuf, "ISO-8859-1"); - } catch (final java.io.UnsupportedEncodingException e) { - throw new OtpErlangDecodeException( - "Failed to decode ISO-8859-1 atom"); - } - if (atom.length() > OtpExternal.maxAtomLength) { - /* - * Throwing an exception would be better I think, - * but truncation seems to be the way it has - * been done in other parts of OTP... - */ - atom = atom.substring(0, OtpExternal.maxAtomLength); - } - break; - - case OtpExternal.smallAtomUtf8Tag: - len = read1(); - // fall-through - case OtpExternal.atomUtf8Tag: - if (len < 0) { - len = read2BE(); - } - strbuf = new byte[len]; - this.readN(strbuf); - try { - atom = new String(strbuf, "UTF-8"); - } catch (final java.io.UnsupportedEncodingException e) { - throw new OtpErlangDecodeException( - "Failed to decode UTF-8 atom"); - } - if (atom.codePointCount(0, atom.length()) > OtpExternal.maxAtomLength) { - /* - * Throwing an exception would be better I think, - * but truncation seems to be the way it has - * been done in other parts of OTP... - */ - final int[] cps = OtpErlangString.stringToCodePoints(atom); - atom = new String(cps, 0, OtpExternal.maxAtomLength); - } - break; - - default: - throw new OtpErlangDecodeException( - "wrong tag encountered, expected " + OtpExternal.atomTag - + ", or " + OtpExternal.atomUtf8Tag + ", got " + tag); - } - - return atom; + int tag; + int len = -1; + byte[] strbuf; + String atom; + + tag = read1skip_version(); + + switch (tag) { + + case OtpExternal.atomTag: + len = read2BE(); + strbuf = new byte[len]; + this.readN(strbuf); + try { + atom = new String(strbuf, "ISO-8859-1"); + } catch (final java.io.UnsupportedEncodingException e) { + throw new OtpErlangDecodeException( + "Failed to decode ISO-8859-1 atom"); + } + if (atom.length() > OtpExternal.maxAtomLength) { + /* + * Throwing an exception would be better I think, but truncation + * seems to be the way it has been done in other parts of OTP... + */ + atom = atom.substring(0, OtpExternal.maxAtomLength); + } + break; + + case OtpExternal.smallAtomUtf8Tag: + len = read1(); + // fall-through + case OtpExternal.atomUtf8Tag: + if (len < 0) { + len = read2BE(); + } + strbuf = new byte[len]; + this.readN(strbuf); + try { + atom = new String(strbuf, "UTF-8"); + } catch (final java.io.UnsupportedEncodingException e) { + throw new OtpErlangDecodeException( + "Failed to decode UTF-8 atom"); + } + if (atom.codePointCount(0, atom.length()) > OtpExternal.maxAtomLength) { + /* + * Throwing an exception would be better I think, but truncation + * seems to be the way it has been done in other parts of OTP... + */ + final int[] cps = OtpErlangString.stringToCodePoints(atom); + atom = new String(cps, 0, OtpExternal.maxAtomLength); + } + break; + + default: + throw new OtpErlangDecodeException( + "wrong tag encountered, expected " + OtpExternal.atomTag + + ", or " + OtpExternal.atomUtf8Tag + ", got " + + tag); + } + + return atom; } /** * Read an Erlang binary from the stream. - * + * * @return a byte array containing the value of the binary. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a binary. */ public byte[] read_binary() throws OtpErlangDecodeException { - int tag; - int len; - byte[] bin; + int tag; + int len; + byte[] bin; - tag = read1skip_version(); + tag = read1skip_version(); - if (tag != OtpExternal.binTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " + OtpExternal.binTag - + ", got " + tag); - } + if (tag != OtpExternal.binTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + OtpExternal.binTag + + ", got " + tag); + } - len = read4BE(); + len = read4BE(); - bin = new byte[len]; - this.readN(bin); + bin = new byte[len]; + this.readN(bin); - return bin; + return bin; } /** * Read an Erlang bitstr from the stream. - * + * * @param pad_bits * an int array whose first element will be set to the number of * pad bits in the last byte. - * + * * @return a byte array containing the value of the bitstr. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a bitstr. */ public byte[] read_bitstr(final int pad_bits[]) - throws OtpErlangDecodeException { - int tag; - int len; - byte[] bin; - - tag = read1skip_version(); - - if (tag != OtpExternal.bitBinTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " + OtpExternal.bitBinTag - + ", got " + tag); - } - - len = read4BE(); - bin = new byte[len]; - final int tail_bits = read1(); - if (tail_bits < 0 || 7 < tail_bits) { - throw new OtpErlangDecodeException( - "Wrong tail bit count in bitstr: " + tail_bits); - } - if (len == 0 && tail_bits != 0) { - throw new OtpErlangDecodeException( - "Length 0 on bitstr with tail bit count: " + tail_bits); - } - this.readN(bin); - - pad_bits[0] = 8 - tail_bits; - return bin; + throws OtpErlangDecodeException { + int tag; + int len; + byte[] bin; + + tag = read1skip_version(); + + if (tag != OtpExternal.bitBinTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + OtpExternal.bitBinTag + + ", got " + tag); + } + + len = read4BE(); + bin = new byte[len]; + final int tail_bits = read1(); + if (tail_bits < 0 || 7 < tail_bits) { + throw new OtpErlangDecodeException( + "Wrong tail bit count in bitstr: " + tail_bits); + } + if (len == 0 && tail_bits != 0) { + throw new OtpErlangDecodeException( + "Length 0 on bitstr with tail bit count: " + tail_bits); + } + this.readN(bin); + + pad_bits[0] = 8 - tail_bits; + return bin; } /** * Read an Erlang float from the stream. - * + * * @return the float value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a float. */ public float read_float() throws OtpErlangDecodeException { - final double d = read_double(); - return (float) d; + final double d = read_double(); + return (float) d; } /** * Read an Erlang float from the stream. - * + * * @return the float value, as a double. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a float. */ public double read_double() throws OtpErlangDecodeException { - int tag; - - // parse the stream - tag = read1skip_version(); - - switch (tag) { - case OtpExternal.newFloatTag: { - return Double.longBitsToDouble(readBE(8)); - } - case OtpExternal.floatTag: { - BigDecimal val; - int epos; - int exp; - final byte[] strbuf = new byte[31]; - String str; - - // get the string - this.readN(strbuf); - str = OtpErlangString.newString(strbuf); - - // find the exponent prefix 'e' in the string - epos = str.indexOf('e', 0); - - if (epos < 0) { - throw new OtpErlangDecodeException("Invalid float format: '" - + str + "'"); - } - - // remove the sign from the exponent, if positive - String estr = str.substring(epos + 1).trim(); - - if (estr.substring(0, 1).equals("+")) { - estr = estr.substring(1); - } - - // now put the mantissa and exponent together - exp = Integer.valueOf(estr).intValue(); - val = new BigDecimal(str.substring(0, epos)).movePointRight(exp); - - return val.doubleValue(); - } - default: - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " - + OtpExternal.newFloatTag + ", got " + tag); - } + int tag; + + // parse the stream + tag = read1skip_version(); + + switch (tag) { + case OtpExternal.newFloatTag: { + return Double.longBitsToDouble(readBE(8)); + } + case OtpExternal.floatTag: { + BigDecimal val; + int epos; + int exp; + final byte[] strbuf = new byte[31]; + String str; + + // get the string + this.readN(strbuf); + str = OtpErlangString.newString(strbuf); + + // find the exponent prefix 'e' in the string + epos = str.indexOf('e', 0); + + if (epos < 0) { + throw new OtpErlangDecodeException("Invalid float format: '" + + str + "'"); + } + + // remove the sign from the exponent, if positive + String estr = str.substring(epos + 1).trim(); + + if (estr.substring(0, 1).equals("+")) { + estr = estr.substring(1); + } + + // now put the mantissa and exponent together + exp = Integer.valueOf(estr).intValue(); + val = new BigDecimal(str.substring(0, epos)).movePointRight(exp); + + return val.doubleValue(); + } + default: + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + + OtpExternal.newFloatTag + ", got " + tag); + } } /** * Read one byte from the stream. - * + * * @return the byte read. - * + * * @exception OtpErlangDecodeException * if the next byte cannot be read. */ public byte read_byte() throws OtpErlangDecodeException { - final long l = this.read_long(false); - final byte i = (byte) l; + final long l = this.read_long(false); + final byte i = (byte) l; - if (l != i) { - throw new OtpErlangDecodeException("Value does not fit in byte: " - + l); - } + if (l != i) { + throw new OtpErlangDecodeException("Value does not fit in byte: " + + l); + } - return i; + return i; } /** * Read a character from the stream. - * + * * @return the character value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an integer that can * be represented as a char. */ public char read_char() throws OtpErlangDecodeException { - final long l = this.read_long(true); - final char i = (char) l; + final long l = this.read_long(true); + final char i = (char) l; - if (l != (i & 0xffffL)) { - throw new OtpErlangDecodeException("Value does not fit in char: " - + l); - } + if (l != (i & 0xffffL)) { + throw new OtpErlangDecodeException("Value does not fit in char: " + + l); + } - return i; + return i; } /** * Read an unsigned integer from the stream. - * + * * @return the integer value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as a * positive integer. */ public int read_uint() throws OtpErlangDecodeException { - final long l = this.read_long(true); - final int i = (int) l; + final long l = this.read_long(true); + final int i = (int) l; - if (l != (i & 0xFFFFffffL)) { - throw new OtpErlangDecodeException("Value does not fit in uint: " - + l); - } + if (l != (i & 0xFFFFffffL)) { + throw new OtpErlangDecodeException("Value does not fit in uint: " + + l); + } - return i; + return i; } /** * Read an integer from the stream. - * + * * @return the integer value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as * an integer. */ public int read_int() throws OtpErlangDecodeException { - final long l = this.read_long(false); - final int i = (int) l; + final long l = this.read_long(false); + final int i = (int) l; - if (l != i) { - throw new OtpErlangDecodeException("Value does not fit in int: " - + l); - } + if (l != i) { + throw new OtpErlangDecodeException("Value does not fit in int: " + + l); + } - return i; + return i; } /** * Read an unsigned short from the stream. - * + * * @return the short value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as a * positive short. */ public short read_ushort() throws OtpErlangDecodeException { - final long l = this.read_long(true); - final short i = (short) l; + final long l = this.read_long(true); + final short i = (short) l; - if (l != (i & 0xffffL)) { - throw new OtpErlangDecodeException("Value does not fit in ushort: " - + l); - } + if (l != (i & 0xffffL)) { + throw new OtpErlangDecodeException("Value does not fit in ushort: " + + l); + } - return i; + return i; } /** * Read a short from the stream. - * + * * @return the short value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as a * short. */ public short read_short() throws OtpErlangDecodeException { - final long l = this.read_long(false); - final short i = (short) l; + final long l = this.read_long(false); + final short i = (short) l; - if (l != i) { - throw new OtpErlangDecodeException("Value does not fit in short: " - + l); - } + if (l != i) { + throw new OtpErlangDecodeException("Value does not fit in short: " + + l); + } - return i; + return i; } /** * Read an unsigned long from the stream. - * + * * @return the long value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as a * positive long. */ public long read_ulong() throws OtpErlangDecodeException { - return this.read_long(true); + return this.read_long(true); } /** * Read a long from the stream. - * + * * @return the long value. - * + * * @exception OtpErlangDecodeException * if the next term in the stream can not be represented as a * long. */ public long read_long() throws OtpErlangDecodeException { - return this.read_long(false); + return this.read_long(false); } public long read_long(final boolean unsigned) - throws OtpErlangDecodeException { - final byte[] b = read_integer_byte_array(); - return OtpInputStream.byte_array_to_long(b, unsigned); + throws OtpErlangDecodeException { + final byte[] b = read_integer_byte_array(); + return OtpInputStream.byte_array_to_long(b, unsigned); } /** * Read an integer from the stream. - * + * * @return the value as a big endian 2's complement byte array. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an integer. */ public byte[] read_integer_byte_array() throws OtpErlangDecodeException { - int tag; - byte[] nb; - - tag = read1skip_version(); - - switch (tag) { - case OtpExternal.smallIntTag: - nb = new byte[2]; - nb[0] = 0; - nb[1] = (byte) read1(); - break; - - case OtpExternal.intTag: - nb = new byte[4]; - if (this.readN(nb) != 4) { // Big endian - throw new OtpErlangDecodeException( - "Cannot read from intput stream"); - } - break; - - case OtpExternal.smallBigTag: - case OtpExternal.largeBigTag: - int arity; - int sign; - if (tag == OtpExternal.smallBigTag) { - arity = read1(); - sign = read1(); - } else { - arity = read4BE(); - sign = read1(); - if (arity + 1 < 0) { - throw new OtpErlangDecodeException( - "Value of largeBig does not fit in BigInteger, arity " - + arity + " sign " + sign); - } - } - nb = new byte[arity + 1]; - // Value is read as little endian. The big end is augumented - // with one zero byte to make the value 2's complement positive. - if (this.readN(nb, 0, arity) != arity) { - throw new OtpErlangDecodeException( - "Cannot read from intput stream"); - } - // Reverse the array to make it big endian. - for (int i = 0, j = nb.length; i < j--; i++) { - // Swap [i] with [j] - final byte b = nb[i]; - nb[i] = nb[j]; - nb[j] = b; - } - if (sign != 0) { - // 2's complement negate the big endian value in the array - int c = 1; // Carry - for (int j = nb.length; j-- > 0;) { - c = (~nb[j] & 0xFF) + c; - nb[j] = (byte) c; - c >>= 8; - } - } - break; - - default: - throw new OtpErlangDecodeException("Not valid integer tag: " + tag); - } - - return nb; + int tag; + byte[] nb; + + tag = read1skip_version(); + + switch (tag) { + case OtpExternal.smallIntTag: + nb = new byte[2]; + nb[0] = 0; + nb[1] = (byte) read1(); + break; + + case OtpExternal.intTag: + nb = new byte[4]; + if (this.readN(nb) != 4) { // Big endian + throw new OtpErlangDecodeException( + "Cannot read from intput stream"); + } + break; + + case OtpExternal.smallBigTag: + case OtpExternal.largeBigTag: + int arity; + int sign; + if (tag == OtpExternal.smallBigTag) { + arity = read1(); + sign = read1(); + } else { + arity = read4BE(); + sign = read1(); + if (arity + 1 < 0) { + throw new OtpErlangDecodeException( + "Value of largeBig does not fit in BigInteger, arity " + + arity + " sign " + sign); + } + } + nb = new byte[arity + 1]; + // Value is read as little endian. The big end is augumented + // with one zero byte to make the value 2's complement positive. + if (this.readN(nb, 0, arity) != arity) { + throw new OtpErlangDecodeException( + "Cannot read from intput stream"); + } + // Reverse the array to make it big endian. + for (int i = 0, j = nb.length; i < j--; i++) { + // Swap [i] with [j] + final byte b = nb[i]; + nb[i] = nb[j]; + nb[j] = b; + } + if (sign != 0) { + // 2's complement negate the big endian value in the array + int c = 1; // Carry + for (int j = nb.length; j-- > 0;) { + c = (~nb[j] & 0xFF) + c; + nb[j] = (byte) c; + c >>= 8; + } + } + break; + + default: + throw new OtpErlangDecodeException("Not valid integer tag: " + tag); + } + + return nb; } public static long byte_array_to_long(final byte[] b, final boolean unsigned) - throws OtpErlangDecodeException { - long v; - switch (b.length) { - case 0: - v = 0; - break; - case 2: - v = ((b[0] & 0xFF) << 8) + (b[1] & 0xFF); - v = (short) v; // Sign extend - if (v < 0 && unsigned) { - throw new OtpErlangDecodeException("Value not unsigned: " + v); - } - break; - case 4: - v = ((b[0] & 0xFF) << 24) + ((b[1] & 0xFF) << 16) - + ((b[2] & 0xFF) << 8) + (b[3] & 0xFF); - v = (int) v; // Sign extend - if (v < 0 && unsigned) { - throw new OtpErlangDecodeException("Value not unsigned: " + v); - } - break; - default: - int i = 0; - final byte c = b[i]; - // Skip non-essential leading bytes - if (unsigned) { - if (c < 0) { - throw new OtpErlangDecodeException("Value not unsigned: " - + Arrays.toString(b)); - } - while (b[i] == 0) { - i++; // Skip leading zero sign bytes - } - } else { - if (c == 0 || c == -1) { // Leading sign byte - i = 1; - // Skip all leading sign bytes - while (i < b.length && b[i] == c) { - i++; - } - if (i < b.length) { - // Check first non-sign byte to see if its sign - // matches the whole number's sign. If not one more - // byte is needed to represent the value. - if (((c ^ b[i]) & 0x80) != 0) { - i--; - } - } - } - } - if (b.length - i > 8) { - // More than 64 bits of value - throw new OtpErlangDecodeException( - "Value does not fit in long: " + Arrays.toString(b)); - } - // Convert the necessary bytes - for (v = c < 0 ? -1 : 0; i < b.length; i++) { - v = v << 8 | b[i] & 0xFF; - } - } - return v; + throws OtpErlangDecodeException { + long v; + switch (b.length) { + case 0: + v = 0; + break; + case 2: + v = ((b[0] & 0xFF) << 8) + (b[1] & 0xFF); + v = (short) v; // Sign extend + if (v < 0 && unsigned) { + throw new OtpErlangDecodeException("Value not unsigned: " + v); + } + break; + case 4: + v = ((b[0] & 0xFF) << 24) + ((b[1] & 0xFF) << 16) + + ((b[2] & 0xFF) << 8) + (b[3] & 0xFF); + v = (int) v; // Sign extend + if (v < 0 && unsigned) { + throw new OtpErlangDecodeException("Value not unsigned: " + v); + } + break; + default: + int i = 0; + final byte c = b[i]; + // Skip non-essential leading bytes + if (unsigned) { + if (c < 0) { + throw new OtpErlangDecodeException("Value not unsigned: " + + Arrays.toString(b)); + } + while (b[i] == 0) { + i++; // Skip leading zero sign bytes + } + } else { + if (c == 0 || c == -1) { // Leading sign byte + i = 1; + // Skip all leading sign bytes + while (i < b.length && b[i] == c) { + i++; + } + if (i < b.length) { + // Check first non-sign byte to see if its sign + // matches the whole number's sign. If not one more + // byte is needed to represent the value. + if (((c ^ b[i]) & 0x80) != 0) { + i--; + } + } + } + } + if (b.length - i > 8) { + // More than 64 bits of value + throw new OtpErlangDecodeException( + "Value does not fit in long: " + Arrays.toString(b)); + } + // Convert the necessary bytes + for (v = c < 0 ? -1 : 0; i < b.length; i++) { + v = v << 8 | b[i] & 0xFF; + } + } + return v; } /** * Read a list header from the stream. - * + * * @return the arity of the list. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a list. */ public int read_list_head() throws OtpErlangDecodeException { - int arity = 0; - final int tag = read1skip_version(); + int arity = 0; + final int tag = read1skip_version(); - switch (tag) { - case OtpExternal.nilTag: - arity = 0; - break; + switch (tag) { + case OtpExternal.nilTag: + arity = 0; + break; - case OtpExternal.stringTag: - arity = read2BE(); - break; + case OtpExternal.stringTag: + arity = read2BE(); + break; - case OtpExternal.listTag: - arity = read4BE(); - break; + case OtpExternal.listTag: + arity = read4BE(); + break; - default: - throw new OtpErlangDecodeException("Not valid list tag: " + tag); - } + default: + throw new OtpErlangDecodeException("Not valid list tag: " + tag); + } - return arity; + return arity; } /** * Read a tuple header from the stream. - * + * * @return the arity of the tuple. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a tuple. */ public int read_tuple_head() throws OtpErlangDecodeException { - int arity = 0; - final int tag = read1skip_version(); + int arity = 0; + final int tag = read1skip_version(); - // decode the tuple header and get arity - switch (tag) { - case OtpExternal.smallTupleTag: - arity = read1(); - break; + // decode the tuple header and get arity + switch (tag) { + case OtpExternal.smallTupleTag: + arity = read1(); + break; - case OtpExternal.largeTupleTag: - arity = read4BE(); - break; + case OtpExternal.largeTupleTag: + arity = read4BE(); + break; - default: - throw new OtpErlangDecodeException("Not valid tuple tag: " + tag); - } + default: + throw new OtpErlangDecodeException("Not valid tuple tag: " + tag); + } - return arity; + return arity; } /** * Read an empty list from the stream. - * + * * @return zero (the arity of the list). - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an empty list. */ public int read_nil() throws OtpErlangDecodeException { - int arity = 0; - final int tag = read1skip_version(); + int arity = 0; + final int tag = read1skip_version(); - switch (tag) { - case OtpExternal.nilTag: - arity = 0; - break; + switch (tag) { + case OtpExternal.nilTag: + arity = 0; + break; - default: - throw new OtpErlangDecodeException("Not valid nil tag: " + tag); - } + default: + throw new OtpErlangDecodeException("Not valid nil tag: " + tag); + } - return arity; + return arity; } /** * Read an Erlang PID from the stream. - * + * * @return the value of the PID. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an Erlang PID. */ public OtpErlangPid read_pid() throws OtpErlangDecodeException { - String node; - int id; - int serial; - int creation; - int tag; - - tag = read1skip_version(); - - if (tag != OtpExternal.pidTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " + OtpExternal.pidTag - + ", got " + tag); - } - - node = read_atom(); - id = read4BE() & 0x7fff; // 15 bits - serial = read4BE() & 0x1fff; // 13 bits - creation = read1() & 0x03; // 2 bits - - return new OtpErlangPid(node, id, serial, creation); + String node; + int id; + int serial; + int creation; + int tag; + + tag = read1skip_version(); + + if (tag != OtpExternal.pidTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + OtpExternal.pidTag + + ", got " + tag); + } + + node = read_atom(); + id = read4BE() & 0x7fff; // 15 bits + serial = read4BE() & 0x1fff; // 13 bits + creation = read1() & 0x03; // 2 bits + + return new OtpErlangPid(node, id, serial, creation); } /** * Read an Erlang port from the stream. - * + * * @return the value of the port. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an Erlang port. */ public OtpErlangPort read_port() throws OtpErlangDecodeException { - String node; - int id; - int creation; - int tag; + String node; + int id; + int creation; + int tag; - tag = read1skip_version(); + tag = read1skip_version(); - if (tag != OtpExternal.portTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " + OtpExternal.portTag - + ", got " + tag); - } + if (tag != OtpExternal.portTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + OtpExternal.portTag + + ", got " + tag); + } - node = read_atom(); - id = read4BE() & 0xfffffff; // 28 bits - creation = read1() & 0x03; // 2 bits + node = read_atom(); + id = read4BE() & 0xfffffff; // 28 bits + creation = read1() & 0x03; // 2 bits - return new OtpErlangPort(node, id, creation); + return new OtpErlangPort(node, id, creation); } /** * Read an Erlang reference from the stream. - * + * * @return the value of the reference - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not an Erlang reference. */ public OtpErlangRef read_ref() throws OtpErlangDecodeException { - String node; - int id; - int creation; - int tag; - - tag = read1skip_version(); - - switch (tag) { - case OtpExternal.refTag: - node = read_atom(); - id = read4BE() & 0x3ffff; // 18 bits - creation = read1() & 0x03; // 2 bits - return new OtpErlangRef(node, id, creation); - - case OtpExternal.newRefTag: - final int arity = read2BE(); - node = read_atom(); - creation = read1() & 0x03; // 2 bits - - final int[] ids = new int[arity]; - for (int i = 0; i < arity; i++) { - ids[i] = read4BE(); - } - ids[0] &= 0x3ffff; // first id gets truncated to 18 bits - return new OtpErlangRef(node, ids, creation); - - default: - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected ref, got " + tag); - } + String node; + int id; + int creation; + int tag; + + tag = read1skip_version(); + + switch (tag) { + case OtpExternal.refTag: + node = read_atom(); + id = read4BE() & 0x3ffff; // 18 bits + creation = read1() & 0x03; // 2 bits + return new OtpErlangRef(node, id, creation); + + case OtpExternal.newRefTag: + final int arity = read2BE(); + node = read_atom(); + creation = read1() & 0x03; // 2 bits + + final int[] ids = new int[arity]; + for (int i = 0; i < arity; i++) { + ids[i] = read4BE(); + } + ids[0] &= 0x3ffff; // first id gets truncated to 18 bits + return new OtpErlangRef(node, ids, creation); + + default: + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected ref, got " + tag); + } } public OtpErlangFun read_fun() throws OtpErlangDecodeException { - final int tag = read1skip_version(); - if (tag == OtpExternal.funTag) { - final int nFreeVars = read4BE(); - final OtpErlangPid pid = read_pid(); - final String module = read_atom(); - final long index = read_long(); - final long uniq = read_long(); - final OtpErlangObject[] freeVars = new OtpErlangObject[nFreeVars]; - for (int i = 0; i < nFreeVars; ++i) { - freeVars[i] = read_any(); - } - return new OtpErlangFun(pid, module, index, uniq, freeVars); - } else if (tag == OtpExternal.newFunTag) { - read4BE(); - final int arity = read1(); - final byte[] md5 = new byte[16]; - readN(md5); - final int index = read4BE(); - final int nFreeVars = read4BE(); - final String module = read_atom(); - final long oldIndex = read_long(); - final long uniq = read_long(); - final OtpErlangPid pid = read_pid(); - final OtpErlangObject[] freeVars = new OtpErlangObject[nFreeVars]; - for (int i = 0; i < nFreeVars; ++i) { - freeVars[i] = read_any(); - } - return new OtpErlangFun(pid, module, arity, md5, index, oldIndex, - uniq, freeVars); - } else { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected fun, got " + tag); - } + final int tag = read1skip_version(); + if (tag == OtpExternal.funTag) { + final int nFreeVars = read4BE(); + final OtpErlangPid pid = read_pid(); + final String module = read_atom(); + final long index = read_long(); + final long uniq = read_long(); + final OtpErlangObject[] freeVars = new OtpErlangObject[nFreeVars]; + for (int i = 0; i < nFreeVars; ++i) { + freeVars[i] = read_any(); + } + return new OtpErlangFun(pid, module, index, uniq, freeVars); + } else if (tag == OtpExternal.newFunTag) { + read4BE(); + final int arity = read1(); + final byte[] md5 = new byte[16]; + readN(md5); + final int index = read4BE(); + final int nFreeVars = read4BE(); + final String module = read_atom(); + final long oldIndex = read_long(); + final long uniq = read_long(); + final OtpErlangPid pid = read_pid(); + final OtpErlangObject[] freeVars = new OtpErlangObject[nFreeVars]; + for (int i = 0; i < nFreeVars; ++i) { + freeVars[i] = read_any(); + } + return new OtpErlangFun(pid, module, arity, md5, index, oldIndex, + uniq, freeVars); + } else { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected fun, got " + tag); + } } public OtpErlangExternalFun read_external_fun() - throws OtpErlangDecodeException { - final int tag = read1skip_version(); - if (tag != OtpExternal.externalFunTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected external fun, got " + tag); - } - final String module = read_atom(); - final String function = read_atom(); - final int arity = (int) read_long(); - return new OtpErlangExternalFun(module, function, arity); + throws OtpErlangDecodeException { + final int tag = read1skip_version(); + if (tag != OtpExternal.externalFunTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected external fun, got " + tag); + } + final String module = read_atom(); + final String function = read_atom(); + final int arity = (int) read_long(); + return new OtpErlangExternalFun(module, function, arity); } /** * Read a string from the stream. - * + * * @return the value of the string. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a string. */ public String read_string() throws OtpErlangDecodeException { - int tag; - int len; - byte[] strbuf; - int[] intbuf; - tag = read1skip_version(); - switch (tag) { - case OtpExternal.stringTag: - len = read2BE(); - strbuf = new byte[len]; - this.readN(strbuf); - return OtpErlangString.newString(strbuf); - case OtpExternal.nilTag: - return ""; - case OtpExternal.listTag: // List when unicode + - len = read4BE(); - intbuf = new int[len]; - for (int i = 0; i < len; i++) { - intbuf[i] = read_int(); - if (! OtpErlangString.isValidCodePoint(intbuf[i])) { - throw new OtpErlangDecodeException - ("Invalid CodePoint: " + intbuf[i]); - } - } - read_nil(); - return new String(intbuf, 0, intbuf.length); - default: - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " + OtpExternal.stringTag - + " or " + OtpExternal.listTag + ", got " + tag); - } + int tag; + int len; + byte[] strbuf; + int[] intbuf; + tag = read1skip_version(); + switch (tag) { + case OtpExternal.stringTag: + len = read2BE(); + strbuf = new byte[len]; + this.readN(strbuf); + return OtpErlangString.newString(strbuf); + case OtpExternal.nilTag: + return ""; + case OtpExternal.listTag: // List when unicode + + len = read4BE(); + intbuf = new int[len]; + for (int i = 0; i < len; i++) { + intbuf[i] = read_int(); + if (!OtpErlangString.isValidCodePoint(intbuf[i])) { + throw new OtpErlangDecodeException("Invalid CodePoint: " + + intbuf[i]); + } + } + read_nil(); + return new String(intbuf, 0, intbuf.length); + default: + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + OtpExternal.stringTag + + " or " + OtpExternal.listTag + ", got " + tag); + } } /** * Read a compressed term from the stream - * + * * @return the resulting uncompressed term. - * + * * @exception OtpErlangDecodeException * if the next term in the stream is not a compressed term. */ public OtpErlangObject read_compressed() throws OtpErlangDecodeException { - final int tag = read1skip_version(); - - if (tag != OtpExternal.compressedTag) { - throw new OtpErlangDecodeException( - "Wrong tag encountered, expected " - + OtpExternal.compressedTag + ", got " + tag); - } - - final int size = read4BE(); - final byte[] abuf = new byte[size]; - final java.util.zip.InflaterInputStream is = - new java.util.zip.InflaterInputStream(this, new java.util.zip.Inflater(), size); - int curPos = 0; - try { - int curRead; - while(curPos < size && (curRead = is.read(abuf, curPos, size - curPos)) != -1) { - curPos += curRead; - } - if (curPos != size) { - throw new OtpErlangDecodeException("Decompression gave " - + curPos + " bytes, not " + size); - } - } catch (final IOException e) { - throw new OtpErlangDecodeException("Cannot read from input stream"); - } - - @SuppressWarnings("resource") - final OtpInputStream ois = new OtpInputStream(abuf, flags); - return ois.read_any(); + final int tag = read1skip_version(); + + if (tag != OtpExternal.compressedTag) { + throw new OtpErlangDecodeException( + "Wrong tag encountered, expected " + + OtpExternal.compressedTag + ", got " + tag); + } + + final int size = read4BE(); + final byte[] abuf = new byte[size]; + final java.util.zip.InflaterInputStream is = new java.util.zip.InflaterInputStream( + this, new java.util.zip.Inflater(), size); + int curPos = 0; + try { + int curRead; + while (curPos < size + && (curRead = is.read(abuf, curPos, size - curPos)) != -1) { + curPos += curRead; + } + if (curPos != size) { + throw new OtpErlangDecodeException("Decompression gave " + + curPos + " bytes, not " + size); + } + } catch (final IOException e) { + throw new OtpErlangDecodeException("Cannot read from input stream"); + } + + @SuppressWarnings("resource") + final OtpInputStream ois = new OtpInputStream(abuf, flags); + return ois.read_any(); } /** * Read an arbitrary Erlang term from the stream. - * + * * @return the Erlang term. - * + * * @exception OtpErlangDecodeException * if the stream does not contain a known Erlang type at the * next position. */ public OtpErlangObject read_any() throws OtpErlangDecodeException { - // calls one of the above functions, depending on o - final int tag = peek1skip_version(); + // calls one of the above functions, depending on o + final int tag = peek1skip_version(); - switch (tag) { - case OtpExternal.smallIntTag: - case OtpExternal.intTag: - case OtpExternal.smallBigTag: - case OtpExternal.largeBigTag: - return new OtpErlangLong(this); + switch (tag) { + case OtpExternal.smallIntTag: + case OtpExternal.intTag: + case OtpExternal.smallBigTag: + case OtpExternal.largeBigTag: + return new OtpErlangLong(this); - case OtpExternal.atomTag: - case OtpExternal.smallAtomUtf8Tag: - case OtpExternal.atomUtf8Tag: - return new OtpErlangAtom(this); + case OtpExternal.atomTag: + case OtpExternal.smallAtomUtf8Tag: + case OtpExternal.atomUtf8Tag: + return new OtpErlangAtom(this); - case OtpExternal.floatTag: - case OtpExternal.newFloatTag: - return new OtpErlangDouble(this); + case OtpExternal.floatTag: + case OtpExternal.newFloatTag: + return new OtpErlangDouble(this); - case OtpExternal.refTag: - case OtpExternal.newRefTag: - return new OtpErlangRef(this); + case OtpExternal.refTag: + case OtpExternal.newRefTag: + return new OtpErlangRef(this); case OtpExternal.mapTag: return new OtpErlangMap(this); - case OtpExternal.portTag: - return new OtpErlangPort(this); + case OtpExternal.portTag: + return new OtpErlangPort(this); - case OtpExternal.pidTag: - return new OtpErlangPid(this); + case OtpExternal.pidTag: + return new OtpErlangPid(this); - case OtpExternal.stringTag: - return new OtpErlangString(this); + case OtpExternal.stringTag: + return new OtpErlangString(this); - case OtpExternal.listTag: - case OtpExternal.nilTag: - if ((flags & DECODE_INT_LISTS_AS_STRINGS) != 0) { - final int savePos = getPos(); - try { - return new OtpErlangString(this); - } catch (final OtpErlangDecodeException e) { - } - setPos(savePos); - } - return new OtpErlangList(this); + case OtpExternal.listTag: + case OtpExternal.nilTag: + if ((flags & DECODE_INT_LISTS_AS_STRINGS) != 0) { + final int savePos = getPos(); + try { + return new OtpErlangString(this); + } catch (final OtpErlangDecodeException e) { + } + setPos(savePos); + } + return new OtpErlangList(this); - case OtpExternal.smallTupleTag: - case OtpExternal.largeTupleTag: - return new OtpErlangTuple(this); + case OtpExternal.smallTupleTag: + case OtpExternal.largeTupleTag: + return new OtpErlangTuple(this); - case OtpExternal.binTag: - return new OtpErlangBinary(this); + case OtpExternal.binTag: + return new OtpErlangBinary(this); - case OtpExternal.bitBinTag: - return new OtpErlangBitstr(this); + case OtpExternal.bitBinTag: + return new OtpErlangBitstr(this); - case OtpExternal.compressedTag: - return read_compressed(); + case OtpExternal.compressedTag: + return read_compressed(); - case OtpExternal.newFunTag: - case OtpExternal.funTag: - return new OtpErlangFun(this); + case OtpExternal.newFunTag: + case OtpExternal.funTag: + return new OtpErlangFun(this); - default: - throw new OtpErlangDecodeException("Uknown data type: " + tag); - } + default: + throw new OtpErlangDecodeException("Uknown data type: " + tag); + } } public int read_map_head() throws OtpErlangDecodeException { diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java index fbd0eb4073..b996ba6f6c 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -32,86 +32,86 @@ public class OtpLocalNode extends AbstractNode { protected java.net.Socket epmd; protected OtpLocalNode() { - super(); - init(); + super(); + init(); } /** * Create a node with the given name and the default cookie. */ protected OtpLocalNode(final String node) { - super(node); - init(); + super(node); + init(); } /** * Create a node with the given name and cookie. */ protected OtpLocalNode(final String node, final String cookie) { - super(node, cookie); - init(); + super(node, cookie); + init(); } private void init() { - serial = 0; - pidCount = 1; - portCount = 1; - refId = new int[3]; - refId[0] = 1; - refId[1] = 0; - refId[2] = 0; + serial = 0; + pidCount = 1; + portCount = 1; + refId = new int[3]; + refId[0] = 1; + refId[1] = 0; + refId[2] = 0; } /** * Get the port number used by this node. - * + * * @return the port number this server node is accepting connections on. */ public int port() { - return port; + return port; } /** * Set the Epmd socket after publishing this nodes listen port to Epmd. - * + * * @param s - * The socket connecting this node to Epmd. + * The socket connecting this node to Epmd. */ protected void setEpmd(final java.net.Socket s) { - epmd = s; + epmd = s; } /** * Get the Epmd socket. - * + * * @return The socket connecting this node to Epmd. */ protected java.net.Socket getEpmd() { - return epmd; + return epmd; } /** * Create an Erlang {@link OtpErlangPid pid}. Erlang pids are based upon * some node specific information; this method creates a pid using the * information in this node. Each call to this method produces a unique pid. - * + * * @return an Erlang pid. */ public synchronized OtpErlangPid createPid() { - final OtpErlangPid p = new OtpErlangPid(node, pidCount, serial, - creation); + final OtpErlangPid p = new OtpErlangPid(node, pidCount, serial, + creation); - pidCount++; - if (pidCount > 0x7fff) { - pidCount = 0; + pidCount++; + if (pidCount > 0x7fff) { + pidCount = 0; - serial++; - if (serial > 0x1fff) { /* 13 bits */ - serial = 0; - } - } + serial++; + if (serial > 0x1fff) { /* 13 bits */ + serial = 0; + } + } - return p; + return p; } /** @@ -120,18 +120,18 @@ public class OtpLocalNode extends AbstractNode { * information in this node. Each call to this method produces a unique * port. It may not be meaningful to create a port in a non-Erlang * environment, but this method is provided for completeness. - * + * * @return an Erlang port. */ public synchronized OtpErlangPort createPort() { - final OtpErlangPort p = new OtpErlangPort(node, portCount, creation); + final OtpErlangPort p = new OtpErlangPort(node, portCount, creation); - portCount++; - if (portCount > 0xfffffff) { /* 28 bits */ - portCount = 0; - } + portCount++; + if (portCount > 0xfffffff) { /* 28 bits */ + portCount = 0; + } - return p; + return p; } /** @@ -139,23 +139,23 @@ public class OtpLocalNode extends AbstractNode { * based upon some node specific information; this method creates a * reference using the information in this node. Each call to this method * produces a unique reference. - * + * * @return an Erlang reference. */ public synchronized OtpErlangRef createRef() { - final OtpErlangRef r = new OtpErlangRef(node, refId, creation); + final OtpErlangRef r = new OtpErlangRef(node, refId, creation); - // increment ref ids (3 ints: 18 + 32 + 32 bits) - refId[0]++; - if (refId[0] > 0x3ffff) { - refId[0] = 0; + // increment ref ids (3 ints: 18 + 32 + 32 bits) + refId[0]++; + if (refId[0] > 0x3ffff) { + refId[0] = 0; - refId[1]++; - if (refId[1] == 0) { - refId[2]++; - } - } + refId[1]++; + if (refId[1] == 0) { + refId[2]++; + } + } - return r; + return r; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMD5.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMD5.java index a5a4d86602..41be523eb2 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMD5.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMD5.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -46,37 +46,37 @@ class OtpMD5 { */ private final long state[] = { 0x67452301L, 0xefcdab89L, 0x98badcfeL, - 0x10325476L }; + 0x10325476L }; private final long count[] = { 0L, 0L }; private final int buffer[]; public OtpMD5() { - buffer = new int[64]; - int i; - for (i = 0; i < 64; ++i) { - buffer[i] = 0; - } + buffer = new int[64]; + int i; + for (i = 0; i < 64; ++i) { + buffer[i] = 0; + } } private int[] to_bytes(final String s) { - final char tmp[] = s.toCharArray(); - final int ret[] = new int[tmp.length]; - int i; - - for (i = 0; i < tmp.length; ++i) { - ret[i] = tmp[i] & 0xFF; - } - return ret; + final char tmp[] = s.toCharArray(); + final int ret[] = new int[tmp.length]; + int i; + + for (i = 0; i < tmp.length; ++i) { + ret[i] = tmp[i] & 0xFF; + } + return ret; } private int[] clean_bytes(final int bytes[]) { - final int ret[] = new int[bytes.length]; - int i; + final int ret[] = new int[bytes.length]; + int i; - for (i = 0; i < bytes.length; ++i) { - ret[i] = bytes[i] & 0xFF; - } - return ret; + for (i = 0; i < bytes.length; ++i) { + ret[i] = bytes[i] & 0xFF; + } + return ret; } /* @@ -84,83 +84,83 @@ class OtpMD5 { */ private long shl(final long what, final int steps) { - return what << steps & 0xFFFFFFFFL; + return what << steps & 0xFFFFFFFFL; } private long shr(final long what, final int steps) { - return what >>> steps; + return what >>> steps; } private long plus(final long a, final long b) { - return a + b & 0xFFFFFFFFL; + return a + b & 0xFFFFFFFFL; } - private long not(long x) { - return ~x & 0xFFFFFFFFL; + private long not(final long x) { + return ~x & 0xFFFFFFFFL; } - private void to_buffer(int to_start, final int[] from, int from_start, - int num) { - int ix = num; - int to_ix = to_start; - int from_ix = from_start; - while (ix-- > 0) { - buffer[to_ix++] = from[from_ix++]; - } + private void to_buffer(final int to_start, final int[] from, + final int from_start, final int num) { + int ix = num; + int to_ix = to_start; + int from_ix = from_start; + while (ix-- > 0) { + buffer[to_ix++] = from[from_ix++]; + } } private void do_update(final int bytes[]) { - int index = (int) (count[0] >>> 3 & 0x3F); - final long inlen = bytes.length; - final long addcount = shl(inlen, 3); - final long partlen = 64 - index; - int i; + int index = (int) (count[0] >>> 3 & 0x3F); + final long inlen = bytes.length; + final long addcount = shl(inlen, 3); + final long partlen = 64 - index; + int i; - count[0] = plus(count[0], addcount); + count[0] = plus(count[0], addcount); - if (count[0] < addcount) { - ++count[1]; - } + if (count[0] < addcount) { + ++count[1]; + } - count[1] = plus(count[1], shr(inlen, 29)); + count[1] = plus(count[1], shr(inlen, 29)); - // dumpstate(); + // dumpstate(); - if (inlen >= partlen) { - to_buffer(index, bytes, 0, (int) partlen); - transform(buffer, 0); + if (inlen >= partlen) { + to_buffer(index, bytes, 0, (int) partlen); + transform(buffer, 0); - for (i = (int) partlen; i + 63 < inlen; i += 64) { - transform(bytes, i); - } + for (i = (int) partlen; i + 63 < inlen; i += 64) { + transform(bytes, i); + } - index = 0; - } else { - i = 0; - } + index = 0; + } else { + i = 0; + } - /* dumpstate(); */ + /* dumpstate(); */ - to_buffer(index, bytes, i, (int) inlen - i); + to_buffer(index, bytes, i, (int) inlen - i); - /* dumpstate(); */ + /* dumpstate(); */ } @SuppressWarnings("unused") private void dumpstate() { - System.out.println("state = {" + state[0] + ", " + state[1] + ", " - + state[2] + ", " + state[3] + "}"); - System.out.println("count = {" + count[0] + ", " + count[1] + "}"); - System.out.print("buffer = {"); - int i; - for (i = 0; i < 64; ++i) { - if (i > 0) { - System.out.print(", "); - } - System.out.print(buffer[i]); - } - System.out.println("}"); + System.out.println("state = {" + state[0] + ", " + state[1] + ", " + + state[2] + ", " + state[3] + "}"); + System.out.println("count = {" + count[0] + ", " + count[1] + "}"); + System.out.print("buffer = {"); + int i; + for (i = 0; i < 64; ++i) { + if (i > 0) { + System.out.print(", "); + } + System.out.print(buffer[i]); + } + System.out.println("}"); } /* @@ -168,191 +168,191 @@ class OtpMD5 { */ private long F(final long x, final long y, final long z) { - return x & y | not(x) & z; + return x & y | not(x) & z; } private long G(final long x, final long y, final long z) { - return x & z | y & not(z); + return x & z | y & not(z); } private long H(final long x, final long y, final long z) { - return x ^ y ^ z; + return x ^ y ^ z; } private long I(final long x, final long y, final long z) { - return y ^ (x | not(z)); + return y ^ (x | not(z)); } private long ROTATE_LEFT(final long x, final long n) { - return shl(x, (int) n) | shr(x, (int) (32 - n)); + return shl(x, (int) n) | shr(x, (int) (32 - n)); } - private long FF(long a, final long b, final long c, final long d, - final long x, final long s, final long ac) { - long tmp = plus(a, plus(plus(F(b, c, d), x), ac)); - tmp = ROTATE_LEFT(tmp, s); - return plus(tmp, b); + private long FF(final long a, final long b, final long c, final long d, + final long x, final long s, final long ac) { + long tmp = plus(a, plus(plus(F(b, c, d), x), ac)); + tmp = ROTATE_LEFT(tmp, s); + return plus(tmp, b); } - private long GG(long a, final long b, final long c, final long d, - final long x, final long s, final long ac) { - long tmp = plus(a, plus(plus(G(b, c, d), x), ac)); - tmp = ROTATE_LEFT(tmp, s); - return plus(tmp, b); + private long GG(final long a, final long b, final long c, final long d, + final long x, final long s, final long ac) { + long tmp = plus(a, plus(plus(G(b, c, d), x), ac)); + tmp = ROTATE_LEFT(tmp, s); + return plus(tmp, b); } - private long HH(long a, final long b, final long c, final long d, - final long x, final long s, final long ac) { - long tmp = plus(a, plus(plus(H(b, c, d), x), ac)); - tmp = ROTATE_LEFT(tmp, s); - return plus(tmp, b); + private long HH(final long a, final long b, final long c, final long d, + final long x, final long s, final long ac) { + long tmp = plus(a, plus(plus(H(b, c, d), x), ac)); + tmp = ROTATE_LEFT(tmp, s); + return plus(tmp, b); } - private long II(long a, final long b, final long c, final long d, - final long x, final long s, final long ac) { - long tmp = plus(a, plus(plus(I(b, c, d), x), ac)); - tmp = ROTATE_LEFT(tmp, s); - return plus(tmp, b); + private long II(final long a, final long b, final long c, final long d, + final long x, final long s, final long ac) { + long tmp = plus(a, plus(plus(I(b, c, d), x), ac)); + tmp = ROTATE_LEFT(tmp, s); + return plus(tmp, b); } private void decode(final long output[], final int input[], - final int in_from, final int len) { - int i, j; - - for (i = 0, j = 0; j < len; i++, j += 4) { - output[i] = input[j + in_from] | shl(input[j + in_from + 1], 8) - | shl(input[j + in_from + 2], 16) - | shl(input[j + in_from + 3], 24); - } + final int in_from, final int len) { + int i, j; + + for (i = 0, j = 0; j < len; i++, j += 4) { + output[i] = input[j + in_from] | shl(input[j + in_from + 1], 8) + | shl(input[j + in_from + 2], 16) + | shl(input[j + in_from + 3], 24); + } } private void transform(final int block[], final int from) { - long a = state[0]; - long b = state[1]; - long c = state[2]; - long d = state[3]; - final long x[] = new long[16]; - - decode(x, block, from, 64); - - a = FF(a, b, c, d, x[0], S11, 0xd76aa478L); /* 1 */ - d = FF(d, a, b, c, x[1], S12, 0xe8c7b756L); /* 2 */ - c = FF(c, d, a, b, x[2], S13, 0x242070dbL); /* 3 */ - b = FF(b, c, d, a, x[3], S14, 0xc1bdceeeL); /* 4 */ - a = FF(a, b, c, d, x[4], S11, 0xf57c0fafL); /* 5 */ - d = FF(d, a, b, c, x[5], S12, 0x4787c62aL); /* 6 */ - c = FF(c, d, a, b, x[6], S13, 0xa8304613L); /* 7 */ - b = FF(b, c, d, a, x[7], S14, 0xfd469501L); /* 8 */ - a = FF(a, b, c, d, x[8], S11, 0x698098d8L); /* 9 */ - d = FF(d, a, b, c, x[9], S12, 0x8b44f7afL); /* 10 */ - c = FF(c, d, a, b, x[10], S13, 0xffff5bb1L); /* 11 */ - b = FF(b, c, d, a, x[11], S14, 0x895cd7beL); /* 12 */ - a = FF(a, b, c, d, x[12], S11, 0x6b901122L); /* 13 */ - d = FF(d, a, b, c, x[13], S12, 0xfd987193L); /* 14 */ - c = FF(c, d, a, b, x[14], S13, 0xa679438eL); /* 15 */ - b = FF(b, c, d, a, x[15], S14, 0x49b40821L); /* 16 */ - - /* Round 2 */ - a = GG(a, b, c, d, x[1], S21, 0xf61e2562L); /* 17 */ - d = GG(d, a, b, c, x[6], S22, 0xc040b340L); /* 18 */ - c = GG(c, d, a, b, x[11], S23, 0x265e5a51L); /* 19 */ - b = GG(b, c, d, a, x[0], S24, 0xe9b6c7aaL); /* 20 */ - a = GG(a, b, c, d, x[5], S21, 0xd62f105dL); /* 21 */ - d = GG(d, a, b, c, x[10], S22, 0x2441453L); /* 22 */ - c = GG(c, d, a, b, x[15], S23, 0xd8a1e681L); /* 23 */ - b = GG(b, c, d, a, x[4], S24, 0xe7d3fbc8L); /* 24 */ - a = GG(a, b, c, d, x[9], S21, 0x21e1cde6L); /* 25 */ - d = GG(d, a, b, c, x[14], S22, 0xc33707d6L); /* 26 */ - c = GG(c, d, a, b, x[3], S23, 0xf4d50d87L); /* 27 */ - b = GG(b, c, d, a, x[8], S24, 0x455a14edL); /* 28 */ - a = GG(a, b, c, d, x[13], S21, 0xa9e3e905L); /* 29 */ - d = GG(d, a, b, c, x[2], S22, 0xfcefa3f8L); /* 30 */ - c = GG(c, d, a, b, x[7], S23, 0x676f02d9L); /* 31 */ - b = GG(b, c, d, a, x[12], S24, 0x8d2a4c8aL); /* 32 */ - - /* Round 3 */ - a = HH(a, b, c, d, x[5], S31, 0xfffa3942L); /* 33 */ - d = HH(d, a, b, c, x[8], S32, 0x8771f681L); /* 34 */ - c = HH(c, d, a, b, x[11], S33, 0x6d9d6122L); /* 35 */ - b = HH(b, c, d, a, x[14], S34, 0xfde5380cL); /* 36 */ - a = HH(a, b, c, d, x[1], S31, 0xa4beea44L); /* 37 */ - d = HH(d, a, b, c, x[4], S32, 0x4bdecfa9L); /* 38 */ - c = HH(c, d, a, b, x[7], S33, 0xf6bb4b60L); /* 39 */ - b = HH(b, c, d, a, x[10], S34, 0xbebfbc70L); /* 40 */ - a = HH(a, b, c, d, x[13], S31, 0x289b7ec6L); /* 41 */ - d = HH(d, a, b, c, x[0], S32, 0xeaa127faL); /* 42 */ - c = HH(c, d, a, b, x[3], S33, 0xd4ef3085L); /* 43 */ - b = HH(b, c, d, a, x[6], S34, 0x4881d05L); /* 44 */ - a = HH(a, b, c, d, x[9], S31, 0xd9d4d039L); /* 45 */ - d = HH(d, a, b, c, x[12], S32, 0xe6db99e5L); /* 46 */ - c = HH(c, d, a, b, x[15], S33, 0x1fa27cf8L); /* 47 */ - b = HH(b, c, d, a, x[2], S34, 0xc4ac5665L); /* 48 */ - - /* Round 4 */ - a = II(a, b, c, d, x[0], S41, 0xf4292244L); /* 49 */ - d = II(d, a, b, c, x[7], S42, 0x432aff97L); /* 50 */ - c = II(c, d, a, b, x[14], S43, 0xab9423a7L); /* 51 */ - b = II(b, c, d, a, x[5], S44, 0xfc93a039L); /* 52 */ - a = II(a, b, c, d, x[12], S41, 0x655b59c3L); /* 53 */ - d = II(d, a, b, c, x[3], S42, 0x8f0ccc92L); /* 54 */ - c = II(c, d, a, b, x[10], S43, 0xffeff47dL); /* 55 */ - b = II(b, c, d, a, x[1], S44, 0x85845dd1L); /* 56 */ - a = II(a, b, c, d, x[8], S41, 0x6fa87e4fL); /* 57 */ - d = II(d, a, b, c, x[15], S42, 0xfe2ce6e0L); /* 58 */ - c = II(c, d, a, b, x[6], S43, 0xa3014314L); /* 59 */ - b = II(b, c, d, a, x[13], S44, 0x4e0811a1L); /* 60 */ - a = II(a, b, c, d, x[4], S41, 0xf7537e82L); /* 61 */ - d = II(d, a, b, c, x[11], S42, 0xbd3af235L); /* 62 */ - c = II(c, d, a, b, x[2], S43, 0x2ad7d2bbL); /* 63 */ - b = II(b, c, d, a, x[9], S44, 0xeb86d391L); /* 64 */ - - state[0] = plus(state[0], a); - state[1] = plus(state[1], b); - state[2] = plus(state[2], c); - state[3] = plus(state[3], d); + long a = state[0]; + long b = state[1]; + long c = state[2]; + long d = state[3]; + final long x[] = new long[16]; + + decode(x, block, from, 64); + + a = FF(a, b, c, d, x[0], S11, 0xd76aa478L); /* 1 */ + d = FF(d, a, b, c, x[1], S12, 0xe8c7b756L); /* 2 */ + c = FF(c, d, a, b, x[2], S13, 0x242070dbL); /* 3 */ + b = FF(b, c, d, a, x[3], S14, 0xc1bdceeeL); /* 4 */ + a = FF(a, b, c, d, x[4], S11, 0xf57c0fafL); /* 5 */ + d = FF(d, a, b, c, x[5], S12, 0x4787c62aL); /* 6 */ + c = FF(c, d, a, b, x[6], S13, 0xa8304613L); /* 7 */ + b = FF(b, c, d, a, x[7], S14, 0xfd469501L); /* 8 */ + a = FF(a, b, c, d, x[8], S11, 0x698098d8L); /* 9 */ + d = FF(d, a, b, c, x[9], S12, 0x8b44f7afL); /* 10 */ + c = FF(c, d, a, b, x[10], S13, 0xffff5bb1L); /* 11 */ + b = FF(b, c, d, a, x[11], S14, 0x895cd7beL); /* 12 */ + a = FF(a, b, c, d, x[12], S11, 0x6b901122L); /* 13 */ + d = FF(d, a, b, c, x[13], S12, 0xfd987193L); /* 14 */ + c = FF(c, d, a, b, x[14], S13, 0xa679438eL); /* 15 */ + b = FF(b, c, d, a, x[15], S14, 0x49b40821L); /* 16 */ + + /* Round 2 */ + a = GG(a, b, c, d, x[1], S21, 0xf61e2562L); /* 17 */ + d = GG(d, a, b, c, x[6], S22, 0xc040b340L); /* 18 */ + c = GG(c, d, a, b, x[11], S23, 0x265e5a51L); /* 19 */ + b = GG(b, c, d, a, x[0], S24, 0xe9b6c7aaL); /* 20 */ + a = GG(a, b, c, d, x[5], S21, 0xd62f105dL); /* 21 */ + d = GG(d, a, b, c, x[10], S22, 0x2441453L); /* 22 */ + c = GG(c, d, a, b, x[15], S23, 0xd8a1e681L); /* 23 */ + b = GG(b, c, d, a, x[4], S24, 0xe7d3fbc8L); /* 24 */ + a = GG(a, b, c, d, x[9], S21, 0x21e1cde6L); /* 25 */ + d = GG(d, a, b, c, x[14], S22, 0xc33707d6L); /* 26 */ + c = GG(c, d, a, b, x[3], S23, 0xf4d50d87L); /* 27 */ + b = GG(b, c, d, a, x[8], S24, 0x455a14edL); /* 28 */ + a = GG(a, b, c, d, x[13], S21, 0xa9e3e905L); /* 29 */ + d = GG(d, a, b, c, x[2], S22, 0xfcefa3f8L); /* 30 */ + c = GG(c, d, a, b, x[7], S23, 0x676f02d9L); /* 31 */ + b = GG(b, c, d, a, x[12], S24, 0x8d2a4c8aL); /* 32 */ + + /* Round 3 */ + a = HH(a, b, c, d, x[5], S31, 0xfffa3942L); /* 33 */ + d = HH(d, a, b, c, x[8], S32, 0x8771f681L); /* 34 */ + c = HH(c, d, a, b, x[11], S33, 0x6d9d6122L); /* 35 */ + b = HH(b, c, d, a, x[14], S34, 0xfde5380cL); /* 36 */ + a = HH(a, b, c, d, x[1], S31, 0xa4beea44L); /* 37 */ + d = HH(d, a, b, c, x[4], S32, 0x4bdecfa9L); /* 38 */ + c = HH(c, d, a, b, x[7], S33, 0xf6bb4b60L); /* 39 */ + b = HH(b, c, d, a, x[10], S34, 0xbebfbc70L); /* 40 */ + a = HH(a, b, c, d, x[13], S31, 0x289b7ec6L); /* 41 */ + d = HH(d, a, b, c, x[0], S32, 0xeaa127faL); /* 42 */ + c = HH(c, d, a, b, x[3], S33, 0xd4ef3085L); /* 43 */ + b = HH(b, c, d, a, x[6], S34, 0x4881d05L); /* 44 */ + a = HH(a, b, c, d, x[9], S31, 0xd9d4d039L); /* 45 */ + d = HH(d, a, b, c, x[12], S32, 0xe6db99e5L); /* 46 */ + c = HH(c, d, a, b, x[15], S33, 0x1fa27cf8L); /* 47 */ + b = HH(b, c, d, a, x[2], S34, 0xc4ac5665L); /* 48 */ + + /* Round 4 */ + a = II(a, b, c, d, x[0], S41, 0xf4292244L); /* 49 */ + d = II(d, a, b, c, x[7], S42, 0x432aff97L); /* 50 */ + c = II(c, d, a, b, x[14], S43, 0xab9423a7L); /* 51 */ + b = II(b, c, d, a, x[5], S44, 0xfc93a039L); /* 52 */ + a = II(a, b, c, d, x[12], S41, 0x655b59c3L); /* 53 */ + d = II(d, a, b, c, x[3], S42, 0x8f0ccc92L); /* 54 */ + c = II(c, d, a, b, x[10], S43, 0xffeff47dL); /* 55 */ + b = II(b, c, d, a, x[1], S44, 0x85845dd1L); /* 56 */ + a = II(a, b, c, d, x[8], S41, 0x6fa87e4fL); /* 57 */ + d = II(d, a, b, c, x[15], S42, 0xfe2ce6e0L); /* 58 */ + c = II(c, d, a, b, x[6], S43, 0xa3014314L); /* 59 */ + b = II(b, c, d, a, x[13], S44, 0x4e0811a1L); /* 60 */ + a = II(a, b, c, d, x[4], S41, 0xf7537e82L); /* 61 */ + d = II(d, a, b, c, x[11], S42, 0xbd3af235L); /* 62 */ + c = II(c, d, a, b, x[2], S43, 0x2ad7d2bbL); /* 63 */ + b = II(b, c, d, a, x[9], S44, 0xeb86d391L); /* 64 */ + + state[0] = plus(state[0], a); + state[1] = plus(state[1], b); + state[2] = plus(state[2], c); + state[3] = plus(state[3], d); } public void update(final int bytes[]) { - do_update(clean_bytes(bytes)); + do_update(clean_bytes(bytes)); } public void update(final String s) { - do_update(to_bytes(s)); + do_update(to_bytes(s)); } private int[] encode(final long[] input, final int len) { - final int output[] = new int[len]; - int i, j; - for (i = 0, j = 0; j < len; i++, j += 4) { - output[j] = (int) (input[i] & 0xff); - output[j + 1] = (int) (input[i] >>> 8 & 0xff); - output[j + 2] = (int) (input[i] >>> 16 & 0xff); - output[j + 3] = (int) (input[i] >>> 24 & 0xff); - } - return output; + final int output[] = new int[len]; + int i, j; + for (i = 0, j = 0; j < len; i++, j += 4) { + output[j] = (int) (input[i] & 0xff); + output[j + 1] = (int) (input[i] >>> 8 & 0xff); + output[j + 2] = (int) (input[i] >>> 16 & 0xff); + output[j + 3] = (int) (input[i] >>> 24 & 0xff); + } + return output; } public int[] final_bytes() { - final int bits[] = encode(count, 8); - int index, padlen; - int padding[], i; - int[] digest; + final int bits[] = encode(count, 8); + int index, padlen; + int padding[], i; + int[] digest; - index = (int) (count[0] >>> 3 & 0x3f); - padlen = index < 56 ? 56 - index : 120 - index; - /* padlen > 0 */ - padding = new int[padlen]; - padding[0] = 0x80; - for (i = 1; i < padlen; ++i) { - padding[i] = 0; - } + index = (int) (count[0] >>> 3 & 0x3f); + padlen = index < 56 ? 56 - index : 120 - index; + /* padlen > 0 */ + padding = new int[padlen]; + padding[0] = 0x80; + for (i = 1; i < padlen; ++i) { + padding[i] = 0; + } - do_update(padding); + do_update(padding); - do_update(bits); + do_update(bits); - digest = encode(state, 16); + digest = encode(state, 16); - return digest; + return digest; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java index fc592c222c..872dba6dab 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2012. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -23,7 +23,7 @@ package com.ericsson.otp.erlang; * Provides a simple mechanism for exchanging messages with Erlang processes or * other instances of this class. * </p> - * + * * <p> * Each mailbox is associated with a unique {@link OtpErlangPid pid} that * contains information necessary for delivery of messages. When sending @@ -33,7 +33,7 @@ package com.ericsson.otp.erlang; * message contents. The sender can determine his own pid by calling * {@link #self() self()}. * </p> - * + * * <p> * Mailboxes can be named, either at creation or later. Messages can be sent to * named mailboxes and named Erlang processes without knowing the @@ -41,7 +41,7 @@ package com.ericsson.otp.erlang; * order to set up initial communication between parts of an application. Each * mailbox can have at most one name. * </p> - * + * * <p> * Since this class was intended for communication with Erlang, all of the send * methods take {@link OtpErlangObject OtpErlangObject} arguments. However this @@ -49,14 +49,14 @@ package com.ericsson.otp.erlang; * implement one of java.io.Serializable or java.io.Externalizable) by * encapsulating the object in a {@link OtpErlangBinary OtpErlangBinary}. * </p> - * + * * <p> * Messages to remote nodes are externalized for transmission, and as a result * the recipient receives a <b>copy</b> of the original Java object. To ensure * consistent behaviour when messages are sent between local mailboxes, such * messages are cloned before delivery. * </p> - * + * * <p> * Additionally, mailboxes can be linked in much the same way as Erlang * processes. If a link is active when a mailbox is {@link #close closed}, any @@ -68,14 +68,14 @@ package com.ericsson.otp.erlang; * close mailboxes if you are using links instead of relying on finalization to * notify other parties in a timely manner. * </p> - * + * * <p> * When retrieving messages from a mailbox that has received an exit signal, an * {@link OtpErlangExit OtpErlangExit} exception will be raised. Note that the * exception is queued in the mailbox along with other messages, and will not be * raised until it reaches the head of the queue and is about to be retrieved. * </p> - * + * */ public class OtpMbox { OtpNode home; @@ -87,17 +87,17 @@ public class OtpMbox { // package constructor: called by OtpNode:createMbox(name) // to create a named mbox OtpMbox(final OtpNode home, final OtpErlangPid self, final String name) { - this.self = self; - this.home = home; - this.name = name; - queue = new GenericQueue(); - links = new Links(10); + this.self = self; + this.home = home; + this.name = name; + queue = new GenericQueue(); + links = new Links(10); } // package constructor: called by OtpNode:createMbox() // to create an anonymous OtpMbox(final OtpNode home, final OtpErlangPid self) { - this(home, self, null); + this(home, self, null); } /** @@ -105,18 +105,18 @@ public class OtpMbox { * Get the identifying {@link OtpErlangPid pid} associated with this * mailbox. * </p> - * + * * <p> * The {@link OtpErlangPid pid} associated with this mailbox uniquely * identifies the mailbox and can be used to address the mailbox. You can * send the {@link OtpErlangPid pid} to a remote communicating part so that * he can know where to send his response. * </p> - * + * * @return the self pid for this mailbox. */ public OtpErlangPid self() { - return self; + return self; } /** @@ -127,305 +127,305 @@ public class OtpMbox { * name; if the mailbox already had a name, calling this method will * supercede that name. * </p> - * + * * @param aname - * the name to register for the mailbox. Specify null to - * unregister the existing name from this mailbox. - * + * the name to register for the mailbox. Specify null to + * unregister the existing name from this mailbox. + * * @return true if the name was available, or false otherwise. */ public synchronized boolean registerName(final String aname) { - return home.registerName(aname, this); + return home.registerName(aname, this); } /** * Get the registered name of this mailbox. - * + * * @return the registered name of this mailbox, or null if the mailbox had * no registered name. */ public String getName() { - return name; + return name; } /** * Block until a message arrives for this mailbox. - * + * * @return an {@link OtpErlangObject OtpErlangObject} representing the body * of the next message waiting in this mailbox. - * + * * @exception OtpErlangDecodeException - * if the message can not be decoded. - * + * if the message can not be decoded. + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. */ public OtpErlangObject receive() throws OtpErlangExit, - OtpErlangDecodeException { - try { - return receiveMsg().getMsg(); - } catch (final OtpErlangExit e) { - throw e; - } catch (final OtpErlangDecodeException f) { - throw f; - } + OtpErlangDecodeException { + try { + return receiveMsg().getMsg(); + } catch (final OtpErlangExit e) { + throw e; + } catch (final OtpErlangDecodeException f) { + throw f; + } } /** * Wait for a message to arrive for this mailbox. - * + * * @param timeout - * the time, in milliseconds, to wait for a message before - * returning null. - * + * the time, in milliseconds, to wait for a message before + * returning null. + * * @return an {@link OtpErlangObject OtpErlangObject} representing the body * of the next message waiting in this mailbox. - * + * * @exception OtpErlangDecodeException - * if the message can not be decoded. - * + * if the message can not be decoded. + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. */ public OtpErlangObject receive(final long timeout) throws OtpErlangExit, - OtpErlangDecodeException { - try { - final OtpMsg m = receiveMsg(timeout); - if (m != null) { - return m.getMsg(); - } - } catch (final OtpErlangExit e) { - throw e; - } catch (final OtpErlangDecodeException f) { - throw f; - } catch (final InterruptedException g) { - } - return null; + OtpErlangDecodeException { + try { + final OtpMsg m = receiveMsg(timeout); + if (m != null) { + return m.getMsg(); + } + } catch (final OtpErlangExit e) { + throw e; + } catch (final OtpErlangDecodeException f) { + throw f; + } catch (final InterruptedException g) { + } + return null; } /** * Block until a message arrives for this mailbox. - * + * * @return a byte array representing the still-encoded body of the next * message waiting in this mailbox. - * + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. - * + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. + * */ public OtpInputStream receiveBuf() throws OtpErlangExit { - return receiveMsg().getMsgBuf(); + return receiveMsg().getMsgBuf(); } /** * Wait for a message to arrive for this mailbox. - * + * * @param timeout - * the time, in milliseconds, to wait for a message before - * returning null. - * + * the time, in milliseconds, to wait for a message before + * returning null. + * * @return a byte array representing the still-encoded body of the next * message waiting in this mailbox. - * + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. - * + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. + * * @exception InterruptedException - * if no message if the method times out before a message - * becomes available. + * if no message if the method times out before a message + * becomes available. */ public OtpInputStream receiveBuf(final long timeout) - throws InterruptedException, OtpErlangExit { - final OtpMsg m = receiveMsg(timeout); - if (m != null) { - return m.getMsgBuf(); - } + throws InterruptedException, OtpErlangExit { + final OtpMsg m = receiveMsg(timeout); + if (m != null) { + return m.getMsgBuf(); + } - return null; + return null; } /** * Block until a message arrives for this mailbox. - * + * * @return an {@link OtpMsg OtpMsg} containing the header information as * well as the body of the next message waiting in this mailbox. - * + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. - * + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. + * */ public OtpMsg receiveMsg() throws OtpErlangExit { - final OtpMsg m = (OtpMsg) queue.get(); - - switch (m.type()) { - case OtpMsg.exitTag: - case OtpMsg.exit2Tag: - try { - final OtpErlangObject o = m.getMsg(); - throw new OtpErlangExit(o, m.getSenderPid()); - } catch (final OtpErlangDecodeException e) { - throw new OtpErlangExit("unknown", m.getSenderPid()); - } - - default: - return m; - } + final OtpMsg m = (OtpMsg) queue.get(); + + switch (m.type()) { + case OtpMsg.exitTag: + case OtpMsg.exit2Tag: + try { + final OtpErlangObject o = m.getMsg(); + throw new OtpErlangExit(o, m.getSenderPid()); + } catch (final OtpErlangDecodeException e) { + throw new OtpErlangExit("unknown", m.getSenderPid()); + } + + default: + return m; + } } /** * Wait for a message to arrive for this mailbox. - * + * * @param timeout - * the time, in milliseconds, to wait for a message. - * + * the time, in milliseconds, to wait for a message. + * * @return an {@link OtpMsg OtpMsg} containing the header information as * well as the body of the next message waiting in this mailbox. - * + * * @exception OtpErlangExit - * if a linked {@link OtpErlangPid pid} has exited or has - * sent an exit signal to this mailbox. - * + * if a linked {@link OtpErlangPid pid} has exited or has + * sent an exit signal to this mailbox. + * * @exception InterruptedException - * if no message if the method times out before a message - * becomes available. + * if no message if the method times out before a message + * becomes available. */ public OtpMsg receiveMsg(final long timeout) throws InterruptedException, - OtpErlangExit { - final OtpMsg m = (OtpMsg) queue.get(timeout); - - if (m == null) { - return null; - } - - switch (m.type()) { - case OtpMsg.exitTag: - case OtpMsg.exit2Tag: - try { - final OtpErlangObject o = m.getMsg(); - throw new OtpErlangExit(o, m.getSenderPid()); - } catch (final OtpErlangDecodeException e) { - throw new OtpErlangExit("unknown", m.getSenderPid()); - } - - default: - return m; - } + OtpErlangExit { + final OtpMsg m = (OtpMsg) queue.get(timeout); + + if (m == null) { + return null; + } + + switch (m.type()) { + case OtpMsg.exitTag: + case OtpMsg.exit2Tag: + try { + final OtpErlangObject o = m.getMsg(); + throw new OtpErlangExit(o, m.getSenderPid()); + } catch (final OtpErlangDecodeException e) { + throw new OtpErlangExit("unknown", m.getSenderPid()); + } + + default: + return m; + } } /** * Send a message to a remote {@link OtpErlangPid pid}, representing either * another {@link OtpMbox mailbox} or an Erlang process. - * + * * @param to - * the {@link OtpErlangPid pid} identifying the intended - * recipient of the message. - * + * the {@link OtpErlangPid pid} identifying the intended + * recipient of the message. + * * @param msg - * the body of the message to send. - * + * the body of the message to send. + * */ public void send(final OtpErlangPid to, final OtpErlangObject msg) { - try { - final String node = to.node(); - if (node.equals(home.node())) { - home.deliver(new OtpMsg(to, (OtpErlangObject) msg.clone())); - } else { - final OtpCookedConnection conn = home.getConnection(node); - if (conn == null) { - return; - } - conn.send(self, to, msg); - } - } catch (final Exception e) { - } + try { + final String node = to.node(); + if (node.equals(home.node())) { + home.deliver(new OtpMsg(to, (OtpErlangObject) msg.clone())); + } else { + final OtpCookedConnection conn = home.getConnection(node); + if (conn == null) { + return; + } + conn.send(self, to, msg); + } + } catch (final Exception e) { + } } /** * Send a message to a named mailbox created from the same node as this * mailbox. - * + * * @param aname - * the registered name of recipient mailbox. - * + * the registered name of recipient mailbox. + * * @param msg - * the body of the message to send. - * + * the body of the message to send. + * */ public void send(final String aname, final OtpErlangObject msg) { - home.deliver(new OtpMsg(self, aname, (OtpErlangObject) msg.clone())); + home.deliver(new OtpMsg(self, aname, (OtpErlangObject) msg.clone())); } /** * Send a message to a named mailbox created from another node. - * + * * @param aname - * the registered name of recipient mailbox. - * + * the registered name of recipient mailbox. + * * @param node - * the name of the remote node where the recipient mailbox is - * registered. - * + * the name of the remote node where the recipient mailbox is + * registered. + * * @param msg - * the body of the message to send. - * + * the body of the message to send. + * */ public void send(final String aname, final String node, - final OtpErlangObject msg) { - try { - final String currentNode = home.node(); - if (node.equals(currentNode)) { - send(aname, msg); - } else if (node.indexOf('@', 0) < 0 - && node.equals(currentNode.substring(0, currentNode - .indexOf('@', 0)))) { - send(aname, msg); - } else { - // other node - final OtpCookedConnection conn = home.getConnection(node); - if (conn == null) { - return; - } - conn.send(self, aname, msg); - } - } catch (final Exception e) { - } + final OtpErlangObject msg) { + try { + final String currentNode = home.node(); + if (node.equals(currentNode)) { + send(aname, msg); + } else if (node.indexOf('@', 0) < 0 + && node.equals(currentNode.substring(0, + currentNode.indexOf('@', 0)))) { + send(aname, msg); + } else { + // other node + final OtpCookedConnection conn = home.getConnection(node); + if (conn == null) { + return; + } + conn.send(self, aname, msg); + } + } catch (final Exception e) { + } } /** * Close this mailbox with the given reason. - * + * * <p> * After this operation, the mailbox will no longer be able to receive * messages. Any delivered but as yet unretrieved messages can still be * retrieved however. * </p> - * + * * <p> * If there are links from this mailbox to other {@link OtpErlangPid pids}, * they will be broken when this method is called and exit signals will be * sent. * </p> - * + * * @param reason - * an Erlang term describing the reason for the exit. + * an Erlang term describing the reason for the exit. */ public void exit(final OtpErlangObject reason) { - home.closeMbox(this, reason); + home.closeMbox(this, reason); } /** * Equivalent to <code>exit(new OtpErlangAtom(reason))</code>. - * + * * @see #exit(OtpErlangObject) */ public void exit(final String reason) { - exit(new OtpErlangAtom(reason)); + exit(new OtpErlangAtom(reason)); } /** @@ -434,17 +434,17 @@ public class OtpMbox { * does not cause any links to be broken, except indirectly if the remote * {@link OtpErlangPid pid} exits as a result of this exit signal. * </p> - * + * * @param to - * the {@link OtpErlangPid pid} to which the exit signal - * should be sent. - * + * the {@link OtpErlangPid pid} to which the exit signal should + * be sent. + * * @param reason - * an Erlang term indicating the reason for the exit. + * an Erlang term indicating the reason for the exit. */ // it's called exit, but it sends exit2 public void exit(final OtpErlangPid to, final OtpErlangObject reason) { - exit(2, to, reason); + exit(2, to, reason); } /** @@ -452,38 +452,38 @@ public class OtpMbox { * Equivalent to <code>exit(to, new * OtpErlangAtom(reason))</code>. * </p> - * + * * @see #exit(OtpErlangPid, OtpErlangObject) */ public void exit(final OtpErlangPid to, final String reason) { - exit(to, new OtpErlangAtom(reason)); + exit(to, new OtpErlangAtom(reason)); } // this function used internally when "process" dies // since Erlang discerns between exit and exit/2. private void exit(final int arity, final OtpErlangPid to, - final OtpErlangObject reason) { - try { - final String node = to.node(); - if (node.equals(home.node())) { - home.deliver(new OtpMsg(OtpMsg.exitTag, self, to, reason)); - } else { - final OtpCookedConnection conn = home.getConnection(node); - if (conn == null) { - return; - } - switch (arity) { - case 1: - conn.exit(self, to, reason); - break; - - case 2: - conn.exit2(self, to, reason); - break; - } - } - } catch (final Exception e) { - } + final OtpErlangObject reason) { + try { + final String node = to.node(); + if (node.equals(home.node())) { + home.deliver(new OtpMsg(OtpMsg.exitTag, self, to, reason)); + } else { + final OtpCookedConnection conn = home.getConnection(node); + if (conn == null) { + return; + } + switch (arity) { + case 1: + conn.exit(self, to, reason); + break; + + case 2: + conn.exit2(self, to, reason); + break; + } + } + } catch (final Exception e) { + } } /** @@ -492,7 +492,7 @@ public class OtpMbox { * this method multiple times will not result in more than one link being * created. * </p> - * + * * <p> * If the remote process subsequently exits or the mailbox is closed, a * subsequent attempt to retrieve a message through this mailbox will cause @@ -500,42 +500,42 @@ public class OtpMbox { * if the sending mailbox is closed, the linked mailbox or process will * receive an exit signal. * </p> - * + * * <p> * If the remote process cannot be reached in order to set the link, the * exception is raised immediately. * </p> - * + * * @param to - * the {@link OtpErlangPid pid} representing the object to - * link to. - * + * the {@link OtpErlangPid pid} representing the object to link + * to. + * * @exception OtpErlangExit - * if the {@link OtpErlangPid pid} referred to does not - * exist or could not be reached. - * + * if the {@link OtpErlangPid pid} referred to does not exist + * or could not be reached. + * */ public void link(final OtpErlangPid to) throws OtpErlangExit { - try { - final String node = to.node(); - if (node.equals(home.node())) { - if (!home.deliver(new OtpMsg(OtpMsg.linkTag, self, to))) { - throw new OtpErlangExit("noproc", to); - } - } else { - final OtpCookedConnection conn = home.getConnection(node); - if (conn != null) { - conn.link(self, to); - } else { - throw new OtpErlangExit("noproc", to); - } - } - } catch (final OtpErlangExit e) { - throw e; - } catch (final Exception e) { - } - - links.addLink(self, to); + try { + final String node = to.node(); + if (node.equals(home.node())) { + if (!home.deliver(new OtpMsg(OtpMsg.linkTag, self, to))) { + throw new OtpErlangExit("noproc", to); + } + } else { + final OtpCookedConnection conn = home.getConnection(node); + if (conn != null) { + conn.link(self, to); + } else { + throw new OtpErlangExit("noproc", to); + } + } + } catch (final OtpErlangExit e) { + throw e; + } catch (final Exception e) { + } + + links.addLink(self, to); } /** @@ -545,58 +545,58 @@ public class OtpMbox { * this method once will remove all links between this mailbox and the * remote {@link OtpErlangPid pid}. * </p> - * + * * @param to - * the {@link OtpErlangPid pid} representing the object to - * unlink from. - * + * the {@link OtpErlangPid pid} representing the object to unlink + * from. + * */ public void unlink(final OtpErlangPid to) { - links.removeLink(self, to); - - try { - final String node = to.node(); - if (node.equals(home.node())) { - home.deliver(new OtpMsg(OtpMsg.unlinkTag, self, to)); - } else { - final OtpCookedConnection conn = home.getConnection(node); - if (conn != null) { - conn.unlink(self, to); - } - } - } catch (final Exception e) { - } + links.removeLink(self, to); + + try { + final String node = to.node(); + if (node.equals(home.node())) { + home.deliver(new OtpMsg(OtpMsg.unlinkTag, self, to)); + } else { + final OtpCookedConnection conn = home.getConnection(node); + if (conn != null) { + conn.unlink(self, to); + } + } + } catch (final Exception e) { + } } /** * <p> * Create a connection to a remote node. * </p> - * + * * <p> * Strictly speaking, this method is not necessary simply to set up a * connection, since connections are created automatically first time a * message is sent to a {@link OtpErlangPid pid} on the remote node. * </p> - * + * * <p> * This method makes it possible to wait for a node to come up, however, or * check that a node is still alive. * </p> - * + * * <p> * This method calls a method with the same name in {@link OtpNode#ping * Otpnode} but is provided here for convenience. * </p> - * + * * @param node - * the name of the node to ping. - * + * the name of the node to ping. + * * @param timeout - * the time, in milliseconds, before reporting failure. + * the time, in milliseconds, before reporting failure. */ public boolean ping(final String node, final long timeout) { - return home.ping(node, timeout); + return home.ping(node, timeout); } /** @@ -604,78 +604,78 @@ public class OtpMbox { * Get a list of all known registered names on the same {@link OtpNode node} * as this mailbox. * </p> - * + * * <p> * This method calls a method with the same name in {@link OtpNode#getNames * Otpnode} but is provided here for convenience. * </p> - * + * * @return an array of Strings containing all registered names on this * {@link OtpNode node}. */ public String[] getNames() { - return home.getNames(); + return home.getNames(); } /** * Determine the {@link OtpErlangPid pid} corresponding to a registered name * on this {@link OtpNode node}. - * + * * <p> * This method calls a method with the same name in {@link OtpNode#whereis * Otpnode} but is provided here for convenience. * </p> - * + * * @return the {@link OtpErlangPid pid} corresponding to the registered * name, or null if the name is not known on this node. */ public OtpErlangPid whereis(final String aname) { - return home.whereis(aname); + return home.whereis(aname); } /** * Close this mailbox. - * + * * <p> * After this operation, the mailbox will no longer be able to receive * messages. Any delivered but as yet unretrieved messages can still be * retrieved however. * </p> - * + * * <p> * If there are links from this mailbox to other {@link OtpErlangPid pids}, * they will be broken when this method is called and exit signals with * reason 'normal' will be sent. * </p> - * + * * <p> * This is equivalent to {@link #exit(String) exit("normal")}. * </p> */ public void close() { - home.closeMbox(this); + home.closeMbox(this); } @Override protected void finalize() { - close(); - queue.flush(); + close(); + queue.flush(); } /** * Determine if two mailboxes are equal. - * + * * @return true if both Objects are mailboxes with the same identifying * {@link OtpErlangPid pids}. */ @Override public boolean equals(final Object o) { - if (!(o instanceof OtpMbox)) { - return false; - } + if (!(o instanceof OtpMbox)) { + return false; + } - final OtpMbox m = (OtpMbox) o; - return m.self.equals(self); + final OtpMbox m = (OtpMbox) o; + return m.self.equals(self); } @Override @@ -685,43 +685,43 @@ public class OtpMbox { /* * called by OtpNode to deliver message to this mailbox. - * + * * About exit and exit2: both cause exception to be raised upon receive(). * However exit (not 2) causes any link to be removed as well, while exit2 * leaves any links intact. */ void deliver(final OtpMsg m) { - switch (m.type()) { - case OtpMsg.linkTag: - links.addLink(self, m.getSenderPid()); - break; - - case OtpMsg.unlinkTag: - links.removeLink(self, m.getSenderPid()); - break; - - case OtpMsg.exitTag: - links.removeLink(self, m.getSenderPid()); - queue.put(m); - break; - - case OtpMsg.exit2Tag: - default: - queue.put(m); - break; - } + switch (m.type()) { + case OtpMsg.linkTag: + links.addLink(self, m.getSenderPid()); + break; + + case OtpMsg.unlinkTag: + links.removeLink(self, m.getSenderPid()); + break; + + case OtpMsg.exitTag: + links.removeLink(self, m.getSenderPid()); + queue.put(m); + break; + + case OtpMsg.exit2Tag: + default: + queue.put(m); + break; + } } // used to break all known links to this mbox void breakLinks(final OtpErlangObject reason) { - final Link[] l = links.clearLinks(); + final Link[] l = links.clearLinks(); - if (l != null) { - final int len = l.length; + if (l != null) { + final int len = l.length; - for (int i = 0; i < len; i++) { - exit(1, l[i].remote(), reason); - } - } + for (int i = 0; i < len; i++) { + exit(1, l[i].remote(), reason); + } + } } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMsg.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMsg.java index 7c5bc69361..fb750d8afe 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMsg.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMsg.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2010. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -22,23 +22,25 @@ package com.ericsson.otp.erlang; * <p> * Provides a carrier for Erlang messages. * </p> - * + * * <p> * Instances of this class are created to package header and payload information * in received Erlang messages so that the recipient can obtain both parts with * a single call to {@link OtpMbox#receiveMsg receiveMsg()}. * </p> - * + * * <p> - * The header information that is available is as follows: <ul> - * <li> a tag indicating the type of message - * <li> the intended recipient of the message, either as a - * {@link OtpErlangPid pid} or as a String, but never both. - * <li> (sometimes) the sender of the message. Due to some eccentric + * The header information that is available is as follows: + * <ul> + * <li>a tag indicating the type of message + * <li>the intended recipient of the message, either as a {@link OtpErlangPid + * pid} or as a String, but never both. + * <li>(sometimes) the sender of the message. Due to some eccentric * characteristics of the Erlang distribution protocol, not all messages have * information about the sending process. In particular, only messages whose tag - * is {@link OtpMsg#regSendTag regSendTag} contain sender information. </ul> - * + * is {@link OtpMsg#regSendTag regSendTag} contain sender information. + * </ul> + * * <p> * Message are sent using the Erlang external format (see separate * documentation). When a message is received and delivered to the recipient @@ -68,87 +70,87 @@ public class OtpMsg { // send has receiver pid but no sender information OtpMsg(final OtpErlangPid to, final OtpInputStream paybuf) { - tag = sendTag; - from = null; - this.to = to; - toName = null; - this.paybuf = paybuf; - payload = null; + tag = sendTag; + from = null; + this.to = to; + toName = null; + this.paybuf = paybuf; + payload = null; } // send has receiver pid but no sender information OtpMsg(final OtpErlangPid to, final OtpErlangObject payload) { - tag = sendTag; - from = null; - this.to = to; - toName = null; - paybuf = null; - this.payload = payload; + tag = sendTag; + from = null; + this.to = to; + toName = null; + paybuf = null; + this.payload = payload; } // send_reg has sender pid and receiver name OtpMsg(final OtpErlangPid from, final String toName, - final OtpInputStream paybuf) { - tag = regSendTag; - this.from = from; - this.toName = toName; - to = null; - this.paybuf = paybuf; - payload = null; + final OtpInputStream paybuf) { + tag = regSendTag; + this.from = from; + this.toName = toName; + to = null; + this.paybuf = paybuf; + payload = null; } // send_reg has sender pid and receiver name OtpMsg(final OtpErlangPid from, final String toName, - final OtpErlangObject payload) { - tag = regSendTag; - this.from = from; - this.toName = toName; - to = null; - paybuf = null; - this.payload = payload; + final OtpErlangObject payload) { + tag = regSendTag; + this.from = from; + this.toName = toName; + to = null; + paybuf = null; + this.payload = payload; } // exit (etc) has from, to, reason OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to, - final OtpErlangObject reason) { - this.tag = tag; - this.from = from; - this.to = to; - paybuf = null; - payload = reason; + final OtpErlangObject reason) { + this.tag = tag; + this.from = from; + this.to = to; + paybuf = null; + payload = reason; } // special case when reason is an atom (i.e. most of the time) OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to, - final String reason) { - this.tag = tag; - this.from = from; - this.to = to; - paybuf = null; - payload = new OtpErlangAtom(reason); + final String reason) { + this.tag = tag; + this.from = from; + this.to = to; + paybuf = null; + payload = new OtpErlangAtom(reason); } // other message types (link, unlink) OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to) { - // convert TT-tags to equiv non-TT versions - int atag = tag; - if (tag > 10) { - atag -= 10; - } + // convert TT-tags to equiv non-TT versions + int atag = tag; + if (tag > 10) { + atag -= 10; + } - this.tag = atag; - this.from = from; - this.to = to; + this.tag = atag; + this.from = from; + this.to = to; } /** * Get the payload from this message without deserializing it. - * + * * @return the serialized Erlang term contained in this message. - * + * */ OtpInputStream getMsgBuf() { - return paybuf; + return paybuf; } /** @@ -157,36 +159,37 @@ public class OtpMsg { * type of message. Valid values are the ``tag'' constants defined in this * class. * </p> - * + * * <p> * The tab identifies not only the type of message but also the content of * the OtpMsg object, since different messages have different components, as * follows: * </p> - * + * * <ul> - * <li> sendTag identifies a "normal" message. The recipient is a - * {@link OtpErlangPid Pid} and it is available through {@link - * #getRecipientPid getRecipientPid()}. Sender information is not available. - * The message body can be retrieved with {@link #getMsg getMsg()}. </li> - * - * <li> regSendTag also identifies a "normal" message. The recipient here is + * <li>sendTag identifies a "normal" message. The recipient is a + * {@link OtpErlangPid Pid} and it is available through + * {@link #getRecipientPid getRecipientPid()}. Sender information is not + * available. The message body can be retrieved with {@link #getMsg + * getMsg()}.</li> + * + * <li>regSendTag also identifies a "normal" message. The recipient here is * a String and it is available through {@link #getRecipientName * getRecipientName()}. Sender information is available through * #getSenderPid getSenderPid()}. The message body can be retrieved with - * {@link #getMsg getMsg()}. </li> - * - * <li> linkTag identifies a link request. The Pid of the sender is - * available, as well as the Pid to which the link should be made. </li> - * - * <li> exitTag and exit2Tag messages are sent as a result of broken links. + * {@link #getMsg getMsg()}.</li> + * + * <li>linkTag identifies a link request. The Pid of the sender is + * available, as well as the Pid to which the link should be made.</li> + * + * <li>exitTag and exit2Tag messages are sent as a result of broken links. * Both sender and recipient Pids and are available through the * corresponding methods, and the "reason" is available through - * {@link #getMsg getMsg()}. </li> + * {@link #getMsg getMsg()}.</li> * </ul> */ public int type() { - return tag; + return tag; } /** @@ -194,42 +197,42 @@ public class OtpMsg { * Deserialize and return a new copy of the message contained in this * OtpMsg. * </p> - * + * * <p> * The first time this method is called the actual payload is deserialized * and the Erlang term is created. Calling this method subsequent times will * not cuase the message to be deserialized additional times, instead the * same Erlang term object will be returned. * </p> - * + * * @return an Erlang term. - * + * * @exception OtpErlangDecodeException - * if the byte stream could not be deserialized. - * + * if the byte stream could not be deserialized. + * */ public OtpErlangObject getMsg() throws OtpErlangDecodeException { - if (payload == null) { - payload = paybuf.read_any(); - } - return payload; + if (payload == null) { + payload = paybuf.read_any(); + } + return payload; } /** * <p> * Get the name of the recipient for this message. * </p> - * + * * <p> * Messages are sent to Pids or names. If this message was sent to a name * then the name is returned by this method. * </p> - * + * * @return the name of the recipient, or null if the recipient was in fact a * Pid. */ public String getRecipientName() { - return toName; + return toName; } /** @@ -237,18 +240,18 @@ public class OtpMsg { * Get the Pid of the recipient for this message, if it is a sendTag * message. * </p> - * + * * <p> * Messages are sent to Pids or names. If this message was sent to a Pid * then the Pid is returned by this method. The recipient Pid is also * available for link, unlink and exit messages. * </p> - * + * * @return the Pid of the recipient, or null if the recipient was in fact a * name. */ public OtpErlangPid getRecipientPid() { - return to; + return to; } /** @@ -256,36 +259,36 @@ public class OtpMsg { * Get the name of the recipient for this message, if it is a regSendTag * message. * </p> - * + * * <p> * Messages are sent to Pids or names. If this message was sent to a name * then the name is returned by this method. * </p> - * + * * @return the Pid of the recipient, or null if the recipient was in fact a * name. */ public Object getRecipient() { - if (toName != null) { - return toName; - } - return to; + if (toName != null) { + return toName; + } + return to; } /** * <p> * Get the Pid of the sender of this message. * </p> - * + * * <p> * For messages sent to names, the Pid of the sender is included with the * message. The sender Pid is also available for link, unlink and exit * messages. It is not available for sendTag messages sent to Pids. * </p> - * + * * @return the Pid of the sender, or null if it was not available. */ public OtpErlangPid getSenderPid() { - return from; + return from; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java index 68addb9f2c..d5edd135cf 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java @@ -1,19 +1,19 @@ -/* +/* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2012. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -36,21 +36,21 @@ import java.util.Iterator; * communication mechanism is automatic and hidden from the application * programmer. * </p> - * + * * <p> * Once an instance of this class has been created, obtain one or more mailboxes * in order to send or receive messages. The first message sent to a given node * will cause a connection to be set up to that node. Any messages received will * be delivered to the appropriate mailboxes. * </p> - * + * * <p> * To shut down the node, call {@link #close close()}. This will prevent the * node from accepting additional connections and it will cause all existing * connections to be closed. Any unread messages in existing mailboxes can still * be read, however no new messages will be delivered to the mailboxes. * </p> - * + * * <p> * Note that the use of this class requires that Epmd (Erlang Port Mapper * Daemon) is running on each cooperating host. This class does not start Epmd @@ -83,74 +83,74 @@ public class OtpNode extends OtpLocalNode { * directory. The home directory is obtained from the System property * "user.home". * </p> - * + * * <p> * If the file does not exist, an empty string is used. This method makes no * attempt to create the file. * </p> - * + * * @param node * the name of this node. - * + * * @exception IOException * if communication could not be initialized. - * + * */ public OtpNode(final String node) throws IOException { - this(node, defaultCookie, 0); + this(node, defaultCookie, 0); } /** * Create a node. - * + * * @param node * the name of this node. - * + * * @param cookie * the authorization cookie that will be used by this node when * it communicates with other nodes. - * + * * @exception IOException * if communication could not be initialized. - * + * */ public OtpNode(final String node, final String cookie) throws IOException { - this(node, cookie, 0); + this(node, cookie, 0); } /** * Create a node. - * + * * @param node * the name of this node. - * + * * @param cookie * the authorization cookie that will be used by this node when * it communicates with other nodes. - * + * * @param port * the port number you wish to use for incoming connections. * Specifying 0 lets the system choose an available port. - * + * * @exception IOException * if communication could not be initialized. - * + * */ public OtpNode(final String node, final String cookie, final int port) - throws IOException { - super(node, cookie); + throws IOException { + super(node, cookie); - init(port); + init(port); } private synchronized void init(final int aport) throws IOException { - if (!initDone) { - connections = new Hashtable<String, OtpCookedConnection>(17, - (float) 0.95); - mboxes = new Mailboxes(); - acceptor = new Acceptor(aport); - initDone = true; - } + if (!initDone) { + connections = new Hashtable<String, OtpCookedConnection>(17, + (float) 0.95); + mboxes = new Mailboxes(); + acceptor = new Acceptor(aport); + initDone = true; + } } /** @@ -158,24 +158,24 @@ public class OtpNode extends OtpLocalNode { * and close all existing connections. */ public synchronized void close() { - acceptor.quit(); - OtpCookedConnection conn; - final Collection<OtpCookedConnection> coll = connections.values(); - final Iterator<OtpCookedConnection> it = coll.iterator(); - - mboxes.clear(); - - while (it.hasNext()) { - conn = it.next(); - it.remove(); - conn.close(); - } - initDone = false; + acceptor.quit(); + OtpCookedConnection conn; + final Collection<OtpCookedConnection> coll = connections.values(); + final Iterator<OtpCookedConnection> it = coll.iterator(); + + mboxes.clear(); + + while (it.hasNext()) { + conn = it.next(); + it.remove(); + conn.close(); + } + initDone = false; } @Override protected void finalize() { - close(); + close(); } /** @@ -183,65 +183,65 @@ public class OtpNode extends OtpLocalNode { * receive messages with other, similar mailboxes and with Erlang processes. * Messages can be sent to this mailbox by using its associated * {@link OtpMbox#self() pid}. - * + * * @return a mailbox. */ public OtpMbox createMbox() { - return mboxes.create(); + return mboxes.create(); } /** * Close the specified mailbox with reason 'normal'. - * + * * @param mbox * the mailbox to close. - * + * * <p> * After this operation, the mailbox will no longer be able to * receive messages. Any delivered but as yet unretrieved * messages can still be retrieved however. * </p> - * + * * <p> * If there are links from the mailbox to other * {@link OtpErlangPid pids}, they will be broken when this * method is called and exit signals with reason 'normal' will be * sent. * </p> - * + * */ public void closeMbox(final OtpMbox mbox) { - closeMbox(mbox, new OtpErlangAtom("normal")); + closeMbox(mbox, new OtpErlangAtom("normal")); } /** * Close the specified mailbox with the given reason. - * + * * @param mbox * the mailbox to close. * @param reason * an Erlang term describing the reason for the termination. - * + * * <p> * After this operation, the mailbox will no longer be able to * receive messages. Any delivered but as yet unretrieved * messages can still be retrieved however. * </p> - * + * * <p> * If there are links from the mailbox to other * {@link OtpErlangPid pids}, they will be broken when this * method is called and exit signals with the given reason will * be sent. * </p> - * + * */ public void closeMbox(final OtpMbox mbox, final OtpErlangObject reason) { - if (mbox != null) { - mboxes.remove(mbox); - mbox.name = null; - mbox.breakLinks(reason); - } + if (mbox != null) { + mboxes.remove(mbox); + mbox.name = null; + mbox.breakLinks(reason); + } } /** @@ -249,16 +249,16 @@ public class OtpNode extends OtpLocalNode { * with other, similar mailboxes and with Erlang processes. Messages can be * sent to this mailbox by using its registered name or the associated * {@link OtpMbox#self() pid}. - * + * * @param name * a name to register for this mailbox. The name must be unique * within this OtpNode. - * + * * @return a mailbox, or null if the name was already in use. - * + * */ public OtpMbox createMbox(final String name) { - return mboxes.create(name); + return mboxes.create(name); } /** @@ -269,58 +269,58 @@ public class OtpNode extends OtpLocalNode { * name; if the mailbox already had a name, calling this method will * supercede that name. * </p> - * + * * @param name * the name to register for the mailbox. Specify null to * unregister the existing name from this mailbox. - * + * * @param mbox * the mailbox to associate with the name. - * + * * @return true if the name was available, or false otherwise. */ public boolean registerName(final String name, final OtpMbox mbox) { - return mboxes.register(name, mbox); + return mboxes.register(name, mbox); } /** * Get a list of all known registered names on this node. - * + * * @return an array of Strings, containins all known registered names on * this node. */ public String[] getNames() { - return mboxes.names(); + return mboxes.names(); } /** * Determine the {@link OtpErlangPid pid} corresponding to a registered name * on this node. - * + * * @return the {@link OtpErlangPid pid} corresponding to the registered * name, or null if the name is not known on this node. */ public OtpErlangPid whereis(final String name) { - final OtpMbox m = mboxes.get(name); - if (m != null) { - return m.self(); - } - return null; + final OtpMbox m = mboxes.get(name); + if (m != null) { + return m.self(); + } + return null; } /** * Register interest in certain system events. The {@link OtpNodeStatus * OtpNodeStatus} handler object contains callback methods, that will be * called when certain events occur. - * + * * @param ahandler * the callback object to register. To clear the handler, specify * null as the handler to use. - * + * */ public synchronized void registerStatusHandler(final OtpNodeStatus ahandler) { - this.handler = ahandler; + handler = ahandler; } /** @@ -329,7 +329,7 @@ public class OtpNode extends OtpLocalNode { * setting up a connection to the remote node (if possible). Only a single * outgoing message is sent; the timeout is how long to wait for a response. * </p> - * + * * <p> * Only a single attempt is made to connect to the remote node, so for * example it is not possible to specify an extremely long timeout and @@ -337,74 +337,73 @@ public class OtpNode extends OtpLocalNode { * wait for a remote node to be started, the following construction may be * useful: * </p> - * + * * <pre> * // ping every 2 seconds until positive response * while (!me.ping(him, 2000)) * ; * </pre> - * + * * @param anode * the name of the node to ping. - * + * * @param timeout * the time, in milliseconds, to wait for response before * returning false. - * + * * @return true if the node was alive and the correct ping response was * returned. false if the correct response was not returned on time. */ /* * internal info about the message formats... - * + * * the request: -> REG_SEND {6,#Pid<[email protected]>,'',net_kernel} * {'$gen_call',{#Pid<[email protected]>,#Ref<[email protected]>},{is_auth,bingo@aule}} - * + * * the reply: <- SEND {2,'',#Pid<[email protected]>} {#Ref<[email protected]>,yes} */ public boolean ping(final String anode, final long timeout) { - if (anode.equals(this.node)) { - return true; - } else if (anode.indexOf('@', 0) < 0 - && anode.equals(this.node - .substring(0, this.node.indexOf('@', 0)))) { - return true; - } - - // other node - OtpMbox mbox = null; - try { - mbox = createMbox(); - mbox.send("net_kernel", anode, getPingTuple(mbox)); - final OtpErlangObject reply = mbox.receive(timeout); - - final OtpErlangTuple t = (OtpErlangTuple) reply; - final OtpErlangAtom a = (OtpErlangAtom) t.elementAt(1); - return "yes".equals(a.atomValue()); - } catch (final Exception e) { - } finally { - closeMbox(mbox); - } - return false; + if (anode.equals(node)) { + return true; + } else if (anode.indexOf('@', 0) < 0 + && anode.equals(node.substring(0, node.indexOf('@', 0)))) { + return true; + } + + // other node + OtpMbox mbox = null; + try { + mbox = createMbox(); + mbox.send("net_kernel", anode, getPingTuple(mbox)); + final OtpErlangObject reply = mbox.receive(timeout); + + final OtpErlangTuple t = (OtpErlangTuple) reply; + final OtpErlangAtom a = (OtpErlangAtom) t.elementAt(1); + return "yes".equals(a.atomValue()); + } catch (final Exception e) { + } finally { + closeMbox(mbox); + } + return false; } /* create the outgoing ping message */ private OtpErlangTuple getPingTuple(final OtpMbox mbox) { - final OtpErlangObject[] ping = new OtpErlangObject[3]; - final OtpErlangObject[] pid = new OtpErlangObject[2]; - final OtpErlangObject[] anode = new OtpErlangObject[2]; + final OtpErlangObject[] ping = new OtpErlangObject[3]; + final OtpErlangObject[] pid = new OtpErlangObject[2]; + final OtpErlangObject[] anode = new OtpErlangObject[2]; - pid[0] = mbox.self(); - pid[1] = createRef(); + pid[0] = mbox.self(); + pid[1] = createRef(); - anode[0] = new OtpErlangAtom("is_auth"); - anode[1] = new OtpErlangAtom(node()); + anode[0] = new OtpErlangAtom("is_auth"); + anode[1] = new OtpErlangAtom(node()); - ping[0] = new OtpErlangAtom("$gen_call"); - ping[1] = new OtpErlangTuple(pid); - ping[2] = new OtpErlangTuple(anode); + ping[0] = new OtpErlangAtom("$gen_call"); + ping[1] = new OtpErlangTuple(pid); + ping[2] = new OtpErlangTuple(anode); - return new OtpErlangTuple(ping); + return new OtpErlangTuple(ping); } /* @@ -412,27 +411,27 @@ public class OtpNode extends OtpLocalNode { * pings. */ private boolean netKernel(final OtpMsg m) { - OtpMbox mbox = null; - try { - final OtpErlangTuple t = (OtpErlangTuple) m.getMsg(); - final OtpErlangTuple req = (OtpErlangTuple) t.elementAt(1); // actual - // request - - final OtpErlangPid pid = (OtpErlangPid) req.elementAt(0); // originating - // pid - - final OtpErlangObject[] pong = new OtpErlangObject[2]; - pong[0] = req.elementAt(1); // his #Ref - pong[1] = new OtpErlangAtom("yes"); - - mbox = createMbox(); - mbox.send(pid, new OtpErlangTuple(pong)); - return true; - } catch (final Exception e) { - } finally { - closeMbox(mbox); - } - return false; + OtpMbox mbox = null; + try { + final OtpErlangTuple t = (OtpErlangTuple) m.getMsg(); + final OtpErlangTuple req = (OtpErlangTuple) t.elementAt(1); // actual + // request + + final OtpErlangPid pid = (OtpErlangPid) req.elementAt(0); // originating + // pid + + final OtpErlangObject[] pong = new OtpErlangObject[2]; + pong[0] = req.elementAt(1); // his #Ref + pong[1] = new OtpErlangAtom("yes"); + + mbox = createMbox(); + mbox.send(pid, new OtpErlangTuple(pong)); + return true; + } catch (final Exception e) { + } finally { + closeMbox(mbox); + } + return false; } /* @@ -440,31 +439,31 @@ public class OtpNode extends OtpLocalNode { * delivered successfully, or false otherwise. */ boolean deliver(final OtpMsg m) { - OtpMbox mbox = null; - - try { - final int t = m.type(); - - if (t == OtpMsg.regSendTag) { - final String name = m.getRecipientName(); - /* special case for netKernel requests */ - if (name.equals("net_kernel")) { - return netKernel(m); - } - mbox = mboxes.get(name); - } else { - mbox = mboxes.get(m.getRecipientPid()); - } - - if (mbox == null) { - return false; - } - mbox.deliver(m); - } catch (final Exception e) { - return false; - } - - return true; + OtpMbox mbox = null; + + try { + final int t = m.type(); + + if (t == OtpMsg.regSendTag) { + final String name = m.getRecipientName(); + /* special case for netKernel requests */ + if (name.equals("net_kernel")) { + return netKernel(m); + } + mbox = mboxes.get(name); + } else { + mbox = mboxes.get(m.getRecipientPid()); + } + + if (mbox == null) { + return false; + } + mbox.deliver(m); + } catch (final Exception e) { + return false; + } + + return true; } /* @@ -472,86 +471,86 @@ public class OtpNode extends OtpLocalNode { * specified by the application */ void deliverError(final OtpCookedConnection conn, final Exception e) { - removeConnection(conn); - remoteStatus(conn.name, false, e); + removeConnection(conn); + remoteStatus(conn.name, false, e); } /* * find or create a connection to the given node */ OtpCookedConnection getConnection(final String anode) { - OtpPeer peer = null; - OtpCookedConnection conn = null; - - synchronized (connections) { - // first just try looking up the name as-is - conn = connections.get(anode); - - if (conn == null) { - // in case node had no '@' add localhost info and try again - peer = new OtpPeer(anode); - conn = connections.get(peer.node()); - - if (conn == null) { - try { - conn = new OtpCookedConnection(this, peer); - conn.setFlags(connFlags); - addConnection(conn); - } catch (final Exception e) { - /* false = outgoing */ - connAttempt(peer.node(), false, e); - } - } - } - return conn; - } + OtpPeer peer = null; + OtpCookedConnection conn = null; + + synchronized (connections) { + // first just try looking up the name as-is + conn = connections.get(anode); + + if (conn == null) { + // in case node had no '@' add localhost info and try again + peer = new OtpPeer(anode); + conn = connections.get(peer.node()); + + if (conn == null) { + try { + conn = new OtpCookedConnection(this, peer); + conn.setFlags(connFlags); + addConnection(conn); + } catch (final Exception e) { + /* false = outgoing */ + connAttempt(peer.node(), false, e); + } + } + } + return conn; + } } void addConnection(final OtpCookedConnection conn) { - if (conn != null && conn.name != null) { - connections.put(conn.name, conn); - remoteStatus(conn.name, true, null); - } + if (conn != null && conn.name != null) { + connections.put(conn.name, conn); + remoteStatus(conn.name, true, null); + } } private void removeConnection(final OtpCookedConnection conn) { - if (conn != null && conn.name != null) { - connections.remove(conn.name); - } + if (conn != null && conn.name != null) { + connections.remove(conn.name); + } } /* use these wrappers to call handler functions */ - private synchronized void remoteStatus(final String anode, final boolean up, - final Object info) { - if (handler == null) { - return; - } - try { - handler.remoteStatus(anode, up, info); - } catch (final Exception e) { - } + private synchronized void remoteStatus(final String anode, + final boolean up, final Object info) { + if (handler == null) { + return; + } + try { + handler.remoteStatus(anode, up, info); + } catch (final Exception e) { + } } synchronized void localStatus(final String anode, final boolean up, - final Object info) { - if (handler == null) { - return; - } - try { - handler.localStatus(anode, up, info); - } catch (final Exception e) { - } + final Object info) { + if (handler == null) { + return; + } + try { + handler.localStatus(anode, up, info); + } catch (final Exception e) { + } } synchronized void connAttempt(final String anode, final boolean incoming, - final Object info) { - if (handler == null) { - return; - } - try { - handler.connAttempt(anode, incoming, info); - } catch (final Exception e) { - } + final Object info) { + if (handler == null) { + return; + } + try { + handler.connAttempt(anode, incoming, info); + } catch (final Exception e) { + } } /* @@ -559,248 +558,248 @@ public class OtpNode extends OtpLocalNode { * references */ public class Mailboxes { - // mbox pids here - private Hashtable<OtpErlangPid, WeakReference<OtpMbox>> byPid = null; - // mbox names here - private Hashtable<String, WeakReference<OtpMbox>> byName = null; - - public Mailboxes() { - byPid = new Hashtable<OtpErlangPid, WeakReference<OtpMbox>>(17, - (float) 0.95); - byName = new Hashtable<String, WeakReference<OtpMbox>>(17, - (float) 0.95); - } - - public OtpMbox create(final String name) { - OtpMbox m = null; - - synchronized (byName) { - if (get(name) != null) { - return null; - } - final OtpErlangPid pid = createPid(); - m = new OtpMbox(OtpNode.this, pid, name); - byPid.put(pid, new WeakReference<OtpMbox>(m)); - byName.put(name, new WeakReference<OtpMbox>(m)); - } - return m; - } - - public OtpMbox create() { - final OtpErlangPid pid = createPid(); - final OtpMbox m = new OtpMbox(OtpNode.this, pid); - byPid.put(pid, new WeakReference<OtpMbox>(m)); - return m; - } - - public void clear() { - byPid.clear(); - byName.clear(); - } - - public String[] names() { - String allnames[] = null; - - synchronized (byName) { - final int n = byName.size(); - final Enumeration<String> keys = byName.keys(); - allnames = new String[n]; - - int i = 0; - while (keys.hasMoreElements()) { - allnames[i++] = keys.nextElement(); - } - } - return allnames; - } - - public boolean register(final String name, final OtpMbox mbox) { - if (name == null) { - if (mbox.name != null) { - byName.remove(mbox.name); - mbox.name = null; - } - } else { - synchronized (byName) { - if (get(name) != null) { - return false; - } - byName.put(name, new WeakReference<OtpMbox>(mbox)); - mbox.name = name; - } - } - return true; - } - - /* - * look up a mailbox based on its name. If the mailbox has gone out of - * scope we also remove the reference from the hashtable so we don't - * find it again. - */ - public OtpMbox get(final String name) { - final WeakReference<OtpMbox> wr = byName.get(name); - - if (wr != null) { - final OtpMbox m = wr.get(); - - if (m != null) { - return m; - } - byName.remove(name); - } - return null; - } - - /* - * look up a mailbox based on its pid. If the mailbox has gone out of - * scope we also remove the reference from the hashtable so we don't - * find it again. - */ - public OtpMbox get(final OtpErlangPid pid) { - final WeakReference<OtpMbox> wr = byPid.get(pid); - - if (wr != null) { - final OtpMbox m = wr.get(); - - if (m != null) { - return m; - } - byPid.remove(pid); - } - return null; - } - - public void remove(final OtpMbox mbox) { - byPid.remove(mbox.self); - if (mbox.name != null) { - byName.remove(mbox.name); - } - } + // mbox pids here + private Hashtable<OtpErlangPid, WeakReference<OtpMbox>> byPid = null; + // mbox names here + private Hashtable<String, WeakReference<OtpMbox>> byName = null; + + public Mailboxes() { + byPid = new Hashtable<OtpErlangPid, WeakReference<OtpMbox>>(17, + (float) 0.95); + byName = new Hashtable<String, WeakReference<OtpMbox>>(17, + (float) 0.95); + } + + public OtpMbox create(final String name) { + OtpMbox m = null; + + synchronized (byName) { + if (get(name) != null) { + return null; + } + final OtpErlangPid pid = createPid(); + m = new OtpMbox(OtpNode.this, pid, name); + byPid.put(pid, new WeakReference<OtpMbox>(m)); + byName.put(name, new WeakReference<OtpMbox>(m)); + } + return m; + } + + public OtpMbox create() { + final OtpErlangPid pid = createPid(); + final OtpMbox m = new OtpMbox(OtpNode.this, pid); + byPid.put(pid, new WeakReference<OtpMbox>(m)); + return m; + } + + public void clear() { + byPid.clear(); + byName.clear(); + } + + public String[] names() { + String allnames[] = null; + + synchronized (byName) { + final int n = byName.size(); + final Enumeration<String> keys = byName.keys(); + allnames = new String[n]; + + int i = 0; + while (keys.hasMoreElements()) { + allnames[i++] = keys.nextElement(); + } + } + return allnames; + } + + public boolean register(final String name, final OtpMbox mbox) { + if (name == null) { + if (mbox.name != null) { + byName.remove(mbox.name); + mbox.name = null; + } + } else { + synchronized (byName) { + if (get(name) != null) { + return false; + } + byName.put(name, new WeakReference<OtpMbox>(mbox)); + mbox.name = name; + } + } + return true; + } + + /* + * look up a mailbox based on its name. If the mailbox has gone out of + * scope we also remove the reference from the hashtable so we don't + * find it again. + */ + public OtpMbox get(final String name) { + final WeakReference<OtpMbox> wr = byName.get(name); + + if (wr != null) { + final OtpMbox m = wr.get(); + + if (m != null) { + return m; + } + byName.remove(name); + } + return null; + } + + /* + * look up a mailbox based on its pid. If the mailbox has gone out of + * scope we also remove the reference from the hashtable so we don't + * find it again. + */ + public OtpMbox get(final OtpErlangPid pid) { + final WeakReference<OtpMbox> wr = byPid.get(pid); + + if (wr != null) { + final OtpMbox m = wr.get(); + + if (m != null) { + return m; + } + byPid.remove(pid); + } + return null; + } + + public void remove(final OtpMbox mbox) { + byPid.remove(mbox.self); + if (mbox.name != null) { + byName.remove(mbox.name); + } + } } /* * this thread simply listens for incoming connections */ public class Acceptor extends Thread { - private final ServerSocket sock; - private final int acceptorPort; - private volatile boolean done = false; - - Acceptor(final int port) throws IOException { - sock = new ServerSocket(port); - this.acceptorPort = sock.getLocalPort(); - OtpNode.this.port = this.acceptorPort; - - setDaemon(true); - setName("acceptor"); - publishPort(); - start(); - } - - private boolean publishPort() throws IOException { - if (getEpmd() != null) { - return false; // already published - } - OtpEpmd.publishPort(OtpNode.this); - return true; - } - - private void unPublishPort() { - // unregister with epmd - OtpEpmd.unPublishPort(OtpNode.this); - - // close the local descriptor (if we have one) - closeSock(epmd); - epmd = null; - } - - public void quit() { - unPublishPort(); - done = true; - closeSock(sock); - localStatus(node, false, null); - } - - private void closeSock(final ServerSocket s) { - try { - if (s != null) { - s.close(); - } - } catch (final Exception e) { - } - } - - private void closeSock(final Socket s) { - try { - if (s != null) { - s.close(); - } - } catch (final Exception e) { - } - } - - public int port() { - return acceptorPort; - } - - @Override - public void run() { - Socket newsock = null; - OtpCookedConnection conn = null; - - localStatus(node, true, null); - - accept_loop: while (!done) { - conn = null; - - try { - newsock = sock.accept(); - } catch (final Exception e) { - // Problem in java1.2.2: accept throws SocketException - // when socket is closed. This will happen when - // acceptor.quit() - // is called. acceptor.quit() will call localStatus(...), so - // we have to check if that's where we come from. - if (!done) { - localStatus(node, false, e); - } - break accept_loop; - } - - try { - synchronized (connections) { - conn = new OtpCookedConnection(OtpNode.this, newsock); - conn.setFlags(connFlags); - addConnection(conn); - } - } catch (final OtpAuthException e) { - if (conn != null && conn.name != null) { - connAttempt(conn.name, true, e); - } else { - connAttempt("unknown", true, e); - } - closeSock(newsock); - } catch (final IOException e) { - if (conn != null && conn.name != null) { - connAttempt(conn.name, true, e); - } else { - connAttempt("unknown", true, e); - } - closeSock(newsock); - } catch (final Exception e) { - closeSock(newsock); - closeSock(sock); - localStatus(node, false, e); - break accept_loop; - } - } // while - - // if we have exited loop we must do this too - unPublishPort(); - } + private final ServerSocket sock; + private final int acceptorPort; + private volatile boolean done = false; + + Acceptor(final int port) throws IOException { + sock = new ServerSocket(port); + acceptorPort = sock.getLocalPort(); + OtpNode.this.port = acceptorPort; + + setDaemon(true); + setName("acceptor"); + publishPort(); + start(); + } + + private boolean publishPort() throws IOException { + if (getEpmd() != null) { + return false; // already published + } + OtpEpmd.publishPort(OtpNode.this); + return true; + } + + private void unPublishPort() { + // unregister with epmd + OtpEpmd.unPublishPort(OtpNode.this); + + // close the local descriptor (if we have one) + closeSock(epmd); + epmd = null; + } + + public void quit() { + unPublishPort(); + done = true; + closeSock(sock); + localStatus(node, false, null); + } + + private void closeSock(final ServerSocket s) { + try { + if (s != null) { + s.close(); + } + } catch (final Exception e) { + } + } + + private void closeSock(final Socket s) { + try { + if (s != null) { + s.close(); + } + } catch (final Exception e) { + } + } + + public int port() { + return acceptorPort; + } + + @Override + public void run() { + Socket newsock = null; + OtpCookedConnection conn = null; + + localStatus(node, true, null); + + accept_loop: while (!done) { + conn = null; + + try { + newsock = sock.accept(); + } catch (final Exception e) { + // Problem in java1.2.2: accept throws SocketException + // when socket is closed. This will happen when + // acceptor.quit() + // is called. acceptor.quit() will call localStatus(...), so + // we have to check if that's where we come from. + if (!done) { + localStatus(node, false, e); + } + break accept_loop; + } + + try { + synchronized (connections) { + conn = new OtpCookedConnection(OtpNode.this, newsock); + conn.setFlags(connFlags); + addConnection(conn); + } + } catch (final OtpAuthException e) { + if (conn != null && conn.name != null) { + connAttempt(conn.name, true, e); + } else { + connAttempt("unknown", true, e); + } + closeSock(newsock); + } catch (final IOException e) { + if (conn != null && conn.name != null) { + connAttempt(conn.name, true, e); + } else { + connAttempt("unknown", true, e); + } + closeSock(newsock); + } catch (final Exception e) { + closeSock(newsock); + closeSock(sock); + localStatus(node, false, e); + break accept_loop; + } + } // while + + // if we have exited loop we must do this too + unPublishPort(); + } } public void setFlags(final int flags) { - this.connFlags = flags; + connFlags = flags; } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNodeStatus.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNodeStatus.java index aee1f8b67a..889f1d1b1f 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNodeStatus.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNodeStatus.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -25,13 +25,13 @@ package com.ericsson.otp.erlang; * with your {@link OtpNode OtpNode} when you wish to be notified about such * status changes and other similar events. * </p> - * + * * <p> * This class provides default handers that ignore all events. Applications are * expected to extend this class in order to act on events that are deemed * interesting. * </p> - * + * * <p> * <b> Note that this class is likely to change in the near future </b> * </p> @@ -42,59 +42,57 @@ public class OtpNodeStatus { /** * Notify about remote node status changes. - * + * * @param node - * the node whose status change is being indicated by this - * call. - * + * the node whose status change is being indicated by this call. + * * @param up - * true if the node has come up, false if it has gone down. - * + * true if the node has come up, false if it has gone down. + * * @param info - * additional info that may be available, for example an - * exception that was raised causing the event in question - * (may be null). - * + * additional info that may be available, for example an + * exception that was raised causing the event in question (may + * be null). + * */ public void remoteStatus(final String node, final boolean up, - final Object info) { + final Object info) { } /** * Notify about local node exceptions. - * + * * @param node - * the node whose status change is being indicated by this - * call. - * + * the node whose status change is being indicated by this call. + * * @param up - * true if the node has come up, false if it has gone down. - * + * true if the node has come up, false if it has gone down. + * * @param info - * additional info that may be available, for example an - * exception that was raised causing the event in question - * (may be null). + * additional info that may be available, for example an + * exception that was raised causing the event in question (may + * be null). */ public void localStatus(final String node, final boolean up, - final Object info) { + final Object info) { } /** * Notify about failed connection attempts. - * + * * @param node - * The name of the remote node - * + * The name of the remote node + * * @param incoming - * The direction of the connection attempt, i.e. true for - * incoming, false for outgoing. - * + * The direction of the connection attempt, i.e. true for + * incoming, false for outgoing. + * * @param info - * additional info that may be available, for example an - * exception that was raised causing the event in question - * (may be null). + * additional info that may be available, for example an + * exception that was raised causing the event in question (may + * be null). */ public void connAttempt(final String node, final boolean incoming, - final Object info) { + final Object info) { } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java index ef60a9f38a..b8493b57ff 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2013. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -30,17 +30,20 @@ import java.util.zip.Deflater; /** * Provides a stream for encoding Erlang terms to external format, for * transmission or storage. - * + * * <p> * Note that this class is not synchronized, if you need synchronization you * must provide it yourself. - * + * */ public class OtpOutputStream extends ByteArrayOutputStream { /** The default initial size of the stream. * */ public static final int defaultInitialSize = 2048; - /** The default increment used when growing the stream (increment at least this much). * */ + /** + * The default increment used when growing the stream (increment at least + * this much). * + */ public static final int defaultIncrement = 2048; // static formats, used to encode floats and doubles @@ -57,66 +60,66 @@ public class OtpOutputStream extends ByteArrayOutputStream { * Create a stream with the default initial size (2048 bytes). */ public OtpOutputStream() { - this(defaultInitialSize); + this(defaultInitialSize); } /** * Create a stream with the specified initial size. */ public OtpOutputStream(final int size) { - super(size); + super(size); } /** * Create a stream containing the encoded version of the given Erlang term. */ public OtpOutputStream(final OtpErlangObject o) { - this(); - write_any(o); + this(); + write_any(o); } // package scope /* * Get the contents of the output stream as an input stream instead. This is * used internally in {@link OtpCconnection} for tracing outgoing packages. - * + * * @param offset where in the output stream to read data from when creating * the input stream. The offset is necessary because header contents start 5 * bytes into the header buffer, whereas payload contents start at the * beginning - * + * * @return an input stream containing the same raw data. */ OtpInputStream getOtpInputStream(final int offset) { - return new OtpInputStream(super.buf, offset, super.count - offset, 0); + return new OtpInputStream(super.buf, offset, super.count - offset, 0); } /** * Get the current position in the stream. - * + * * @return the current position in the stream. */ public int getPos() { - return super.count; + return super.count; } /** * Trims the capacity of this <tt>OtpOutputStream</tt> instance to be the - * buffer's current size. An application can use this operation to minimize + * buffer's current size. An application can use this operation to minimize * the storage of an <tt>OtpOutputStream</tt> instance. */ public void trimToSize() { - resize(super.count); + resize(super.count); } - private void resize(int size) { - if (size < super.buf.length) { - final byte[] tmp = new byte[size]; - System.arraycopy(super.buf, 0, tmp, 0, size); - super.buf = tmp; - } else if (size > super.buf.length) { - ensureCapacity(size); - } + private void resize(final int size) { + if (size < super.buf.length) { + final byte[] tmp = new byte[size]; + System.arraycopy(super.buf, 0, tmp, 0, size); + super.buf = tmp; + } else if (size > super.buf.length) { + ensureCapacity(size); + } } /** @@ -124,228 +127,237 @@ public class OtpOutputStream extends ByteArrayOutputStream { * necessary, to ensure that it can hold at least the number of elements * specified by the minimum capacity argument. * - * @param minCapacity the desired minimum capacity + * @param minCapacity + * the desired minimum capacity */ - public void ensureCapacity(int minCapacity) { - if (minCapacity > fixedSize) { - throw new IllegalArgumentException("Trying to increase fixed-size buffer"); - } - int oldCapacity = super.buf.length; - if (minCapacity > oldCapacity) { - int newCapacity = (oldCapacity * 3)/2 + 1; - if (newCapacity < oldCapacity + defaultIncrement) - newCapacity = oldCapacity + defaultIncrement; - if (newCapacity < minCapacity) - newCapacity = minCapacity; - newCapacity = Math.min(fixedSize, newCapacity); - // minCapacity is usually close to size, so this is a win: - final byte[] tmp = new byte[newCapacity]; - System.arraycopy(super.buf, 0, tmp, 0, super.count); - super.buf = tmp; - } + public void ensureCapacity(final int minCapacity) { + if (minCapacity > fixedSize) { + throw new IllegalArgumentException( + "Trying to increase fixed-size buffer"); + } + final int oldCapacity = super.buf.length; + if (minCapacity > oldCapacity) { + int newCapacity = oldCapacity * 3 / 2 + 1; + if (newCapacity < oldCapacity + defaultIncrement) { + newCapacity = oldCapacity + defaultIncrement; + } + if (newCapacity < minCapacity) { + newCapacity = minCapacity; + } + newCapacity = Math.min(fixedSize, newCapacity); + // minCapacity is usually close to size, so this is a win: + final byte[] tmp = new byte[newCapacity]; + System.arraycopy(super.buf, 0, tmp, 0, super.count); + super.buf = tmp; + } } /** * Write one byte to the stream. - * + * * @param b * the byte to write. - * + * */ public void write(final byte b) { - ensureCapacity(super.count + 1); - super.buf[super.count++] = b; + ensureCapacity(super.count + 1); + super.buf[super.count++] = b; } - /* (non-Javadoc) + /* + * (non-Javadoc) + * * @see java.io.ByteArrayOutputStream#write(byte[]) */ @Override public void write(final byte[] abuf) { - // don't assume that super.write(byte[]) calls write(buf, 0, buf.length) - write(abuf, 0, abuf.length); + // don't assume that super.write(byte[]) calls write(buf, 0, buf.length) + write(abuf, 0, abuf.length); } - /* (non-Javadoc) + /* + * (non-Javadoc) + * * @see java.io.ByteArrayOutputStream#write(int) */ @Override - public synchronized void write(int b) { - ensureCapacity(super.count + 1); - super.buf[super.count] = (byte) b; - count += 1; + public synchronized void write(final int b) { + ensureCapacity(super.count + 1); + super.buf[super.count] = (byte) b; + count += 1; } - /* (non-Javadoc) + /* + * (non-Javadoc) + * * @see java.io.ByteArrayOutputStream#write(byte[], int, int) */ @Override - public synchronized void write(byte[] b, int off, int len) { - if ((off < 0) || (off > b.length) || (len < 0) - || ((off + len) - b.length > 0)) { - throw new IndexOutOfBoundsException(); - } - ensureCapacity(super.count + len); - System.arraycopy(b, off, super.buf, super.count, len); - super.count += len; + public synchronized void write(final byte[] b, final int off, final int len) { + if (off < 0 || off > b.length || len < 0 || off + len - b.length > 0) { + throw new IndexOutOfBoundsException(); + } + ensureCapacity(super.count + len); + System.arraycopy(b, off, super.buf, super.count, len); + super.count += len; } /** * Write the low byte of a value to the stream. - * + * * @param n * the value to use. - * + * */ public void write1(final long n) { - write((byte) (n & 0xff)); + write((byte) (n & 0xff)); } /** * Write an array of bytes to the stream. - * + * * @param bytes * the array of bytes to write. - * + * */ public void writeN(final byte[] bytes) { - write(bytes); + write(bytes); } /** * Get the current capacity of the stream. As bytes are added the capacity * of the stream is increased automatically, however this method returns the * current size. - * + * * @return the size of the internal buffer used by the stream. */ public int length() { - return super.buf.length; + return super.buf.length; } /** * Get the number of bytes in the stream. - * + * * @return the number of bytes in the stream. - * + * * @deprecated As of Jinterface 1.4, replaced by super.size(). * @see #size() */ @Deprecated public int count() { - return count; + return count; } /** * Write the low two bytes of a value to the stream in big endian order. - * + * * @param n * the value to use. */ public void write2BE(final long n) { - write((byte) ((n & 0xff00) >> 8)); - write((byte) (n & 0xff)); + write((byte) ((n & 0xff00) >> 8)); + write((byte) (n & 0xff)); } /** * Write the low four bytes of a value to the stream in big endian order. - * + * * @param n * the value to use. */ public void write4BE(final long n) { - write((byte) ((n & 0xff000000) >> 24)); - write((byte) ((n & 0xff0000) >> 16)); - write((byte) ((n & 0xff00) >> 8)); - write((byte) (n & 0xff)); + write((byte) ((n & 0xff000000) >> 24)); + write((byte) ((n & 0xff0000) >> 16)); + write((byte) ((n & 0xff00) >> 8)); + write((byte) (n & 0xff)); } /** * Write the low eight (all) bytes of a value to the stream in big endian * order. - * + * * @param n * the value to use. */ public void write8BE(final long n) { - write((byte) (n >> 56 & 0xff)); - write((byte) (n >> 48 & 0xff)); - write((byte) (n >> 40 & 0xff)); - write((byte) (n >> 32 & 0xff)); - write((byte) (n >> 24 & 0xff)); - write((byte) (n >> 16 & 0xff)); - write((byte) (n >> 8 & 0xff)); - write((byte) (n & 0xff)); + write((byte) (n >> 56 & 0xff)); + write((byte) (n >> 48 & 0xff)); + write((byte) (n >> 40 & 0xff)); + write((byte) (n >> 32 & 0xff)); + write((byte) (n >> 24 & 0xff)); + write((byte) (n >> 16 & 0xff)); + write((byte) (n >> 8 & 0xff)); + write((byte) (n & 0xff)); } /** * Write any number of bytes in little endian format. - * + * * @param n * the value to use. * @param b * the number of bytes to write from the little end. */ public void writeLE(final long n, final int b) { - long v = n; - for (int i = 0; i < b; i++) { - write((byte) (v & 0xff)); - v >>= 8; - } + long v = n; + for (int i = 0; i < b; i++) { + write((byte) (v & 0xff)); + v >>= 8; + } } /** * Write the low two bytes of a value to the stream in little endian order. - * + * * @param n * the value to use. */ public void write2LE(final long n) { - write((byte) (n & 0xff)); - write((byte) ((n & 0xff00) >> 8)); + write((byte) (n & 0xff)); + write((byte) ((n & 0xff00) >> 8)); } /** * Write the low four bytes of a value to the stream in little endian order. - * + * * @param n * the value to use. */ public void write4LE(final long n) { - write((byte) (n & 0xff)); - write((byte) ((n & 0xff00) >> 8)); - write((byte) ((n & 0xff0000) >> 16)); - write((byte) ((n & 0xff000000) >> 24)); + write((byte) (n & 0xff)); + write((byte) ((n & 0xff00) >> 8)); + write((byte) ((n & 0xff0000) >> 16)); + write((byte) ((n & 0xff000000) >> 24)); } /** * Write the low eight bytes of a value to the stream in little endian * order. - * + * * @param n * the value to use. */ public void write8LE(final long n) { - write((byte) (n & 0xff)); - write((byte) (n >> 8 & 0xff)); - write((byte) (n >> 16 & 0xff)); - write((byte) (n >> 24 & 0xff)); - write((byte) (n >> 32 & 0xff)); - write((byte) (n >> 40 & 0xff)); - write((byte) (n >> 48 & 0xff)); - write((byte) (n >> 56 & 0xff)); + write((byte) (n & 0xff)); + write((byte) (n >> 8 & 0xff)); + write((byte) (n >> 16 & 0xff)); + write((byte) (n >> 24 & 0xff)); + write((byte) (n >> 32 & 0xff)); + write((byte) (n >> 40 & 0xff)); + write((byte) (n >> 48 & 0xff)); + write((byte) (n >> 56 & 0xff)); } /** * Write the low four bytes of a value to the stream in bif endian order, at * the specified position. If the position specified is beyond the end of * the stream, this method will have no effect. - * + * * Normally this method should be used in conjunction with {@link #size() * size()}, when is is necessary to insert data into the stream before it is * known what the actual value should be. For example: - * + * * <pre> * int pos = s.size(); * s.write4BE(0); // make space for length data, @@ -354,501 +366,495 @@ public class OtpOutputStream extends ByteArrayOutputStream { * // later... when we know the length value * s.poke4BE(pos, length); * </pre> - * - * + * + * * @param offset * the position in the stream. * @param n * the value to use. */ public void poke4BE(final int offset, final long n) { - if (offset < super.count) { - buf[offset + 0] = (byte) ((n & 0xff000000) >> 24); - buf[offset + 1] = (byte) ((n & 0xff0000) >> 16); - buf[offset + 2] = (byte) ((n & 0xff00) >> 8); - buf[offset + 3] = (byte) (n & 0xff); - } + if (offset < super.count) { + buf[offset + 0] = (byte) ((n & 0xff000000) >> 24); + buf[offset + 1] = (byte) ((n & 0xff0000) >> 16); + buf[offset + 2] = (byte) ((n & 0xff00) >> 8); + buf[offset + 3] = (byte) (n & 0xff); + } } /** * Write a string to the stream as an Erlang atom. - * + * * @param atom * the string to write. */ public void write_atom(final String atom) { - String enc_atom; - byte[] bytes; - boolean isLatin1 = true; - - if (atom.codePointCount(0, atom.length()) <= OtpExternal.maxAtomLength) { - enc_atom = atom; - } - else { - /* - * Throwing an exception would be better I think, - * but truncation seems to be the way it has - * been done in other parts of OTP... - */ - enc_atom = new String(OtpErlangString.stringToCodePoints(atom), - 0, OtpExternal.maxAtomLength); - } - - for (int offset = 0; offset < enc_atom.length();) { - final int cp = enc_atom.codePointAt(offset); - if ((cp & ~0xFF) != 0) { - isLatin1 = false; - break; - } - offset += Character.charCount(cp); - } - try { - if (isLatin1) { - bytes = enc_atom.getBytes("ISO-8859-1"); - write1(OtpExternal.atomTag); - write2BE(bytes.length); - } - else { - bytes = enc_atom.getBytes("UTF-8"); - final int length = bytes.length; - if (length < 256) { - write1(OtpExternal.smallAtomUtf8Tag); - write1(length); - } - else { - write1(OtpExternal.atomUtf8Tag); - write2BE(length); - } - } - writeN(bytes); - } catch (final java.io.UnsupportedEncodingException e) { - /* - * Sigh, why didn't the API designer add an - * OtpErlangEncodeException to these encoding - * functions?!? Instead of changing the API we - * write an invalid atom and let it fail for - * whoever trying to decode this... Sigh, - * again... - */ - write1(OtpExternal.smallAtomUtf8Tag); - write1(2); - write2BE(0xffff); /* Invalid UTF-8 */ - } + String enc_atom; + byte[] bytes; + boolean isLatin1 = true; + + if (atom.codePointCount(0, atom.length()) <= OtpExternal.maxAtomLength) { + enc_atom = atom; + } else { + /* + * Throwing an exception would be better I think, but truncation + * seems to be the way it has been done in other parts of OTP... + */ + enc_atom = new String(OtpErlangString.stringToCodePoints(atom), 0, + OtpExternal.maxAtomLength); + } + + for (int offset = 0; offset < enc_atom.length();) { + final int cp = enc_atom.codePointAt(offset); + if ((cp & ~0xFF) != 0) { + isLatin1 = false; + break; + } + offset += Character.charCount(cp); + } + try { + if (isLatin1) { + bytes = enc_atom.getBytes("ISO-8859-1"); + write1(OtpExternal.atomTag); + write2BE(bytes.length); + } else { + bytes = enc_atom.getBytes("UTF-8"); + final int length = bytes.length; + if (length < 256) { + write1(OtpExternal.smallAtomUtf8Tag); + write1(length); + } else { + write1(OtpExternal.atomUtf8Tag); + write2BE(length); + } + } + writeN(bytes); + } catch (final java.io.UnsupportedEncodingException e) { + /* + * Sigh, why didn't the API designer add an OtpErlangEncodeException + * to these encoding functions?!? Instead of changing the API we + * write an invalid atom and let it fail for whoever trying to + * decode this... Sigh, again... + */ + write1(OtpExternal.smallAtomUtf8Tag); + write1(2); + write2BE(0xffff); /* Invalid UTF-8 */ + } } /** * Write an array of bytes to the stream as an Erlang binary. - * + * * @param bin * the array of bytes to write. */ public void write_binary(final byte[] bin) { - write1(OtpExternal.binTag); - write4BE(bin.length); - writeN(bin); + write1(OtpExternal.binTag); + write4BE(bin.length); + writeN(bin); } /** * Write an array of bytes to the stream as an Erlang bitstr. - * + * * @param bin * the array of bytes to write. * @param pad_bits * the number of zero pad bits at the low end of the last byte */ public void write_bitstr(final byte[] bin, final int pad_bits) { - if (pad_bits == 0) { - write_binary(bin); - return; - } - write1(OtpExternal.bitBinTag); - write4BE(bin.length); - write1(8 - pad_bits); - writeN(bin); + if (pad_bits == 0) { + write_binary(bin); + return; + } + write1(OtpExternal.bitBinTag); + write4BE(bin.length); + write1(8 - pad_bits); + writeN(bin); } /** * Write a boolean value to the stream as the Erlang atom 'true' or 'false'. - * + * * @param b * the boolean value to write. */ public void write_boolean(final boolean b) { - write_atom(String.valueOf(b)); + write_atom(String.valueOf(b)); } /** * Write a single byte to the stream as an Erlang integer. The byte is * really an IDL 'octet', that is, unsigned. - * + * * @param b * the byte to use. */ public void write_byte(final byte b) { - this.write_long(b & 0xffL, true); + this.write_long(b & 0xffL, true); } /** * Write a character to the stream as an Erlang integer. The character may * be a 16 bit character, kind of IDL 'wchar'. It is up to the Erlang side * to take care of souch, if they should be used. - * + * * @param c * the character to use. */ public void write_char(final char c) { - this.write_long(c & 0xffffL, true); + this.write_long(c & 0xffffL, true); } /** * Write a double value to the stream. - * + * * @param d * the double to use. */ public void write_double(final double d) { - write1(OtpExternal.newFloatTag); - write8BE(Double.doubleToLongBits(d)); + write1(OtpExternal.newFloatTag); + write8BE(Double.doubleToLongBits(d)); } /** * Write a float value to the stream. - * + * * @param f * the float to use. */ public void write_float(final float f) { - write_double(f); + write_double(f); } public void write_big_integer(final BigInteger v) { - if (v.bitLength() < 64) { - this.write_long(v.longValue(), true); - return; - } - final int signum = v.signum(); - BigInteger val = v; - if (signum < 0) { - val = val.negate(); - } - final byte[] magnitude = val.toByteArray(); - final int n = magnitude.length; - // Reverse the array to make it little endian. - for (int i = 0, j = n; i < j--; i++) { - // Swap [i] with [j] - final byte b = magnitude[i]; - magnitude[i] = magnitude[j]; - magnitude[j] = b; - } - if ((n & 0xFF) == n) { - write1(OtpExternal.smallBigTag); - write1(n); // length - } else { - write1(OtpExternal.largeBigTag); - write4BE(n); // length - } - write1(signum < 0 ? 1 : 0); // sign - // Write the array - writeN(magnitude); + if (v.bitLength() < 64) { + this.write_long(v.longValue(), true); + return; + } + final int signum = v.signum(); + BigInteger val = v; + if (signum < 0) { + val = val.negate(); + } + final byte[] magnitude = val.toByteArray(); + final int n = magnitude.length; + // Reverse the array to make it little endian. + for (int i = 0, j = n; i < j--; i++) { + // Swap [i] with [j] + final byte b = magnitude[i]; + magnitude[i] = magnitude[j]; + magnitude[j] = b; + } + if ((n & 0xFF) == n) { + write1(OtpExternal.smallBigTag); + write1(n); // length + } else { + write1(OtpExternal.largeBigTag); + write4BE(n); // length + } + write1(signum < 0 ? 1 : 0); // sign + // Write the array + writeN(magnitude); } void write_long(final long v, final boolean unsigned) { - /* - * If v<0 and unsigned==true the value - * java.lang.Long.MAX_VALUE-java.lang.Long.MIN_VALUE+1+v is written, i.e - * v is regarded as unsigned two's complement. - */ - if ((v & 0xffL) == v) { - // will fit in one byte - write1(OtpExternal.smallIntTag); - write1(v); - } else { - // note that v != 0L - if (v < 0 && unsigned || v < OtpExternal.erlMin - || v > OtpExternal.erlMax) { - // some kind of bignum - final long abs = unsigned ? v : v < 0 ? -v : v; - final int sign = unsigned ? 0 : v < 0 ? 1 : 0; - int n; - long mask; - for (mask = 0xFFFFffffL, n = 4; (abs & mask) != abs; n++, mask = mask << 8 | 0xffL) { - // count nonzero bytes - } - write1(OtpExternal.smallBigTag); - write1(n); // length - write1(sign); // sign - writeLE(abs, n); // value. obs! little endian - } else { - write1(OtpExternal.intTag); - write4BE(v); - } - } + /* + * If v<0 and unsigned==true the value + * java.lang.Long.MAX_VALUE-java.lang.Long.MIN_VALUE+1+v is written, i.e + * v is regarded as unsigned two's complement. + */ + if ((v & 0xffL) == v) { + // will fit in one byte + write1(OtpExternal.smallIntTag); + write1(v); + } else { + // note that v != 0L + if (v < 0 && unsigned || v < OtpExternal.erlMin + || v > OtpExternal.erlMax) { + // some kind of bignum + final long abs = unsigned ? v : v < 0 ? -v : v; + final int sign = unsigned ? 0 : v < 0 ? 1 : 0; + int n; + long mask; + for (mask = 0xFFFFffffL, n = 4; (abs & mask) != abs; n++, mask = mask << 8 | 0xffL) { + // count nonzero bytes + } + write1(OtpExternal.smallBigTag); + write1(n); // length + write1(sign); // sign + writeLE(abs, n); // value. obs! little endian + } else { + write1(OtpExternal.intTag); + write4BE(v); + } + } } /** * Write a long to the stream. - * + * * @param l * the long to use. */ public void write_long(final long l) { - this.write_long(l, false); + this.write_long(l, false); } /** * Write a positive long to the stream. The long is interpreted as a two's * complement unsigned long even if it is negative. - * + * * @param ul * the long to use. */ public void write_ulong(final long ul) { - this.write_long(ul, true); + this.write_long(ul, true); } /** * Write an integer to the stream. - * + * * @param i * the integer to use. */ public void write_int(final int i) { - this.write_long(i, false); + this.write_long(i, false); } /** * Write a positive integer to the stream. The integer is interpreted as a * two's complement unsigned integer even if it is negative. - * + * * @param ui * the integer to use. */ public void write_uint(final int ui) { - this.write_long(ui & 0xFFFFffffL, true); + this.write_long(ui & 0xFFFFffffL, true); } /** * Write a short to the stream. - * + * * @param s * the short to use. */ public void write_short(final short s) { - this.write_long(s, false); + this.write_long(s, false); } /** * Write a positive short to the stream. The short is interpreted as a two's * complement unsigned short even if it is negative. - * + * * @param us * the short to use. */ public void write_ushort(final short us) { - this.write_long(us & 0xffffL, true); + this.write_long(us & 0xffffL, true); } /** * Write an Erlang list header to the stream. After calling this method, you * must write 'arity' elements to the stream followed by nil, or it will not * be possible to decode it later. - * + * * @param arity * the number of elements in the list. */ public void write_list_head(final int arity) { - if (arity == 0) { - write_nil(); - } else { - write1(OtpExternal.listTag); - write4BE(arity); - } + if (arity == 0) { + write_nil(); + } else { + write1(OtpExternal.listTag); + write4BE(arity); + } } /** * Write an empty Erlang list to the stream. */ public void write_nil() { - write1(OtpExternal.nilTag); + write1(OtpExternal.nilTag); } /** * Write an Erlang tuple header to the stream. After calling this method, * you must write 'arity' elements to the stream or it will not be possible * to decode it later. - * + * * @param arity * the number of elements in the tuple. */ public void write_tuple_head(final int arity) { - if (arity < 0xff) { - write1(OtpExternal.smallTupleTag); - write1(arity); - } else { - write1(OtpExternal.largeTupleTag); - write4BE(arity); - } + if (arity < 0xff) { + write1(OtpExternal.smallTupleTag); + write1(arity); + } else { + write1(OtpExternal.largeTupleTag); + write4BE(arity); + } } /** * Write an Erlang PID to the stream. - * + * * @param node * the nodename. - * + * * @param id * an arbitrary number. Only the low order 15 bits will be used. - * + * * @param serial * another arbitrary number. Only the low order 13 bits will be * used. - * + * * @param creation * yet another arbitrary number. Only the low order 2 bits will * be used. - * + * */ public void write_pid(final String node, final int id, final int serial, - final int creation) { - write1(OtpExternal.pidTag); - write_atom(node); - write4BE(id & 0x7fff); // 15 bits - write4BE(serial & 0x1fff); // 13 bits - write1(creation & 0x3); // 2 bits + final int creation) { + write1(OtpExternal.pidTag); + write_atom(node); + write4BE(id & 0x7fff); // 15 bits + write4BE(serial & 0x1fff); // 13 bits + write1(creation & 0x3); // 2 bits } /** * Write an Erlang port to the stream. - * + * * @param node * the nodename. - * + * * @param id * an arbitrary number. Only the low order 28 bits will be used. - * + * * @param creation * another arbitrary number. Only the low order 2 bits will be * used. - * + * */ public void write_port(final String node, final int id, final int creation) { - write1(OtpExternal.portTag); - write_atom(node); - write4BE(id & 0xfffffff); // 28 bits - write1(creation & 0x3); // 2 bits + write1(OtpExternal.portTag); + write_atom(node); + write4BE(id & 0xfffffff); // 28 bits + write1(creation & 0x3); // 2 bits } /** * Write an old style Erlang ref to the stream. - * + * * @param node * the nodename. - * + * * @param id * an arbitrary number. Only the low order 18 bits will be used. - * + * * @param creation * another arbitrary number. Only the low order 2 bits will be * used. - * + * */ public void write_ref(final String node, final int id, final int creation) { - write1(OtpExternal.refTag); - write_atom(node); - write4BE(id & 0x3ffff); // 18 bits - write1(creation & 0x3); // 2 bits + write1(OtpExternal.refTag); + write_atom(node); + write4BE(id & 0x3ffff); // 18 bits + write1(creation & 0x3); // 2 bits } /** * Write a new style (R6 and later) Erlang ref to the stream. - * + * * @param node * the nodename. - * + * * @param ids * an array of arbitrary numbers. Only the low order 18 bits of * the first number will be used. If the array contains only one * number, an old style ref will be written instead. At most * three numbers will be read from the array. - * + * * @param creation * another arbitrary number. Only the low order 2 bits will be * used. - * + * */ public void write_ref(final String node, final int[] ids, final int creation) { - int arity = ids.length; - if (arity > 3) { - arity = 3; // max 3 words in ref - } + int arity = ids.length; + if (arity > 3) { + arity = 3; // max 3 words in ref + } - if (arity == 1) { - // use old method - this.write_ref(node, ids[0], creation); - } else { - // r6 ref - write1(OtpExternal.newRefTag); + if (arity == 1) { + // use old method + this.write_ref(node, ids[0], creation); + } else { + // r6 ref + write1(OtpExternal.newRefTag); - // how many id values - write2BE(arity); + // how many id values + write2BE(arity); - write_atom(node); + write_atom(node); - // note: creation BEFORE id in r6 ref - write1(creation & 0x3); // 2 bits + // note: creation BEFORE id in r6 ref + write1(creation & 0x3); // 2 bits - // first int gets truncated to 18 bits - write4BE(ids[0] & 0x3ffff); + // first int gets truncated to 18 bits + write4BE(ids[0] & 0x3ffff); - // remaining ones are left as is - for (int i = 1; i < arity; i++) { - write4BE(ids[i]); - } - } + // remaining ones are left as is + for (int i = 1; i < arity; i++) { + write4BE(ids[i]); + } + } } /** * Write a string to the stream. - * + * * @param s * the string to write. */ public void write_string(final String s) { - final int len = s.length(); - - switch (len) { - case 0: - write_nil(); - break; - default: - if (len <= 65535 && is8bitString(s)) { // 8-bit string - try { - final byte[] bytebuf = s.getBytes("ISO-8859-1"); - write1(OtpExternal.stringTag); - write2BE(len); - writeN(bytebuf); - } catch (final UnsupportedEncodingException e) { - write_nil(); // it should never ever get here... - } - } else { // unicode or longer, must code as list - final int[] codePoints = OtpErlangString.stringToCodePoints(s); - write_list_head(codePoints.length); - for (final int codePoint : codePoints) { - write_int(codePoint); - } - write_nil(); - } - } + final int len = s.length(); + + switch (len) { + case 0: + write_nil(); + break; + default: + if (len <= 65535 && is8bitString(s)) { // 8-bit string + try { + final byte[] bytebuf = s.getBytes("ISO-8859-1"); + write1(OtpExternal.stringTag); + write2BE(len); + writeN(bytebuf); + } catch (final UnsupportedEncodingException e) { + write_nil(); // it should never ever get here... + } + } else { // unicode or longer, must code as list + final int[] codePoints = OtpErlangString.stringToCodePoints(s); + write_list_head(codePoints.length); + for (final int codePoint : codePoints) { + write_int(codePoint); + } + write_nil(); + } + } } private boolean is8bitString(final String s) { - for (int i = 0; i < s.length(); ++i) { - final char c = s.charAt(i); - if (c < 0 || c > 255) { - return false; - } - } - return true; + for (int i = 0; i < s.length(); ++i) { + final char c = s.charAt(i); + if (c < 0 || c > 255) { + return false; + } + } + return true; } /** @@ -858,7 +864,7 @@ public class OtpOutputStream extends ByteArrayOutputStream { * the Erlang term to write. */ public void write_compressed(final OtpErlangObject o) { - write_compressed(o, Deflater.DEFAULT_COMPRESSION); + write_compressed(o, Deflater.DEFAULT_COMPRESSION); } /** @@ -869,119 +875,119 @@ public class OtpOutputStream extends ByteArrayOutputStream { * @param level * the compression level (<tt>0..9</tt>) */ - public void write_compressed(final OtpErlangObject o, int level) { - @SuppressWarnings("resource") - final OtpOutputStream oos = new OtpOutputStream(o); - /* - * similar to erts_term_to_binary() in external.c: - * We don't want to compress if compression actually increases the size. - * Since compression uses 5 extra bytes (COMPRESSED tag + size), don't - * compress if the original term is smaller. - */ - if (oos.size() < 5) { - // fast path for small terms - try { - oos.writeTo(this); - // if the term is written as a compressed term, the output - // stream is closed, so we do this here, too - this.close(); - } catch (IOException e) { - throw new java.lang.IllegalArgumentException( - "Intermediate stream failed for Erlang object " + o); - } - } else { - int startCount = super.count; - // we need destCount bytes for an uncompressed term - // -> if compression uses more, use the uncompressed term! - int destCount = startCount + oos.size(); - this.fixedSize = destCount; - Deflater def = new Deflater(level); - final java.util.zip.DeflaterOutputStream dos = new java.util.zip.DeflaterOutputStream( - this, def); - try { - write1(OtpExternal.compressedTag); - write4BE(oos.size()); - oos.writeTo(dos); - dos.close(); // note: closes this, too! - } catch (final IllegalArgumentException e) { - // discard further un-compressed data - // -> if not called, there may be memory leaks! - def.end(); - // could not make the value smaller than originally - // -> reset to starting count, write uncompressed - super.count = startCount; - try { - oos.writeTo(this); - // if the term is written as a compressed term, the output - // stream is closed, so we do this here, too - this.close(); - } catch (IOException e2) { - throw new java.lang.IllegalArgumentException( - "Intermediate stream failed for Erlang object " + o); - } - } catch (final IOException e) { - throw new java.lang.IllegalArgumentException( - "Intermediate stream failed for Erlang object " + o); - } finally { - this.fixedSize = Integer.MAX_VALUE; - try { - dos.close(); - } catch (IOException e) { - // ignore + public void write_compressed(final OtpErlangObject o, final int level) { + @SuppressWarnings("resource") + final OtpOutputStream oos = new OtpOutputStream(o); + /* + * similar to erts_term_to_binary() in external.c: We don't want to + * compress if compression actually increases the size. Since + * compression uses 5 extra bytes (COMPRESSED tag + size), don't + * compress if the original term is smaller. + */ + if (oos.size() < 5) { + // fast path for small terms + try { + oos.writeTo(this); + // if the term is written as a compressed term, the output + // stream is closed, so we do this here, too + close(); + } catch (final IOException e) { + throw new java.lang.IllegalArgumentException( + "Intermediate stream failed for Erlang object " + o); + } + } else { + final int startCount = super.count; + // we need destCount bytes for an uncompressed term + // -> if compression uses more, use the uncompressed term! + final int destCount = startCount + oos.size(); + fixedSize = destCount; + final Deflater def = new Deflater(level); + final java.util.zip.DeflaterOutputStream dos = new java.util.zip.DeflaterOutputStream( + this, def); + try { + write1(OtpExternal.compressedTag); + write4BE(oos.size()); + oos.writeTo(dos); + dos.close(); // note: closes this, too! + } catch (final IllegalArgumentException e) { + // discard further un-compressed data + // -> if not called, there may be memory leaks! + def.end(); + // could not make the value smaller than originally + // -> reset to starting count, write uncompressed + super.count = startCount; + try { + oos.writeTo(this); + // if the term is written as a compressed term, the output + // stream is closed, so we do this here, too + close(); + } catch (final IOException e2) { + throw new java.lang.IllegalArgumentException( + "Intermediate stream failed for Erlang object " + o); + } + } catch (final IOException e) { + throw new java.lang.IllegalArgumentException( + "Intermediate stream failed for Erlang object " + o); + } finally { + fixedSize = Integer.MAX_VALUE; + try { + dos.close(); + } catch (final IOException e) { + // ignore + } + } } - } - } } /** * Write an arbitrary Erlang term to the stream. - * + * * @param o * the Erlang term to write. */ public void write_any(final OtpErlangObject o) { - // calls one of the above functions, depending on o - o.encode(this); + // calls one of the above functions, depending on o + o.encode(this); } public void write_fun(final OtpErlangPid pid, final String module, - final long old_index, final int arity, final byte[] md5, - final long index, final long uniq, final OtpErlangObject[] freeVars) { - if (arity == -1) { - write1(OtpExternal.funTag); - write4BE(freeVars.length); - pid.encode(this); - write_atom(module); - write_long(index); - write_long(uniq); - for (final OtpErlangObject fv : freeVars) { - fv.encode(this); - } - } else { - write1(OtpExternal.newFunTag); - final int saveSizePos = getPos(); - write4BE(0); // this is where we patch in the size - write1(arity); - writeN(md5); - write4BE(index); - write4BE(freeVars.length); - write_atom(module); - write_long(old_index); - write_long(uniq); - pid.encode(this); - for (final OtpErlangObject fv : freeVars) { - fv.encode(this); - } - poke4BE(saveSizePos, getPos() - saveSizePos); - } + final long old_index, final int arity, final byte[] md5, + final long index, final long uniq, final OtpErlangObject[] freeVars) { + if (arity == -1) { + write1(OtpExternal.funTag); + write4BE(freeVars.length); + pid.encode(this); + write_atom(module); + write_long(index); + write_long(uniq); + for (final OtpErlangObject fv : freeVars) { + fv.encode(this); + } + } else { + write1(OtpExternal.newFunTag); + final int saveSizePos = getPos(); + write4BE(0); // this is where we patch in the size + write1(arity); + writeN(md5); + write4BE(index); + write4BE(freeVars.length); + write_atom(module); + write_long(old_index); + write_long(uniq); + pid.encode(this); + for (final OtpErlangObject fv : freeVars) { + fv.encode(this); + } + poke4BE(saveSizePos, getPos() - saveSizePos); + } } public void write_external_fun(final String module, final String function, - final int arity) { - write1(OtpExternal.externalFunTag); - write_atom(module); - write_atom(function); - write_long(arity); + final int arity) { + write1(OtpExternal.externalFunTag); + write_atom(module); + write_atom(function); + write_long(arity); } public void write_map_head(final int arity) { diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java index df5ce61820..2c79c04247 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -28,59 +28,59 @@ import java.net.UnknownHostException; */ public class OtpPeer extends AbstractNode { int distChoose = 0; /* - * this is set by OtpConnection and is the highest - * common protocol version we both support - */ + * this is set by OtpConnection and is the highest + * common protocol version we both support + */ OtpPeer() { - super(); + super(); } /** * Create a peer node. - * + * * @param node - * the name of the node. + * the name of the node. */ public OtpPeer(final String node) { - super(node); + super(node); } /** * Create a connection to a remote node. - * + * * @param self - * the local node from which you wish to connect. - * + * the local node from which you wish to connect. + * * @return a connection to the remote node. - * + * * @exception java.net.UnknownHostException - * if the remote host could not be found. - * + * if the remote host could not be found. + * * @exception java.io.IOException - * if it was not possible to connect to the remote node. - * + * if it was not possible to connect to the remote node. + * * @exception OtpAuthException - * if the connection was refused by the remote node. - * + * if the connection was refused by the remote node. + * * @deprecated Use the corresponding method in {@link OtpSelf} instead. */ @Deprecated public OtpConnection connect(final OtpSelf self) throws IOException, - UnknownHostException, OtpAuthException { - return new OtpConnection(self, this); + UnknownHostException, OtpAuthException { + return new OtpConnection(self, this); } // package /* * Get the port number used by the remote node. - * + * * @return the port number used by the remote node, or 0 if the node was not * registered with the port mapper. - * + * * @exception java.io.IOException if the port mapper could not be contacted. */ int port() throws IOException { - return OtpEpmd.lookupPort(this); + return OtpEpmd.lookupPort(this); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java index 8e78cda894..166dac5701 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -26,26 +26,26 @@ import java.net.UnknownHostException; /** * Represents an OTP node. It is used to connect to remote nodes or accept * incoming connections from remote nodes. - * + * * <p> * When the Java node will be connecting to a remote Erlang, Java or C node, it * must first identify itself as a node by creating an instance of this class, * after which it may connect to the remote node. - * + * * <p> * When you create an instance of this class, it will bind a socket to a port so * that incoming connections can be accepted. However the port number will not * be made available to other nodes wishing to connect until you explicitely * register with the port mapper daemon by calling {@link #publishPort()}. * </p> - * + * * <pre> * OtpSelf self = new OtpSelf("client", "authcookie"); // identify self * OtpPeer other = new OtpPeer("server"); // identify peer - * + * * OtpConnection conn = self.connect(other); // connect to peer * </pre> - * + * */ public class OtpSelf extends OtpLocalNode { private final ServerSocket sock; @@ -58,47 +58,47 @@ public class OtpSelf extends OtpLocalNode { * directory. The home directory is obtained from the System property * "user.home". * </p> - * + * * <p> * If the file does not exist, an empty string is used. This method makes no * attempt to create the file. * </p> - * + * * @param node - * the name of this node. - * + * the name of this node. + * */ public OtpSelf(final String node) throws IOException { - this(node, defaultCookie, 0); + this(node, defaultCookie, 0); } /** * Create a self node. - * + * * @param node - * the name of this node. - * + * the name of this node. + * * @param cookie - * the authorization cookie that will be used by this node - * when it communicates with other nodes. + * the authorization cookie that will be used by this node when + * it communicates with other nodes. */ public OtpSelf(final String node, final String cookie) throws IOException { - this(node, cookie, 0); + this(node, cookie, 0); } public OtpSelf(final String node, final String cookie, final int port) - throws IOException { - super(node, cookie); + throws IOException { + super(node, cookie); - sock = new ServerSocket(port); + sock = new ServerSocket(port); - if (port != 0) { - this.port = port; - } else { - this.port = sock.getLocalPort(); - } + if (port != 0) { + this.port = port; + } else { + this.port = sock.getLocalPort(); + } - pid = createPid(); + pid = createPid(); } /** @@ -106,12 +106,12 @@ public class OtpSelf extends OtpLocalNode { * messages sent by this node. Anonymous messages are those sent via send * methods in {@link OtpConnection OtpConnection} that do not specify a * sender. - * + * * @return the Erlang PID that will be used as the sender id in all * anonymous messages sent by this node. */ public OtpErlangPid pid() { - return pid; + return pid; } /** @@ -119,31 +119,31 @@ public class OtpSelf extends OtpLocalNode { * connect to this one. This method establishes a connection to the Erlang * port mapper (Epmd) and registers the server node's name and port so that * remote nodes are able to connect. - * + * * <p> * This method will fail if an Epmd process is not running on the localhost. * See the Erlang documentation for information about starting Epmd. - * + * * <p> * Note that once this method has been called, the node is expected to be * available to accept incoming connections. For that reason you should make * sure that you call {@link #accept()} shortly after calling * {@link #publishPort()}. When you no longer intend to accept connections * you should call {@link #unPublishPort()}. - * + * * @return true if the operation was successful, false if the node was * already registered. - * + * * @exception java.io.IOException - * if the port mapper could not be contacted. + * if the port mapper could not be contacted. */ public boolean publishPort() throws IOException { - if (getEpmd() != null) { - return false; // already published - } + if (getEpmd() != null) { + return false; // already published + } - OtpEpmd.publishPort(this); - return getEpmd() != null; + OtpEpmd.publishPort(this); + return getEpmd() != null; } /** @@ -151,71 +151,71 @@ public class OtpSelf extends OtpLocalNode { * mapper, thus preventing any new connections from remote nodes. */ public void unPublishPort() { - // unregister with epmd - OtpEpmd.unPublishPort(this); - - // close the local descriptor (if we have one) - try { - if (super.epmd != null) { - super.epmd.close(); - } - } catch (final IOException e) {/* ignore close errors */ - } - super.epmd = null; + // unregister with epmd + OtpEpmd.unPublishPort(this); + + // close the local descriptor (if we have one) + try { + if (super.epmd != null) { + super.epmd.close(); + } + } catch (final IOException e) {/* ignore close errors */ + } + super.epmd = null; } /** * Accept an incoming connection from a remote node. A call to this method * will block until an incoming connection is at least attempted. - * + * * @return a connection to a remote node. - * + * * @exception java.io.IOException - * if a remote node attempted to connect but no common - * protocol was found. - * + * if a remote node attempted to connect but no common + * protocol was found. + * * @exception OtpAuthException - * if a remote node attempted to connect, but was not - * authorized to connect. + * if a remote node attempted to connect, but was not + * authorized to connect. */ public OtpConnection accept() throws IOException, OtpAuthException { - Socket newsock = null; - - while (true) { - try { - newsock = sock.accept(); - return new OtpConnection(this, newsock); - } catch (final IOException e) { - try { - if (newsock != null) { - newsock.close(); - } - } catch (final IOException f) {/* ignore close errors */ - } - throw e; - } - } + Socket newsock = null; + + while (true) { + try { + newsock = sock.accept(); + return new OtpConnection(this, newsock); + } catch (final IOException e) { + try { + if (newsock != null) { + newsock.close(); + } + } catch (final IOException f) {/* ignore close errors */ + } + throw e; + } + } } /** * Open a connection to a remote node. - * + * * @param other - * the remote node to which you wish to connect. - * + * the remote node to which you wish to connect. + * * @return a connection to the remote node. - * + * * @exception java.net.UnknownHostException - * if the remote host could not be found. - * + * if the remote host could not be found. + * * @exception java.io.IOException - * if it was not possible to connect to the remote node. - * + * if it was not possible to connect to the remote node. + * * @exception OtpAuthException - * if the connection was refused by the remote node. + * if the connection was refused by the remote node. */ public OtpConnection connect(final OtpPeer other) throws IOException, - UnknownHostException, OtpAuthException { - return new OtpConnection(this, other); + UnknownHostException, OtpAuthException { + return new OtpConnection(this, other); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServer.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServer.java index 0de399ac61..9a7d8bdd60 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServer.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServer.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2000-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -23,88 +23,89 @@ import java.io.IOException; /** * Represents a local OTP client or server node. It is used when you want other * nodes to be able to establish connections to this one. - * + * * When you create an instance of this class, it will bind a socket to a port so * that incoming connections can be accepted. However the port number will not * be made available to other nodes wishing to connect until you explicitely * register with the port mapper daemon by calling {@link #publishPort()}. - * + * * <p> * When the Java node will be connecting to a remote Erlang, Java or C node, it * must first identify itself as a node by creating an instance of this class, * after which it may connect to the remote node. - * + * * <p> * Setting up a connection may be done as follows: - * - * + * + * * <pre> * OtpServer self = new OtpServer("server", "cookie"); // identify self * self.publishPort(); // make port information available - * + * * OtpConnection conn = self.accept(); // get incoming connection * </pre> - * + * * @see OtpSelf - * - * @deprecated the functionality of this class has been moved to {@link OtpSelf}. + * + * @deprecated the functionality of this class has been moved to {@link OtpSelf} + * . */ @Deprecated public class OtpServer extends OtpSelf { /** * Create an {@link OtpServer} from an existing {@link OtpSelf}. - * + * * @param self - * an existing self node. - * + * an existing self node. + * * @exception java.io.IOException - * if a ServerSocket could not be created. - * + * if a ServerSocket could not be created. + * */ public OtpServer(final OtpSelf self) throws IOException { - super(self.node(), self.cookie()); + super(self.node(), self.cookie()); } /** * Create an OtpServer, using a vacant port chosen by the operating system. * To determine what port was chosen, call the object's {@link #port()} * method. - * + * * @param node - * the name of the node. - * + * the name of the node. + * * @param cookie - * the authorization cookie that will be used by this node - * when accepts connections from remote nodes. - * + * the authorization cookie that will be used by this node when + * accepts connections from remote nodes. + * * @exception java.io.IOException - * if a ServerSocket could not be created. - * + * if a ServerSocket could not be created. + * */ public OtpServer(final String node, final String cookie) throws IOException { - super(node, cookie); + super(node, cookie); } /** * Create an OtpServer, using the specified port number. - * + * * @param node - * a name for this node, as above. - * + * a name for this node, as above. + * * @param cookie - * the authorization cookie that will be used by this node - * when accepts connections from remote nodes. - * + * the authorization cookie that will be used by this node when + * accepts connections from remote nodes. + * * @param port - * the port number to bind the socket to. - * + * the port number to bind the socket to. + * * @exception java.io.IOException - * if a ServerSocket could not be created or if the - * chosen port number was not available. - * + * if a ServerSocket could not be created or if the chosen + * port number was not available. + * */ public OtpServer(final String node, final String cookie, final int port) - throws IOException { - super(node, cookie, port); + throws IOException { + super(node, cookie, port); } } diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSystem.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSystem.java index 969da39d70..8eb1f86764 100644 --- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSystem.java +++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSystem.java @@ -1,19 +1,19 @@ /* * %CopyrightBegin% - * + * * Copyright Ericsson AB 2004-2009. All Rights Reserved. - * + * * The contents of this file are subject to the Erlang Public License, * Version 1.1, (the "License"); you may not use this file except in * compliance with the License. You should have received a copy of the * Erlang Public License along with this software. If not, it can be * retrieved online at http://www.erlang.org/. - * + * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See * the License for the specific language governing rights and limitations * under the License. - * + * * %CopyrightEnd% */ package com.ericsson.otp.erlang; @@ -24,27 +24,27 @@ final class OtpSystem { static { - final String rel = System.getProperty("OtpCompatRel", "0"); - - try { - - switch (Integer.parseInt(rel)) { - case 1: - case 2: - case 3: - case 4: - case 5: - case 6: - case 7: - case 8: - case 9: - case 0: - default: - break; - } - } catch (final NumberFormatException e) { - /* Ignore ... */ - } + final String rel = System.getProperty("OtpCompatRel", "0"); + + try { + + switch (Integer.parseInt(rel)) { + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + case 8: + case 9: + case 0: + default: + break; + } + } catch (final NumberFormatException e) { + /* Ignore ... */ + } } diff --git a/lib/jinterface/test/.classpath b/lib/jinterface/test/.classpath new file mode 100644 index 0000000000..2e4a3e6776 --- /dev/null +++ b/lib/jinterface/test/.classpath @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="src" output="jinterface_SUITE_data" path="jinterface_SUITE_data"/> + <classpathentry kind="src" output="nc_SUITE_data" path="nc_SUITE_data"/> + <classpathentry combineaccessrules="false" kind="src" path="/jinterface"/> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/> + <classpathentry kind="output" path="bin"/> +</classpath> diff --git a/lib/jinterface/test/.project b/lib/jinterface/test/.project new file mode 100644 index 0000000000..4144c6ebea --- /dev/null +++ b/lib/jinterface/test/.project @@ -0,0 +1,17 @@ +<?xml version="1.0" encoding="UTF-8"?> +<projectDescription> + <name>jinterface_tests</name> + <comment></comment> + <projects> + </projects> + <buildSpec> + <buildCommand> + <name>org.eclipse.jdt.core.javabuilder</name> + <arguments> + </arguments> + </buildCommand> + </buildSpec> + <natures> + <nature>org.eclipse.jdt.core.javanature</nature> + </natures> +</projectDescription> diff --git a/lib/jinterface/test/.settings/org.eclipse.jdt.core.prefs b/lib/jinterface/test/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..af0f20f97a --- /dev/null +++ b/lib/jinterface/test/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,7 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 +org.eclipse.jdt.core.compiler.compliance=1.5 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.5 diff --git a/lib/jinterface/test/jinterface_SUITE_data/.gitignore b/lib/jinterface/test/jinterface_SUITE_data/.gitignore new file mode 100644 index 0000000000..6b468b62a9 --- /dev/null +++ b/lib/jinterface/test/jinterface_SUITE_data/.gitignore @@ -0,0 +1 @@ +*.class diff --git a/lib/jinterface/test/jitu.erl b/lib/jinterface/test/jitu.erl index 46b8cb3ac2..b68dfd0351 100644 --- a/lib/jinterface/test/jitu.erl +++ b/lib/jinterface/test/jitu.erl @@ -117,10 +117,7 @@ classpath(Dir) -> end, es(Dir++PS++ filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++ - case os:getenv("CLASSPATH") of - false -> ""; - Classpath -> Classpath - end, + os:getenv("CLASSPATH", "") end, Quote, EscSpace). diff --git a/lib/jinterface/test/nc_SUITE_data/.gitignore b/lib/jinterface/test/nc_SUITE_data/.gitignore new file mode 100644 index 0000000000..6b468b62a9 --- /dev/null +++ b/lib/jinterface/test/nc_SUITE_data/.gitignore @@ -0,0 +1 @@ +*.class diff --git a/lib/kernel/doc/src/file.xml b/lib/kernel/doc/src/file.xml index dcb9640dcf..338d62e82b 100644 --- a/lib/kernel/doc/src/file.xml +++ b/lib/kernel/doc/src/file.xml @@ -1693,9 +1693,9 @@ <desc> <p>Makes sure that any buffers kept by the operating system (not by the Erlang runtime system) are written to disk. In - many ways it's resembles fsync but it not requires to update - some of file's metadata such as the access time. On - some platforms, this function might have no effect.</p> + many ways it resembles fsync but it does not update + some of the file's metadata such as the access time. On + some platforms this function has no effect.</p> <p>Applications that access databases or log files often write a tiny data fragment (e.g., one line in a log file) and then call fsync() immediately in order to ensure that the written @@ -1703,11 +1703,11 @@ will always initiate two write operations: one for the newly written data and another one in order to update the modification time stored in the inode. If the modification time is not a part - of the transaction concept fdatasync() can be used to avoid + of the transaction concept, fdatasync() can be used to avoid unnecessary inode disk write operations.</p> - <p>Available only in some POSIX systems. This call results in a - call to fsync(), or has no effect, in systems not implementing - the fdatasync syscall.</p> + <p>Available only in some POSIX systems, this call results in a + call to fsync(), or has no effect in systems not implementing + the fdatasync() syscall.</p> </desc> </func> <func> diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml index 2b57e75023..8b85f24455 100644 --- a/lib/kernel/doc/src/os.xml +++ b/lib/kernel/doc/src/os.xml @@ -100,6 +100,19 @@ DirOut = os:cmd("dir"), % on Win32 platform</code> </desc> </func> <func> + <name name="getenv" arity="2"/> + <fsummary>Get the value of an environment variable</fsummary> + <desc> + <p>Returns the <c><anno>Value</anno></c> of the environment variable + <c><anno>VarName</anno></c>, or <c>DefaultValue</c> if the environment variable + is undefined.</p> + <p>If Unicode file name encoding is in effect (see the <seealso + marker="erts:erl#file_name_encoding">erl manual + page</seealso>), the strings (both <c><anno>VarName</anno></c> and + <c><anno>Value</anno></c>) may contain characters with codepoints > 255.</p> + </desc> + </func> + <func> <name name="getpid" arity="0"/> <fsummary>Return the process identifier of the emulator process</fsummary> <desc> diff --git a/lib/kernel/doc/src/pg2.xml b/lib/kernel/doc/src/pg2.xml index 5eb63c1ef6..35cf85470a 100644 --- a/lib/kernel/doc/src/pg2.xml +++ b/lib/kernel/doc/src/pg2.xml @@ -34,11 +34,8 @@ <module>pg2</module> <modulesummary>Distributed Named Process Groups</modulesummary> <description> - <p>This module implements process groups. The groups in this - module differ from the groups in the module <c>pg</c> in several - ways. In <c>pg</c>, each message is sent to all members in the - group. In this module, each message may be sent to one, some, or - all members. + <p>This module implements process groups. Each message may be sent + to one, some, or all members of the group. </p> <p>A group of processes can be accessed by a common name. For example, if there is a group named <c>foobar</c>, there can be a @@ -160,8 +157,7 @@ <section> <title>See Also</title> - <p><seealso marker="kernel_app">kernel(6)</seealso>, - <seealso marker="stdlib:pg">pg(3)</seealso></p> + <p><seealso marker="kernel_app">kernel(6)</seealso></p> </section> </erlref> diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl index 3d6665a36a..b6b153ae56 100644 --- a/lib/kernel/src/file.erl +++ b/lib/kernel/src/file.erl @@ -445,21 +445,15 @@ open(Item, ModeList) when is_list(ModeList) -> case lists:member(raw, ModeList) of %% Raw file, use ?PRIM_FILE to handle this file true -> - %% check if raw file mode is disabled - case catch application:get_env(kernel, raw_files) of - {ok,false} -> - open(Item, lists:delete(raw, ModeList)); - _ -> % undefined | {ok,true} - Args = [file_name(Item) | ModeList], - case check_args(Args) of - ok -> - [FileName | _] = Args, - %% We rely on the returned Handle (in {ok, Handle}) - %% being a pid() or a #file_descriptor{} - ?PRIM_FILE:open(FileName, ModeList); - Error -> - Error - end + Args = [file_name(Item) | ModeList], + case check_args(Args) of + ok -> + [FileName | _] = Args, + %% We rely on the returned Handle (in {ok, Handle}) + %% being a pid() or a #file_descriptor{} + ?PRIM_FILE:open(FileName, ModeList); + Error -> + Error end; false -> case lists:member(ram, ModeList) of diff --git a/lib/kernel/src/file_io_server.erl b/lib/kernel/src/file_io_server.erl index 0e9ff5bc0f..7d30e7e1d8 100644 --- a/lib/kernel/src/file_io_server.erl +++ b/lib/kernel/src/file_io_server.erl @@ -307,18 +307,18 @@ io_request({get_chars,Enc,_Prompt,N}, #state{}=State) -> get_chars(N, Enc, State); -%% -%% This optimization gives almost nothing - needs more working... -%% Disabled for now. /PaN -%% -%% io_request({get_line,Enc,_Prompt}, -%% #state{unic=latin1}=State) -> -%% get_line(Enc,State); - -io_request({get_line,Enc,_Prompt}, - #state{}=State) -> - get_chars(io_lib, collect_line, [], Enc, State); - +io_request({get_line,OutEnc,_Prompt}, #state{buf=Buf, read_mode=Mode, unic=InEnc} = State0) -> + try + %% Minimize the encoding conversions + WorkEnc = case InEnc of + {_,_} -> OutEnc; %% utf16 or utf32 + _ -> InEnc %% Byte oriented utf8 or latin1 + end, + {Res, State} = get_line(start, convert_enc(Buf, InEnc, WorkEnc), WorkEnc, State0), + {reply, cast(Res, Mode, WorkEnc, OutEnc), State} + catch exit:ExError -> + {stop,ExError,{error,ExError},State0#state{buf= <<>>}} + end; io_request({setopts, Opts}, #state{}=State) when is_list(Opts) -> @@ -386,56 +386,40 @@ put_chars(Chars, InEncoding, #state{handle=Handle, unic=OutEncoding}=State) -> {stop,normal,{error,{no_translation, InEncoding, OutEncoding}},State} end. -%% -%% Process the I/O request get_line for latin1 encoding of file specially -%% Unfortunately this function gives almost nothing, it needs more work -%% I disable it for now /PaN -%% -%% srch(<<>>,_,_) -> -%% nomatch; -%% srch(<<X:8,_/binary>>,X,N) -> -%% {match,N}; -%% srch(<<_:8,T/binary>>,X,N) -> -%% srch(T,X,N+1). -%% get_line(OutEnc, #state{handle=Handle,buf = <<>>,unic=latin1}=State) -> -%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of -%% {ok, B} -> -%% get_line(OutEnc, State#state{buf = B}); -%% eof -> -%% {reply,eof,State}; -%% {error,Reason}=Error -> -%% {stop,Reason,Error,State} -%% end; -%% get_line(OutEnc, #state{handle=Handle,buf=Buf,read_mode=ReadMode,unic=latin1}=State) -> -%% case srch(Buf,$\n,0) of -%% nomatch -> -%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of -%% {ok, B} -> -%% get_line(OutEnc,State#state{buf = <<Buf/binary,B/binary>>}); -%% eof -> -%% std_reply(cast(Buf, ReadMode,latin1,OutEnc), State); -%% {error,Reason}=Error -> -%% {stop,Reason,Error,State#state{buf= <<>>}} -%% end; -%% {match,Pos} when Pos >= 1-> -%% PosP1 = Pos + 1, -%% <<Res0:PosP1/binary,NewBuf/binary>> = Buf, -%% PosM1 = Pos - 1, -%% Res = case Res0 of -%% <<Chomped:PosM1/binary,$\r:8,$\n:8>> -> -%% cat(Chomped, <<"\n">>, ReadMode,latin1,OutEnc); -%% _Other -> -%% cast(Res0, ReadMode,latin1,OutEnc) -%% end, -%% {reply,Res,State#state{buf=NewBuf}}; -%% {match,Pos} -> -%% PosP1 = Pos + 1, -%% <<Res:PosP1/binary,NewBuf/binary>> = Buf, -%% {reply,Res,State#state{buf=NewBuf}} -%% end; -%% get_line(_, #state{}=State) -> -%% {error,{error,get_line},State}. - +get_line(S, {<<>>, Cont}, OutEnc, + #state{handle=Handle, read_mode=Mode, unic=InEnc}=State) -> + case ?PRIM_FILE:read(Handle, read_size(Mode)) of + {ok,Bin} -> + get_line(S, convert_enc([Cont, Bin], InEnc, OutEnc), OutEnc, State); + eof -> + get_line(S, {eof, Cont}, OutEnc, State); + {error,Reason}=Error -> + {stop,Reason,Error,State} + end; +get_line(S0, {Buf, BCont}, OutEnc, #state{unic=InEnc}=State) -> + case io_lib:collect_line(S0, Buf, OutEnc, []) of + {stop, Result, Cont0} -> + %% Convert both buffers back to file InEnc encoding + {Cont, <<>>} = convert_enc(Cont0, OutEnc, InEnc), + {Result, State#state{buf=cast_binary([Cont, BCont])}}; + S -> + get_line(S, {<<>>, BCont}, OutEnc, State) + end. + +convert_enc(Bins, Enc, Enc) -> + {cast_binary(Bins), <<>>}; +convert_enc(eof, _, _) -> + {<<>>, <<>>}; +convert_enc(Bin, InEnc, OutEnc) -> + case unicode:characters_to_binary(Bin, InEnc, OutEnc) of + Res when is_binary(Res) -> + {Res, <<>>}; + {incomplete, Res, Cont} -> + {Res, Cont}; + {error, _, _} -> + exit({no_translation, InEnc, OutEnc}) + end. + %% %% Process the I/O request get_chars %% @@ -640,8 +624,6 @@ invalid_unicode_error(Mod, Func, XtraArg, S) -> %% Convert error code to make it look as before err_func(io_lib, get_until, {_,F,_}) -> - F; -err_func(_, F, _) -> F. @@ -713,6 +695,8 @@ cat(B1, B2, list, latin1,_) -> binary_to_list(B1)++binary_to_list(B2). %% Cast binary to list or binary +cast(eof, _, _, _) -> + eof; cast(B, binary, latin1, latin1) -> B; cast(B, binary, InEncoding, OutEncoding) -> @@ -736,6 +720,8 @@ cast(B, list, InEncoding, OutEncoding) -> %% Convert buffer to binary cast_binary(Binary) when is_binary(Binary) -> Binary; +cast_binary([<<>>|List]) -> + cast_binary(List); cast_binary(List) when is_list(List) -> list_to_binary(List); cast_binary(_EOF) -> diff --git a/lib/kernel/src/group.erl b/lib/kernel/src/group.erl index b36dbf33dd..046885f885 100644 --- a/lib/kernel/src/group.erl +++ b/lib/kernel/src/group.erl @@ -111,8 +111,13 @@ start_shell1(Fun) -> server_loop(Drv, Shell, Buf0) -> receive {io_request,From,ReplyAs,Req} when is_pid(From) -> - Buf = io_request(Req, From, ReplyAs, Drv, Buf0), - server_loop(Drv, Shell, Buf); + %% This io_request may cause a transition to a couple of + %% selective receive loops elsewhere in this module. + Buf = io_request(Req, From, ReplyAs, Drv, Buf0), + server_loop(Drv, Shell, Buf); + {reply,{{From,ReplyAs},Reply}} -> + io_reply(From, ReplyAs, Reply), + server_loop(Drv, Shell, Buf0); {driver_id,ReplyTo} -> ReplyTo ! {self(),driver_id,Drv}, server_loop(Drv, Shell, Buf0); @@ -172,10 +177,13 @@ set_unicode_state(Drv,Bool) -> io_request(Req, From, ReplyAs, Drv, Buf0) -> - case io_request(Req, Drv, Buf0) of + case io_request(Req, Drv, {From,ReplyAs}, Buf0) of {ok,Reply,Buf} -> io_reply(From, ReplyAs, Reply), Buf; + {noreply,Buf} -> + %% We expect a {reply,_} message from the Drv when request is done + Buf; {error,Reply,Buf} -> io_reply(From, ReplyAs, Reply), Buf; @@ -196,78 +204,85 @@ io_request(Req, From, ReplyAs, Drv, Buf0) -> %% io_request({put_chars,unicode,Binary}, Drv, Buf) when is_binary(Binary) -> %% send_drv(Drv, {put_chars,Binary}), %% {ok,ok,Buf}; -io_request({put_chars,unicode,Chars}, Drv, Buf) -> +%% +%% These put requests have to be synchronous to the driver as otherwise +%% there is no guarantee that the data has actually been printed. +io_request({put_chars,unicode,Chars}, Drv, From, Buf) -> case catch unicode:characters_to_binary(Chars,utf8) of Binary when is_binary(Binary) -> - send_drv(Drv, {put_chars, unicode, Binary}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, Binary, {From,ok}}), + {noreply,Buf}; _ -> {error,{error,{put_chars, unicode,Chars}},Buf} end; -io_request({put_chars,unicode,M,F,As}, Drv, Buf) -> +io_request({put_chars,unicode,M,F,As}, Drv, From, Buf) -> case catch apply(M, F, As) of Binary when is_binary(Binary) -> - send_drv(Drv, {put_chars, unicode,Binary}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, Binary, {From,ok}}), + {noreply,Buf}; Chars -> case catch unicode:characters_to_binary(Chars,utf8) of B when is_binary(B) -> - send_drv(Drv, {put_chars, unicode,B}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, B, {From,ok}}), + {noreply,Buf}; _ -> {error,{error,F},Buf} end end; -io_request({put_chars,latin1,Binary}, Drv, Buf) when is_binary(Binary) -> - send_drv(Drv, {put_chars, unicode,unicode:characters_to_binary(Binary,latin1)}), - {ok,ok,Buf}; -io_request({put_chars,latin1,Chars}, Drv, Buf) -> +io_request({put_chars,latin1,Binary}, Drv, From, Buf) when is_binary(Binary) -> + send_drv(Drv, {put_chars_sync, unicode, + unicode:characters_to_binary(Binary,latin1), + {From,ok}}), + {noreply,Buf}; +io_request({put_chars,latin1,Chars}, Drv, From, Buf) -> case catch unicode:characters_to_binary(Chars,latin1) of Binary when is_binary(Binary) -> - send_drv(Drv, {put_chars, unicode,Binary}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, Binary, {From,ok}}), + {noreply,Buf}; _ -> {error,{error,{put_chars,latin1,Chars}},Buf} end; -io_request({put_chars,latin1,M,F,As}, Drv, Buf) -> +io_request({put_chars,latin1,M,F,As}, Drv, From, Buf) -> case catch apply(M, F, As) of Binary when is_binary(Binary) -> - send_drv(Drv, {put_chars, unicode,unicode:characters_to_binary(Binary,latin1)}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, + unicode:characters_to_binary(Binary,latin1), + {From,ok}}), + {noreply,Buf}; Chars -> case catch unicode:characters_to_binary(Chars,latin1) of B when is_binary(B) -> - send_drv(Drv, {put_chars, unicode,B}), - {ok,ok,Buf}; + send_drv(Drv, {put_chars_sync, unicode, B, {From,ok}}), + {noreply,Buf}; _ -> {error,{error,F},Buf} end end; -io_request({get_chars,Encoding,Prompt,N}, Drv, Buf) -> +io_request({get_chars,Encoding,Prompt,N}, Drv, _From, Buf) -> get_chars(Prompt, io_lib, collect_chars, N, Drv, Buf, Encoding); -io_request({get_line,Encoding,Prompt}, Drv, Buf) -> +io_request({get_line,Encoding,Prompt}, Drv, _From, Buf) -> get_chars(Prompt, io_lib, collect_line, [], Drv, Buf, Encoding); -io_request({get_until,Encoding, Prompt,M,F,As}, Drv, Buf) -> +io_request({get_until,Encoding, Prompt,M,F,As}, Drv, _From, Buf) -> get_chars(Prompt, io_lib, get_until, {M,F,As}, Drv, Buf, Encoding); -io_request({get_password,_Encoding},Drv,Buf) -> +io_request({get_password,_Encoding},Drv,_From,Buf) -> get_password_chars(Drv, Buf); -io_request({setopts,Opts}, Drv, Buf) when is_list(Opts) -> +io_request({setopts,Opts}, Drv, _From, Buf) when is_list(Opts) -> setopts(Opts, Drv, Buf); -io_request(getopts, Drv, Buf) -> +io_request(getopts, Drv, _From, Buf) -> getopts(Drv, Buf); -io_request({requests,Reqs}, Drv, Buf) -> - io_requests(Reqs, {ok,ok,Buf}, Drv); +io_request({requests,Reqs}, Drv, From, Buf) -> + io_requests(Reqs, {ok,ok,Buf}, From, Drv); %% New in R12 -io_request({get_geometry,columns},Drv,Buf) -> +io_request({get_geometry,columns},Drv,_From,Buf) -> case get_tty_geometry(Drv) of {W,_H} -> {ok,W,Buf}; _ -> {error,{error,enotsup},Buf} end; -io_request({get_geometry,rows},Drv,Buf) -> +io_request({get_geometry,rows},Drv,_From,Buf) -> case get_tty_geometry(Drv) of {_W,H} -> {ok,H,Buf}; @@ -276,38 +291,49 @@ io_request({get_geometry,rows},Drv,Buf) -> end; %% BC with pre-R13 -io_request({put_chars,Chars}, Drv, Buf) -> - io_request({put_chars,latin1,Chars}, Drv, Buf); -io_request({put_chars,M,F,As}, Drv, Buf) -> - io_request({put_chars,latin1,M,F,As}, Drv, Buf); -io_request({get_chars,Prompt,N}, Drv, Buf) -> - io_request({get_chars,latin1,Prompt,N}, Drv, Buf); -io_request({get_line,Prompt}, Drv, Buf) -> - io_request({get_line,latin1,Prompt}, Drv, Buf); -io_request({get_until, Prompt,M,F,As}, Drv, Buf) -> - io_request({get_until,latin1, Prompt,M,F,As}, Drv, Buf); -io_request(get_password,Drv,Buf) -> - io_request({get_password,latin1},Drv,Buf); - - - -io_request(_, _Drv, Buf) -> +io_request({put_chars,Chars}, Drv, From, Buf) -> + io_request({put_chars,latin1,Chars}, Drv, From, Buf); +io_request({put_chars,M,F,As}, Drv, From, Buf) -> + io_request({put_chars,latin1,M,F,As}, Drv, From, Buf); +io_request({get_chars,Prompt,N}, Drv, From, Buf) -> + io_request({get_chars,latin1,Prompt,N}, Drv, From, Buf); +io_request({get_line,Prompt}, Drv, From, Buf) -> + io_request({get_line,latin1,Prompt}, Drv, From, Buf); +io_request({get_until, Prompt,M,F,As}, Drv, From, Buf) -> + io_request({get_until,latin1, Prompt,M,F,As}, Drv, From, Buf); +io_request(get_password,Drv,From,Buf) -> + io_request({get_password,latin1},Drv,From,Buf); + + + +io_request(_, _Drv, _From, Buf) -> {error,{error,request},Buf}. -%% Status = io_requests(RequestList, PrevStat, Drv) -%% Process a list of output requests as long as the previous status is 'ok'. - -io_requests([R|Rs], {ok,ok,Buf}, Drv) -> - io_requests(Rs, io_request(R, Drv, Buf), Drv); -io_requests([_|_], Error, _Drv) -> +%% Status = io_requests(RequestList, PrevStat, From, Drv) +%% Process a list of output requests as long as +%% the previous status is 'ok' or noreply. +%% +%% We use undefined as the From for all but the last request +%% in order to discards acknowledgements from those requests. +%% +io_requests([R|Rs], {noreply,Buf}, From, Drv) -> + ReqFrom = if Rs =:= [] -> From; true -> undefined end, + io_requests(Rs, io_request(R, Drv, ReqFrom, Buf), From, Drv); +io_requests([R|Rs], {ok,ok,Buf}, From, Drv) -> + ReqFrom = if Rs =:= [] -> From; true -> undefined end, + io_requests(Rs, io_request(R, Drv, ReqFrom, Buf), From, Drv); +io_requests([_|_], Error, _From, _Drv) -> Error; -io_requests([], Stat, _) -> +io_requests([], Stat, _From, _) -> Stat. %% io_reply(From, ReplyAs, Reply) %% The function for sending i/o command acknowledgement. %% The ACK contains the return value. +io_reply(undefined, _ReplyAs, _Reply) -> + %% Ignore these replies as they are generated from io_requests/4. + ok; io_reply(From, ReplyAs, Reply) -> From ! {io_reply,ReplyAs,Reply}, ok. @@ -619,6 +645,10 @@ more_data(What, Cont0, Drv, Ls, Encoding) -> io_request(Req, From, ReplyAs, Drv, []), %WRONG!!! send_drv_reqs(Drv, edlin:redraw_line(Cont)), get_line1({more_chars,Cont,[]}, Drv, Ls, Encoding); + {reply,{{From,ReplyAs},Reply}} -> + %% We take care of replies from puts here as well + io_reply(From, ReplyAs, Reply), + more_data(What, Cont0, Drv, Ls, Encoding); {'EXIT',Drv,interrupt} -> interrupted; {'EXIT',Drv,_} -> @@ -641,6 +671,10 @@ get_line_echo_off1({Chars,[]}, Drv) -> {io_request,From,ReplyAs,Req} when is_pid(From) -> io_request(Req, From, ReplyAs, Drv, []), get_line_echo_off1({Chars,[]}, Drv); + {reply,{{From,ReplyAs},Reply}} when From =/= undefined -> + %% We take care of replies from puts here as well + io_reply(From, ReplyAs, Reply), + get_line_echo_off1({Chars,[]},Drv); {'EXIT',Drv,interrupt} -> interrupted; {'EXIT',Drv,_} -> @@ -790,6 +824,10 @@ get_password1({Chars,[]}, Drv) -> %% set to []. But do we expect anything but plain output? get_password1({Chars, []}, Drv); + {reply,{{From,ReplyAs},Reply}} -> + %% We take care of replies from puts here as well + io_reply(From, ReplyAs, Reply), + get_password1({Chars, []},Drv); {'EXIT',Drv,interrupt} -> interrupted; {'EXIT',Drv,_} -> diff --git a/lib/kernel/src/hipe_unified_loader.erl b/lib/kernel/src/hipe_unified_loader.erl index e5928c7b63..2d124d95b7 100644 --- a/lib/kernel/src/hipe_unified_loader.erl +++ b/lib/kernel/src/hipe_unified_loader.erl @@ -827,7 +827,6 @@ patch_to_emu_step1(Mod) -> %% were added as the result of dynamic apply calls. We must %% purge them too, but we have no explicit record of them. %% Therefore invalidate all native addresses for the module. - %% emu_make_stubs/1 will repair the ones for compiled static calls. hipe_bifs:invalidate_funinfo_native_addresses(MFAs), %% Find all call sites that call these MFAs. As a side-effect, %% create native stubs for any MFAs that are referred. @@ -841,7 +840,6 @@ patch_to_emu_step1(Mod) -> %% Step 2 must occur after the new BEAM stub module is created. patch_to_emu_step2(ReferencesToPatch) -> - emu_make_stubs(ReferencesToPatch), redirect(ReferencesToPatch). -spec is_loaded(Module::atom()) -> boolean(). @@ -852,21 +850,6 @@ is_loaded(M) when is_atom(M) -> catch _:_ -> false end. --ifdef(notdef). -emu_make_stubs([{MFA,_Refs}|Rest]) -> - make_stub(MFA), - emu_make_stubs(Rest); -emu_make_stubs([]) -> - []. - -make_stub({_,_,A} = MFA) -> - EmuAddress = hipe_bifs:get_emu_address(MFA), - StubAddress = hipe_bifs:make_native_stub(EmuAddress, A), - hipe_bifs:set_funinfo_native_address(MFA, StubAddress). --else. -emu_make_stubs(_) -> []. --endif. - %%-------------------------------------------------------------------- %% Given a list of MFAs, tag them with their referred_from references. %% The resulting {MFA,Refs} list is later passed to redirect/1, once diff --git a/lib/kernel/src/inet_config.erl b/lib/kernel/src/inet_config.erl index fdc244f959..187bfbdab0 100644 --- a/lib/kernel/src/inet_config.erl +++ b/lib/kernel/src/inet_config.erl @@ -113,13 +113,7 @@ init() -> {unix,_} -> %% The Etc variable enables us to run tests with other %% configuration files than the normal ones - Etc = - case os:getenv("ERL_INET_ETC_DIR") of - false -> - ?DEFAULT_ETC; - _EtcDir -> - _EtcDir - end, + Etc = os:getenv("ERL_INET_ETC_DIR", ?DEFAULT_ETC), case inet_db:res_option(resolv_conf) of undefined -> inet_db:res_option( @@ -152,11 +146,7 @@ erl_dist_mode() -> do_load_resolv({unix,Type}, longnames) -> %% The Etc variable enables us to run tests with other %% configuration files than the normal ones - Etc = case os:getenv("ERL_INET_ETC_DIR") of - false -> ?DEFAULT_ETC; - _EtcDir -> - _EtcDir - end, + Etc = os:getenv("ERL_INET_ETC_DIR", ?DEFAULT_ETC), load_resolv(filename:join(Etc, ?DEFAULT_RESOLV), resolv), case Type of freebsd -> %% we may have to check version (2.2.2) @@ -307,10 +297,7 @@ load_hosts(File,Os) -> win32_load_from_registry(Type) -> %% The TcpReg variable enables us to run tests with other registry configurations than %% the normal ones - TcpReg = case os:getenv("ERL_INET_ETC_DIR") of - false -> []; - _TReg -> _TReg - end, + TcpReg = os:getenv("ERL_INET_ETC_DIR", ""), {ok, Reg} = win32reg:open([read]), {TcpIp,HFileKey} = case Type of diff --git a/lib/kernel/src/kernel.appup.src b/lib/kernel/src/kernel.appup.src index f8f4cc1ec2..1bae762bed 100644 --- a/lib/kernel/src/kernel.appup.src +++ b/lib/kernel/src/kernel.appup.src @@ -17,9 +17,7 @@ %% %CopyrightEnd% {"%VSN%", %% Up from - max one major revision back - [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}, %% R17 - {<<"2\\.16(\\.[0-9]+)*">>,[restart_new_emulator]}],%% R16 + [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-17 %% Down to - max one major revision back - [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}, %% R17 - {<<"2\\.16(\\.[0-9]+)*">>,[restart_new_emulator]}] %% R16 + [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-17 }. diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl index 187fd0001b..7468a06f3c 100644 --- a/lib/kernel/src/os.erl +++ b/lib/kernel/src/os.erl @@ -26,7 +26,7 @@ %%% BIFs --export([getenv/0, getenv/1, getpid/0, putenv/2, timestamp/0, unsetenv/1]). +-export([getenv/0, getenv/1, getenv/2, getpid/0, putenv/2, timestamp/0, unsetenv/1]). -spec getenv() -> [string()]. @@ -39,6 +39,19 @@ getenv() -> erlang:nif_error(undef). getenv(_) -> erlang:nif_error(undef). +-spec getenv(VarName, DefaultValue) -> Value when + VarName :: string(), + DefaultValue :: string(), + Value :: string(). + +getenv(VarName, DefaultValue) -> + case os:getenv(VarName) of + false -> + DefaultValue; + Value -> + Value + end. + -spec getpid() -> Value when Value :: string(). @@ -85,10 +98,7 @@ version() -> Name :: string(), Filename :: string(). find_executable(Name) -> - case os:getenv("PATH") of - false -> find_executable(Name, []); - Path -> find_executable(Name, Path) - end. + find_executable(Name, os:getenv("PATH", "")). -spec find_executable(Name, Path) -> Filename | 'false' when Name :: string(), diff --git a/lib/kernel/src/user_drv.erl b/lib/kernel/src/user_drv.erl index a91c23539d..e6ce85c379 100644 --- a/lib/kernel/src/user_drv.erl +++ b/lib/kernel/src/user_drv.erl @@ -29,6 +29,7 @@ -define(OP_INSC,2). -define(OP_DELC,3). -define(OP_BEEP,4). +-define(OP_PUTC_SYNC,5). % Control op -define(CTRL_OP_GET_WINSIZE,100). -define(CTRL_OP_GET_UNICODE_STATE,101). @@ -133,7 +134,7 @@ server1(Iport, Oport, Shell) -> [erlang:system_info(system_version)]))}, Iport, Oport), %% Enter the server loop. - server_loop(Iport, Oport, Curr, User, Gr). + server_loop(Iport, Oport, Curr, User, Gr, queue:new()). rem_sh_opts(Node) -> [{expand_fun,fun(B)-> rpc:call(Node,edlin_expand,expand,[B]) end}]. @@ -158,42 +159,41 @@ start_user() -> User end. -server_loop(Iport, Oport, User, Gr) -> +server_loop(Iport, Oport, User, Gr, IOQueue) -> Curr = gr_cur_pid(Gr), put(current_group, Curr), - server_loop(Iport, Oport, Curr, User, Gr). + server_loop(Iport, Oport, Curr, User, Gr, IOQueue). -server_loop(Iport, Oport, Curr, User, Gr) -> +server_loop(Iport, Oport, Curr, User, Gr, IOQueue) -> receive {Iport,{data,Bs}} -> BsBin = list_to_binary(Bs), Unicode = unicode:characters_to_list(BsBin,utf8), - port_bytes(Unicode, Iport, Oport, Curr, User, Gr); + port_bytes(Unicode, Iport, Oport, Curr, User, Gr, IOQueue); {Iport,eof} -> Curr ! {self(),eof}, - server_loop(Iport, Oport, Curr, User, Gr); - {User,Req} -> % never block from user! - io_request(Req, Iport, Oport), - server_loop(Iport, Oport, Curr, User, Gr); - {Curr,tty_geometry} -> - Curr ! {self(),tty_geometry,get_tty_geometry(Iport)}, - server_loop(Iport, Oport, Curr, User, Gr); - {Curr,get_unicode_state} -> - Curr ! {self(),get_unicode_state,get_unicode_state(Iport)}, - server_loop(Iport, Oport, Curr, User, Gr); - {Curr,set_unicode_state, Bool} -> - Curr ! {self(),set_unicode_state,set_unicode_state(Iport,Bool)}, - server_loop(Iport, Oport, Curr, User, Gr); - {Curr,Req} -> - io_request(Req, Iport, Oport), - server_loop(Iport, Oport, Curr, User, Gr); + server_loop(Iport, Oport, Curr, User, Gr, IOQueue); + Req when element(1,Req) =:= User orelse element(1,Req) =:= Curr, + tuple_size(Req) =:= 2 orelse tuple_size(Req) =:= 3 -> + %% We match {User|Curr,_}|{User|Curr,_,_} + NewQ = handle_req(Req, Iport, Oport, IOQueue), + server_loop(Iport, Oport, Curr, User, Gr, NewQ); + {Oport,ok} -> + %% We get this ok from the port, in io_request we store + %% info about where to send reply at head of queue + {{value,{Origin,Reply}},ReplyQ} = queue:out(IOQueue), + Origin ! {reply,Reply}, + NewQ = handle_req(next, Iport, Oport, ReplyQ), + server_loop(Iport, Oport, Curr, User, Gr, NewQ); {'EXIT',Iport,_R} -> - server_loop(Iport, Oport, Curr, User, Gr); + server_loop(Iport, Oport, Curr, User, Gr, IOQueue); {'EXIT',Oport,_R} -> - server_loop(Iport, Oport, Curr, User, Gr); + server_loop(Iport, Oport, Curr, User, Gr, IOQueue); + {'EXIT',User,shutdown} -> % force data to port + server_loop(Iport, Oport, Curr, User, Gr, IOQueue); {'EXIT',User,_R} -> % keep 'user' alive NewU = start_user(), - server_loop(Iport, Oport, Curr, NewU, gr_set_num(Gr, 1, NewU, {})); + server_loop(Iport, Oport, Curr, NewU, gr_set_num(Gr, 1, NewU, {}), IOQueue); {'EXIT',Pid,R} -> % shell and group leader exit case gr_cur_pid(Gr) of Pid when R =/= die , @@ -213,18 +213,51 @@ server_loop(Iport, Oport, Curr, User, Gr) -> {ok,Gr2} = gr_set_cur(gr_set_num(Gr1, Ix, Pid1, {shell,start,Params}), Ix), put(current_group, Pid1), - server_loop(Iport, Oport, Pid1, User, Gr2); + server_loop(Iport, Oport, Pid1, User, Gr2, IOQueue); _ -> % remote shell io_requests([{put_chars,unicode,"(^G to start new job) ***\n"}], Iport, Oport), - server_loop(Iport, Oport, Curr, User, Gr1) + server_loop(Iport, Oport, Curr, User, Gr1, IOQueue) end; _ -> % not current, just remove it - server_loop(Iport, Oport, Curr, User, gr_del_pid(Gr, Pid)) + server_loop(Iport, Oport, Curr, User, gr_del_pid(Gr, Pid), IOQueue) end; _X -> %% Ignore unknown messages. - server_loop(Iport, Oport, Curr, User, Gr) + server_loop(Iport, Oport, Curr, User, Gr, IOQueue) + end. + +%% We always handle geometry and unicode requests +handle_req({Curr,tty_geometry},Iport,_Oport,IOQueue) -> + Curr ! {self(),tty_geometry,get_tty_geometry(Iport)}, + IOQueue; +handle_req({Curr,get_unicode_state},Iport,_Oport,IOQueue) -> + Curr ! {self(),get_unicode_state,get_unicode_state(Iport)}, + IOQueue; +handle_req({Curr,set_unicode_state, Bool},Iport,_Oport,IOQueue) -> + Curr ! {self(),set_unicode_state,set_unicode_state(Iport,Bool)}, + IOQueue; +handle_req(next,Iport,Oport,IOQueue) -> + case queue:out(IOQueue) of + {{value,Next},ExecQ} -> + NewQ = handle_req(Next,Iport,Oport,queue:new()), + queue:join(NewQ,ExecQ); + {empty,_} -> + IOQueue + end; +handle_req(Msg,Iport,Oport,IOQueue) -> + case queue:peek(IOQueue) of + empty -> + {Origin,Req} = Msg, + case io_request(Req, Iport, Oport) of + ok -> IOQueue; + Reply -> + %% Push reply info to front of queue + queue:in_r({Origin,Reply},IOQueue) + end; + _Else -> + %% All requests are queued when we have outstanding sync put_chars + queue:in(Msg,IOQueue) end. %% port_bytes(Bytes, InPort, OutPort, CurrentProcess, UserProcess, Group) @@ -232,34 +265,34 @@ server_loop(Iport, Oport, Curr, User, Gr) -> %% either escape to switch_loop or restart the shell. Otherwise send %% the bytes to Curr. -port_bytes([$\^G|_Bs], Iport, Oport, _Curr, User, Gr) -> - handle_escape(Iport, Oport, User, Gr); +port_bytes([$\^G|_Bs], Iport, Oport, _Curr, User, Gr, IOQueue) -> + handle_escape(Iport, Oport, User, Gr, IOQueue); -port_bytes([$\^C|_Bs], Iport, Oport, Curr, User, Gr) -> - interrupt_shell(Iport, Oport, Curr, User, Gr); +port_bytes([$\^C|_Bs], Iport, Oport, Curr, User, Gr, IOQueue) -> + interrupt_shell(Iport, Oport, Curr, User, Gr, IOQueue); -port_bytes([B], Iport, Oport, Curr, User, Gr) -> +port_bytes([B], Iport, Oport, Curr, User, Gr, IOQueue) -> Curr ! {self(),{data,[B]}}, - server_loop(Iport, Oport, Curr, User, Gr); -port_bytes(Bs, Iport, Oport, Curr, User, Gr) -> + server_loop(Iport, Oport, Curr, User, Gr, IOQueue); +port_bytes(Bs, Iport, Oport, Curr, User, Gr, IOQueue) -> case member($\^G, Bs) of true -> - handle_escape(Iport, Oport, User, Gr); + handle_escape(Iport, Oport, User, Gr, IOQueue); false -> Curr ! {self(),{data,Bs}}, - server_loop(Iport, Oport, Curr, User, Gr) + server_loop(Iport, Oport, Curr, User, Gr, IOQueue) end. -interrupt_shell(Iport, Oport, Curr, User, Gr) -> +interrupt_shell(Iport, Oport, Curr, User, Gr, IOQueue) -> case gr_get_info(Gr, Curr) of undefined -> ok; % unknown _ -> exit(Curr, interrupt) end, - server_loop(Iport, Oport, Curr, User, Gr). + server_loop(Iport, Oport, Curr, User, Gr, IOQueue). -handle_escape(Iport, Oport, User, Gr) -> +handle_escape(Iport, Oport, User, Gr, IOQueue) -> case application:get_env(stdlib, shell_esc) of {ok,abort} -> Pid = gr_cur_pid(Gr), @@ -278,11 +311,11 @@ handle_escape(Iport, Oport, User, Gr) -> Pid1 = group:start(self(), {shell,start,[]}), io_request({put_chars,unicode,"\n"}, Iport, Oport), server_loop(Iport, Oport, User, - gr_add_cur(Gr1, Pid1, {shell,start,[]})); + gr_add_cur(Gr1, Pid1, {shell,start,[]}), IOQueue); _ -> % {ok,jcl} | undefined io_request({put_chars,unicode,"\nUser switch command\n"}, Iport, Oport), - server_loop(Iport, Oport, User, switch_loop(Iport, Oport, Gr)) + server_loop(Iport, Oport, User, switch_loop(Iport, Oport, Gr), IOQueue) end. switch_loop(Iport, Oport, Gr) -> @@ -492,9 +525,12 @@ set_unicode_state(Iport, Bool) -> io_request(Request, Iport, Oport) -> try io_command(Request) of - Command -> + {command,_} = Command -> Oport ! {self(),Command}, - ok + ok; + {Command,Reply} -> + Oport ! {self(),Command}, + Reply catch {requests,Rs} -> io_requests(Rs, Iport, Oport); @@ -511,6 +547,13 @@ io_requests([], _Iport, _Oport) -> put_int16(N, Tail) -> [(N bsr 8)band 255,N band 255|Tail]. +%% When a put_chars_sync command is used, user_drv guarantees that +%% the bytes have been put in the buffer of the port before an acknowledgement +%% is sent back to the process sending the request. This command was added in +%% OTP 18 to make sure that data sent from io:format is actually printed +%% to the console before the vm stops when calling erlang:halt(integer()). +io_command({put_chars_sync, unicode,Cs,Reply}) -> + {{command,[?OP_PUTC_SYNC|unicode:characters_to_binary(Cs,utf8)]},Reply}; io_command({put_chars, unicode,Cs}) -> {command,[?OP_PUTC|unicode:characters_to_binary(Cs,utf8)]}; io_command({move_rel,N}) -> diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl index 2ce2303ba3..1213d8e37e 100644 --- a/lib/kernel/test/file_SUITE.erl +++ b/lib/kernel/test/file_SUITE.erl @@ -93,6 +93,8 @@ -export([old_io_protocol/1]). +-export([unicode_mode/1]). + %% Debug exports -export([create_file_slow/2, create_file/2, create_bin/2]). -export([verify_file/2, verify_bin/3]). @@ -105,6 +107,7 @@ -include_lib("test_server/include/test_server.hrl"). -include_lib("kernel/include/file.hrl"). +-define(THROW_ERROR(RES), throw({fail, ?LINE, RES})). suite() -> [{ct_hooks,[ts_install_cth]}]. @@ -116,7 +119,9 @@ all() -> delayed_write, read_ahead, segment_read, segment_write, ipread, pid2name, interleaved_read_write, otp_5814, otp_10852, large_file, large_write, read_line_1, read_line_2, read_line_3, - read_line_4, standard_io, old_io_protocol]. + read_line_4, standard_io, old_io_protocol, + unicode_mode + ]. groups() -> [{dirs, [], [make_del_dir, cur_dir_0, cur_dir_1, @@ -347,8 +352,153 @@ old_io_protocol(Config) when is_list(Config) -> [] = flush(), ok. +unicode_mode(suite) -> []; +unicode_mode(doc) -> [""]; +unicode_mode(Config) -> + Dir = {dir, ?config(priv_dir,Config)}, + OptVariants = [[Dir], + [Dir, {encoding, utf8}], + [Dir, binary], + [Dir, binary, {encoding, utf8}] + ], + ReadVariants = [{read, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read(Fd1, 1024) end) end}, + {read_line, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read_line(Fd1) end) end} + %%{pread, fun(Fd) -> file:pread(Fd, 0, 1024) end}, + %%{preadl, fun(Fd) -> file:pread(Fd, [{0, 1024}]) end}, + ], + + _ = [read_write_0("ASCII: list: Hello World", Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + _ = [read_write_0("LATIN1: list: åäöÅÄÖ", Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + _ = [read_write_0(<<"ASCII: bin: Hello World">>, Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + _ = [read_write_0(<<"LATIN1: bin: åäöÅÄÖ">>, Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + %% These will be double encoded if option is encoding utf-8 + _ = [read_write_0(<<"UTF8: bin: Ωß"/utf8>>, Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + %% These should not work (with encoding set to utf-8) + %% according to file's documentation + _ = [read_write_0("UTF8: list: Ωß", Read, Opt) || + Opt <- OptVariants, Read <- ReadVariants], + ok. + +read_write_0(Str, {Func, ReadFun}, Options) -> + try + Res = read_write_1(Str, ReadFun, Options), + io:format("~p: ~ts ~p '~p'~n", [Func, Str, tl(Options), Res]), + ok + catch {fail, Line, ReadBytes = [_|_]} -> + io:format("~p:~p: ~p ERROR: ~w vs~n ~w~n - ~p~n", + [?MODULE, Line, Func, Str, ReadBytes, Options]), + exit({error, ?LINE}); + {fail, Line, ReadBytes} -> + io:format("~p:~p: ~p ERROR: ~ts vs~n ~w~n - ~p~n", + [?MODULE, Line, Func, Str, ReadBytes, Options]), + exit({error, ?LINE}); + error:What -> + io:format("~p:??: ~p ERROR: ~p from~n ~w~n ~p~n", + [?MODULE, Func, What, Str, Options]), + + io:format("\t~p~n", [erlang:get_stacktrace()]), + exit({error, ?LINE}) + end. + +read_write_1(Str0, ReadFun, [{dir,Dir}|Options]) -> + File = um_filename(Str0, Dir, Options), + Pre = "line 1\n", Post = "\nlast line\n", + Str = case is_list(Str0) andalso lists:max(Str0) > 255 of + false -> %% Normal case Use options + {ok, FdW} = file:open(File, [write|Options]), + IO = [Pre, Str0, Post], + ok = file:write(FdW, IO), + case is_binary(Str0) of + true -> iolist_to_binary(IO); + false -> lists:append(IO) + end; + true -> %% Test unicode lists + {ok, FdW} = file:open(File, [write]), + Utf8 = unicode:characters_to_binary([Pre, Str0, Post]), + file:write(FdW, Utf8), + {unicode, Utf8} + end, + file:close(FdW), + {ok, FdR} = file:open(File, [read|Options]), + ReadRes = ReadFun(FdR), + file:close(FdR), + Res = um_check(Str, ReadRes, Options), + file:delete(File), + Res. +um_read(Fd, Fun) -> + um_read(Fd, Fun, []). + +um_read(Fd, Fun, Acc) -> + case Fun(Fd) of + eof -> + case is_binary(hd(Acc)) of + true -> {ok, iolist_to_binary(lists:reverse(Acc))}; + false -> {ok, lists:append(lists:reverse(Acc))} + end; + {ok, Data} -> + um_read(Fd, Fun, [Data|Acc]); + Error -> + Error + end. + + +um_check(Str, {ok, Str}, _) -> ok; +um_check(Bin, {ok, Res}, _Options) when is_binary(Bin), is_list(Res) -> + case list_to_binary(Res) of + Bin -> ok; + _ -> ?THROW_ERROR(Res) + end; +um_check(Str, {ok, Res}, _Options) when is_list(Str), is_binary(Res) -> + case iolist_to_binary(Str) of + Res -> ok; + _ -> ?THROW_ERROR(Res) + end; +um_check({unicode, Utf8Bin}, Res, Options) -> + um_check_unicode(Utf8Bin, Res, + proplists:get_value(binary, Options, false), + proplists:get_value(encoding, Options, none)); +um_check(_Str, Res, _Options) -> + ?THROW_ERROR(Res). + +um_check_unicode(Utf8Bin, {ok, Utf8Bin}, true, none) -> + ok; +um_check_unicode(Utf8Bin, {ok, List = [_|_]}, false, none) -> + case binary_to_list(Utf8Bin) == List of + true -> ok; + false -> ?THROW_ERROR(List) + end; +um_check_unicode(_Utf8Bin, {error, {no_translation, unicode, latin1}}, _, _) -> + no_translation; +um_check_unicode(_Utf8Bin, Error = {error, _}, _, _Unicode) -> + ?THROW_ERROR(Error); +um_check_unicode(_Utf8Bin, {ok, _ListOrBin}, _, _UTF8_) -> + %% List = if is_binary(ListOrBin) -> unicode:characters_to_list(ListOrBin); + %% true -> ListOrBin + %% end, + %% io:format("In: ~w~n", [binary_to_list(Utf8Bin)]), + %% io:format("Ut: ~w~n", [List]), + ?THROW_ERROR({shoud_be, no_translation}). + +um_filename(Bin, Dir, Options) when is_binary(Bin) -> + um_filename(binary_to_list(Bin), Dir, Options); +um_filename(Str = [_|_], Dir, Options) -> + Name = hd(string:tokens(Str, ":")), + Enc = atom_to_list(proplists:get_value(encoding, Options, latin1)), + File = case lists:member(binary, Options) of + true -> + "test_" ++ Name ++ "_bin_enc_" ++ Enc; + false -> + "test_" ++ Name ++ "_list_enc_" ++ Enc + end, + filename:join(Dir, File). + %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% read_write_file(suite) -> []; diff --git a/lib/kernel/test/interactive_shell_SUITE.erl b/lib/kernel/test/interactive_shell_SUITE.erl index 7f6024f642..3fb7c68886 100644 --- a/lib/kernel/test/interactive_shell_SUITE.erl +++ b/lib/kernel/test/interactive_shell_SUITE.erl @@ -48,12 +48,7 @@ groups() -> []. init_per_suite(Config) -> - Term = case os:getenv("TERM") of - List when is_list(List) -> - List; - _ -> - "dumb" - end, + Term = os:getenv("TERM", "dumb"), os:putenv("TERM","vt100"), DefShell = get_default_shell(), [{default_shell,DefShell},{term,Term}|Config]. diff --git a/lib/kernel/test/pdict_SUITE.erl b/lib/kernel/test/pdict_SUITE.erl index 98cff0222e..4b60beb9dc 100644 --- a/lib/kernel/test/pdict_SUITE.erl +++ b/lib/kernel/test/pdict_SUITE.erl @@ -31,7 +31,7 @@ -export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, init_per_group/2,end_per_group/2, - simple/1, complicated/1, heavy/1, info/1]). + simple/1, complicated/1, heavy/1, simple_all_keys/1, info/1]). -export([init_per_testcase/2, end_per_testcase/2]). -export([other_process/2]). @@ -46,7 +46,7 @@ end_per_testcase(_Case, Config) -> suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [simple, complicated, heavy, info]. + [simple, complicated, heavy, simple_all_keys, info]. groups() -> []. @@ -70,6 +70,7 @@ simple(suite) -> []; simple(Config) when is_list(Config) -> XX = get(), + ok = match_keys(XX), erase(), L = [a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p, q,r,s,t,u,v,x,y,z,'A','B','C','D'], @@ -105,6 +106,7 @@ simple(Config) when is_list(Config) -> complicated(Config) when is_list(Config) -> Previous = get(), + ok = match_keys(Previous), Previous = erase(), N = case ?t:is_debug() of false -> 500000; @@ -113,8 +115,10 @@ complicated(Config) when is_list(Config) -> comp_1(N), comp_2(N), N = comp_3(lists:sort(get()), 1), + ok = match_keys(get()), comp_4(get()), [] = get(), + [] = get_keys(), [put(Key, Value) || {Key,Value} <- Previous], ok. @@ -160,6 +164,26 @@ heavy(Config) when is_list(Config) -> [put(Key, Value) || {Key,Value} <- XX], ok. +simple_all_keys(Config) when is_list(Config) -> + erase(), + ok = simple_all_keys_add_loop(1000), + [] = get_keys(), + [] = get(), + ok. + +simple_all_keys_add_loop(0) -> + simple_all_keys_del_loop(erlang:get_keys()); +simple_all_keys_add_loop(N) -> + put(gen_key(N),value), + ok = match_keys(get()), + simple_all_keys_add_loop(N-1). + +simple_all_keys_del_loop([]) -> ok; +simple_all_keys_del_loop([K|Ks]) -> + value = erase(K), + ok = match_keys(get()), + simple_all_keys_del_loop(Ks). + info(doc) -> ["Tests process_info(Pid, dictionary)"]; info(suite) -> @@ -339,3 +363,8 @@ m(A,B,Module,Line) -> [A,B,Module,Line]), exit({no_match,{A,B},Module,Line}) end. + +match_keys(All) -> + Ks = lists:sort([K||{K,_}<-All]), + Ks = lists:sort(erlang:get_keys()), + ok. diff --git a/lib/kernel/test/zlib_SUITE.erl b/lib/kernel/test/zlib_SUITE.erl index 3be6f39d95..41c19fce51 100644 --- a/lib/kernel/test/zlib_SUITE.erl +++ b/lib/kernel/test/zlib_SUITE.erl @@ -82,7 +82,7 @@ groups() -> api_deflateSetDictionary, api_deflateReset, api_deflateParams, api_deflate, api_deflateEnd, api_inflateInit, api_inflateSetDictionary, - api_inflateSync, api_inflateReset, api_inflate, + api_inflateSync, api_inflateReset, api_inflate, api_inflateChunk, api_inflateEnd, api_setBufsz, api_getBufsz, api_crc32, api_adler32, api_getQSize, api_un_compress, api_un_zip, api_g_un_zip]}, @@ -146,8 +146,6 @@ api_deflateInit(Config) when is_list(Config) -> ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-20,8,default)), ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-7,8,default)), ?m(?BARG, zlib:deflateInit(Z1,default,deflated,7,8,default)), - ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-8,8,default)), - ?m(?BARG, zlib:deflateInit(Z1,default,deflated,8,8,default)), ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,0,default)), ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,10,default)), @@ -169,7 +167,7 @@ api_deflateInit(Config) when is_list(Config) -> ?m(ok, zlib:deflateInit(Z12,default,deflated,-Wbits,8,default)), ?m(ok,zlib:close(Z11)), ?m(ok,zlib:close(Z12)) - end, lists:seq(9, 15)), + end, lists:seq(8, 15)), lists:foreach(fun(MemLevel) -> ?line Z = zlib:open(), @@ -277,7 +275,7 @@ api_inflateInit(Config) when is_list(Config) -> ?m(ok, zlib:inflateInit(Z12,-Wbits)), ?m(ok,zlib:close(Z11)), ?m(ok,zlib:close(Z12)) - end, lists:seq(9,15)), + end, lists:seq(8,15)), ?m(?BARG, zlib:inflateInit(gurka, -15)), ?m(?BARG, zlib:inflateInit(Z1, 7)), ?m(?BARG, zlib:inflateInit(Z1, -7)), @@ -357,6 +355,39 @@ api_inflate(Config) when is_list(Config) -> ?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, <<2,1,2,1,2>>)), ?m(ok, zlib:close(Z1)). +api_inflateChunk(doc) -> "Test inflateChunk"; +api_inflateChunk(suite) -> []; +api_inflateChunk(Config) when is_list(Config) -> + ChunkSize = 1024, + Data = << <<(I rem 150)>> || I <- lists:seq(1, 3 * ChunkSize) >>, + Part1 = binary:part(Data, 0, ChunkSize), + Part2 = binary:part(Data, ChunkSize, ChunkSize), + Part3 = binary:part(Data, ChunkSize * 2, ChunkSize), + ?line Compressed = zlib:compress(Data), + ?line Z1 = zlib:open(), + ?line zlib:setBufSize(Z1, ChunkSize), + ?m(ok, zlib:inflateInit(Z1)), + ?m([], zlib:inflateChunk(Z1, <<>>)), + ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)), + ?m({more, Part2}, zlib:inflateChunk(Z1)), + ?m(Part3, zlib:inflateChunk(Z1)), + ?m(ok, zlib:inflateEnd(Z1)), + + ?m(ok, zlib:inflateInit(Z1)), + ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)), + + ?m(ok, zlib:inflateReset(Z1)), + + ?line zlib:setBufSize(Z1, size(Data)), + ?m(Data, zlib:inflateChunk(Z1, Compressed)), + ?m(ok, zlib:inflateEnd(Z1)), + + ?m(ok, zlib:inflateInit(Z1)), + ?m(?BARG, zlib:inflateChunk(gurka, Compressed)), + ?m(?BARG, zlib:inflateChunk(Z1, 4384)), + ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)), + ?m(ok, zlib:close(Z1)). + api_inflateEnd(doc) -> "Test inflateEnd"; api_inflateEnd(suite) -> []; api_inflateEnd(Config) when is_list(Config) -> diff --git a/lib/mnesia/doc/src/mnesia.xml b/lib/mnesia/doc/src/mnesia.xml index ed5b879f7f..856a7594a7 100644 --- a/lib/mnesia/doc/src/mnesia.xml +++ b/lib/mnesia/doc/src/mnesia.xml @@ -3019,6 +3019,12 @@ raise(Name, Amount) -> totally unpredictable.</p> </item> <item> + <p><c>-mnesia dump_disc_copies_at_startup true | false</c>. + If set to false, this disables the dumping of <c>disc_copies</c> + tables during startup while tables are being loaded. The default + is true.</p> + </item> + <item> <p><c>-mnesia dump_log_load_regulation true | false</c>. Controls if the log dumps should be performed as fast as possible or if the dumper should do its own load diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl index 5a9bae54da..aa72de7594 100644 --- a/lib/mnesia/src/mnesia_controller.erl +++ b/lib/mnesia/src/mnesia_controller.erl @@ -51,6 +51,7 @@ force_load_table/1, async_dump_log/1, sync_dump_log/1, + snapshot_dcd/1, connect_nodes/1, connect_nodes/2, wait_for_schema_commit_lock/0, @@ -139,7 +140,8 @@ max_loaders() -> -record(block_controller, {owner}). -record(dump_log, {initiated_by, - opt_reply_to + opt_reply_to, + operation = dump_log }). -record(net_load, {table, @@ -201,6 +203,15 @@ async_dump_log(InitBy) -> ?SERVER_NAME ! {async_dump_log, InitBy}, ok. +snapshot_dcd(Tables) when is_list(Tables) -> + case [T || T <- Tables, + mnesia_lib:storage_type_at_node(node(), T) =/= disc_copies] of + [] -> + call({snapshot_dcd, Tables}); + BadTabs -> + {error, {not_disc_copies, BadTabs}} + end. + %% Wait for tables to be active %% If needed, we will wait for Mnesia to start %% If Mnesia stops, we will wait for Mnesia to restart @@ -646,6 +657,15 @@ handle_call({sync_dump_log, InitBy}, From, State) -> State2 = add_worker(Worker, State), noreply(State2); +handle_call({snapshot_dcd, Tables}, From, State) -> + Worker = #dump_log{initiated_by = user, + opt_reply_to = From, + operation = fun() -> + mnesia_dumper:snapshot_dcd(Tables) + end}, + State2 = add_worker(Worker, State), + noreply(State2); + handle_call(wait_for_schema_commit_lock, From, State) -> Worker = #schema_commit_lock{owner = From}, State2 = add_worker(Worker, State), @@ -2089,7 +2109,12 @@ start_remote_sender(Node, Tab, Receiver, Storage) -> dump_and_reply(ReplyTo, Worker) -> %% No trap_exit, die intentionally instead - Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by), + Res = case Worker#dump_log.operation of + dump_log -> + mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by); + F when is_function(F, 0) -> + F() + end, ReplyTo ! #dumper_done{worker_pid = self(), worker_res = Res}, unlink(ReplyTo), diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl index 14665797a0..509b765dee 100644 --- a/lib/mnesia/src/mnesia_dumper.erl +++ b/lib/mnesia/src/mnesia_dumper.erl @@ -34,11 +34,13 @@ -export([ get_log_writes/0, incr_log_writes/0, + needs_dump_ets/1, raw_dump_table/2, raw_named_dump_table/2, start_regulator/0, opt_dump_log/1, - update/3 + update/3, + snapshot_dcd/1 ]). %% Internal stuff @@ -99,6 +101,19 @@ opt_dump_log(InitBy) -> end, perform_dump(InitBy, Reg). +snapshot_dcd(Tables) -> + lists:foreach( + fun(Tab) -> + case mnesia_lib:storage_type_at_node(node(), Tab) of + disc_copies -> + mnesia_log:ets2dcd(Tab); + _ -> + %% Storage type was checked before queueing the op, though + skip + end + end, Tables), + dumped. + %% Scan for decisions perform_dump(InitBy, Regulator) when InitBy == scan_decisions -> ?eval_debug_fun({?MODULE, perform_dump}, [InitBy]), @@ -981,28 +996,10 @@ open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) -> false. open_disc_copies(Tab, InitBy) -> - DclF = mnesia_lib:tab2dcl(Tab), - DumpEts = - case file:read_file_info(DclF) of - {error, enoent} -> - false; - {ok, DclInfo} -> - DcdF = mnesia_lib:tab2dcd(Tab), - case file:read_file_info(DcdF) of - {error, Reason} -> - mnesia_lib:dbg_out("File ~p info_error ~p ~n", - [DcdF, Reason]), - true; - {ok, DcdInfo} -> - Mul = case ?catch_val(dc_dump_limit) of - {'EXIT', _} -> ?DumpToEtsMultiplier; - Val -> Val - end, - DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul) - end - end, + DumpEts = needs_dump_ets(Tab), if DumpEts == false; InitBy == startup -> + DclF = mnesia_lib:tab2dcl(Tab), mnesia_log:open_log({?MODULE,Tab}, mnesia_log:dcl_log_header(), DclF, @@ -1017,6 +1014,27 @@ open_disc_copies(Tab, InitBy) -> false end. +needs_dump_ets(Tab) -> + DclF = mnesia_lib:tab2dcl(Tab), + case file:read_file_info(DclF) of + {error, enoent} -> + false; + {ok, DclInfo} -> + DcdF = mnesia_lib:tab2dcd(Tab), + case file:read_file_info(DcdF) of + {error, Reason} -> + mnesia_lib:dbg_out("File ~p info_error ~p ~n", + [DcdF, Reason]), + true; + {ok, DcdInfo} -> + Mul = case ?catch_val(dc_dump_limit) of + {'EXIT', _} -> ?DumpToEtsMultiplier; + Val -> Val + end, + DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul) + end + end. + %% Always opens the dcl file for writing overriding already_dumped %% mechanismen, used for schema transactions. open_dcl(Tab) -> diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl index 530317bcdd..cbb3d7e430 100644 --- a/lib/mnesia/src/mnesia_loader.erl +++ b/lib/mnesia/src/mnesia_loader.erl @@ -69,9 +69,10 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies -> ignore; _ -> mnesia_monitor:mktab(Tab, Args), - Count = mnesia_log:dcd2ets(Tab, Repair), - case ets:info(Tab, size) of - X when X < Count * 4 -> + _Count = mnesia_log:dcd2ets(Tab, Repair), + case mnesia_monitor:get_env(dump_disc_copies_at_startup) + andalso mnesia_dumper:needs_dump_ets(Tab) of + true -> ok = mnesia_log:ets2dcd(Tab); _ -> ignore diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl index 6fc1a394a6..a0e0e630ec 100644 --- a/lib/mnesia/src/mnesia_monitor.erl +++ b/lib/mnesia/src/mnesia_monitor.erl @@ -664,6 +664,7 @@ env() -> backup_module, debug, dir, + dump_disc_copies_at_startup, dump_log_load_regulation, dump_log_time_threshold, dump_log_update_in_place, @@ -692,6 +693,8 @@ default_env(debug) -> default_env(dir) -> Name = lists:concat(["Mnesia.", node()]), filename:absname(Name); +default_env(dump_disc_copies_at_startup) -> + true; default_env(dump_log_load_regulation) -> false; default_env(dump_log_time_threshold) -> @@ -741,6 +744,7 @@ do_check_type(debug, trace) -> trace; do_check_type(debug, true) -> debug; do_check_type(debug, verbose) -> verbose; do_check_type(dir, V) -> filename:absname(V); +do_check_type(dump_disc_copies_at_startup, B) -> bool(B); do_check_type(dump_log_load_regulation, B) -> bool(B); do_check_type(dump_log_time_threshold, I) when is_integer(I), I > 0 -> I; do_check_type(dump_log_update_in_place, B) -> bool(B); diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml index 62f99c5210..fcb42f6c31 100644 --- a/lib/observer/doc/src/observer_ug.xml +++ b/lib/observer/doc/src/observer_ug.xml @@ -104,6 +104,29 @@ <note> <p><em>Reds</em> can be presented as accumulated values or as values since last update.</p> </note> + <p><c>Process info</c> open a detailed information window on the selected process. + <taglist> + <tag>Process Information</tag> + <item>Shows the process information.</item> + <tag>Messages</tag> + <item>Shows the process messages.</item> + <tag>Dictionary</tag> + <item>Shows the process dictionary.</item> + <tag>Stack Trace</tag> + <item>Shows the process current stack trace.</item> + <tag>State</tag> + <item>Show the process state.</item> + <tag>Log</tag> + <item>If enabled and available, show the process SASL log entries.</item> + </taglist> + <note> + <p><c>Log</c> needs SASL application to be started on the observed node, with log_mf_h as log handler. + The Observed node must be R16B02 or higher. + <c>rb</c> server must not be started on the observed node when clicking on menu 'Log/Toggle log view'. + <c>rb</c> server will be stopped on the observed node when exiting or changing observed node. + </p> + </note> + </p> <p><c>Trace Processes</c> will add the selected process identifiers to the <c>Trace Overview</c> view and the node the processes reside on will be added as well. <c>Trace Named Processes</c> will add the registered name of processes. This can be useful diff --git a/lib/observer/src/cdv_proc_cb.erl b/lib/observer/src/cdv_proc_cb.erl index dfc2df9c4c..d1549f79eb 100644 --- a/lib/observer/src/cdv_proc_cb.erl +++ b/lib/observer/src/cdv_proc_cb.erl @@ -129,6 +129,7 @@ info_fields() -> {"Started", start_time}, {"Parent", {click,parent}}, {"Message Queue Len",msg_q_len}, + {"Run queue", run_queue}, {"Reductions", reds}, {"Program counter", prog_count}, {"Continuation pointer",cp}, diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl index 99329b94e2..ef14ba46e2 100644 --- a/lib/observer/src/crashdump_viewer.erl +++ b/lib/observer/src/crashdump_viewer.erl @@ -320,6 +320,8 @@ handle_call(general_info,_From,State=#state{file=File}) -> "Some information might be missing."]; false -> [] end, + ets:insert(cdv_reg_proc_table, + {cdv_dump_node_name,GenInfo#general_info.node_name}), {reply,{ok,GenInfo,TW},State#state{wordsize=WS, num_atoms=NumAtoms}}; handle_call({expand_binary,{Offset,Size,Pos}},_From,State=#state{file=File}) -> Fd = open(File), @@ -926,7 +928,7 @@ general_info(File) -> N; [] -> case lookup_index(?no_distribution) of - [_] -> "nonode@nohost"; + [_] -> "'nonode@nohost'"; [] -> "unknown" end end, @@ -1131,6 +1133,8 @@ all_procinfo(Fd,Fun,Proc,WS,LineHead) -> "arity = " ++ Arity -> %%! Temporary workaround get_procinfo(Fd,Fun,Proc#proc{arity=Arity--"\r\n"},WS); + "Run queue" -> + get_procinfo(Fd,Fun,Proc#proc{run_queue=val(Fd)},WS); "=" ++ _next_tag -> Proc; Other -> @@ -1165,6 +1169,19 @@ parse_pid(Str) -> {Pid,Rest} = parse_link(Str,[]), {{Pid,Pid},Rest}. +parse_monitor("{"++Str) -> + %% Named process + {Name,Node,Rest1} = parse_name_node(Str,[]), + Pid = get_pid_from_name(Name,Node), + case parse_link(string:strip(Rest1,left,$,),[]) of + {Ref,"}"++Rest2} -> + %% Bug in break.c - prints an extra "}" for remote + %% nodes... thus the strip + {{Pid,"{"++Name++","++Node++"} ("++Ref++")"}, + string:strip(Rest2,left,$})}; + {Ref,[]} -> + {{Pid,"{"++Name++","++Node++"} ("++Ref++")"},[]} + end; parse_monitor(Str) -> case parse_link(Str,[]) of {Pid,","++Rest1} -> @@ -1186,6 +1203,35 @@ parse_link([],Acc) -> %% truncated {lists:reverse(Acc),[]}. +parse_name_node(","++Rest,Name) -> + parse_name_node(Rest,Name,[]); +parse_name_node([H|T],Name) -> + parse_name_node(T,[H|Name]); +parse_name_node([],Name) -> + %% truncated + {lists:reverse(Name),[],[]}. + +parse_name_node("}"++Rest,Name,Node) -> + {lists:reverse(Name),lists:reverse(Node),Rest}; +parse_name_node([H|T],Name,Node) -> + parse_name_node(T,Name,[H|Node]); +parse_name_node([],Name,Node) -> + %% truncated + {lists:reverse(Name),lists:reverse(Node),[]}. + +get_pid_from_name(Name,Node) -> + case ets:lookup(cdv_reg_proc_table,cdv_dump_node_name) of + [{_,Node}] -> + case ets:lookup(cdv_reg_proc_table,Name) of + [{_,Pid}] when is_pid(Pid) -> + pid_to_list(Pid); + _ -> + "<unkonwn_pid>" + end; + _ -> + "<unknown_pid_other_node>" + end. + maybe_other_node(Id) -> Channel = case split($.,Id) of diff --git a/lib/observer/src/crashdump_viewer.hrl b/lib/observer/src/crashdump_viewer.hrl index 0e2eba6dee..47705d0da7 100644 --- a/lib/observer/src/crashdump_viewer.hrl +++ b/lib/observer/src/crashdump_viewer.hrl @@ -85,7 +85,9 @@ old_heap_top, old_heap_end, memory, - stack_dump}). + stack_dump, + run_queue=?unknown + }). -record(port, {id, diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl index c279218707..53197078cf 100644 --- a/lib/observer/src/observer_html_lib.erl +++ b/lib/observer/src/observer_html_lib.erl @@ -60,7 +60,8 @@ expandable_term_body(Heading,[],_Tab) -> "StackDump" -> "No stack dump was found"; "Dictionary" -> "No dictionary was found"; "ProcState" -> "Information could not be retrieved," - " system messages may not be handled by this process." + " system messages may not be handled by this process."; + "SaslLog" -> "No log entry was found" end]; expandable_term_body(Heading,Expanded,Tab) -> Attr = "BORDER=0 CELLPADDING=0 CELLSPACING=1 WIDTH=100%", @@ -102,7 +103,10 @@ expandable_term_body(Heading,Expanded,Tab) -> element(1, lists:mapfoldl(fun(Entry, Even) -> {proc_state(Tab, Entry,Even), not Even} - end, true, Expanded))]); + end, true, Expanded))]); + "SaslLog" -> + table(Attr, + [tr("BGCOLOR=white",[td("ALIGN=left", pre(href_proc_port(Expanded)))])]) ; _ -> table(Attr, [tr( diff --git a/lib/observer/src/observer_pro_wx.erl b/lib/observer/src/observer_pro_wx.erl index 0be8c18893..026693ff56 100644 --- a/lib/observer/src/observer_pro_wx.erl +++ b/lib/observer/src/observer_pro_wx.erl @@ -578,7 +578,7 @@ get_row(From, Row, pid, Info) -> end, From ! {self(), Pid}; get_row(From, Row, Col, Info) -> - Data = case Row > array:size(Info) of + Data = case Row >= array:size(Info) of true -> ""; false -> diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl index 8e8a37fc93..2a840dc49e 100644 --- a/lib/observer/src/observer_procinfo.erl +++ b/lib/observer/src/observer_procinfo.erl @@ -43,6 +43,8 @@ -record(worker, {panel, callback}). +-record(io, {rdata=""}). + start(Process, ParentFrame, Parent) -> wx_object:start_link(?MODULE, [Process, ParentFrame, Parent], []). @@ -69,6 +71,10 @@ init([Pid, ParentFrame, Parent]) -> DictPage = init_panel(Notebook, "Dictionary", [Pid,Table], fun init_dict_page/3), StackPage = init_panel(Notebook, "Stack Trace", [Pid], fun init_stack_page/2), StatePage = init_panel(Notebook, "State", [Pid,Table], fun init_state_page/3), + Ps = case gen_server:call(observer, log_status) of + true -> [init_panel(Notebook, "Log", [Pid,Table], fun init_log_page/3)]; + false -> [] + end, wxFrame:connect(Frame, close_window), wxMenu:connect(Frame, command_menu_selected), @@ -78,7 +84,7 @@ init([Pid, ParentFrame, Parent]) -> pid=Pid, frame=Frame, notebook=Notebook, - pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage], + pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage|Ps], expand_table=Table }} catch error:{badrpc, _} -> @@ -327,6 +333,26 @@ fetch_state_info2(Pid, M) -> {badrpc,{'EXIT',{timeout, _}}} -> [] end. +init_log_page(Parent, Pid, Table) -> + Win = observer_lib:html_window(Parent), + Update = fun() -> + Fd = spawn_link(fun() -> io_server() end), + rpc:call(node(Pid), rb, rescan, [[{start_log, Fd}]]), + rpc:call(node(Pid), rb, grep, [local_pid_str(Pid)]), + Logs = io_get_data(Fd), + %% Replace remote local pid notation to global notation + Pref = global_pid_node_pref(Pid), + ExpPid = re:replace(Logs,"<0\.","<" ++ Pref ++ ".",[global, {return, list}]), + %% Try to keep same look by removing blanks at right of rewritten PID + NbBlanks = length(Pref) - 1, + Re = "(<" ++ Pref ++ "\.[^>]{1,}>)[ ]{"++ integer_to_list(NbBlanks) ++ "}", + Look = re:replace(ExpPid, Re, "\\1", [global, {return, list}]), + Html = observer_html_lib:expandable_term("SaslLog", Look, Table), + wxHtmlWindow:setPage(Win, Html) + end, + Update(), + {Win, Update}. + create_menus(MenuBar) -> Menus = [{"File", [#create_menu{id=?wxID_CLOSE, text="Close"}]}, {"View", [#create_menu{id=?REFRESH, text="Refresh\tCtrl-R"}]}], @@ -409,3 +435,51 @@ filter_monitor_info() -> Ms = proplists:get_value(monitors, Data), [Pid || {process, Pid} <- Ms] end. + +local_pid_str(Pid) -> + %% observer can observe remote nodes + %% There is no function to get the local + %% pid from the remote pid ... + %% So grep will fail to find remote pid in remote local log. + %% i.e. <4589.42.1> will not be found, but <0.42.1> will + %% Let's replace first integer by zero + "<0" ++ re:replace(pid_to_list(Pid),"\<([0-9]{1,})","",[{return, list}]). + +global_pid_node_pref(Pid) -> + %% Global PID node prefix : X of <X.Y.Z> + string:strip(string:sub_word(pid_to_list(Pid),1,$.),left,$<). + + +io_get_data(Pid) -> + Pid ! {self(), get_data_and_close}, + receive + {Pid, data, Data} -> lists:flatten(Data) + end. + +io_server() -> + io_server(#io{}). + +io_server(State) -> + receive + {io_request, From, ReplyAs, Request} -> + {_, Reply, NewState} = io_request(Request,State), + From ! {io_reply, ReplyAs, Reply}, + io_server(NewState); + {Pid, get_data_and_close} -> + Pid ! {self(), data, lists:reverse(State#io.rdata)}, + normal; + _Unknown -> + io_server(State) + end. + +io_request({put_chars, _Encoding, Chars}, State = #io{rdata=Data}) -> + {ok, ok, State#io{rdata=[Chars|Data]}}; +io_request({put_chars, Encoding, Module, Function, Args}, State) -> + try + io_request({put_chars, Encoding, apply(Module, Function, Args)}, State) + catch _:_ -> + {error, {error, Function}, State} + end; +io_request(_Req, State) -> + %% io:format("~p: Unknown req: ~p ~n",[?LINE, _Req]), + {ok, {error, request}, State}. diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl index c86f5ea916..54c4092a78 100644 --- a/lib/observer/src/observer_wx.erl +++ b/lib/observer/src/observer_wx.erl @@ -37,6 +37,7 @@ -define(ID_CONNECT, 2). -define(ID_NOTEBOOK, 3). -define(ID_CDV, 4). +-define(ID_LOGVIEW, 5). -define(FIRST_NODES_MENU_ID, 1000). -define(LAST_NODES_MENU_ID, 2000). @@ -60,7 +61,8 @@ active_tab, node, nodes, - prev_node="" + prev_node="", + log = false }). start() -> @@ -215,14 +217,17 @@ handle_event(#wx{event=#wxNotebook{type=command_notebook_page_changing}}, {noreply, State#state{active_tab=Pid}} end; -handle_event(#wx{event = #wxClose{}}, State) -> - {stop, normal, State}; - handle_event(#wx{id = ?ID_CDV, event = #wxCommand{type = command_menu_selected}}, State) -> spawn(crashdump_viewer, start, []), {noreply, State}; -handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, State) -> +handle_event(#wx{event = #wxClose{}}, #state{log=LogOn} = State) -> + LogOn andalso rpc:block_call(State#state.node, rb, stop, []), + {stop, normal, State}; + +handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, + #state{log=LogOn} = State) -> + LogOn andalso rpc:block_call(State#state.node, rb, stop, []), {stop, normal, State}; handle_event(#wx{id = ?wxID_HELP, event = #wxCommand{type = command_menu_selected}}, State) -> @@ -300,12 +305,42 @@ handle_event(#wx{id = ?ID_PING, event = #wxCommand{type = command_menu_selected} end, {noreply, UpdState}; -handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, State) - when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID -> +handle_event(#wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}}, + #state{frame = Frame, log = PrevLog, node = Node} = State) -> + try + ok = ensure_sasl_started(Node), + ok = ensure_mf_h_handler_used(Node), + ok = ensure_rb_mode(Node, PrevLog), + case PrevLog of + false -> + rpc:block_call(Node, rb, start, []), + set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server started)"), + {noreply, State#state{log=true}}; + true -> + rpc:block_call(Node, rb, stop, []), + set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server stopped)"), + {noreply, State#state{log=false}} + end + catch + throw:Reason -> + create_txt_dialog(Frame, Reason, "Log view status", ?wxICON_ERROR), + {noreply, State} + end; - Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, State#state.nodes), - UpdState = change_node_view(Node, State), - {noreply, UpdState}; +handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, + #state{nodes= Ns , node = PrevNode, log = PrevLog} = State) + when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID -> + Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, Ns), + %% Close rb_server only if another node than current one selected + LState = case PrevLog of + true -> case Node == PrevNode of + false -> rpc:block_call(PrevNode, rb, stop, []), + State#state{log=false} ; + true -> State + end; + false -> State + end, + {noreply, change_node_view(Node, LState)}; handle_event(Event, State) -> Pid = get_active_pid(State), @@ -340,6 +375,9 @@ handle_call(stop, _, State = #state{frame = Frame}) -> wxFrame:destroy(Frame), {stop, normal, ok, State}; +handle_call(log_status, _From, State) -> + {reply, State#state.log, State}; + handle_call(_Msg, _From, State) -> {reply, ok, State}. @@ -422,8 +460,7 @@ return_to_localnode(Frame, Node) -> end. create_txt_dialog(Frame, Msg, Title, Style) -> - MD = wxMessageDialog:new(Frame, Msg, [{style, Style}]), - wxMessageDialog:setTitle(MD, Title), + MD = wxMessageDialog:new(Frame, Msg, [{style, Style}, {caption,Title}]), wxDialog:showModal(MD), wxDialog:destroy(MD). @@ -569,17 +606,19 @@ default_menus(NodesMenuItems) -> false -> {"Nodes", NodesMenuItems ++ [#create_menu{id = ?ID_CONNECT, text = "Enable distribution"}]} end, + LogMenu = {"Log", [#create_menu{id = ?ID_LOGVIEW, text = "Toggle log view"}]}, case os:type() =:= {unix, darwin} of false -> FileMenu = {"File", [CDV, Quit]}, HelpMenu = {"Help", [About,Help]}, - [FileMenu, NodeMenu, HelpMenu]; + [FileMenu, NodeMenu, LogMenu, HelpMenu]; true -> %% On Mac quit and about will be moved to the "default' place %% automagicly, so just add them to a menu that always exist. %% But not to the help menu for some reason - {Tag, Menus} = FileMenu, - [{Tag, Menus ++ [About]}, NodeMenu, {"&Help", [Help]}] + + {Tag, Menus} = NodeMenu, + [{Tag, Menus ++ [Quit,About]}, LogMenu, {"&Help", [Help]}] end. clean_menus(Menus, MenuBar) -> @@ -658,3 +697,59 @@ update_node_list(State = #state{menubar=MenuBar}) -> end, observer_lib:create_menu_item(Dist, NodeMenu, Index), State#state{nodes = Nodes}. + +ensure_sasl_started(Node) -> + %% is sasl started ? + Apps = rpc:block_call(Node, application, which_applications, []), + case lists:keyfind(sasl, 1, Apps) of + false -> throw("Error: sasl application not started."), + error; + {sasl, _, _} -> ok + end. + +ensure_mf_h_handler_used(Node) -> + %% is log_mf_h used ? + Handlers = rpc:block_call(Node, gen_event, which_handlers, [error_logger]), + case lists:any(fun(L)-> L == log_mf_h end, Handlers) of + false -> throw("Error: log_mf_h handler not used in sasl."), + error; + true -> ok + end. + +ensure_rb_mode(Node, PrevLog) -> + ok = ensure_rb_module_loaded(Node), + ok = is_rb_compatible(Node), + ok = is_rb_server_running(Node, PrevLog), + ok. + + +ensure_rb_module_loaded(Node) -> + %% Need to ensure that module is loaded in order to detect exported + %% functions on interactive nodes + case rpc:block_call(Node, code, ensure_loaded, [rb]) of + {badrpc, Reason} -> + throw("Error: badrpc - " ++ io_lib:format("~tp",[Reason])); + {error, Reason} -> + throw("Error: rb module load error - " ++ io_lib:format("~tp",[Reason])); + {module,rb} -> + ok + end. + +is_rb_compatible(Node) -> + %% Simply test that rb:log_list/0 is exported + case rpc:block_call(Node, erlang, function_exported, [rb, log_list, 0]) of + false -> throw("Error: Node's Erlang release must be at least R16B02."); + true -> ok + end. + +is_rb_server_running(Node, LogState) -> + %% If already started, somebody else may use it. + %% We can not use it too, as far log file would be overriden. Not fair. + case rpc:block_call(Node, erlang, whereis, [rb_server]) of + Pid when is_pid(Pid), (LogState == false) -> + throw("Error: rb_server is already started and maybe used by someone."); + Pid when is_pid(Pid) -> + ok; + undefined -> + ok + end. diff --git a/lib/observer/test/crashdump_viewer_SUITE.erl b/lib/observer/test/crashdump_viewer_SUITE.erl index 03ab0c20e1..1266b1f9b9 100644 --- a/lib/observer/test/crashdump_viewer_SUITE.erl +++ b/lib/observer/test/crashdump_viewer_SUITE.erl @@ -101,7 +101,7 @@ end_per_group(_GroupName, Config) -> init_per_suite(Config) when is_list(Config) -> delete_saved(Config), DataDir = ?config(data_dir,Config), - Rels = [R || R <- [r15b,r16b], ?t:is_release_available(R)] ++ [current], + Rels = [R || R <- [r16b,'17'], ?t:is_release_available(R)] ++ [current], io:format("Creating crash dumps for the following releases: ~p", [Rels]), AllDumps = create_dumps(DataDir,Rels), [{dumps,AllDumps}|Config]. @@ -563,12 +563,6 @@ dump_with_strange_module_name(DataDir,Rel,DumpName) -> CD. dump(Node,DataDir,Rel,DumpName) -> - case Rel of - _ when Rel<r15b, Rel=/=current -> - rpc:call(Node,os,putenv,["ERL_CRASH_DUMP_SECONDS","600"]); - _ -> - ok - end, rpc:call(Node,erlang,halt,[DumpName]), Crashdump0 = filename:join(filename:dirname(code:which(?t)), "erl_crash_dump.n1"), @@ -623,42 +617,21 @@ dos_dump(DataDir,Rel,Dump) -> rel_opt(Rel) -> case Rel of - r9b -> [{erl,[{release,"r9b_patched"}]}]; - r9c -> [{erl,[{release,"r9c_patched"}]}]; - r10b -> [{erl,[{release,"r10b_patched"}]}]; - r11b -> [{erl,[{release,"r11b_patched"}]}]; - r12b -> [{erl,[{release,"r12b_patched"}]}]; - r13b -> [{erl,[{release,"r13b_patched"}]}]; - r14b -> [{erl,[{release,"r14b_latest"}]}]; %naming convention changed - r15b -> [{erl,[{release,"r15b_latest"}]}]; r16b -> [{erl,[{release,"r16b_latest"}]}]; + '17' -> [{erl,[{release,"17_latest"}]}]; current -> [] end. dump_prefix(Rel) -> case Rel of - r9b -> "r9b_dump."; - r9c -> "r9c_dump."; - r10b -> "r10b_dump."; - r11b -> "r11b_dump."; - r12b -> "r12b_dump."; - r13b -> "r13b_dump."; - r14b -> "r14b_dump."; - r15b -> "r15b_dump."; r16b -> "r16b_dump."; - current -> "r17b_dump." + '17' -> "r17_dump."; + current -> "r18_dump." end. compat_rel(Rel) -> case Rel of - r9b -> "+R9 "; - r9c -> "+R9 "; - r10b -> "+R10 "; - r11b -> "+R11 "; - r12b -> "+R12 "; - r13b -> "+R13 "; - r14b -> "+R14 "; - r15b -> "+R15 "; r16b -> "+R16 "; + '17' -> "+R17 "; current -> "" end. diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl index 5cf719acb1..c69fdf4bdf 100644 --- a/lib/observer/test/observer_SUITE.erl +++ b/lib/observer/test/observer_SUITE.erl @@ -22,6 +22,8 @@ -include_lib("wx/include/wx.hrl"). -include_lib("observer/src/observer_tv.hrl"). +-define(ID_LOGVIEW, 5). + %% Test server specific exports -export([all/0, suite/0,groups/0]). -export([init_per_testcase/2, end_per_testcase/2, @@ -44,8 +46,9 @@ all() -> groups() -> [{gui, [], - [basic - , process_win, table_win + [basic, + process_win, + table_win ] }]. @@ -107,7 +110,7 @@ appup_file(Config) when is_list(Config) -> basic(suite) -> []; basic(doc) -> [""]; basic(Config) when is_list(Config) -> - timer:send_after(100, "foobar"), %% Otherwise the timer sever gets added to procs + timer:send_after(100, "foobar"), %% Otherwise the timer server gets added to procs ProcsBefore = processes(), NumProcsBefore = length(ProcsBefore), @@ -126,7 +129,7 @@ basic(Config) when is_list(Config) -> timer:sleep(200), ok = wxNotebook:advanceSelection(Notebook) end, - %% Just verify that we can toogle trough all pages + %% Just verify that we can toggle through all pages [_|_] = [Check(N, false) || N <- lists:seq(1, Count)], %% Cause it to resize Frame = get_top_level_parent(Notebook), @@ -214,10 +217,27 @@ test_page(Title, Window) -> process_win(suite) -> []; process_win(doc) -> [""]; process_win(Config) when is_list(Config) -> + % Stop SASL if already started + SaslStart = case whereis(sasl_sup) of + undefined -> false; + _ -> application:stop(sasl), + true + end, + % Define custom sasl and log_mf_h app vars + Privdir=?config(priv_dir,Config), + application:set_env(sasl, sasl_error_logger, tty), + application:set_env(sasl, error_logger_mf_dir, Privdir), + application:set_env(sasl, error_logger_mf_maxbytes, 1000), + application:set_env(sasl, error_logger_mf_maxfiles, 5), + application:start(sasl), ok = observer:start(), ObserverNB = setup_whitebox_testing(), Parent = get_top_level_parent(ObserverNB), - Frame = observer_procinfo:start(self(), Parent, self()), + % Activate log view + whereis(observer) ! #wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}}, + timer:sleep(1000), + % Process window tests (use sasl_sup for a non empty Log tab) + Frame = observer_procinfo:start(whereis(sasl_sup), Parent, self()), PIPid = wx_object:get_pid(Frame), PIPid ! {get_debug_info, self()}, Notebook = receive {procinfo_debug, NB} -> NB end, @@ -229,6 +249,11 @@ process_win(Config) when is_list(Config) -> [_|_] = [Check(N) || N <- lists:seq(1, Count)], PIPid ! #wx{event=#wxClose{type=close_window}}, observer:stop(), + application:stop(sasl), + case SaslStart of + true -> application:start(sasl); + false -> ok + end, ok. table_win(suite) -> []; diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl index 938aef58f9..2657fdcfaa 100644 --- a/lib/parsetools/include/leexinc.hrl +++ b/lib/parsetools/include/leexinc.hrl @@ -44,6 +44,8 @@ string(Ics0, L0, Tcs, Ts) -> %% Test for and remove the end token wrapper. Push back characters %% are prepended to RestChars. +-dialyzer({nowarn_function, string_cont/4}). + string_cont(Rest, Line, {token,T}, Ts) -> string(Rest, Line, Rest, [T|Ts]); string_cont(Rest, Line, {token,T,Push}, Ts) -> @@ -113,6 +115,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) -> %% If we have a token or error then return done, else if we have a %% skip_token then continue. +-dialyzer({nowarn_function, token_cont/3}). + token_cont(Rest, Line, {token,T}) -> {done,{ok,T,Line},Rest}; token_cont(Rest, Line, {token,T,Push}) -> @@ -187,6 +191,8 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) -> %% a token then save it and continue, else if we have a skip_token %% just continue. +-dialyzer({nowarn_function, tokens_cont/4}). + tokens_cont(Rest, Line, {token,T}, Ts) -> tokens(yystate(), Rest, Line, Rest, 0, Line, [T|Ts], reject, 0); tokens_cont(Rest, Line, {token,T,Push}, Ts) -> @@ -238,6 +244,8 @@ skip_tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Error, A0, Alen0) -> %% Skip tokens until we have an end_token or error then return done %% with the original rror. +-dialyzer({nowarn_function, skip_cont/4}). + skip_cont(Rest, Line, {token,_T}, Error) -> skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0); skip_cont(Rest, Line, {token,_T,Push}, Error) -> diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl index f4657663e6..3fcec73ce2 100644 --- a/lib/parsetools/src/yecc.erl +++ b/lib/parsetools/src/yecc.erl @@ -2064,11 +2064,13 @@ output_actions(St0, StateJumps, StateInfo) -> SelS = [{State,Called} || {{State,_JActions}, {State,Called}} <- lists:zip(StateJumps, lists:keysort(1, Sel))], + St05 = + fwrite(St0, <<"-dialyzer({nowarn_function, yeccpars2/7}).\n">>, []), St10 = foldl(fun({State, Called}, St_0) -> {State, #state_info{state_repr = IState}} = lookup_state(StateInfo, State), output_state_selection(St_0, State, IState, Called) - end, St0, SelS), + end, St05, SelS), St20 = fwrite(St10, <<"yeccpars2(Other, _, _, _, _, _, _) ->\n">>, []), St = fwrite(St20, ?YECC_BUG(<<"{missing_state_in_action_table, Other}">>, []), @@ -2089,7 +2091,8 @@ output_state_selection(St0, State, IState, Called) -> [Comment, IState]). output_state_actions(St, State, State, {Actions,jump_none}, SI) -> - output_state_actions1(St, State, Actions, true, normal, SI); + St1 = output_state_actions_begin(St, State, Actions), + output_state_actions1(St1, State, Actions, true, normal, SI); output_state_actions(St0, State, State, {Actions, Jump}, SI) -> {Tag, To, Common} = Jump, CS = case Tag of @@ -2099,13 +2102,22 @@ output_state_actions(St0, State, State, {Actions, Jump}, SI) -> St = output_state_actions1(St0, State, Actions, true, {to, CS}, SI), if To =:= State -> - output_state_actions1(St, CS, Common, true, normal, SI); + St1 = output_state_actions_begin(St, State, Actions), + output_state_actions1(St1, CS, Common, true, normal, SI); true -> St end; output_state_actions(St, State, JState, _XActions, _SI) -> fwrite(St, <<"%% yeccpars2_~w: see yeccpars2_~w\n\n">>, [State, JState]). +output_state_actions_begin(St, State, Actions) -> + case [yes || {_, #reduce{}} <- Actions] of + [] -> + fwrite(St, <<"-dialyzer({nowarn_function, yeccpars2_~w/7}).\n">>, + [State]); % Only when yeccerror(T) is output. + _ -> St + end. + output_state_actions1(St, State, [], IsFirst, normal, _SI) -> output_state_actions_fini(State, IsFirst, St); output_state_actions1(St0, State, [], IsFirst, {to, ToS}, _SI) -> diff --git a/lib/parsetools/test/yecc_SUITE.erl b/lib/parsetools/test/yecc_SUITE.erl index d308d21f82..c18dc15e37 100644 --- a/lib/parsetools/test/yecc_SUITE.erl +++ b/lib/parsetools/test/yecc_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2005-2013. All Rights Reserved. +%% Copyright Ericsson AB 2005-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -340,8 +340,8 @@ syntax(Config) when is_list(Config) -> {_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}}, {L2,_,{bad_inline,{yeccpars2_2_,1}}}]}], []} = compile:file(Parserfile1, [basic_validation,return]), - ?line L1 = 28 + SzYeccPre, - ?line L2 = 35 + SzYeccPre + ?line L1 = 31 + SzYeccPre, + ?line L2 = 38 + SzYeccPre end(), %% Bad macro in action. OTP-7224. @@ -358,8 +358,8 @@ syntax(Config) when is_list(Config) -> {_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}}, {L2,_,{bad_inline,{yeccpars2_2_,1}}}]}], []} = compile:file(Parserfile1, [basic_validation,return]), - ?line L1 = 28 + SzYeccPre, - ?line L2 = 35 + SzYeccPre + ?line L1 = 31 + SzYeccPre, + ?line L2 = 38 + SzYeccPre end(), %% Check line numbers. OTP-7224. @@ -1619,8 +1619,8 @@ otp_7292(Config) when is_list(Config) -> {L2,_,{bad_inline,{yeccpars2_2_,1}}}]}], [{_,[{16,_,{unused_function,{foo,0}}}]}]} = compile:file(Parserfile1, [basic_validation, return]), - ?line L1 = 38 + SzYeccPre, - ?line L2 = 45 + SzYeccPre + L1 = 41 + SzYeccPre, + L2 = 48 + SzYeccPre end(), YeccPre = filename:join(Dir, "yeccpre.hrl"), @@ -1637,8 +1637,8 @@ otp_7292(Config) when is_list(Config) -> {L2,_,{bad_inline,{yeccpars2_2_,1}}}]}], [{_,[{16,_,{unused_function,{foo,0}}}]}]} = compile:file(Parserfile1, [basic_validation, return]), - ?line L1 = 37 + SzYeccPre, - ?line L2 = 44 + SzYeccPre + ?line L1 = 40 + SzYeccPre, + ?line L2 = 47 + SzYeccPre end(), file:delete(YeccPre), diff --git a/lib/public_key/test/erl_make_certs.erl b/lib/public_key/test/erl_make_certs.erl index 5926794ca8..b8e0494ce7 100644 --- a/lib/public_key/test/erl_make_certs.erl +++ b/lib/public_key/test/erl_make_certs.erl @@ -204,7 +204,7 @@ issuer_der(Issuer) -> Subject. subject(undefined, IsRootCA) -> - User = if IsRootCA -> "RootCA"; true -> user() end, + User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end, Opts = [{email, User ++ "@erlang.org"}, {name, User}, {city, "Stockholm"}, @@ -215,14 +215,6 @@ subject(undefined, IsRootCA) -> subject(Opts, _) -> subject(Opts). -user() -> - case os:getenv("USER") of - false -> - "test_user"; - User -> - User - end. - subject(SubjectOpts) when is_list(SubjectOpts) -> Encode = fun(Opt) -> {Type,Value} = subject_enc(Opt), diff --git a/lib/reltool/src/reltool_utils.erl b/lib/reltool/src/reltool_utils.erl index 5a3f34506d..e6b1901316 100644 --- a/lib/reltool/src/reltool_utils.erl +++ b/lib/reltool/src/reltool_utils.erl @@ -54,12 +54,7 @@ root_dir() -> code:root_dir(). erl_libs() -> - case os:getenv("ERL_LIBS") of - false -> - []; - LibStr -> - string:tokens(LibStr, ":;") - end. + string:tokens(os:getenv("ERL_LIBS", ""), ":;"). lib_dirs(Dir) -> case erl_prim_loader:list_dir(Dir) of diff --git a/lib/reltool/test/reltool_server_SUITE.erl b/lib/reltool/test/reltool_server_SUITE.erl index 347e80ed7c..f140d6c55f 100644 --- a/lib/reltool/test/reltool_server_SUITE.erl +++ b/lib/reltool/test/reltool_server_SUITE.erl @@ -1205,14 +1205,9 @@ create_slim(Config) -> RootDir = code:root_dir(), Erl = filename:join([RootDir, "bin", "erl"]), - EscapedQuote = - case os:type() of - {win32,_} -> "\\\""; - _ -> "\"" - end, Args = ["-boot_var", "RELTOOL_EXT_LIB", TargetLibDir, "-boot", filename:join(TargetRelVsnDir,RelName), - "-sasl", "releases_dir", EscapedQuote++TargetRelDir++EscapedQuote], + "-sasl", "releases_dir", "\""++TargetRelDir++"\""], {ok, Node} = ?msym({ok, _}, start_node(?NODE_NAME, Erl, Args)), ?msym(RootDir, rpc:call(Node, code, root_dir, [])), wait_for_app(Node,sasl,50), @@ -2518,10 +2513,7 @@ undefined_regexp(_Config) -> %% Library functions erl_libs() -> - case os:getenv("ERL_LIBS") of - false -> []; - LibStr -> string:tokens(LibStr, ":;") - end. + string:tokens(os:getenv("ERL_LIBS", ""), ":;"). datadir(Config) -> %% Removes the trailing slash... diff --git a/lib/runtime_tools/src/system_information.erl b/lib/runtime_tools/src/system_information.erl index 04cc33e1ad..0796e96ffc 100644 --- a/lib/runtime_tools/src/system_information.erl +++ b/lib/runtime_tools/src/system_information.erl @@ -577,10 +577,7 @@ get_beam_name() -> false -> ""; true -> ".smp" end, - Beam = case os:getenv("EMU") of - false -> "beam"; - Value -> Value - end, + Beam = os:getenv("EMU", "beam"), Beam ++ Type ++ Flavor. %% Check runtime dependencies... diff --git a/lib/runtime_tools/test/erts_alloc_config_SUITE.erl b/lib/runtime_tools/test/erts_alloc_config_SUITE.erl index 8ea04e1767..9be1565a02 100644 --- a/lib/runtime_tools/test/erts_alloc_config_SUITE.erl +++ b/lib/runtime_tools/test/erts_alloc_config_SUITE.erl @@ -79,12 +79,7 @@ basic(Config) when is_list(Config) -> SbctMod = " +MBsbct 1024 +MHsbct 4096", %% Make sure we have enabled allocators - ZFlgs = case os:getenv("ERL_ZFLAGS") of - FlgString when is_list(FlgString) -> - FlgString; - _ -> - "" - end ++ " +Mea max +Mea config", + ZFlgs = os:getenv("ERL_ZFLAGS", "") ++ " +Mea max +Mea config", ?line os:putenv("ERL_ZFLAGS", ZFlgs ++ SbctMod), diff --git a/lib/sasl/doc/src/appup.xml b/lib/sasl/doc/src/appup.xml index 95f315d269..f0f41b0c7e 100644 --- a/lib/sasl/doc/src/appup.xml +++ b/lib/sasl/doc/src/appup.xml @@ -180,15 +180,28 @@ <c>Mod</c> when upgrading, and vice versa when downgrading.</p> <pre> {add_module, Mod} +{add_module, Mod, DepMods} Mod = atom() + DepMods = [Mod] </pre> <p>Loads a new module <c>Mod</c>.</p> + <p><c>DepMods</c> defaults to [] and defines which other modules + <c>Mod</c> is dependent on. In <c>relup</c>, instructions + related to these modules will come before the instruction for + loading <c>Mod</c> when upgrading, and vice versa when + downgrading.</p> <pre> {delete_module, Mod} +{delete_module, Mod, DepMods} Mod = atom() </pre> <p>Deletes a module <c>Mod</c> using the low-level instructions <c>remove</c> and <c>purge</c>.</p> + <p><c>DepMods</c> defaults to [] and defines which other modules + <c>Mod</c> is dependent on. In <c>relup</c>, instructions + related to these modules will come before the instruction for + removing <c>Mod</c> when upgrading, and vice versa when + downgrading.</p> <pre> {add_application, Application} {add_application, Application, Type} diff --git a/lib/sasl/src/sasl.appup.src b/lib/sasl/src/sasl.appup.src index e789853eea..af04d007ac 100644 --- a/lib/sasl/src/sasl.appup.src +++ b/lib/sasl/src/sasl.appup.src @@ -17,9 +17,7 @@ %% %CopyrightEnd% {"%VSN%", %% Up from - max one major revision back - [{<<"2\\.4(\\.[0-9]+)*">>,[restart_new_emulator]}, %% R17 - {<<"2\\.3(\\.[0-9]+)*">>,[restart_new_emulator]}], %% R16 + [{<<"2\\.4(\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-17 %% Down to - max one major revision back - [{<<"2\\.4(\\.[0-9]+)*">>,[restart_new_emulator]}, %% R17 - {<<"2\\.3(\\.[0-9]+)*">>,[restart_new_emulator]}] %% R16 + [{<<"2\\.4(\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-17 }. diff --git a/lib/sasl/src/systools_rc.erl b/lib/sasl/src/systools_rc.erl index 76f753c3d0..11e097996c 100644 --- a/lib/sasl/src/systools_rc.erl +++ b/lib/sasl/src/systools_rc.erl @@ -32,7 +32,6 @@ %% {load_module, Mod, PrePurge, PostPurge, [Mod]} %% {add_module, Mod} %% {add_module, Mod, [Mod]} -%% {remove_module, Mod, PrePurge, PostPurge, [Mod]} %% {restart_application, Appl} %% {add_application, Appl, Type} %% {remove_application, Appl} @@ -59,7 +58,7 @@ %% High-level instructions that contain dependencies %% --define(DEP_INSTRS, [update, load_module, add_module, remove_module]). +-define(DEP_INSTRS, [update, load_module, add_module, delete_module]). %%----------------------------------------------------------------- %% translate_scripts(Scripts, Appls, PreAppls) -> Res @@ -107,9 +106,6 @@ expand_script([I|Script]) -> {update, Mod, Change, Mods} when Change==soft, is_list(Mods) -> {update, Mod, Change, brutal_purge,brutal_purge, Mods}; - {delete_module, Mod} -> - [{remove, {Mod, brutal_purge, brutal_purge}}, - {purge, [Mod]}]; {add_application, Application} -> {add_application, Application, permanent}; _ -> @@ -301,6 +297,8 @@ normalize_instrs(Script) -> PostPurge, Mods}; ({add_module, Mod}) -> {add_module, Mod, []}; + ({delete_module, Mod}) -> + {delete_module, Mod, []}; (I) -> I end, Script). @@ -412,7 +410,7 @@ translate_add_module_instrs(Before, After) -> %%----------------------------------------------------------------- %%----------------------------------------------------------------- -%% Translates update, load_module and remove_module, and reorder the +%% Translates update, load_module and delete_module, and reorder the %% instructions according to dependencies. Leaves other instructions %% unchanged. %%----------------------------------------------------------------- @@ -538,7 +536,7 @@ get_dependent_instructions(G, WCs, Mod) -> %% Instructions are in order of dependency. %% Appls = [#application] %% -%% Instructions translated are: update, load_module, and remove_module +%% Instructions translated are: update, load_module, and delete_module %% %% Before = [{load_object_code, ...}] %% After = [{suspend, ...}] ++ CodeInstrs ++ [{resume, ...}] @@ -576,17 +574,19 @@ translate_dep_to_low(Mode, Instructions, Appls) -> end, RevUpdateMods)}] end, - LoadRemoveInstrs = + LoadRemoveInstrs0 = filtermap(fun({update, Mod, _, _, _, PreP, PostP, _}) -> {true, {load, {Mod, PreP, PostP}}}; ({load_module, Mod, PreP, PostP, _}) -> {true, {load, {Mod, PreP, PostP}}}; - ({remove_module, Mod, PreP, PostP, _}) -> - {true, {remove, {Mod, PreP, PostP}}}; + ({delete_module, Mod, _}) -> + {true,[{remove, {Mod, brutal_purge, brutal_purge}}, + {purge, [Mod]}]}; (_) -> false end, Instructions), - RevLoadRemoveInstrs = lists:reverse(LoadRemoveInstrs), + LoadRemoveInstrs = lists:flatten(LoadRemoveInstrs0), + RevLoadRemoveInstrs = lists:flatten(lists:reverse(LoadRemoveInstrs0)), %% The order of loading object code is unimportant. The order %% chosen is the order of dependency. @@ -781,10 +781,10 @@ check_op({add_module, Mod, Mods}) -> check_mod(Mod), check_list(Mods), lists:foreach(fun(M) -> check_mod(M) end, Mods); -check_op({remove_module, Mod, PrePurge, PostPurge, Mods}) -> +check_op({delete_module, Mod}) -> + check_mod(Mod); +check_op({delete_module, Mod, Mods}) -> check_mod(Mod), - check_purge(PrePurge), - check_purge(PostPurge), check_list(Mods), lists:foreach(fun(M) -> check_mod(M) end, Mods); check_op({remove_application, Appl}) -> diff --git a/lib/sasl/test/release_handler_SUITE_data/lib/README b/lib/sasl/test/release_handler_SUITE_data/lib/README index ffb8c5120b..5d17950b0b 100644 --- a/lib/sasl/test/release_handler_SUITE_data/lib/README +++ b/lib/sasl/test/release_handler_SUITE_data/lib/README @@ -21,7 +21,7 @@ start version, includes b_lib and b_server b-2.0: can be upgraded to from b-1.0. -Removes b_lib (soft_purge) and updates b_server (brutal_purge) +Removes b_lib (brutal_purge) and updates b_server (soft_purge) * The diff in purge method is important for test "check_and_purge", in order to check that the purge option to check_install_release works for both methods. diff --git a/lib/sasl/test/release_handler_SUITE_data/lib/b-2.0/ebin/b.appup b/lib/sasl/test/release_handler_SUITE_data/lib/b-2.0/ebin/b.appup index 001255a88c..9df590e63f 100644 --- a/lib/sasl/test/release_handler_SUITE_data/lib/b-2.0/ebin/b.appup +++ b/lib/sasl/test/release_handler_SUITE_data/lib/b-2.0/ebin/b.appup @@ -1,6 +1,6 @@ %% -*- erlang -*- {"2.0", - [{"1.0",[{remove_module,b_lib,soft_purge,soft_purge,[]}, - {update,b_server,{advanced,[]}}]}], + [{"1.0",[{delete_module,b_lib}, + {update,b_server,{advanced,[]},soft_purge,soft_purge,[]}]}], [{"1.0",[{add_module,b_lib}, - {update,b_server,{advanced,[]}}]}]}. + {update,b_server,{advanced,[]},soft_purge,soft_purge,[]}]}]}. diff --git a/lib/sasl/test/systools_rc_SUITE.erl b/lib/sasl/test/systools_rc_SUITE.erl index 5efab7c028..1afef986d2 100644 --- a/lib/sasl/test/systools_rc_SUITE.erl +++ b/lib/sasl/test/systools_rc_SUITE.erl @@ -22,14 +22,16 @@ -include_lib("sasl/src/systools.hrl"). -export([all/0,groups/0,init_per_group/2,end_per_group/2, syntax_check/1, translate/1, translate_app/1, - translate_emulator_restarts/1]). + translate_emulator_restarts/1, + translate_add_delete_module/1]). %%----------------------------------------------------------------- %% erl -compile systools_rc_SUITE @i ../src/ @i ../../test_server/include/ %% c(systools_rc_SUITE, [{i, "../src"}, {i, "../../test_server/include"}]). %%----------------------------------------------------------------- all() -> - [syntax_check, translate, translate_app, translate_emulator_restarts]. + [syntax_check, translate, translate_app, translate_emulator_restarts, + translate_add_delete_module]. groups() -> []. @@ -707,3 +709,59 @@ translate_emulator_restarts(_Config) -> restart_emulator] = X6, ok. + +translate_add_delete_module(_Config) -> + PreApps = + [#application{name = test, + description = "TEST", + vsn = "0.1", + modules = [foo,bar,baz,old_mod], + regs = [], + mod = {sasl, []}}], + Apps = + [#application{name = test, + description = "TEST", + vsn = "1.0", + modules = [foo,bar,baz,new_mod], + regs = [], + mod = {sasl, []}}], + S1 = [ + {delete_module, old_mod}, + {add_module, new_mod}, + {load_module, foo} + ], + {ok, X1} = systools_rc:translate_scripts([S1], Apps, PreApps), + [{load_object_code,{test,"1.0",[new_mod,foo]}}, + point_of_no_return, + {remove,{old_mod,brutal_purge,brutal_purge}}, + {purge,[old_mod]}, + {load,{new_mod,brutal_purge,brutal_purge}}, + {load,{foo,brutal_purge,brutal_purge}}] = X1, + + S2 = [ + {delete_module, old_mod}, + {add_module, new_mod, [foo]}, + {load_module, foo} + ], + {ok, X2} = systools_rc:translate_scripts([S2], Apps, PreApps), + [{load_object_code,{test,"1.0",[new_mod,foo]}}, + point_of_no_return, + {remove,{old_mod,brutal_purge,brutal_purge}}, + {purge,[old_mod]}, + {load,{foo,brutal_purge,brutal_purge}}, + {load,{new_mod,brutal_purge,brutal_purge}}] = X2, + + S3 = [ + {delete_module, old_mod, [new_mod]}, + {add_module, new_mod, [foo]}, + {load_module, foo} + ], + {ok, X3} = systools_rc:translate_scripts([S3], Apps, PreApps), + [{load_object_code,{test,"1.0",[new_mod,foo]}}, + point_of_no_return, + {load,{foo,brutal_purge,brutal_purge}}, + {load,{new_mod,brutal_purge,brutal_purge}}, + {remove,{old_mod,brutal_purge,brutal_purge}}, + {purge,[old_mod]}] = X3, + + ok. diff --git a/lib/sasl/test/test_lib.hrl b/lib/sasl/test/test_lib.hrl index c8a4e92f24..b16c4ac34c 100644 --- a/lib/sasl/test/test_lib.hrl +++ b/lib/sasl/test/test_lib.hrl @@ -1,3 +1,3 @@ -define(ertsvsn,"4.4"). --define(kernelvsn,"2.16.4"). --define(stdlibvsn,"1.19.4"). +-define(kernelvsn,"3.0"). +-define(stdlibvsn,"2.0"). diff --git a/lib/ssh/src/ssh_cli.erl b/lib/ssh/src/ssh_cli.erl index 18841e3d2d..de6d246403 100644 --- a/lib/ssh/src/ssh_cli.erl +++ b/lib/ssh/src/ssh_cli.erl @@ -98,7 +98,7 @@ handle_ssh_msg({ssh_cm, ConnectionHandler, Pty = Pty0#ssh_pty{width = Width, height = Height, pixel_width = PixWidth, pixel_height = PixHeight}, - {Chars, NewBuf} = io_request({window_change, Pty0}, Buf, Pty), + {Chars, NewBuf} = io_request({window_change, Pty0}, Buf, Pty, undefined), write_chars(ConnectionHandler, ChannelId, Chars), {ok, State#state{pty = Pty, buf = NewBuf}}; @@ -188,7 +188,7 @@ handle_msg({Group, tty_geometry}, #state{group = Group, handle_msg({Group, Req}, #state{group = Group, buf = Buf, pty = Pty, cm = ConnectionHandler, channel = ChannelId} = State) -> - {Chars, NewBuf} = io_request(Req, Buf, Pty), + {Chars, NewBuf} = io_request(Req, Buf, Pty, Group), write_chars(ConnectionHandler, ChannelId, Chars), {ok, State#state{buf = NewBuf}}; @@ -263,40 +263,49 @@ eval(Error) -> %%% displaying device... %%% We are *not* really unicode aware yet, we just filter away characters %%% beyond the latin1 range. We however handle the unicode binaries... -io_request({window_change, OldTty}, Buf, Tty) -> +io_request({window_change, OldTty}, Buf, Tty, _Group) -> window_change(Tty, OldTty, Buf); -io_request({put_chars, Cs}, Buf, Tty) -> +io_request({put_chars, Cs}, Buf, Tty, _Group) -> put_chars(bin_to_list(Cs), Buf, Tty); -io_request({put_chars, unicode, Cs}, Buf, Tty) -> +io_request({put_chars, unicode, Cs}, Buf, Tty, _Group) -> put_chars(unicode:characters_to_list(Cs,unicode), Buf, Tty); -io_request({insert_chars, Cs}, Buf, Tty) -> +io_request({insert_chars, Cs}, Buf, Tty, _Group) -> insert_chars(bin_to_list(Cs), Buf, Tty); -io_request({insert_chars, unicode, Cs}, Buf, Tty) -> +io_request({insert_chars, unicode, Cs}, Buf, Tty, _Group) -> insert_chars(unicode:characters_to_list(Cs,unicode), Buf, Tty); -io_request({move_rel, N}, Buf, Tty) -> +io_request({move_rel, N}, Buf, Tty, _Group) -> move_rel(N, Buf, Tty); -io_request({delete_chars,N}, Buf, Tty) -> +io_request({delete_chars,N}, Buf, Tty, _Group) -> delete_chars(N, Buf, Tty); -io_request(beep, Buf, _Tty) -> +io_request(beep, Buf, _Tty, _Group) -> {[7], Buf}; %% New in R12 -io_request({get_geometry,columns},Buf,Tty) -> +io_request({get_geometry,columns},Buf,Tty, _Group) -> {ok, Tty#ssh_pty.width, Buf}; -io_request({get_geometry,rows},Buf,Tty) -> +io_request({get_geometry,rows},Buf,Tty, _Group) -> {ok, Tty#ssh_pty.height, Buf}; -io_request({requests,Rs}, Buf, Tty) -> - io_requests(Rs, Buf, Tty, []); -io_request(tty_geometry, Buf, Tty) -> - io_requests([{move_rel, 0}, {put_chars, unicode, [10]}], Buf, Tty, []); +io_request({requests,Rs}, Buf, Tty, Group) -> + io_requests(Rs, Buf, Tty, [], Group); +io_request(tty_geometry, Buf, Tty, Group) -> + io_requests([{move_rel, 0}, {put_chars, unicode, [10]}], + Buf, Tty, [], Group); %{[], Buf}; -io_request(_R, Buf, _Tty) -> + +%% New in 18 +io_request({put_chars_sync, Class, Cs, Reply}, Buf, Tty, Group) -> + %% We handle these asynchronous for now, if we need output guarantees + %% we have to handle these synchronously + Group ! {reply, Reply}, + io_request({put_chars, Class, Cs}, Buf, Tty, Group); + +io_request(_R, Buf, _Tty, _Group) -> {[], Buf}. -io_requests([R|Rs], Buf, Tty, Acc) -> - {Chars, NewBuf} = io_request(R, Buf, Tty), - io_requests(Rs, NewBuf, Tty, [Acc|Chars]); -io_requests([], Buf, _Tty, Acc) -> +io_requests([R|Rs], Buf, Tty, Acc, Group) -> + {Chars, NewBuf} = io_request(R, Buf, Tty, Group), + io_requests(Rs, NewBuf, Tty, [Acc|Chars], Group); +io_requests([], Buf, _Tty, Acc, _Group) -> {Acc, Buf}. %%% return commands for cursor navigation, assume everything is ansi diff --git a/lib/ssh/src/ssh_connection.erl b/lib/ssh/src/ssh_connection.erl index c66f810948..e97bf9ceeb 100644 --- a/lib/ssh/src/ssh_connection.erl +++ b/lib/ssh/src/ssh_connection.erl @@ -200,7 +200,7 @@ ptty_alloc(ConnectionHandler, Channel, Options, TimeOut) -> {Width, PixWidth} = pty_default_dimensions(width, Options), {Hight, PixHight} = pty_default_dimensions(hight, Options), pty_req(ConnectionHandler, Channel, - proplists:get_value(term, Options, default_term()), + proplists:get_value(term, Options, os:getenv("TERM", ?DEFAULT_TERMINAL)), proplists:get_value(width, Options, Width), proplists:get_value(hight, Options, Hight), proplists:get_value(pixel_widh, Options, PixWidth), @@ -1299,11 +1299,3 @@ decode_ip(Addr) when is_binary(Addr) -> {error,_} -> Addr; {ok,A} -> A end. - -default_term() -> - case os:getenv("TERM") of - false -> - ?DEFAULT_TERMINAL; - Str when is_list(Str)-> - Str - end. diff --git a/lib/ssh/test/ssh_connection_SUITE.erl b/lib/ssh/test/ssh_connection_SUITE.erl index e3871b3feb..c9441a46b0 100644 --- a/lib/ssh/test/ssh_connection_SUITE.erl +++ b/lib/ssh/test/ssh_connection_SUITE.erl @@ -21,6 +21,7 @@ -module(ssh_connection_SUITE). -include_lib("common_test/include/ct.hrl"). +-include_lib("ssh/src/ssh_connect.hrl"). -compile(export_all). @@ -269,7 +270,7 @@ ptty_alloc(Config) when is_list(Config) -> {user_interaction, false}]), {ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity), success = ssh_connection:ptty_alloc(ConnectionRef, ChannelId, - [{term, default_term()}, {width, 70}, {high, 20}]), + [{term, os:getenv("TERM", ?DEFAULT_TERMINAL)}, {width, 70}, {high, 20}]), ssh:close(ConnectionRef). @@ -282,7 +283,7 @@ ptty_alloc_pixel(Config) when is_list(Config) -> {user_interaction, false}]), {ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity), success = ssh_connection:ptty_alloc(ConnectionRef, ChannelId, - [{term, default_term()}, {pixel_widh, 630}, {pixel_hight, 470}]), + [{term, os:getenv("TERM", ?DEFAULT_TERMINAL)}, {pixel_widh, 630}, {pixel_hight, 470}]), ssh:close(ConnectionRef). %%-------------------------------------------------------------------- @@ -647,11 +648,3 @@ ssh_exec(Cmd) -> spawn(fun() -> io:format(Cmd ++ "\n") end). - -default_term() -> - case os:getenv("TERM") of - false -> - "vt100"; - Str when is_list(Str)-> - Str - end. diff --git a/lib/ssl/doc/src/Makefile b/lib/ssl/doc/src/Makefile index fb12499ef7..cfbf98f6e3 100644 --- a/lib/ssl/doc/src/Makefile +++ b/lib/ssl/doc/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 1999-2012. All Rights Reserved. +# Copyright Ericsson AB 1999-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -37,7 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN) # Target Specs # ---------------------------------------------------- XML_APPLICATION_FILES = refman.xml -XML_REF3_FILES = ssl.xml ssl_session_cache_api.xml +XML_REF3_FILES = ssl.xml ssl_crl_cache.xml ssl_crl_cache.xml ssl_session_cache_api.xml XML_REF6_FILES = ssl_app.xml XML_PART_FILES = release_notes.xml usersguide.xml diff --git a/lib/ssl/doc/src/refman.xml b/lib/ssl/doc/src/refman.xml index ae11198edb..d5f2219af9 100644 --- a/lib/ssl/doc/src/refman.xml +++ b/lib/ssl/doc/src/refman.xml @@ -4,7 +4,7 @@ <application xmlns:xi="http://www.w3.org/2001/XInclude"> <header> <copyright> - <year>1999</year><year>2013</year> + <year>1999</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -28,23 +28,10 @@ <rev>B</rev> <file>refman.sgml</file> </header> - <description> - <p>The <em>SSL</em> application provides secure communication over - sockets. - </p> - <p>This product includes software developed by the OpenSSL Project for - use in the OpenSSL Toolkit (http://www.openssl.org/). - </p> - <p>This product includes cryptographic software written by Eric Young - ([email protected]). - </p> - <p>This product includes software written by Tim Hudson - ([email protected]). - </p> - <p>For full OpenSSL and SSLeay license texts, see <seealso marker="licenses#licenses">Licenses</seealso>.</p> - </description> <xi:include href="ssl_app.xml"/> <xi:include href="ssl.xml"/> + <xi:include href="ssl_crl_cache.xml"/> + <xi:include href="ssl_crl_cache_api.xml"/> <xi:include href="ssl_session_cache_api.xml"/> </application> diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml index 0c042f8571..47100c0d81 100644 --- a/lib/ssl/doc/src/ssl.xml +++ b/lib/ssl/doc/src/ssl.xml @@ -38,7 +38,9 @@ <item>ssl requires the crypto and public_key applications.</item> <item>Supported SSL/TLS-versions are SSL-3.0, TLS-1.0, TLS-1.1 and TLS-1.2.</item> - <item>For security reasons sslv2 is not supported.</item> + <item>For security reasons SSL-2.0 is not supported.</item> + <item>For security reasons SSL-3.0 is no longer supported by default, + but may be configured.</item> <item>Ephemeral Diffie-Hellman cipher suites are supported but not Diffie Hellman Certificates cipher suites.</item> <item>Elliptic Curve cipher suites are supported if crypto @@ -49,9 +51,9 @@ <item>IDEA cipher suites are not supported as they have become deprecated by the latest TLS spec so there is not any real motivation to implement them.</item> - <item>CRL and policy certificate extensions are not supported - yet. However CRL verification is supported by public_key, only not integrated - in ssl yet. </item> + <item>CRL validation is supported.</item> + <item>Policy certificate extensions are not supported + yet. </item> <item>Support for 'Server Name Indication' extension client side (RFC 6066 section 3).</item> </list> @@ -92,7 +94,7 @@ {log_alert, boolean()} | {server_name_indication, hostname() | disable} </c></p> - <p><c>transportoption() = {cb_info, {CallbackModule::atom(), DataTag::atom(), ClosedTag::atom(), ErrTag:atom()}} + <p><c>transportoption() = {cb_info, {CallbackModule :: atom(), DataTag :: atom(), ClosedTag :: atom(), ErrTag:atom()}} - defaults to {gen_tcp, tcp, tcp_closed, tcp_error}. Can be used to customize the transport layer. The callback module must implement a reliable transport protocol and behave as gen_tcp and in addition have functions corresponding to @@ -136,7 +138,7 @@ </c></p> <p><c>cipher() = rc4_128 | des_cbc | '3des_ede_cbc' - | aes_128_cbc | aes_256_cbc </c></p> + | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm </c></p> <p> <c>hash() = md5 | sha </c></p> @@ -299,10 +301,47 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo <item> Possible such reasons see <seealso marker="public_key:public_key#pkix_path_validation-3"> public_key:pkix_path_validation/3 </seealso></item> </taglist> + </item> + + <tag>{crl_check, boolean() | peer | best_effort }</tag> + <item> + Perform CRL (Certificate Revocation List) verification + <seealso marker="public_key:public_key#pkix_crl_validate-3"> + (public_key:pkix_crls_validate/3)</seealso> on all the certificates during the path validation + <seealso + marker="public_key:public_key#pkix_path_validation-3">(public_key:pkix_path_validation/3) + </seealso> + of the certificate chain. Defaults to false. + + <p><c>peer</c> - check is only performed on + the peer certificate.</p> + + <p><c>best_effort</c> - if certificate revocation status can not be determined + it will be accepted as valid.</p> + <p>The CA certificates specified for the connection will be used to + construct the certificate chain validating the CRLs.</p> + + <p>The CRLs will be fetched from a local or external cache + <seealso marker="ssl:ssl_crl_cache_api">ssl_crl_cache_api(3)</seealso>.</p> </item> + <tag>{crl_cache, {Module :: atom(), {DbHandle :: internal | term(), Args :: list()}}}</tag> + <item> + <p>Module defaults to ssl_crl_cache with <c> DbHandle </c> internal and an + empty argument list. The following arguments may be specified for the internal cache.</p> + <taglist> + <tag>{http, timeout()}</tag> + <item> + Enables fetching of CRLs specified as http URIs in<seealso + marker="public_key:cert_records"> X509 cerificate extensions.</seealso> + Requires the OTP inets application. + </item> + </taglist> + </item> + <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag> + <item> Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3 with the selected CA as trusted anchor and the rest of the chain. @@ -311,7 +350,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo <tag>{versions, [protocol()]}</tag> <item>TLS protocol versions that will be supported by started clients and servers. This option overrides the application environment option <c>protocol_version</c>. If the - environment option is not set it defaults to all versions supported by the SSL application. See also + environment option is not set it defaults to all versions, except SSL-3.0, supported by the SSL application. See also <seealso marker="ssl:ssl_app">ssl(6)</seealso> </item> @@ -945,19 +984,37 @@ fun(srp, Username :: string(), UserState :: term()) -> </func> <func> - <name>versions() -> - [{SslAppVer, SupportedSslVer, AvailableSslVsn}]</name> + <name>versions() -> [versions_info()]</name> <fsummary>Returns version information relevant for the ssl application.</fsummary> <type> - <v>SslAppVer = string()</v> - <v>SupportedSslVer = [protocol()]</v> - <v>AvailableSslVsn = [protocol()]</v> + <v>versions_info() = {app_vsn, string()} | {supported | available, [protocol()] </v> </type> <desc> <p> Returns version information relevant for the - ssl application.</p> + ssl application. + </p> + <taglist> + <tag>app_vsn</tag> + <item> The application version of the OTP ssl application.</item> + + <tag>supported</tag> + + <item>TLS/SSL versions supported by default. + Overridden by a versions option on + <seealso marker="#connect-2"> connect/[2,3,4]</seealso>, <seealso + marker="#listen-2"> listen/2</seealso> and <seealso + marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>. For the + negotiated TLS/SSL version see <seealso + marker="#connection_info-1">ssl:connection_info/1 + </seealso></item> + + <tag>available</tag> + <item>All TLS/SSL versions that the Erlang ssl application + can support. Note that TLS 1.2 requires sufficient support + from the crypto application. </item> + </taglist> </desc> </func> <func> diff --git a/lib/ssl/doc/src/ssl_app.xml b/lib/ssl/doc/src/ssl_app.xml index f1377cabda..e3a3fc27f2 100644 --- a/lib/ssl/doc/src/ssl_app.xml +++ b/lib/ssl/doc/src/ssl_app.xml @@ -75,10 +75,10 @@ </p> </item> - <tag><c><![CDATA[session_cb_init_args = list() <optional>]]></c></tag> + <tag><c><![CDATA[session_cb_init_args = proplist:proplist() <optional>]]></c></tag> <item> <p> - List of arguments to the init function in session cache + List of additional user defined arguments to the init function in session cache callback module, defaults to []. </p> </item> diff --git a/lib/ssl/doc/src/ssl_crl_cache.xml b/lib/ssl/doc/src/ssl_crl_cache.xml new file mode 100644 index 0000000000..b291c7b633 --- /dev/null +++ b/lib/ssl/doc/src/ssl_crl_cache.xml @@ -0,0 +1,66 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE erlref SYSTEM "erlref.dtd"> + +<erlref> + <header> + <copyright> + <year>2015</year><year>2015</year> + <holder>Ericsson AB. All Rights Reserved.</holder> + </copyright> + <legalnotice> + The contents of this file are subject to the Erlang Public License, + Version 1.1, (the "License"); you may not use this file except in + compliance with the License. You should have received a copy of the + Erlang Public License along with this software. If not, it can be + retrieved online at http://www.erlang.org/. + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + the License for the specific language governing rights and limitations + under the License. + </legalnotice> + <title>ssl_crl_cache</title> + <file>ssl_crl_cache.xml</file> + </header> + + <module>ssl_crl_cache</module> + <modulesummary>CRL cache </modulesummary> + <description> + <p> + Implements an internal CRL (Certificate Revocation List) cache. + In addition to implementing the <seealso + marker="ssl_cache_crl_api"> ssl_cache_crl_api</seealso> behaviour + the following functions are available. + </p> + </description> + + <funcs> + <func> + <name>insert(CRLSrc) -> ok | {error, Reason}</name> + <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name> + <fsummary> </fsummary> + <type> + <v> CRLSrc = {file, string()} | {der, [ <seealso + marker="public_key:public_key"> der_encoded() </seealso> ]}</v> + <v> URI = http_uri:uri()</v> + <v> Reason = term()</v> + </type> + <desc> + Insert CRLs into the ssl applications local cache. + </desc> + </func> + + <func> + <name>delete(Entries) -> ok | {error, Reason} </name> + <fsummary> </fsummary> + <type> + <v> Entries = http_uri:uri() | {file, string()} | {der, [<seealso + marker="public_key:public_key"> der_encoded() </seealso>]}</v> + <v> Reason = term()</v> + </type> + <desc> + Delete CRLs from the ssl applications local cache. + </desc> + </func> + </funcs> +</erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_crl_cache_api.xml b/lib/ssl/doc/src/ssl_crl_cache_api.xml new file mode 100644 index 0000000000..3f518496be --- /dev/null +++ b/lib/ssl/doc/src/ssl_crl_cache_api.xml @@ -0,0 +1,99 @@ +<?xml version="1.0" encoding="utf-8" ?> +<!DOCTYPE erlref SYSTEM "erlref.dtd"> + +<erlref> + <header> + <copyright> + <year>2015</year><year>2015</year> + <holder>Ericsson AB. All Rights Reserved.</holder> + </copyright> + <legalnotice> + The contents of this file are subject to the Erlang Public License, + Version 1.1, (the "License"); you may not use this file except in + compliance with the License. You should have received a copy of the + Erlang Public License along with this software. If not, it can be + retrieved online at http://www.erlang.org/. + + Software distributed under the License is distributed on an "AS IS" + basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See + the License for the specific language governing rights and limitations + under the License. + </legalnotice> + <title>ssl_crl_cache_api</title> + <file>ssl_crl_cache_api.xml</file> + </header> + + <module>ssl_crl_cache_api</module> + <modulesummary>API for a SSL/TLS CRL (Certificate Revocation List) cache.</modulesummary> + <description> + <p> + When SSL/TLS performs certificate path validation according to + <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280 </url> + it should also perform CRL validation checks. To enable the CRL + checks the application needs access to CRLs. A database of CRLs + can be set up in many different ways. This module provides the + behavior of the API needed to integrate an arbitrary CRL cache + with the erlang ssl application. It is also used by the + application itself to provide a simple default implementation of + a CRL cache. + </p> + </description> + + <section> + <title>Common Data Types</title> + + <p>The following data types are used in the functions below: + </p> + + <p><c>cache_ref() = opaque()</c></p> + <p> dist_point() = #'DistributionPoint'{} see <seealso + marker="public_key:cert_records"> X509 certificates records</seealso></p> + </section> + + <funcs> + <func> + <name>lookup(DistributionPoint, DbHandle) -> not_available | CRLs </name> + <fsummary> </fsummary> + <type> + <v> DistributionPoint = dist_point() </v> + <v> DbHandle = cache_ref() </v> + <v> CRLs = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + </type> + <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint </c> </p>. + This function may choose to only look in the cache or to follow distribution point + links depending on how the cache is administrated. + </desc> + </func> + + <func> + <name>select(Issuer, DbHandle) -> CRLs </name> + <fsummary>Select the CRLs in the cache that are issued by <c>Issuer</c></fsummary> + <type> + <v> Issuer = <seealso + marker="public_key:public_key">public_key:issuer_name()</seealso></v> + <v> DbHandle = cache_ref() </v> + </type> + <desc> + <p>Select the CRLs in the cache that are issued by <c>Issuer</c> </p> + </desc> + </func> + + <func> + <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name> + <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + public_key:pkix_crls_validate/3 </fsummary> + <type> + <v> DistributionPoint = dist_point() </v> + <v> CRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + <v> FreshCRL = [<seealso + marker="public_key:public_key">public_key:der_encoded()</seealso>] </v> + </type> + <desc> + <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to + <seealso marker="public_key#pkix_path_validation-3">public_key:pkix_crls_validate/3 </seealso> </p> + </desc> + </func> + </funcs> +</erlref>
\ No newline at end of file diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml index 82de1784ca..9f87d31e90 100644 --- a/lib/ssl/doc/src/ssl_session_cache_api.xml +++ b/lib/ssl/doc/src/ssl_session_cache_api.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1999</year><year>2013</year> + <year>1999</year><year>2015</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -79,17 +79,25 @@ </func> <func> - <name>init() -> opaque() </name> + <name>init(Args) -> opaque() </name> <fsummary>Return cache reference</fsummary> <type> - <v></v> + <v>Args = proplists:proplist()</v> + <d>Will always include the property {role, client | server}. Currently this + is the only predefined property, there may also be user defined properties. + <seealso marker="ssl_app"> See also application environment variable + session_cb_init_args</seealso> + </d> </type> <desc> <p>Performs possible initializations of the cache and returns a reference to it that will be used as parameter to the other - api functions. Will be called by the cache handling processes - init function, hence putting the same requirements on it as - a normal process init function. + API functions. Will be called by the cache handling processes + init function, hence putting the same requirements on it as a + normal process init function. Note that this function will be + called twice when starting the ssl application, once with the + role client and once with the role server, as the ssl application + must be prepared to take on both roles. </p> </desc> </func> @@ -111,14 +119,14 @@ <func> <name>select_session(Cache, PartialKey) -> [session()]</name> - <fsummary>>Selects sessions that could be reused.</fsummary> + <fsummary>Selects a sessions that could be reused.</fsummary> <type> <v> Cache = cache_ref()</v> <v> PartialKey = partialkey()</v> <v> Session = session()</v> </type> <desc> - <p>Selects sessions that could be reused. Should be callable + <p>Selects a sessions that could be reused. Should be callable from any process. </p> </desc> diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile index 0c00a650b9..d71d3fc445 100644 --- a/lib/ssl/src/Makefile +++ b/lib/ssl/src/Makefile @@ -1,7 +1,7 @@ # # %CopyrightBegin% # -# Copyright Ericsson AB 1999-2014. All Rights Reserved. +# Copyright Ericsson AB 1999-2015. All Rights Reserved. # # The contents of this file are subject to the Erlang Public License, # Version 1.1, (the "License"); you may not use this file except in @@ -38,7 +38,8 @@ RELSYSDIR = $(RELEASE_PATH)/lib/ssl-$(VSN) # ---------------------------------------------------- BEHAVIOUR_MODULES= \ - ssl_session_cache_api + ssl_session_cache_api \ + ssl_crl_cache_api MODULES= \ ssl \ @@ -65,6 +66,8 @@ MODULES= \ ssl_manager \ ssl_session \ ssl_session_cache \ + ssl_crl\ + ssl_crl_cache \ ssl_socket \ ssl_listen_tracker_sup \ tls_record \ @@ -164,5 +167,5 @@ $(EBIN)/ssl_session_cache.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl $(EBIN)/ssl_session_cache_api.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl $(EBIN)/ssl_ssl3.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl $(EBIN)/ssl_tls1.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl - +$(EBIN)/ssl_cache.$(EMULATOR): ssl_cache.erl ssl_internal.hrl ../../public_key/include/public_key.hrl diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl index 508983ddac..f177a8610d 100644 --- a/lib/ssl/src/dtls_connection.erl +++ b/lib/ssl/src/dtls_connection.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -146,7 +146,7 @@ init([Role, Host, Port, Socket, {SSLOpts0, _} = Options, User, CbInfo]) -> Handshake = ssl_handshake:init_handshake_history(), TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}), try ssl_config:init(SSLOpts0, Role) of - {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} -> + {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} -> Session = State0#state.session, State = State0#state{ tls_handshake_history = Handshake, @@ -155,6 +155,7 @@ init([Role, Host, Port, Socket, {SSLOpts0, _} = Options, User, CbInfo]) -> file_ref_db = FileRefHandle, cert_db_ref = Ref, cert_db = CertDbHandle, + crl_db = CRLDbInfo, session_cache = CacheHandle, private_key = Key, diffie_hellman_params = DHParams}, diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl index ae35dd7ea4..59b3ddec5c 100644 --- a/lib/ssl/src/dtls_record.erl +++ b/lib/ssl/src/dtls_record.erl @@ -120,6 +120,26 @@ get_dtls_records_aux(Data, Acc) -> end. encode_plain_text(Type, Version, Data, + #connection_states{current_write = + #connection_state{ + epoch = Epoch, + sequence_number = Seq, + compression_state=CompS0, + security_parameters= + #security_parameters{ + cipher_type = ?AEAD, + compression_algorithm=CompAlg} + }= WriteState0} = ConnectionStates) -> + {Comp, CompS1} = ssl_record:compress(CompAlg, Data, CompS0), + WriteState1 = WriteState0#connection_state{compression_state = CompS1}, + AAD = calc_aad(Type, Version, Epoch, Seq), + {CipherFragment, WriteState} = ssl_record:cipher_aead(dtls_v1:corresponding_tls_version(Version), + Comp, WriteState1, AAD), + CipherText = encode_tls_cipher_text(Type, Version, Epoch, Seq, CipherFragment), + {CipherText, ConnectionStates#connection_states{current_write = + WriteState#connection_state{sequence_number = Seq +1}}}; + +encode_plain_text(Type, Version, Data, #connection_states{current_write=#connection_state{ epoch = Epoch, sequence_number = Seq, @@ -141,16 +161,44 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version, sequence_number = Seq, fragment = CipherFragment} = CipherText, #connection_states{current_read = - #connection_state{compression_state = CompressionS0, - security_parameters = SecParams} = ReadState0} - = ConnnectionStates0) -> - CompressAlg = SecParams#security_parameters.compression_algorithm, + #connection_state{ + compression_state = CompressionS0, + security_parameters= + #security_parameters{ + cipher_type = ?AEAD, + compression_algorithm=CompAlg} + } = ReadState0}= ConnnectionStates0) -> + AAD = calc_aad(Type, Version, Epoch, Seq), + case ssl_record:decipher_aead(dtls_v1:corresponding_tls_version(Version), + CipherFragment, ReadState0, AAD) of + {PlainFragment, ReadState1} -> + {Plain, CompressionS1} = ssl_record:uncompress(CompAlg, + PlainFragment, CompressionS0), + ConnnectionStates = ConnnectionStates0#connection_states{ + current_read = ReadState1#connection_state{ + compression_state = CompressionS1}}, + {CipherText#ssl_tls{fragment = Plain}, ConnnectionStates}; + #alert{} = Alert -> + Alert + end; + +decode_cipher_text(#ssl_tls{type = Type, version = Version, + epoch = Epoch, + sequence_number = Seq, + fragment = CipherFragment} = CipherText, + #connection_states{current_read = + #connection_state{ + compression_state = CompressionS0, + security_parameters= + #security_parameters{ + compression_algorithm=CompAlg} + } = ReadState0}= ConnnectionStates0) -> {PlainFragment, Mac, ReadState1} = ssl_record:decipher(dtls_v1:corresponding_tls_version(Version), CipherFragment, ReadState0, true), MacHash = calc_mac_hash(ReadState1, Type, Version, Epoch, Seq, PlainFragment), case ssl_record:is_correct_mac(Mac, MacHash) of true -> - {Plain, CompressionS1} = ssl_record:uncompress(CompressAlg, + {Plain, CompressionS1} = ssl_record:uncompress(CompAlg, PlainFragment, CompressionS0), ConnnectionStates = ConnnectionStates0#connection_states{ current_read = ReadState1#connection_state{ @@ -368,3 +416,7 @@ calc_mac_hash(#connection_state{mac_secret = MacSecret, mac_hash(Version, MacAlg, MacSecret, SeqNo, Type, Length, Fragment) -> dtls_v1:mac_hash(Version, MacAlg, MacSecret, SeqNo, Type, Length, Fragment). + +calc_aad(Type, {MajVer, MinVer}, Epoch, SeqNo) -> + NewSeq = (Epoch bsl 48) + SeqNo, + <<NewSeq:64/integer, ?BYTE(Type), ?BYTE(MajVer), ?BYTE(MinVer)>>. diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src index 36681e2897..955875fa95 100644 --- a/lib/ssl/src/ssl.app.src +++ b/lib/ssl/src/ssl.app.src @@ -39,6 +39,10 @@ ssl_manager, ssl_pkix_db, ssl_certificate, + %% CRL handling + ssl_crl, + ssl_crl_cache, + ssl_crl_cache_api, %% App structure ssl_app, ssl_sup, diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src index 7986722094..1476336039 100644 --- a/lib/ssl/src/ssl.appup.src +++ b/lib/ssl/src/ssl.appup.src @@ -1,14 +1,14 @@ %% -*- erlang -*- {"%VSN%", [ - {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]}, - {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]}, + {<<"6\\..*">>, [{restart_application, ssl}]}, + {<<"5\\..*">>, [{restart_application, ssl}]}, {<<"4\\..*">>, [{restart_application, ssl}]}, {<<"3\\..*">>, [{restart_application, ssl}]} ], [ - {<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]}, - {<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]}, + {<<"6\\..*">>, [{restart_application, ssl}]}, + {<<"5\\..*">>, [{restart_application, ssl}]}, {<<"4\\..*">>, [{restart_application, ssl}]}, {<<"3\\..*">>, [{restart_application, ssl}]} ] diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl index 5f4ad7f013..623fa92121 100644 --- a/lib/ssl/src/ssl.erl +++ b/lib/ssl/src/ssl.erl @@ -353,12 +353,8 @@ cipher_suites(openssl) -> || S <- ssl_cipher:filter_suites(ssl_cipher:suites(Version))]; cipher_suites(all) -> Version = tls_record:highest_protocol_version([]), - Supported = ssl_cipher:all_suites(Version) - ++ ssl_cipher:anonymous_suites() - ++ ssl_cipher:psk_suites(Version) - ++ ssl_cipher:srp_suites(), - ssl_cipher:filter_suites([suite_definition(S) || S <- Supported]). - + ssl_cipher:filter_suites([suite_definition(S) + || S <-ssl_cipher:all_suites(Version)]). cipher_suites() -> cipher_suites(erlang). @@ -454,7 +450,7 @@ session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) -> versions() -> Vsns = tls_record:supported_protocol_versions(), SupportedVsns = [tls_record:protocol_version(Vsn) || Vsn <- Vsns], - AvailableVsns = ?ALL_SUPPORTED_VERSIONS, + AvailableVsns = ?ALL_AVAILABLE_VERSIONS, %% TODO Add DTLS versions when supported [{ssl_app, ?VSN}, {supported, SupportedVsns}, {available, AvailableVsns}]. @@ -658,7 +654,9 @@ handle_options(Opts0) -> honor_cipher_order = handle_option(honor_cipher_order, Opts, false), protocol = proplists:get_value(protocol, Opts, tls), padding_check = proplists:get_value(padding_check, Opts, true), - fallback = proplists:get_value(fallback, Opts, false) + fallback = proplists:get_value(fallback, Opts, false), + crl_check = handle_option(crl_check, Opts, false), + crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}}) }, CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}), @@ -671,7 +669,7 @@ handle_options(Opts0) -> cb_info, renegotiate_at, secure_renegotiate, hibernate_after, erl_dist, next_protocols_advertised, client_preferred_next_protocols, log_alert, - server_name_indication, honor_cipher_order, padding_check, + server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache, fallback], SockOpts = lists:foldl(fun(Key, PropList) -> @@ -854,6 +852,12 @@ validate_option(padding_check, Value) when is_boolean(Value) -> Value; validate_option(fallback, Value) when is_boolean(Value) -> Value; +validate_option(crl_check, Value) when is_boolean(Value) -> + Value; +validate_option(crl_check, Value) when (Value == best_effort) or (Value == peer) -> + Value; +validate_option(crl_cache, {Cb, {_Handle, Options}} = Value) when is_atom(Cb) and is_list(Options) -> + Value; validate_option(Opt, Value) -> throw({error, {options, {Opt, Value}}}). @@ -959,10 +963,7 @@ binary_cipher_suites(Version, [{_,_,_}| _] = Ciphers0) -> binary_cipher_suites(Version, Ciphers); binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) -> - All = ssl_cipher:suites(Version) - ++ ssl_cipher:anonymous_suites() - ++ ssl_cipher:psk_suites(Version) - ++ ssl_cipher:srp_suites(), + All = ssl_cipher:all_suites(Version), case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of [] -> %% Defaults to all supported suites that does @@ -1189,3 +1190,4 @@ handle_verify_options(Opts, CaCerts) -> Value -> throw({error, {options, {verify, Value}}}) end. + diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl index 30d224fee2..34e4a8b447 100644 --- a/lib/ssl/src/ssl_certificate.erl +++ b/lib/ssl/src/ssl_certificate.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014 All Rights Reserved. +%% Copyright Ericsson AB 2007-2015 All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,7 +33,8 @@ -export([trusted_cert_and_path/4, certificate_chain/3, file_to_certificats/2, - validate_extension/3, + file_to_crls/2, + validate/3, is_valid_extkey_usage/2, is_valid_key_usage/2, select_extension/2, @@ -83,16 +84,19 @@ trusted_cert_and_path(CertChain, CertDbHandle, CertDbRef, PartialChainHandler) - end. %%-------------------------------------------------------------------- --spec certificate_chain(undefined | binary(), db_handle(), certdb_ref()) -> - {error, no_cert} | {ok, [der_cert()]}. +-spec certificate_chain(undefined | binary() | #'OTPCertificate'{} , db_handle(), certdb_ref()) -> + {error, no_cert} | {ok, #'OTPCertificate'{} | undefined, [der_cert()]}. %% %% Description: Return the certificate chain to send to peer. %%-------------------------------------------------------------------- certificate_chain(undefined, _, _) -> {error, no_cert}; -certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> +certificate_chain(OwnCert, CertDbHandle, CertsDbRef) when is_binary(OwnCert) -> ErlCert = public_key:pkix_decode_cert(OwnCert, otp), - certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]). + certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]); +certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> + DerCert = public_key:pkix_encode('OTPCertificate', OwnCert, otp), + certificate_chain(OwnCert, DerCert, CertDbHandle, CertsDbRef, [DerCert]). %%-------------------------------------------------------------------- -spec file_to_certificats(binary(), term()) -> [der_cert()]. %% @@ -101,29 +105,39 @@ certificate_chain(OwnCert, CertDbHandle, CertsDbRef) -> file_to_certificats(File, DbHandle) -> {ok, List} = ssl_manager:cache_pem_file(File, DbHandle), [Bin || {'Certificate', Bin, not_encrypted} <- List]. + %%-------------------------------------------------------------------- --spec validate_extension(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid, - term()) -> {valid, term()} | - {fail, tuple()} | - {unknown, term()}. +-spec file_to_crls(binary(), term()) -> [der_cert()]. +%% +%% Description: Return list of DER encoded certificates. +%%-------------------------------------------------------------------- +file_to_crls(File, DbHandle) -> + {ok, List} = ssl_manager:cache_pem_file(File, DbHandle), + [Bin || {'CertificateList', Bin, not_encrypted} <- List]. + +%%-------------------------------------------------------------------- +-spec validate(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid, + term()) -> {valid, term()} | + {fail, tuple()} | + {unknown, term()}. %% %% Description: Validates ssl/tls specific extensions %%-------------------------------------------------------------------- -validate_extension(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage', - extnValue = KeyUse}}, Role) -> +validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage', + extnValue = KeyUse}}, {Role, _,_, _, _}) -> case is_valid_extkey_usage(KeyUse, Role) of true -> {valid, Role}; false -> {fail, {bad_cert, invalid_ext_key_usage}} end; -validate_extension(_, {bad_cert, _} = Reason, _) -> - {fail, Reason}; -validate_extension(_, {extension, _}, Role) -> +validate(_, {extension, _}, Role) -> {unknown, Role}; -validate_extension(_, valid, Role) -> +validate(_, {bad_cert, _} = Reason, _) -> + {fail, Reason}; +validate(_, valid, Role) -> {valid, Role}; -validate_extension(_, valid_peer, Role) -> +validate(_, valid_peer, Role) -> {valid, Role}. %%-------------------------------------------------------------------- @@ -194,14 +208,14 @@ certificate_chain(OtpCert, _Cert, CertDbHandle, CertsDbRef, Chain) -> %% certificate. The verification of the %% cert chain will fail if guess is %% incorrect. - {ok, lists:reverse(Chain)} + {ok, undefined, lists:reverse(Chain)} end; {{ok, {SerialNr, Issuer}}, SelfSigned} -> certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, SelfSigned) end. -certificate_chain(_,_, Chain, _SerialNr, _Issuer, true) -> - {ok, lists:reverse(Chain)}; +certificate_chain(_, _, [RootCert | _] = Chain, _, _, true) -> + {ok, RootCert, lists:reverse(Chain)}; certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) -> case ssl_manager:lookup_trusted_cert(CertDbHandle, CertsDbRef, @@ -214,7 +228,7 @@ certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned %% The trusted cert may be obmitted from the chain as the %% counter part needs to have it anyway to be able to %% verify it. - {ok, lists:reverse(Chain)} + {ok, undefined, lists:reverse(Chain)} end. find_issuer(OtpCert, CertDbHandle) -> diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl index bec0055353..8584e56d6c 100644 --- a/lib/ssl/src/ssl_cipher.erl +++ b/lib/ssl/src/ssl_cipher.erl @@ -33,9 +33,10 @@ -include_lib("public_key/include/public_key.hrl"). -export([security_parameters/2, security_parameters/3, suite_definition/1, - decipher/6, cipher/5, suite/1, suites/1, all_suites/1, - ec_keyed_suites/0, anonymous_suites/0, psk_suites/1, srp_suites/0, - openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1, + cipher_init/3, decipher/6, cipher/5, decipher_aead/6, cipher_aead/6, + suite/1, suites/1, all_suites/1, + ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0, + rc4_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1, hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1]). -export_type([cipher_suite/0, @@ -43,7 +44,7 @@ key_algo/0]). -type cipher() :: null |rc4_128 | idea_cbc | des40_cbc | des_cbc | '3des_ede_cbc' - | aes_128_cbc | aes_256_cbc. + | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm | chacha20_poly1305. -type hash() :: null | sha | md5 | sha224 | sha256 | sha384 | sha512. -type key_algo() :: null | rsa | dhe_rsa | dhe_dss | ecdhe_ecdsa| ecdh_ecdsa | ecdh_rsa| srp_rsa| srp_dss | psk | dhe_psk | rsa_psk | dh_anon | ecdh_anon | srp_anon. -type erl_cipher_suite() :: {key_algo(), cipher(), hash()}. @@ -87,20 +88,32 @@ security_parameters(Version, CipherSuite, SecParams) -> hash_size = hash_size(Hash)}. %%-------------------------------------------------------------------- +-spec cipher_init(cipher_enum(), binary(), binary()) -> #cipher_state{}. +%% +%% Description: Initializes the #cipher_state according to BCA +%%------------------------------------------------------------------- +cipher_init(?RC4, IV, Key) -> + State = crypto:stream_init(rc4, Key), + #cipher_state{iv = IV, key = Key, state = State}; +cipher_init(?AES_GCM, IV, Key) -> + <<Nonce:64>> = ssl:random_bytes(8), + #cipher_state{iv = IV, key = Key, nonce = Nonce}; +cipher_init(_BCA, IV, Key) -> + #cipher_state{iv = IV, key = Key}. + +%%-------------------------------------------------------------------- -spec cipher(cipher_enum(), #cipher_state{}, binary(), iodata(), ssl_record:ssl_version()) -> {binary(), #cipher_state{}}. %% %% Description: Encrypts the data and the MAC using chipher described %% by cipher_enum() and updating the cipher state +%% Used for "MAC then Cipher" suites where first an HMAC of the +%% data is calculated and the data plus the HMAC is ecncrypted. %%------------------------------------------------------------------- cipher(?NULL, CipherState, <<>>, Fragment, _Version) -> GenStreamCipherList = [Fragment, <<>>], {GenStreamCipherList, CipherState}; -cipher(?RC4, CipherState, Mac, Fragment, _Version) -> - State0 = case CipherState#cipher_state.state of - undefined -> crypto:stream_init(rc4, CipherState#cipher_state.key); - S -> S - end, +cipher(?RC4, CipherState = #cipher_state{state = State0}, Mac, Fragment, _Version) -> GenStreamCipherList = [Fragment, Mac], {State1, T} = crypto:stream_encrypt(State0, GenStreamCipherList), {T, CipherState#cipher_state{state = State1}}; @@ -112,13 +125,40 @@ cipher(?'3DES', CipherState, Mac, Fragment, Version) -> block_cipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) -> crypto:block_encrypt(des3_cbc, [K1, K2, K3], IV, T) end, block_size(des_cbc), CipherState, Mac, Fragment, Version); -cipher(?AES, CipherState, Mac, Fragment, Version) -> +cipher(?AES_CBC, CipherState, Mac, Fragment, Version) -> block_cipher(fun(Key, IV, T) when byte_size(Key) =:= 16 -> crypto:block_encrypt(aes_cbc128, Key, IV, T); (Key, IV, T) when byte_size(Key) =:= 32 -> crypto:block_encrypt(aes_cbc256, Key, IV, T) end, block_size(aes_128_cbc), CipherState, Mac, Fragment, Version). +%%-------------------------------------------------------------------- +-spec cipher_aead(cipher_enum(), #cipher_state{}, integer(), binary(), iodata(), ssl_record:ssl_version()) -> + {binary(), #cipher_state{}}. +%% +%% Description: Encrypts the data and protects associated data (AAD) using chipher +%% described by cipher_enum() and updating the cipher state +%% Use for suites that use authenticated encryption with associated data (AEAD) +%%------------------------------------------------------------------- +cipher_aead(?AES_GCM, CipherState, SeqNo, AAD, Fragment, Version) -> + aead_cipher(aes_gcm, CipherState, SeqNo, AAD, Fragment, Version); +cipher_aead(?CHACHA20_POLY1305, CipherState, SeqNo, AAD, Fragment, Version) -> + aead_cipher(chacha20_poly1305, CipherState, SeqNo, AAD, Fragment, Version). + +aead_cipher(chacha20_poly1305, #cipher_state{key=Key} = CipherState, SeqNo, AAD0, Fragment, _Version) -> + CipherLen = erlang:iolist_size(Fragment), + AAD = <<AAD0/binary, ?UINT16(CipherLen)>>, + Nonce = <<SeqNo:64/integer>>, + {Content, CipherTag} = crypto:block_encrypt(chacha20_poly1305, Key, Nonce, {AAD, Fragment}), + {<<Content/binary, CipherTag/binary>>, CipherState}; +aead_cipher(Type, #cipher_state{key=Key, iv = IV0, nonce = Nonce} = CipherState, _SeqNo, AAD0, Fragment, _Version) -> + CipherLen = erlang:iolist_size(Fragment), + AAD = <<AAD0/binary, ?UINT16(CipherLen)>>, + <<Salt:4/bytes, _/binary>> = IV0, + IV = <<Salt/binary, Nonce:64/integer>>, + {Content, CipherTag} = crypto:block_encrypt(Type, Key, IV, {AAD, Fragment}), + {<<Nonce:64/integer, Content/binary, CipherTag/binary>>, CipherState#cipher_state{nonce = Nonce + 1}}. + build_cipher_block(BlockSz, Mac, Fragment) -> TotSz = byte_size(Mac) + erlang:iolist_size(Fragment) + 1, {PaddingLength, Padding} = get_padding(TotSz, BlockSz), @@ -148,14 +188,12 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0, %% %% Description: Decrypts the data and the MAC using cipher described %% by cipher_enum() and updating the cipher state. +%% Used for "MAC then Cipher" suites where first the data is decrypted +%% and the an HMAC of the decrypted data is checked %%------------------------------------------------------------------- decipher(?NULL, _HashSz, CipherState, Fragment, _, _) -> {Fragment, <<>>, CipherState}; -decipher(?RC4, HashSz, CipherState, Fragment, _, _) -> - State0 = case CipherState#cipher_state.state of - undefined -> crypto:stream_init(rc4, CipherState#cipher_state.key); - S -> S - end, +decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _, _) -> try crypto:stream_decrypt(State0, Fragment) of {State, Text} -> GSC = generic_stream_cipher_from_bin(Text, HashSz), @@ -179,13 +217,26 @@ decipher(?'3DES', HashSz, CipherState, Fragment, Version, PaddingCheck) -> block_decipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) -> crypto:block_decrypt(des3_cbc, [K1, K2, K3], IV, T) end, CipherState, HashSz, Fragment, Version, PaddingCheck); -decipher(?AES, HashSz, CipherState, Fragment, Version, PaddingCheck) -> +decipher(?AES_CBC, HashSz, CipherState, Fragment, Version, PaddingCheck) -> block_decipher(fun(Key, IV, T) when byte_size(Key) =:= 16 -> crypto:block_decrypt(aes_cbc128, Key, IV, T); (Key, IV, T) when byte_size(Key) =:= 32 -> crypto:block_decrypt(aes_cbc256, Key, IV, T) end, CipherState, HashSz, Fragment, Version, PaddingCheck). +%%-------------------------------------------------------------------- +-spec decipher_aead(cipher_enum(), #cipher_state{}, integer(), binary(), binary(), ssl_record:ssl_version()) -> + {binary(), binary(), #cipher_state{}} | #alert{}. +%% +%% Description: Decrypts the data and checks the associated data (AAD) MAC using +%% cipher described by cipher_enum() and updating the cipher state. +%% Use for suites that use authenticated encryption with associated data (AEAD) +%%------------------------------------------------------------------- +decipher_aead(?AES_GCM, CipherState, SeqNo, AAD, Fragment, Version) -> + aead_decipher(aes_gcm, CipherState, SeqNo, AAD, Fragment, Version); +decipher_aead(?CHACHA20_POLY1305, CipherState, SeqNo, AAD, Fragment, Version) -> + aead_decipher(chacha20_poly1305, CipherState, SeqNo, AAD, Fragment, Version). + block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0, HashSz, Fragment, Version, PaddingCheck) -> try @@ -215,6 +266,35 @@ block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0, %% bad_record_mac alert to hide the specific type of the error." ?ALERT_REC(?FATAL, ?BAD_RECORD_MAC) end. + +aead_ciphertext_to_state(chacha20_poly1305, SeqNo, _IV, AAD0, Fragment, _Version) -> + CipherLen = size(Fragment) - 16, + <<CipherText:CipherLen/bytes, CipherTag:16/bytes>> = Fragment, + AAD = <<AAD0/binary, ?UINT16(CipherLen)>>, + Nonce = <<SeqNo:64/integer>>, + {Nonce, AAD, CipherText, CipherTag}; +aead_ciphertext_to_state(_, _SeqNo, <<Salt:4/bytes, _/binary>>, AAD0, Fragment, _Version) -> + CipherLen = size(Fragment) - 24, + <<ExplicitNonce:8/bytes, CipherText:CipherLen/bytes, CipherTag:16/bytes>> = Fragment, + AAD = <<AAD0/binary, ?UINT16(CipherLen)>>, + Nonce = <<Salt/binary, ExplicitNonce/binary>>, + {Nonce, AAD, CipherText, CipherTag}. + +aead_decipher(Type, #cipher_state{key = Key, iv = IV} = CipherState, + SeqNo, AAD0, Fragment, Version) -> + try + {Nonce, AAD, CipherText, CipherTag} = aead_ciphertext_to_state(Type, SeqNo, IV, AAD0, Fragment, Version), + case crypto:block_decrypt(Type, Key, Nonce, {AAD, CipherText, CipherTag}) of + Content when is_binary(Content) -> + {Content, CipherState}; + _ -> + ?ALERT_REC(?FATAL, ?BAD_RECORD_MAC) + end + catch + _:_ -> + ?ALERT_REC(?FATAL, ?BAD_RECORD_MAC) + end. + %%-------------------------------------------------------------------- -spec suites(ssl_record:ssl_version()) -> [cipher_suite()]. %% @@ -227,16 +307,27 @@ suites({3, N}) -> all_suites(Version) -> suites(Version) - ++ ssl_cipher:anonymous_suites() - ++ ssl_cipher:psk_suites(Version) - ++ ssl_cipher:srp_suites(). + ++ anonymous_suites(Version) + ++ psk_suites(Version) + ++ srp_suites() + ++ rc4_suites(Version). %%-------------------------------------------------------------------- --spec anonymous_suites() -> [cipher_suite()]. +-spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()]. %% %% Description: Returns a list of the anonymous cipher suites, only supported %% if explicitly set by user. Intended only for testing. %%-------------------------------------------------------------------- -anonymous_suites() -> + +anonymous_suites({3, N}) -> + anonymous_suites(N); + +anonymous_suites(N) + when N >= 3 -> + [?TLS_DH_anon_WITH_AES_128_GCM_SHA256, + ?TLS_DH_anon_WITH_AES_256_GCM_SHA384 + ] ++ anonymous_suites(0); + +anonymous_suites(_) -> [?TLS_DH_anon_WITH_RC4_128_MD5, ?TLS_DH_anon_WITH_DES_CBC_SHA, ?TLS_DH_anon_WITH_3DES_EDE_CBC_SHA, @@ -260,13 +351,20 @@ psk_suites({3, N}) -> psk_suites(N) when N >= 3 -> - psk_suites(0) ++ - [?TLS_DHE_PSK_WITH_AES_256_CBC_SHA384, - ?TLS_RSA_PSK_WITH_AES_256_CBC_SHA384, - ?TLS_PSK_WITH_AES_256_CBC_SHA384, - ?TLS_DHE_PSK_WITH_AES_128_CBC_SHA256, - ?TLS_RSA_PSK_WITH_AES_128_CBC_SHA256, - ?TLS_PSK_WITH_AES_128_CBC_SHA256]; + [ + ?TLS_DHE_PSK_WITH_AES_256_GCM_SHA384, + ?TLS_RSA_PSK_WITH_AES_256_GCM_SHA384, + ?TLS_PSK_WITH_AES_256_GCM_SHA384, + ?TLS_DHE_PSK_WITH_AES_256_CBC_SHA384, + ?TLS_RSA_PSK_WITH_AES_256_CBC_SHA384, + ?TLS_PSK_WITH_AES_256_CBC_SHA384, + ?TLS_DHE_PSK_WITH_AES_128_GCM_SHA256, + ?TLS_RSA_PSK_WITH_AES_128_GCM_SHA256, + ?TLS_PSK_WITH_AES_128_GCM_SHA256, + ?TLS_DHE_PSK_WITH_AES_128_CBC_SHA256, + ?TLS_RSA_PSK_WITH_AES_128_CBC_SHA256, + ?TLS_PSK_WITH_AES_128_CBC_SHA256 + ] ++ psk_suites(0); psk_suites(_) -> [?TLS_DHE_PSK_WITH_AES_256_CBC_SHA, @@ -298,6 +396,24 @@ srp_suites() -> ?TLS_SRP_SHA_WITH_AES_256_CBC_SHA, ?TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA, ?TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA]. +%%-------------------------------------------------------------------- +-spec rc4_suites(Version::ssl_record:ssl_version()) -> [cipher_suite()]. +%% +%% Description: Returns a list of the RSA|(ECDH/RSA)| (ECDH/ECDSA) +%% with RC4 cipher suites, only supported if explicitly set by user. +%% Are not considered secure any more. Other RC4 suites already +%% belonged to the user configured only category. +%%-------------------------------------------------------------------- +rc4_suites({3, 0}) -> + [?TLS_RSA_WITH_RC4_128_SHA, + ?TLS_RSA_WITH_RC4_128_MD5]; +rc4_suites({3, N}) when N =< 3 -> + [?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA, + ?TLS_ECDHE_RSA_WITH_RC4_128_SHA, + ?TLS_RSA_WITH_RC4_128_SHA, + ?TLS_RSA_WITH_RC4_128_MD5, + ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA, + ?TLS_ECDH_RSA_WITH_RC4_128_SHA]. %%-------------------------------------------------------------------- -spec suite_definition(cipher_suite()) -> int_cipher_suite(). @@ -418,6 +534,19 @@ suite_definition(?TLS_RSA_PSK_WITH_AES_256_CBC_SHA) -> %%% TLS 1.2 PSK Cipher Suites RFC 5487 +suite_definition(?TLS_PSK_WITH_AES_128_GCM_SHA256) -> + {psk, aes_128_gcm, null, sha256}; +suite_definition(?TLS_PSK_WITH_AES_256_GCM_SHA384) -> + {psk, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DHE_PSK_WITH_AES_128_GCM_SHA256) -> + {dhe_psk, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DHE_PSK_WITH_AES_256_GCM_SHA384) -> + {dhe_psk, aes_256_gcm, null, sha384}; +suite_definition(?TLS_RSA_PSK_WITH_AES_128_GCM_SHA256) -> + {rsa_psk, aes_128_gcm, null, sha256}; +suite_definition(?TLS_RSA_PSK_WITH_AES_256_GCM_SHA384) -> + {rsa_psk, aes_256_gcm, null, sha384}; + suite_definition(?TLS_PSK_WITH_AES_128_CBC_SHA256) -> {psk, aes_128_cbc, sha256, default_prf}; suite_definition(?TLS_PSK_WITH_AES_256_CBC_SHA384) -> @@ -537,7 +666,59 @@ suite_definition(?TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384) -> suite_definition(?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256) -> {ecdh_rsa, aes_128_cbc, sha256, sha256}; suite_definition(?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384) -> - {ecdh_rsa, aes_256_cbc, sha384, sha384}. + {ecdh_rsa, aes_256_cbc, sha384, sha384}; + +%% RFC 5288 AES-GCM Cipher Suites +suite_definition(?TLS_RSA_WITH_AES_128_GCM_SHA256) -> + {rsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_RSA_WITH_AES_256_GCM_SHA384) -> + {rsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256) -> + {dhe_rsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384) -> + {dhe_rsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DH_RSA_WITH_AES_128_GCM_SHA256) -> + {dh_rsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DH_RSA_WITH_AES_256_GCM_SHA384) -> + {dh_rsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256) -> + {dhe_dss, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384) -> + {dhe_dss, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DH_DSS_WITH_AES_128_GCM_SHA256) -> + {dh_dss, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DH_DSS_WITH_AES_256_GCM_SHA384) -> + {dh_dss, aes_256_gcm, null, sha384}; +suite_definition(?TLS_DH_anon_WITH_AES_128_GCM_SHA256) -> + {dh_anon, aes_128_gcm, null, sha256}; +suite_definition(?TLS_DH_anon_WITH_AES_256_GCM_SHA384) -> + {dh_anon, aes_256_gcm, null, sha384}; + +%% RFC 5289 ECC AES-GCM Cipher Suites +suite_definition(?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256) -> + {ecdhe_ecdsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384) -> + {ecdhe_ecdsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256) -> + {ecdh_ecdsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384) -> + {ecdh_ecdsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256) -> + {ecdhe_rsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384) -> + {ecdhe_rsa, aes_256_gcm, null, sha384}; +suite_definition(?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256) -> + {ecdh_rsa, aes_128_gcm, null, sha256}; +suite_definition(?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384) -> + {ecdh_rsa, aes_256_gcm, null, sha384}; + +%% draft-agl-tls-chacha20poly1305-04 Chacha20/Poly1305 Suites +suite_definition(?TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256) -> + {ecdhe_rsa, chacha20_poly1305, null, sha256}; +suite_definition(?TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256) -> + {ecdhe_ecdsa, chacha20_poly1305, null, sha256}; +suite_definition(?TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256) -> + {dhe_rsa, chacha20_poly1305, null, sha256}. %%-------------------------------------------------------------------- -spec suite(erl_cipher_suite()) -> cipher_suite(). @@ -641,6 +822,19 @@ suite({rsa_psk, aes_256_cbc,sha}) -> %%% TLS 1.2 PSK Cipher Suites RFC 5487 +suite({psk, aes_128_gcm, null}) -> + ?TLS_PSK_WITH_AES_128_GCM_SHA256; +suite({psk, aes_256_gcm, null}) -> + ?TLS_PSK_WITH_AES_256_GCM_SHA384; +suite({dhe_psk, aes_128_gcm, null}) -> + ?TLS_DHE_PSK_WITH_AES_128_GCM_SHA256; +suite({dhe_psk, aes_256_gcm, null}) -> + ?TLS_DHE_PSK_WITH_AES_256_GCM_SHA384; +suite({rsa_psk, aes_128_gcm, null}) -> + ?TLS_RSA_PSK_WITH_AES_128_GCM_SHA256; +suite({rsa_psk, aes_256_gcm, null}) -> + ?TLS_RSA_PSK_WITH_AES_256_GCM_SHA384; + suite({psk, aes_128_cbc, sha256}) -> ?TLS_PSK_WITH_AES_128_CBC_SHA256; suite({psk, aes_256_cbc, sha384}) -> @@ -760,7 +954,60 @@ suite({ecdhe_rsa, aes_256_cbc, sha384}) -> suite({ecdh_rsa, aes_128_cbc, sha256}) -> ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256; suite({ecdh_rsa, aes_256_cbc, sha384}) -> - ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384. + ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384; + +%% RFC 5288 AES-GCM Cipher Suites +suite({rsa, aes_128_gcm, null}) -> + ?TLS_RSA_WITH_AES_128_GCM_SHA256; +suite({rsa, aes_256_gcm, null}) -> + ?TLS_RSA_WITH_AES_256_GCM_SHA384; +suite({dhe_rsa, aes_128_gcm, null}) -> + ?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256; +suite({dhe_rsa, aes_256_gcm, null}) -> + ?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384; +suite({dh_rsa, aes_128_gcm, null}) -> + ?TLS_DH_RSA_WITH_AES_128_GCM_SHA256; +suite({dh_rsa, aes_256_gcm, null}) -> + ?TLS_DH_RSA_WITH_AES_256_GCM_SHA384; +suite({dhe_dss, aes_128_gcm, null}) -> + ?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256; +suite({dhe_dss, aes_256_gcm, null}) -> + ?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384; +suite({dh_dss, aes_128_gcm, null}) -> + ?TLS_DH_DSS_WITH_AES_128_GCM_SHA256; +suite({dh_dss, aes_256_gcm, null}) -> + ?TLS_DH_DSS_WITH_AES_256_GCM_SHA384; +suite({dh_anon, aes_128_gcm, null}) -> + ?TLS_DH_anon_WITH_AES_128_GCM_SHA256; +suite({dh_anon, aes_256_gcm, null}) -> + ?TLS_DH_anon_WITH_AES_256_GCM_SHA384; + +%% RFC 5289 ECC AES-GCM Cipher Suites +suite({ecdhe_ecdsa, aes_128_gcm, null}) -> + ?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256; +suite({ecdhe_ecdsa, aes_256_gcm, null}) -> + ?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384; +suite({ecdh_ecdsa, aes_128_gcm, null}) -> + ?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256; +suite({ecdh_ecdsa, aes_256_gcm, null}) -> + ?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384; +suite({ecdhe_rsa, aes_128_gcm, null}) -> + ?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256; +suite({ecdhe_rsa, aes_256_gcm, null}) -> + ?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384; +suite({ecdh_rsa, aes_128_gcm, null}) -> + ?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256; +suite({ecdh_rsa, aes_256_gcm, null}) -> + ?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384; + + +%% draft-agl-tls-chacha20poly1305-04 Chacha20/Poly1305 Suites +suite({ecdhe_rsa, chacha20_poly1305, null}) -> + ?TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256; +suite({ecdhe_ecdsa, chacha20_poly1305, null}) -> + ?TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256; +suite({dhe_rsa, chacha20_poly1305, null}) -> + ?TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256. %%-------------------------------------------------------------------- -spec openssl_suite(openssl_cipher_suite()) -> cipher_suite(). @@ -875,7 +1122,47 @@ openssl_suite("ECDHE-RSA-AES256-SHA384") -> openssl_suite("ECDH-RSA-AES128-SHA256") -> ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256; openssl_suite("ECDH-RSA-AES256-SHA384") -> - ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384. + ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384; + +%% RFC 5288 AES-GCM Cipher Suites +openssl_suite("AES128-GCM-SHA256") -> + ?TLS_RSA_WITH_AES_128_GCM_SHA256; +openssl_suite("AES256-GCM-SHA384") -> + ?TLS_RSA_WITH_AES_256_GCM_SHA384; +openssl_suite("DHE-RSA-AES128-GCM-SHA256") -> + ?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256; +openssl_suite("DHE-RSA-AES256-GCM-SHA384") -> + ?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384; +openssl_suite("DH-RSA-AES128-GCM-SHA256") -> + ?TLS_DH_RSA_WITH_AES_128_GCM_SHA256; +openssl_suite("DH-RSA-AES256-GCM-SHA384") -> + ?TLS_DH_RSA_WITH_AES_256_GCM_SHA384; +openssl_suite("DHE-DSS-AES128-GCM-SHA256") -> + ?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256; +openssl_suite("DHE-DSS-AES256-GCM-SHA384") -> + ?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384; +openssl_suite("DH-DSS-AES128-GCM-SHA256") -> + ?TLS_DH_DSS_WITH_AES_128_GCM_SHA256; +openssl_suite("DH-DSS-AES256-GCM-SHA384") -> + ?TLS_DH_DSS_WITH_AES_256_GCM_SHA384; + +%% RFC 5289 ECC AES-GCM Cipher Suites +openssl_suite("ECDHE-ECDSA-AES128-GCM-SHA256") -> + ?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256; +openssl_suite("ECDHE-ECDSA-AES256-GCM-SHA384") -> + ?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384; +openssl_suite("ECDH-ECDSA-AES128-GCM-SHA256") -> + ?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256; +openssl_suite("ECDH-ECDSA-AES256-GCM-SHA384") -> + ?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384; +openssl_suite("ECDHE-RSA-AES128-GCM-SHA256") -> + ?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256; +openssl_suite("ECDHE-RSA-AES256-GCM-SHA384") -> + ?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384; +openssl_suite("ECDH-RSA-AES128-GCM-SHA256") -> + ?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256; +openssl_suite("ECDH-RSA-AES256-GCM-SHA384") -> + ?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384. %%-------------------------------------------------------------------- -spec openssl_suite_name(cipher_suite()) -> openssl_cipher_suite(). @@ -1012,6 +1299,46 @@ openssl_suite_name(?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256) -> openssl_suite_name(?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384) -> "ECDH-RSA-AES256-SHA384"; +%% RFC 5288 AES-GCM Cipher Suites +openssl_suite_name(?TLS_RSA_WITH_AES_128_GCM_SHA256) -> + "AES128-GCM-SHA256"; +openssl_suite_name(?TLS_RSA_WITH_AES_256_GCM_SHA384) -> + "AES256-GCM-SHA384"; +openssl_suite_name(?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256) -> + "DHE-RSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384) -> + "DHE-RSA-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_DH_RSA_WITH_AES_128_GCM_SHA256) -> + "DH-RSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_DH_RSA_WITH_AES_256_GCM_SHA384) -> + "DH-RSA-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256) -> + "DHE-DSS-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384) -> + "DHE-DSS-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_DH_DSS_WITH_AES_128_GCM_SHA256) -> + "DH-DSS-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_DH_DSS_WITH_AES_256_GCM_SHA384) -> + "DH-DSS-AES256-GCM-SHA384"; + +%% RFC 5289 ECC AES-GCM Cipher Suites +openssl_suite_name(?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256) -> + "ECDHE-ECDSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384) -> + "ECDHE-ECDSA-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256) -> + "ECDH-ECDSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384) -> + "ECDH-ECDSA-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256) -> + "ECDHE-RSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384) -> + "ECDHE-RSA-AES256-GCM-SHA384"; +openssl_suite_name(?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256) -> + "ECDH-RSA-AES128-GCM-SHA256"; +openssl_suite_name(?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384) -> + "ECDH-RSA-AES256-GCM-SHA384"; + %% No oppenssl name openssl_suite_name(Cipher) -> suite_definition(Cipher). @@ -1095,6 +1422,13 @@ is_acceptable_keyexchange(KeyExchange, Algos) is_acceptable_keyexchange(_, _) -> true. +is_acceptable_cipher(Cipher, Algos) + when Cipher == aes_128_gcm; + Cipher == aes_256_gcm -> + proplists:get_bool(aes_gcm, Algos); +is_acceptable_cipher(Cipher, Algos) + when Cipher == chacha20_poly1305 -> + proplists:get_bool(Cipher, Algos); is_acceptable_cipher(_, _) -> true. @@ -1125,7 +1459,12 @@ bulk_cipher_algorithm('3des_ede_cbc') -> ?'3DES'; bulk_cipher_algorithm(Cipher) when Cipher == aes_128_cbc; Cipher == aes_256_cbc -> - ?AES. + ?AES_CBC; +bulk_cipher_algorithm(Cipher) when Cipher == aes_128_gcm; + Cipher == aes_256_gcm -> + ?AES_GCM; +bulk_cipher_algorithm(chacha20_poly1305) -> + ?CHACHA20_POLY1305. type(Cipher) when Cipher == null; Cipher == rc4_128 -> @@ -1135,7 +1474,11 @@ type(Cipher) when Cipher == des_cbc; Cipher == '3des_ede_cbc'; Cipher == aes_128_cbc; Cipher == aes_256_cbc -> - ?BLOCK. + ?BLOCK; +type(Cipher) when Cipher == aes_128_gcm; + Cipher == aes_256_gcm; + Cipher == chacha20_poly1305 -> + ?AEAD. key_material(null) -> 0; @@ -1148,6 +1491,12 @@ key_material('3des_ede_cbc') -> key_material(aes_128_cbc) -> 16; key_material(aes_256_cbc) -> + 32; +key_material(aes_128_gcm) -> + 16; +key_material(aes_256_gcm) -> + 32; +key_material(chacha20_poly1305) -> 32. expanded_key_material(null) -> @@ -1159,7 +1508,10 @@ expanded_key_material(Cipher) when Cipher == des_cbc -> expanded_key_material('3des_ede_cbc') -> 24; expanded_key_material(Cipher) when Cipher == aes_128_cbc; - Cipher == aes_256_cbc -> + Cipher == aes_256_cbc; + Cipher == aes_128_gcm; + Cipher == aes_256_gcm; + Cipher == chacha20_poly1305 -> unknown. @@ -1168,16 +1520,25 @@ effective_key_bits(null) -> effective_key_bits(des_cbc) -> 56; effective_key_bits(Cipher) when Cipher == rc4_128; - Cipher == aes_128_cbc -> + Cipher == aes_128_cbc; + Cipher == aes_128_gcm -> 128; effective_key_bits('3des_ede_cbc') -> 168; -effective_key_bits(aes_256_cbc) -> +effective_key_bits(Cipher) when Cipher == aes_256_cbc; + Cipher == aes_256_gcm; + Cipher == chacha20_poly1305 -> 256. iv_size(Cipher) when Cipher == null; - Cipher == rc4_128 -> + Cipher == rc4_128; + Cipher == chacha20_poly1305-> 0; + +iv_size(Cipher) when Cipher == aes_128_gcm; + Cipher == aes_256_gcm -> + 4; + iv_size(Cipher) -> block_size(Cipher). @@ -1186,7 +1547,10 @@ block_size(Cipher) when Cipher == des_cbc; 8; block_size(Cipher) when Cipher == aes_128_cbc; - Cipher == aes_256_cbc -> + Cipher == aes_256_cbc; + Cipher == aes_128_gcm; + Cipher == aes_256_gcm; + Cipher == chacha20_poly1305 -> 16. prf_algorithm(default_prf, {3, N}) when N >= 3 -> @@ -1342,10 +1706,15 @@ dhe_rsa_suites() -> ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA, ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA, - ?TLS_DHE_RSA_WITH_DES_CBC_SHA]. + ?TLS_DHE_RSA_WITH_DES_CBC_SHA, + ?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + ?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + ?TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256]. psk_rsa_suites() -> - [?TLS_RSA_PSK_WITH_AES_256_CBC_SHA384, + [?TLS_RSA_PSK_WITH_AES_256_GCM_SHA384, + ?TLS_RSA_PSK_WITH_AES_128_GCM_SHA256, + ?TLS_RSA_PSK_WITH_AES_256_CBC_SHA384, ?TLS_RSA_PSK_WITH_AES_128_CBC_SHA256, ?TLS_RSA_PSK_WITH_AES_256_CBC_SHA, ?TLS_RSA_PSK_WITH_AES_128_CBC_SHA, @@ -1365,7 +1734,9 @@ rsa_suites() -> ?TLS_RSA_WITH_AES_128_CBC_SHA, ?TLS_RSA_WITH_RC4_128_SHA, ?TLS_RSA_WITH_RC4_128_MD5, - ?TLS_RSA_WITH_DES_CBC_SHA]. + ?TLS_RSA_WITH_DES_CBC_SHA, + ?TLS_RSA_WITH_AES_128_GCM_SHA256, + ?TLS_RSA_WITH_AES_256_GCM_SHA384]. ecdh_rsa_suites() -> [?TLS_ECDH_RSA_WITH_NULL_SHA, @@ -1374,7 +1745,9 @@ ecdh_rsa_suites() -> ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA, ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA, ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256, - ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384]. + ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384, + ?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384]. ecdhe_rsa_suites() -> [?TLS_ECDHE_RSA_WITH_NULL_SHA, @@ -1383,7 +1756,10 @@ ecdhe_rsa_suites() -> ?TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, ?TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, ?TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, - ?TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384]. + ?TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + ?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + ?TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256]. dsa_signed_suites() -> dhe_dss_suites() ++ srp_dss_suites(). @@ -1394,7 +1770,9 @@ dhe_dss_suites() -> ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA, ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA, - ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA]. + ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA, + ?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, + ?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384]. srp_dss_suites() -> [?TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA, @@ -1418,7 +1796,9 @@ ecdh_ecdsa_suites() -> ?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA, ?TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA, ?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256, - ?TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384]. + ?TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384, + ?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384]. ecdhe_ecdsa_suites() -> [?TLS_ECDHE_ECDSA_WITH_NULL_SHA, @@ -1427,7 +1807,10 @@ ecdhe_ecdsa_suites() -> ?TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, ?TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, ?TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, - ?TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384]. + ?TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, + ?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + ?TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256]. filter_keyuse(OtpCert, Ciphers, Suites, SignSuites) -> TBSCert = OtpCert#'OTPCertificate'.tbsCertificate, diff --git a/lib/ssl/src/ssl_cipher.hrl b/lib/ssl/src/ssl_cipher.hrl index 3e50563f0a..8689a3c68b 100644 --- a/lib/ssl/src/ssl_cipher.hrl +++ b/lib/ssl/src/ssl_cipher.hrl @@ -46,7 +46,8 @@ -record(cipher_state, { iv, key, - state + state, + nonce }). %%% TLS_NULL_WITH_NULL_NULL is specified and is the initial state of a @@ -399,6 +400,24 @@ %%% TLS 1.2 PSK Cipher Suites RFC 5487 +%% TLS_PSK_WITH_AES_128_GCM_SHA256 = {0x00,0xA8}; +-define(TLS_PSK_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#A8)>>). + +%% TLS_PSK_WITH_AES_256_GCM_SHA384 = {0x00,0xA9}; +-define(TLS_PSK_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#A9)>>). + +%% TLS_DHE_PSK_WITH_AES_128_GCM_SHA256 = {0x00,0xAA}; +-define(TLS_DHE_PSK_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#AA)>>). + +%% TLS_DHE_PSK_WITH_AES_256_GCM_SHA384 = {0x00,0xAB}; +-define(TLS_DHE_PSK_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#AB)>>). + +%% TLS_RSA_PSK_WITH_AES_128_GCM_SHA256 = {0x00,0xAC}; +-define(TLS_RSA_PSK_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#AC)>>). + +%% TLS_RSA_PSK_WITH_AES_256_GCM_SHA384 = {0x00,0xAD}; +-define(TLS_RSA_PSK_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#AD)>>). + %% TLS_PSK_WITH_AES_128_CBC_SHA256 = {0x00,0xAE}; -define(TLS_PSK_WITH_AES_128_CBC_SHA256, <<?BYTE(16#00), ?BYTE(16#AE)>>). @@ -464,4 +483,79 @@ %% TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA = { 0xC0,0x22 }; -define(TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA, <<?BYTE(16#C0), ?BYTE(16#22)>>). +%%% AES-GCM Cipher Suites RFC 5288 + +%% TLS_RSA_WITH_AES_128_GCM_SHA256 = {0x00,0x9C} +-define(TLS_RSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#9C)>>). + +%% TLS_RSA_WITH_AES_256_GCM_SHA384 = {0x00,0x9D} +-define(TLS_RSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#9D)>>). + +%% TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = {0x00,0x9E} +-define(TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#9E)>>). + +%% TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = {0x00,0x9F} +-define(TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#9F)>>). + +%% TLS_DH_RSA_WITH_AES_128_GCM_SHA256 = {0x00,0xA0} +-define(TLS_DH_RSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#A0)>>). + +%% TLS_DH_RSA_WITH_AES_256_GCM_SHA384 = {0x00,0xA1} +-define(TLS_DH_RSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#A1)>>). + +%% TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = {0x00,0xA2} +-define(TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#A2)>>). + +%% TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = {0x00,0xA3} +-define(TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#A3)>>). + +%% TLS_DH_DSS_WITH_AES_128_GCM_SHA256 = {0x00,0xA4} +-define(TLS_DH_DSS_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#A4)>>). + +%% TLS_DH_DSS_WITH_AES_256_GCM_SHA384 = {0x00,0xA5} +-define(TLS_DH_DSS_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#A5)>>). + +%% TLS_DH_anon_WITH_AES_128_GCM_SHA256 = {0x00,0xA6} +-define(TLS_DH_anon_WITH_AES_128_GCM_SHA256, <<?BYTE(16#00), ?BYTE(16#A6)>>). + +%% TLS_DH_anon_WITH_AES_256_GCM_SHA384 = {0x00,0xA7} +-define(TLS_DH_anon_WITH_AES_256_GCM_SHA384, <<?BYTE(16#00), ?BYTE(16#A7)>>). + +%%% ECC AES-GCM Cipher Suites RFC 5289 + +%% TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = {0xC0,0x2B}; +-define(TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#C0), ?BYTE(16#2B)>>). + +%% TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = {0xC0,0x2C}; +-define(TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#C0), ?BYTE(16#2C)>>). + +%% TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 = {0xC0,0x2D}; +-define(TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#C0), ?BYTE(16#2D)>>). + +%% TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 = {0xC0,0x2E}; +-define(TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#C0), ?BYTE(16#2E)>>). + +%% TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = {0xC0,0x2F}; +-define(TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#C0), ?BYTE(16#2F)>>). + +%% TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = {0xC0,0x30}; +-define(TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#C0), ?BYTE(16#30)>>). + +%% TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 = {0xC0,0x31}; +-define(TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256, <<?BYTE(16#C0), ?BYTE(16#31)>>). + +%% TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 = {0xC0,0x32}; +-define(TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384, <<?BYTE(16#C0), ?BYTE(16#32)>>). + +%%% Chacha20/Poly1305 Suites draft-agl-tls-chacha20poly1305-04 + +%% TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = {0xcc, 0x13} +-define(TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, <<?BYTE(16#CC), ?BYTE(16#13)>>). + +%% TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = {0xcc, 0x14} +-define(TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, <<?BYTE(16#CC), ?BYTE(16#14)>>). + +%% TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = {0xcc, 0x15} +-define(TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256, <<?BYTE(16#CC), ?BYTE(16#15)>>). + -endif. % -ifdef(ssl_cipher). diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl index 545b8aa0f6..fc8b214a29 100644 --- a/lib/ssl/src/ssl_config.erl +++ b/lib/ssl/src/ssl_config.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2013. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,13 +31,13 @@ init(SslOpts, Role) -> init_manager_name(SslOpts#ssl_options.erl_dist), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert} = init_certificates(SslOpts, Role), PrivateKey = init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile, SslOpts#ssl_options.password, Role), DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role), - {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, PrivateKey, DHParams}. + {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}. init_manager_name(false) -> put(ssl_manager, ssl_manager:manager_name(normal)); @@ -46,9 +46,11 @@ init_manager_name(true) -> init_certificates(#ssl_options{cacerts = CaCerts, cacertfile = CACertFile, - certfile = CertFile, - cert = Cert}, Role) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle} = + certfile = CertFile, + cert = Cert, + crl_cache = CRLCache + }, Role) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} = try Certs = case CaCerts of undefined -> @@ -56,39 +58,40 @@ init_certificates(#ssl_options{cacerts = CaCerts, _ -> {der, CaCerts} end, - {ok, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role) + {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache) catch _:Reason -> file_error(CACertFile, {cacertfile, Reason}) end, init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CertFile, Role). + CacheHandle, CRLDbInfo, CertFile, Role). -init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, <<>>, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined}; +init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, + CRLDbInfo, <<>>, _) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}; init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, - CacheHandle, CertFile, client) -> + CacheHandle, CRLDbInfo, CertFile, client) -> try %% Ignoring potential proxy-certificates see: %% http://dev.globus.org/wiki/Security/ProxyFileFormat [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert} catch _Error:_Reason -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined} end; init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, - PemCacheHandle, CacheRef, CertFile, server) -> + PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) -> try [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle), - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, OwnCert} + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert} catch _:Reason -> file_error(CertFile, {certfile, Reason}) end; -init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, _, _) -> - {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, Cert}. +init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) -> + {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}. init_private_key(_, undefined, <<>>, _Password, _Client) -> undefined; diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl index b6059eac58..08d0145aa7 100644 --- a/lib/ssl/src/ssl_connection.erl +++ b/lib/ssl/src/ssl_connection.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -411,11 +411,15 @@ certify(#certificate{} = Cert, role = Role, cert_db = CertDbHandle, cert_db_ref = CertDbRef, + crl_db = CRLDbInfo, ssl_options = Opts} = State, Connection) -> - case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, Opts#ssl_options.depth, + case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, + Opts#ssl_options.depth, Opts#ssl_options.verify, Opts#ssl_options.verify_fun, Opts#ssl_options.partial_chain, + Opts#ssl_options.crl_check, + CRLDbInfo, Role) of {PeerCert, PublicKeyInfo} -> handle_peer_cert(Role, PeerCert, PublicKeyInfo, @@ -964,7 +968,7 @@ format_status(terminate, [_, State]) -> %%% Internal functions %%-------------------------------------------------------------------- ssl_config(Opts, Role, State) -> - {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} = + {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} = ssl_config:init(Opts, Role), Handshake = ssl_handshake:init_handshake_history(), TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}), @@ -975,6 +979,7 @@ ssl_config(Opts, Role, State) -> file_ref_db = FileRefHandle, cert_db_ref = Ref, cert_db = CertDbHandle, + crl_db = CRLDbInfo, session_cache = CacheHandle, private_key = Key, diffie_hellman_params = DHParams, diff --git a/lib/ssl/src/ssl_connection.hrl b/lib/ssl/src/ssl_connection.hrl index b9a1ef3a84..ac3b26e4bf 100644 --- a/lib/ssl/src/ssl_connection.hrl +++ b/lib/ssl/src/ssl_connection.hrl @@ -1,8 +1,7 @@ - %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -53,6 +52,7 @@ session :: #session{} | secret_printout(), session_cache :: db_handle(), session_cache_cb :: atom(), + crl_db :: term(), negotiated_version :: ssl_record:ssl_version(), client_certificate_requested = false :: boolean(), key_algorithm :: ssl_cipher:key_algo(), diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl new file mode 100644 index 0000000000..1a08d3c80a --- /dev/null +++ b/lib/ssl/src/ssl_crl.erl @@ -0,0 +1,80 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% + +%---------------------------------------------------------------------- +%% Purpose: CRL handling +%%---------------------------------------------------------------------- + +-module(ssl_crl). + +-include("ssl_alert.hrl"). +-include("ssl_internal.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +-export([trusted_cert_and_path/3]). + +trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) -> + case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of + undefined -> + trusted_cert_and_path(CRL, issuer_not_found, DbHandle); + {ok, {_, OtpCert}} -> + {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef), + {ok, Root, lists:reverse(Chain)} + end; + +trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) -> + try find_issuer(CRL, DbHandle) of + OtpCert -> + {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef), + {ok, Root, lists:reverse(Chain)} + catch + throw:_ -> + {error, issuer_not_found} + end. + +find_issuer(CRL, {Db,_}) -> + Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)), + IsIssuerFun = + fun({_Key, {_Der,ErlCertCandidate}}, Acc) -> + verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc); + (_, Acc) -> + Acc + end, + + try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of + issuer_not_found -> + {error, issuer_not_found} + catch + {ok, IssuerCert} -> + IssuerCert + end. + + +verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) -> + TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate, + case public_key:pkix_normalize_name(TBSCert#'OTPTBSCertificate'.subject) of + Issuer -> + case public_key:pkix_crl_verify(CRL, ErlCertCandidate) of + true -> + throw({ok, ErlCertCandidate}); + false -> + NotIssuer + end; + _ -> + NotIssuer + end. diff --git a/lib/ssl/src/ssl_crl_cache.erl b/lib/ssl/src/ssl_crl_cache.erl new file mode 100644 index 0000000000..b9d6a61c3b --- /dev/null +++ b/lib/ssl/src/ssl_crl_cache.erl @@ -0,0 +1,179 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% + +%---------------------------------------------------------------------- +%% Purpose: Simple default CRL cache +%%---------------------------------------------------------------------- + +-module(ssl_crl_cache). + +-include("ssl_internal.hrl"). +-include_lib("public_key/include/public_key.hrl"). + +-behaviour(ssl_crl_cache_api). + +-export([lookup/2, select/2, fresh_crl/2]). +-export([insert/1, insert/2, delete/1]). + +%%==================================================================== +%% Cache callback API +%%==================================================================== + +lookup(#'DistributionPoint'{distributionPoint = {fullName, Names}}, + CRLDbInfo) -> + get_crls(Names, CRLDbInfo); +lookup(_,_) -> + not_available. + +select(Issuer, {{_Cache, Mapping},_}) -> + case ssl_pkix_db:lookup(Issuer, Mapping) of + undefined -> + []; + CRLs -> + CRLs + end. + +fresh_crl(#'DistributionPoint'{distributionPoint = {fullName, Names}}, CRL) -> + case get_crls(Names, undefined) of + not_available -> + CRL; + [NewCRL] -> + NewCRL + end. + +%%==================================================================== +%% API +%%==================================================================== + +insert(CRLs) -> + insert(?NO_DIST_POINT, CRLs). + +insert(URI, {file, File}) when is_list(URI) -> + case file:read_file(File) of + {ok, PemBin} -> + PemEntries = public_key:pem_decode(PemBin), + CRLs = [ CRL || {'CertificateList', CRL, not_encrypted} + <- PemEntries], + do_insert(URI, CRLs); + Error -> + Error + end; +insert(URI, {der, CRLs}) -> + do_insert(URI, CRLs). + +delete({file, File}) -> + case file:read_file(File) of + {ok, PemBin} -> + PemEntries = public_key:pem_decode(PemBin), + CRLs = [ CRL || {'CertificateList', CRL, not_encrypted} + <- PemEntries], + ssl_manager:delete_crls({?NO_DIST_POINT, CRLs}); + Error -> + Error + end; +delete({der, CRLs}) -> + ssl_manager:delete_crls({?NO_DIST_POINT, CRLs}); + +delete(URI) -> + case http_uri:parse(URI) of + {ok, {http, _, _ , _, Path,_}} -> + ssl_manager:delete_crls(string:strip(Path, left, $/)); + _ -> + {error, {only_http_distribution_points_supported, URI}} + end. + +%%-------------------------------------------------------------------- +%%% Internal functions +%%-------------------------------------------------------------------- +do_insert(URI, CRLs) -> + case http_uri:parse(URI) of + {ok, {http, _, _ , _, Path,_}} -> + ssl_manager:insert_crls(string:strip(Path, left, $/), CRLs); + _ -> + {error, {only_http_distribution_points_supported, URI}} + end. + +get_crls([], _) -> + not_available; +get_crls([{uniformResourceIdentifier, "http"++_ = URL} | Rest], + CRLDbInfo) -> + case cache_lookup(URL, CRLDbInfo) of + [] -> + handle_http(URL, Rest, CRLDbInfo); + CRLs -> + CRLs + end; +get_crls([ _| Rest], CRLDbInfo) -> + %% unsupported CRL location + get_crls(Rest, CRLDbInfo). + +http_lookup(URL, Rest, CRLDbInfo, Timeout) -> + case application:ensure_started(inets) of + ok -> + http_get(URL, Rest, CRLDbInfo, Timeout); + _ -> + get_crls(Rest, CRLDbInfo) + end. + +http_get(URL, Rest, CRLDbInfo, Timeout) -> + case httpc:request(get, {URL, [{"connection", "close"}]}, + [{timeout, Timeout}], [{body_format, binary}]) of + {ok, {_Status, _Headers, Body}} -> + case Body of + <<"-----BEGIN", _/binary>> -> + Pem = public_key:pem_decode(Body), + lists:filtermap(fun({'CertificateList', + CRL, not_encrypted}) -> + {true, CRL}; + (_) -> + false + end, Pem); + _ -> + try public_key:der_decode('CertificateList', Body) of + _ -> + [Body] + catch + _:_ -> + get_crls(Rest, CRLDbInfo) + end + end; + {error, _Reason} -> + get_crls(Rest, CRLDbInfo) + end. + +cache_lookup(_, undefined) -> + []; +cache_lookup(URL, {{Cache, _}, _}) -> + {ok, {_, _, _ , _, Path,_}} = http_uri:parse(URL), + case ssl_pkix_db:lookup(string:strip(Path, left, $/), Cache) of + undefined -> + []; + CRLs -> + CRLs + end. + +handle_http(URI, Rest, {_, [{http, Timeout}]} = CRLDbInfo) -> + CRLs = http_lookup(URI, Rest, CRLDbInfo, Timeout), + %% Uncomment to improve performance, but need to + %% implement cache limit and or cleaning to prevent + %% DoS attack possibilities + %%insert(URI, {der, CRLs}), + CRLs; +handle_http(_, Rest, CRLDbInfo) -> + get_crls(Rest, CRLDbInfo). + diff --git a/lib/ssl/src/ssl_crl_cache_api.erl b/lib/ssl/src/ssl_crl_cache_api.erl new file mode 100644 index 0000000000..0915ba12e5 --- /dev/null +++ b/lib/ssl/src/ssl_crl_cache_api.erl @@ -0,0 +1,30 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2015-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/. +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% + +%% + +-module(ssl_crl_cache_api). + +-include_lib("public_key/include/public_key.hrl"). + +-type db_handle() :: term(). + +-callback lookup(#'DistributionPoint'{}, db_handle()) -> not_available | [public_key:der_encode()]. +-callback select(term(), db_handle()) -> [public_key:der_encode()]. +-callback fresh_crl(#'DistributionPoint'{}, public_key:der_encode()) -> public_key:der_encode(). diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl index 88ccb94e0b..5c5f386c6f 100644 --- a/lib/ssl/src/ssl_handshake.erl +++ b/lib/ssl/src/ssl_handshake.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2013-2014. All Rights Reserved. +%% Copyright Ericsson AB 2013-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -49,7 +49,7 @@ finished/5, next_protocol/1]). %% Handle handshake messages --export([certify/8, client_certificate_verify/6, certificate_verify/6, verify_signature/5, +-export([certify/10, client_certificate_verify/6, certificate_verify/6, verify_signature/5, master_secret/5, server_key_exchange_hash/2, verify_connection/6, init_handshake_history/0, update_handshake_history/2, verify_server_key/5 ]). @@ -149,7 +149,7 @@ client_hello_extensions(Host, Version, CipherSuites, SslOpts, ConnectionStates, certificate(OwnCert, CertDbHandle, CertDbRef, client) -> Chain = case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of - {ok, CertChain} -> + {ok, _, CertChain} -> CertChain; {error, _} -> %% If no suitable certificate is available, the client @@ -161,7 +161,7 @@ certificate(OwnCert, CertDbHandle, CertDbRef, client) -> certificate(OwnCert, CertDbHandle, CertDbRef, server) -> case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of - {ok, Chain} -> + {ok, _, Chain} -> #certificate{asn1_certificates = Chain}; {error, _} -> ?ALERT_REC(?FATAL, ?INTERNAL_ERROR) @@ -383,49 +383,24 @@ verify_signature(_Version, Hash, {HashAlgo, ecdsa}, Signature, %%-------------------------------------------------------------------- -spec certify(#certificate{}, db_handle(), certdb_ref(), integer() | nolimit, - verify_peer | verify_none, {fun(), term}, fun(), + verify_peer | verify_none, {fun(), term}, fun(), term(), term(), client | server) -> {der_cert(), public_key_info()} | #alert{}. %% %% Description: Handles a certificate handshake message %%-------------------------------------------------------------------- certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef, - MaxPathLen, _Verify, VerifyFunAndState, PartialChain, Role) -> + MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) -> [PeerCert | _] = ASN1Certs, - - ValidationFunAndState = - case VerifyFunAndState of - undefined -> - {fun(OtpCert, ExtensionOrVerifyResult, SslState) -> - ssl_certificate:validate_extension(OtpCert, - ExtensionOrVerifyResult, SslState) - end, Role}; - {Fun, UserState0} -> - {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) -> - case ssl_certificate:validate_extension(OtpCert, - Extension, - SslState) of - {valid, NewSslState} -> - {valid, {NewSslState, UserState}}; - {fail, Reason} -> - apply_user_fun(Fun, OtpCert, Reason, UserState, - SslState); - {unknown, _} -> - apply_user_fun(Fun, OtpCert, - Extension, UserState, SslState) - end; - (OtpCert, VerifyResult, {SslState, UserState}) -> - apply_user_fun(Fun, OtpCert, VerifyResult, UserState, - SslState) - end, {Role, UserState0}} - end, + + ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role, + CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle), try - {TrustedErlCert, CertPath} = + {TrustedCert, CertPath} = ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain), - case public_key:pkix_path_validation(TrustedErlCert, - CertPath, - [{max_path_length, - MaxPathLen}, + case public_key:pkix_path_validation(TrustedCert, + CertPath, + [{max_path_length, MaxPathLen}, {verify_fun, ValidationFunAndState}]) of {ok, {PublicKeyInfo,_}} -> {PeerCert, PublicKeyInfo}; @@ -1374,15 +1349,66 @@ sni1(Hostname) -> %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- +validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) -> + {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) -> + case ssl_certificate:validate(OtpCert, + Extension, + SslState) of + {valid, NewSslState} -> + {valid, {NewSslState, UserState}}; + {fail, Reason} -> + apply_user_fun(Fun, OtpCert, Reason, UserState, + SslState); + {unknown, _} -> + apply_user_fun(Fun, OtpCert, + Extension, UserState, SslState) + end; + (OtpCert, VerifyResult, {SslState, UserState}) -> + apply_user_fun(Fun, OtpCert, VerifyResult, UserState, + SslState) + end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}}; +validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) -> + {fun(OtpCert, {extension, _} = Extension, SslState) -> + ssl_certificate:validate(OtpCert, + Extension, + SslState); + (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) -> + case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of + valid -> + {VerifyResult, SslState}; + Reason -> + {fail, Reason} + end; + (OtpCert, VerifyResult, SslState) -> + ssl_certificate:validate(OtpCert, + VerifyResult, + SslState) + end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}. + +apply_user_fun(Fun, OtpCert, VerifyResult, UserState0, + {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when + (VerifyResult == valid) or (VerifyResult == valid_peer) -> + case Fun(OtpCert, VerifyResult, UserState0) of + {Valid, UserState} when (Valid == valid) or (Valid == valid_peer) -> + case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of + valid -> + {Valid, {SslState, UserState}}; + Result -> + apply_user_fun(Fun, OtpCert, Result, UserState, SslState) + end; + {fail, _} = Fail -> + Fail + end; apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) -> case Fun(OtpCert, ExtensionOrError, UserState0) of - {valid, UserState} -> - {valid, {SslState, UserState}}; + {Valid, UserState} when (Valid == valid) or (Valid == valid_peer)-> + {Valid, {SslState, UserState}}; {fail, _} = Fail -> Fail; {unknown, UserState} -> {unknown, {SslState, UserState}} end. + path_validation_alert({bad_cert, cert_expired}) -> ?ALERT_REC(?FATAL, ?CERTIFICATE_EXPIRED); path_validation_alert({bad_cert, invalid_issuer}) -> @@ -1393,8 +1419,10 @@ path_validation_alert({bad_cert, name_not_permitted}) -> ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, unknown_critical_extension}) -> ?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE); -path_validation_alert({bad_cert, cert_revoked}) -> +path_validation_alert({bad_cert, {revoked, _}}) -> ?ALERT_REC(?FATAL, ?CERTIFICATE_REVOKED); +path_validation_alert({bad_cert, revocation_status_undetermined}) -> + ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, selfsigned_peer}) -> ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE); path_validation_alert({bad_cert, unknown_ca}) -> @@ -1435,6 +1463,7 @@ calc_finished({3, N}, Role, PrfAlgo, MasterSecret, Handshake) -> master_secret(_RecordCB, Version, MasterSecret, #security_parameters{ + bulk_cipher_algorithm = BCA, client_random = ClientRandom, server_random = ServerRandom, hash_size = HashSize, @@ -1453,8 +1482,8 @@ master_secret(_RecordCB, Version, MasterSecret, ssl_record:set_mac_secret(ClientWriteMacSecret, ServerWriteMacSecret, Role, ConnStates1), - ClientCipherState = #cipher_state{iv = ClientIV, key = ClientWriteKey}, - ServerCipherState = #cipher_state{iv = ServerIV, key = ServerWriteKey}, + ClientCipherState = ssl_cipher:cipher_init(BCA, ClientIV, ClientWriteKey), + ServerCipherState = ssl_cipher:cipher_init(BCA, ServerIV, ServerWriteKey), {MasterSecret, ssl_record:set_pending_cipher_state(ConnStates2, ClientCipherState, ServerCipherState, Role)}. @@ -1953,3 +1982,70 @@ handle_psk_identity(_PSKIdentity, LookupFun) error; handle_psk_identity(PSKIdentity, {Fun, UserState}) -> Fun(psk, PSKIdentity, UserState). + +crl_check(_, false, _,_,_, _) -> + valid; +crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option. + valid; +crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) -> + Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) -> + ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo) + end, {CertDbHandle, CertDbRef}}}, + {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end} + ], + case dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) of + no_dps -> + case dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) of + [] -> + valid; %% No relevant CRL existed + DpsAndCRls -> + crl_check_same_issuer(OtpCert, Check, DpsAndCRls, Options) + end; + DpsAndCRLs -> %% This DP list may be empty if relevant CRLs existed + %% but could not be retrived, will result in {bad_cert, revocation_status_undetermined} + case public_key:pkix_crls_validate(OtpCert, DpsAndCRLs, Options) of + {bad_cert, revocation_status_undetermined} -> + crl_check_same_issuer(OtpCert, Check, dps_and_crls(OtpCert, Callback, + CRLDbHandle, same_issuer), Options); + Other -> + Other + end + end. + +crl_check_same_issuer(OtpCert, best_effort, Dps, Options) -> + case public_key:pkix_crls_validate(OtpCert, Dps, Options) of + {bad_cert, revocation_status_undetermined} -> + valid; + Other -> + Other + end; +crl_check_same_issuer(OtpCert, _, Dps, Options) -> + public_key:pkix_crls_validate(OtpCert, Dps, Options). + +dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) -> + case public_key:pkix_dist_points(OtpCert) of + [] -> + no_dps; + DistPoints -> + distpoints_lookup(DistPoints, Callback, CRLDbHandle) + end; + +dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) -> + DP = #'DistributionPoint'{distributionPoint = {fullName, GenNames}} = + public_key:pkix_dist_point(OtpCert), + CRLs = lists:flatmap(fun({directoryName, Issuer}) -> + Callback:select(Issuer, CRLDbHandle); + (_) -> + [] + end, GenNames), + [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]. + +distpoints_lookup([], _, _) -> + []; +distpoints_lookup([DistPoint | Rest], Callback, CRLDbHandle) -> + case Callback:lookup(DistPoint, CRLDbHandle) of + not_available -> + distpoints_lookup(Rest, Callback, CRLDbHandle); + CRLs -> + [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs] + end. diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl index 88105cac5a..8df79f9e8c 100644 --- a/lib/ssl/src/ssl_internal.hrl +++ b/lib/ssl/src/ssl_internal.hrl @@ -61,14 +61,19 @@ -define(CDR_HDR_SIZE, 12). -define(DEFAULT_TIMEOUT, 5000). +-define(NO_DIST_POINT, "http://dummy/no_distribution_point"). +-define(NO_DIST_POINT_PATH, "dummy/no_distribution_point"). %% Common enumerate values in for SSL-protocols -define(NULL, 0). -define(TRUE, 0). -define(FALSE, 1). --define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]). --define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1, sslv3]). +%% sslv3 is considered insecure due to lack of padding check (Poodle attack) +%% Keep as interop with legacy software but do not support as default +-define(ALL_AVAILABLE_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]). +-define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]). +-define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]). -define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]). -define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]). @@ -119,7 +124,9 @@ %% the client? honor_cipher_order = false, padding_check = true, - fallback = false + fallback = false, + crl_check, + crl_cache }). -record(socket_options, diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl index c4f1f7f193..396013825e 100644 --- a/lib/ssl/src/ssl_manager.erl +++ b/lib/ssl/src/ssl_manager.erl @@ -26,10 +26,11 @@ %% Internal application API -export([start_link/1, start_link_dist/1, - connection_init/2, cache_pem_file/2, + connection_init/3, cache_pem_file/2, lookup_trusted_cert/4, new_session_id/1, clean_cert_db/2, register_session/2, register_session/3, invalidate_session/2, + insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2, invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]). % Spawn export @@ -44,7 +45,8 @@ -include_lib("kernel/include/file.hrl"). -record(state, { - session_cache, + session_cache_client, + session_cache_server, session_cache_cb, session_lifetime, certificate_db, @@ -99,19 +101,21 @@ start_link_dist(Opts) -> gen_server:start_link({local, DistMangerName}, ?MODULE, [DistMangerName, Opts], []). %%-------------------------------------------------------------------- --spec connection_init(binary()| {der, list()}, client | server) -> - {ok, certdb_ref(), db_handle(), db_handle(), db_handle(), db_handle()}. +-spec connection_init(binary()| {der, list()}, client | server, + {Cb :: atom(), Handle:: term()}) -> + {ok, certdb_ref(), db_handle(), db_handle(), + db_handle(), db_handle(), CRLInfo::term()}. %% %% Description: Do necessary initializations for a new connection. %%-------------------------------------------------------------------- -connection_init({der, _} = Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}); +connection_init({der, _} = Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}); -connection_init(<<>> = Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}); +connection_init(<<>> = Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}); -connection_init(Trustedcerts, Role) -> - call({connection_init, Trustedcerts, Role}). +connection_init(Trustedcerts, Role, CRLCache) -> + call({connection_init, Trustedcerts, Role, CRLCache}). %%-------------------------------------------------------------------- -spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}. @@ -123,7 +127,7 @@ cache_pem_file(File, DbHandle) -> [{Content,_}] -> {ok, Content}; [Content] -> - {ok, Content}; + {ok, Content}; undefined -> call({cache_pem, File}) end. @@ -192,11 +196,28 @@ invalidate_session(Host, Port, Session) -> invalidate_session(Port, Session) -> cast({invalidate_session, Port, Session}). - -spec invalidate_pem(File::binary()) -> ok. invalidate_pem(File) -> cast({invalidate_pem, File}). +insert_crls(Path, CRLs)-> + insert_crls(Path, CRLs, normal). +insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + cast({insert_crls, Path, CRLs}); +insert_crls(Path, CRLs, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + call({insert_crls, Path, CRLs}). + +delete_crls(Path)-> + delete_crls(Path, normal). +delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + cast({delete_crls, Path}); +delete_crls(Path, ManagerType)-> + put(ssl_manager, manager_name(ManagerType)), + call({delete_crls, Path}). + %%==================================================================== %% gen_server callbacks %%==================================================================== @@ -215,13 +236,17 @@ init([Name, Opts]) -> SessionLifeTime = proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'), CertDb = ssl_pkix_db:create(), - SessionCache = CacheCb:init(proplists:get_value(session_cb_init_args, Opts, [])), + ClientSessionCache = CacheCb:init([{role, client} | + proplists:get_value(session_cb_init_args, Opts, [])]), + ServerSessionCache = CacheCb:init([{role, server} | + proplists:get_value(session_cb_init_args, Opts, [])]), Timer = erlang:send_after(SessionLifeTime * 1000 + 5000, self(), validate_sessions), Interval = pem_check_interval(), erlang:send_after(Interval, self(), clear_pem_cache), {ok, #state{certificate_db = CertDb, - session_cache = SessionCache, + session_cache_client = ClientSessionCache, + session_cache_server = ServerSessionCache, session_cache_cb = CacheCb, session_lifetime = SessionLifeTime, session_validation_timer = Timer, @@ -240,32 +265,38 @@ init([Name, Opts]) -> %% %% Description: Handling call messages %%-------------------------------------------------------------------- -handle_call({{connection_init, <<>>, _Role}, _Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace], - session_cache = Cache} = State) -> - Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache}, - {reply, Result, State}; - -handle_call({{connection_init, Trustedcerts, _Role}, Pid}, _From, - #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db, - session_cache = Cache} = State) -> - Result = - try - {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db), - {ok, Ref, CertDb, FileRefDb, PemChace, Cache} - catch - _:Reason -> - {error, Reason} - end, - {reply, Result, State}; - -handle_call({{new_session_id,Port}, _}, +handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From, + #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> + Ref = make_ref(), + Result = {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}}, + {reply, Result, State#state{certificate_db = Db}}; + +handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From, + #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) -> + case add_trusted_certs(Pid, Trustedcerts, Db) of + {ok, Ref} -> + {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), + {CRLCb, crl_db_info(Db, UserCRLDb)}}, State}; + {error, _} = Error -> + {reply, Error, State} + end; + +handle_call({{insert_crls, Path, CRLs}, _}, _From, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:add_crls(Db, Path, CRLs), + {reply, ok, State}; + +handle_call({{delete_crls, CRLsOrPath}, _}, _From, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:remove_crls(Db, CRLsOrPath), + {reply, ok, State}; + +handle_call({{new_session_id, Port}, _}, _, #state{session_cache_cb = CacheCb, - session_cache = Cache} = State) -> + session_cache_server = Cache} = State) -> Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb), {reply, Id, State}; - handle_call({{cache_pem,File}, _Pid}, _, #state{certificate_db = Db} = State) -> try ssl_pkix_db:cache_pem_file(File, Db) of @@ -275,7 +306,7 @@ handle_call({{cache_pem,File}, _Pid}, _, _:Reason -> {reply, {error, Reason}, State} end; -handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace]} = State) -> +handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace | _]} = State) -> ssl_pkix_db:clear(PemChace), {reply, ok, State}. @@ -288,16 +319,22 @@ handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_ %% Description: Handling cast messages %%-------------------------------------------------------------------- handle_cast({register_session, Host, Port, Session}, - #state{session_cache = Cache, + #state{session_cache_client = Cache, session_cache_cb = CacheCb} = State) -> TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}), NewSession = Session#session{time_stamp = TimeStamp}, - CacheCb:update(Cache, {{Host, Port}, - NewSession#session.session_id}, NewSession), + + case CacheCb:select_session(Cache, {Host, Port}) of + no_session -> + CacheCb:update(Cache, {{Host, Port}, + NewSession#session.session_id}, NewSession); + Sessions -> + register_unique_session(Sessions, NewSession, CacheCb, Cache, {Host, Port}) + end, {noreply, State}; handle_cast({register_session, Port, Session}, - #state{session_cache = Cache, + #state{session_cache_server = Cache, session_cache_cb = CacheCb} = State) -> TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}), NewSession = Session#session{time_stamp = TimeStamp}, @@ -306,17 +343,28 @@ handle_cast({register_session, Port, Session}, handle_cast({invalidate_session, Host, Port, #session{session_id = ID} = Session}, - #state{session_cache = Cache, + #state{session_cache_client = Cache, session_cache_cb = CacheCb} = State) -> invalidate_session(Cache, CacheCb, {{Host, Port}, ID}, Session, State); handle_cast({invalidate_session, Port, #session{session_id = ID} = Session}, - #state{session_cache = Cache, + #state{session_cache_server = Cache, session_cache_cb = CacheCb} = State) -> invalidate_session(Cache, CacheCb, {Port, ID}, Session, State); + +handle_cast({insert_crls, Path, CRLs}, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:add_crls(Db, Path, CRLs), + {noreply, State}; + +handle_cast({delete_crls, CRLsOrPath}, + #state{certificate_db = Db} = State) -> + ssl_pkix_db:remove_crls(Db, CRLsOrPath), + {noreply, State}; + handle_cast({invalidate_pem, File}, - #state{certificate_db = [_, _, PemCache]} = State) -> + #state{certificate_db = [_, _, PemCache | _]} = State) -> ssl_pkix_db:remove(File, PemCache), {noreply, State}. @@ -329,21 +377,23 @@ handle_cast({invalidate_pem, File}, %% Description: Handling all non call/cast messages %%------------------------------------------------------------------- handle_info(validate_sessions, #state{session_cache_cb = CacheCb, - session_cache = Cache, + session_cache_client = ClientCache, + session_cache_server = ServerCache, session_lifetime = LifeTime } = State) -> Timer = erlang:send_after(?SESSION_VALIDATION_INTERVAL, self(), validate_sessions), - start_session_validator(Cache, CacheCb, LifeTime), + start_session_validator(ClientCache, CacheCb, LifeTime), + start_session_validator(ServerCache, CacheCb, LifeTime), {noreply, State#state{session_validation_timer = Timer}}; -handle_info({delayed_clean_session, Key}, #state{session_cache = Cache, - session_cache_cb = CacheCb - } = State) -> + +handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = CacheCb + } = State) -> CacheCb:delete(Cache, Key), {noreply, State}; -handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace], +handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _], clear_pem_cache = Interval, last_pem_check = CheckPoint} = State) -> NewCheckPoint = os:timestamp(), @@ -351,9 +401,8 @@ handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace], erlang:send_after(Interval, self(), clear_pem_cache), {noreply, State#state{last_pem_check = NewCheckPoint}}; - handle_info({clean_cert_db, Ref, File}, - #state{certificate_db = [CertDb,RefDb, PemCache]} = State) -> + #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) -> case ssl_pkix_db:lookup(Ref, RefDb) of undefined -> %% Alredy cleaned @@ -380,12 +429,14 @@ handle_info(_Info, State) -> %% The return value is ignored. %%-------------------------------------------------------------------- terminate(_Reason, #state{certificate_db = Db, - session_cache = SessionCache, + session_cache_client = ClientSessionCache, + session_cache_server = ServerSessionCache, session_cache_cb = CacheCb, session_validation_timer = Timer}) -> erlang:cancel_timer(Timer), ssl_pkix_db:remove(Db), - CacheCb:terminate(SessionCache), + catch CacheCb:terminate(ClientSessionCache), + catch CacheCb:terminate(ServerSessionCache), ok. %%-------------------------------------------------------------------- @@ -458,7 +509,7 @@ invalidate_session(Cache, CacheCb, Key, Session, #state{last_delay_timer = LastT %% up the session data but new connections should not get to use this session. CacheCb:update(Cache, Key, Session#session{is_resumable = false}), TRef = - erlang:send_after(delay_time(), self(), {delayed_clean_session, Key}), + erlang:send_after(delay_time(), self(), {delayed_clean_session, Key, Cache}), {noreply, State#state{last_delay_timer = last_delay_timer(Key, TRef, LastTimer)}} end. @@ -507,6 +558,37 @@ clean_cert_db(Ref, CertDb, RefDb, PemCache, File) -> ok end. +%% Do not let dumb clients create a gigantic session table +%% for itself creating big delays at connection time. +register_unique_session(Sessions, Session, CacheCb, Cache, PartialKey) -> + case exists_equivalent(Session , Sessions) of + true -> + ok; + false -> + CacheCb:update(Cache, {PartialKey, + Session#session.session_id}, Session) + end. + +exists_equivalent(_, []) -> + false; +exists_equivalent(#session{ + peer_certificate = PeerCert, + own_certificate = OwnCert, + compression_method = Compress, + cipher_suite = CipherSuite, + srp_username = SRP, + ecc = ECC} , + [#session{ + peer_certificate = PeerCert, + own_certificate = OwnCert, + compression_method = Compress, + cipher_suite = CipherSuite, + srp_username = SRP, + ecc = ECC} | _]) -> + true; +exists_equivalent(Session, [ _ | Rest]) -> + exists_equivalent(Session, Rest). + start_pem_cache_validator(PemCache, CheckPoint) -> spawn_link(?MODULE, init_pem_cache_validator, [[get(ssl_manager), PemCache, CheckPoint]]). @@ -542,3 +624,21 @@ is_before_checkpoint(Time, CheckPoint) -> calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(CheckPoint)) - calendar:datetime_to_gregorian_seconds(Time) > 0. +add_trusted_certs(Pid, Trustedcerts, Db) -> + try + ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db) + catch + _:Reason -> + {error, Reason} + end. + +session_cache(client, #state{session_cache_client = Cache}) -> + Cache; +session_cache(server, #state{session_cache_server = Cache}) -> + Cache. + +crl_db_info([_,_,_,Local], {internal, Info}) -> + {Local, Info}; +crl_db_info(_, UserCRLDb) -> + UserCRLDb. + diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl index 8531445ba4..d7b7e3eae3 100644 --- a/lib/ssl/src/ssl_pkix_db.erl +++ b/lib/ssl/src/ssl_pkix_db.erl @@ -27,9 +27,9 @@ -include_lib("public_key/include/public_key.hrl"). -include_lib("kernel/include/file.hrl"). --export([create/0, remove/1, add_trusted_certs/3, +-export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3, remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1, - ref_count/3, lookup_trusted_cert/4, foldl/3, + ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2, lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3, lookup/2]). @@ -51,16 +51,24 @@ create() -> ets:new(ssl_otp_cacertificate_db, [set, public]), %% Let connection processes call ref_count/3 directly ets:new(ssl_otp_ca_file_ref, [set, public]), - ets:new(ssl_otp_pem_cache, [set, protected]) + ets:new(ssl_otp_pem_cache, [set, protected]), + %% Default cache + {ets:new(ssl_otp_crl_cache, [set, protected]), + ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])} ]. %%-------------------------------------------------------------------- --spec remove([db_handle()]) -> ok. +-spec remove([db_handle()]) -> ok. %% %% Description: Removes database db %%-------------------------------------------------------------------- remove(Dbs) -> - lists:foreach(fun(Db) -> + lists:foreach(fun({Db0, Db1}) -> + true = ets:delete(Db0), + true = ets:delete(Db1); + (undefined) -> + ok; + (Db) -> true = ets:delete(Db) end, Dbs). @@ -81,7 +89,7 @@ lookup_trusted_cert(DbHandle, Ref, SerialNumber, Issuer) -> {ok, Certs} end. -lookup_cached_pem([_, _, PemChache], File) -> +lookup_cached_pem([_, _, PemChache | _], File) -> lookup_cached_pem(PemChache, File); lookup_cached_pem(PemChache, File) -> lookup(File, PemChache). @@ -94,12 +102,12 @@ lookup_cached_pem(PemChache, File) -> %% runtime database. Returns Ref that should be handed to lookup_trusted_cert %% together with the cert serialnumber and issuer. %%-------------------------------------------------------------------- -add_trusted_certs(_Pid, {der, DerList}, [CerDb, _,_]) -> +add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) -> NewRef = make_ref(), - add_certs_from_der(DerList, NewRef, CerDb), + add_certs_from_der(DerList, NewRef, CertDb), {ok, NewRef}; -add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) -> +add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) -> case lookup_cached_pem(Db, File) of [{_Content, Ref}] -> ref_count(Ref, RefDb, 1), @@ -118,14 +126,15 @@ add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) -> %% Description: Cache file as binary in DB %%-------------------------------------------------------------------- -spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}. -cache_pem_file(File, [_CertsDb, _RefDb, PemChache]) -> +cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) -> {ok, PemBin} = file:read_file(File), Content = public_key:pem_decode(PemBin), insert(File, Content, PemChache), {ok, Content}. + -spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}. -cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache]) -> +cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) -> {ok, PemBin} = file:read_file(File), Content = public_key:pem_decode(PemBin), insert(File, {Content, Ref}, PemChache), @@ -149,6 +158,15 @@ remove(Key, Db) -> ok. %%-------------------------------------------------------------------- +-spec remove(term(), term(), db_handle()) -> ok. +%% +%% Description: Removes an element in a <Db>. +%%-------------------------------------------------------------------- +remove(Key, Data, Db) -> + ets:delete_object(Db, {Key, Data}), + ok. + +%%-------------------------------------------------------------------- -spec lookup(term(), db_handle()) -> [term()] | undefined. %% %% Description: Looks up an element in a <Db>. @@ -175,6 +193,10 @@ lookup(Key, Db) -> foldl(Fun, Acc0, Cache) -> ets:foldl(Fun, Acc0, Cache). + +select_cert_by_issuer(Cache, Issuer) -> + ets:select(Cache, [{{{'_','_', Issuer},{'_', '$1'}},[],['$$']}]). + %%-------------------------------------------------------------------- -spec ref_count(term(), db_handle(), integer()) -> integer(). %% @@ -244,9 +266,39 @@ add_certs(Cert, Ref, CertsDb) -> error_logger:info_report(Report) end. -new_trusted_cert_entry(File, [CertsDb, RefDb, _] = Db) -> +new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) -> Ref = make_ref(), update_counter(Ref, 1, RefDb), {ok, Content} = cache_pem_file(Ref, File, Db), add_certs_from_pem(Content, Ref, CertsDb), {ok, Ref}. + +add_crls([_,_,_, {_, Mapping} | _], ?NO_DIST_POINT, CRLs) -> + [add_crls(CRL, Mapping) || CRL <- CRLs]; +add_crls([_,_,_, {Cache, Mapping} | _], Path, CRLs) -> + insert(Path, CRLs, Cache), + [add_crls(CRL, Mapping) || CRL <- CRLs]. + +add_crls(CRL, Mapping) -> + insert(crl_issuer(CRL), CRL, Mapping). + +remove_crls([_,_,_, {_, Mapping} | _], {?NO_DIST_POINT, CRLs}) -> + [rm_crls(CRL, Mapping) || CRL <- CRLs]; + +remove_crls([_,_,_, {Cache, Mapping} | _], Path) -> + case lookup(Path, Cache) of + undefined -> + ok; + CRLs -> + remove(Path, Cache), + [rm_crls(CRL, Mapping) || CRL <- CRLs] + end. + +rm_crls(CRL, Mapping) -> + remove(crl_issuer(CRL), CRL, Mapping). + +crl_issuer(DerCRL) -> + CRL = public_key:der_decode('CertificateList', DerCRL), + TBSCRL = CRL#'CertificateList'.tbsCertList, + TBSCRL#'TBSCertList'.issuer. + diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl index 025a46bf65..a02375a947 100644 --- a/lib/ssl/src/ssl_record.erl +++ b/lib/ssl/src/ssl_record.erl @@ -48,7 +48,8 @@ -export([compress/3, uncompress/3, compressions/0]). %% Payload encryption/decryption --export([cipher/4, decipher/4, is_correct_mac/2]). +-export([cipher/4, decipher/4, is_correct_mac/2, + cipher_aead/4, decipher_aead/4]). -export_type([ssl_version/0, ssl_atom_version/0]). @@ -376,6 +377,23 @@ cipher(Version, Fragment, {CipherFragment, CipherS1} = ssl_cipher:cipher(BulkCipherAlgo, CipherS0, MacHash, Fragment, Version), {CipherFragment, WriteState0#connection_state{cipher_state = CipherS1}}. +%%-------------------------------------------------------------------- +-spec cipher_aead(ssl_version(), iodata(), #connection_state{}, MacHash::binary()) -> + {CipherFragment::binary(), #connection_state{}}. +%% +%% Description: Payload encryption +%%-------------------------------------------------------------------- +cipher_aead(Version, Fragment, + #connection_state{cipher_state = CipherS0, + sequence_number = SeqNo, + security_parameters= + #security_parameters{bulk_cipher_algorithm = + BulkCipherAlgo} + } = WriteState0, AAD) -> + + {CipherFragment, CipherS1} = + ssl_cipher:cipher_aead(BulkCipherAlgo, CipherS0, SeqNo, AAD, Fragment, Version), + {CipherFragment, WriteState0#connection_state{cipher_state = CipherS1}}. %%-------------------------------------------------------------------- -spec decipher(ssl_version(), binary(), #connection_state{}, boolean()) -> {binary(), binary(), #connection_state{}} | #alert{}. @@ -397,6 +415,25 @@ decipher(Version, CipherFragment, Alert end. %%-------------------------------------------------------------------- +-spec decipher_aead(ssl_version(), binary(), #connection_state{}, binary()) -> {binary(), binary(), #connection_state{}} | #alert{}. +%% +%% Description: Payload decryption +%%-------------------------------------------------------------------- +decipher_aead(Version, CipherFragment, + #connection_state{sequence_number = SeqNo, + security_parameters = + #security_parameters{bulk_cipher_algorithm = + BulkCipherAlgo}, + cipher_state = CipherS0 + } = ReadState, AAD) -> + case ssl_cipher:decipher_aead(BulkCipherAlgo, CipherS0, SeqNo, AAD, CipherFragment, Version) of + {PlainFragment, CipherS1} -> + CS1 = ReadState#connection_state{cipher_state = CipherS1}, + {PlainFragment, CS1}; + #alert{} = Alert -> + Alert + end. +%%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- empty_connection_state(ConnectionEnd) -> diff --git a/lib/ssl/src/ssl_record.hrl b/lib/ssl/src/ssl_record.hrl index 6aab35d6da..53b5f2399b 100644 --- a/lib/ssl/src/ssl_record.hrl +++ b/lib/ssl/src/ssl_record.hrl @@ -90,11 +90,14 @@ -define('3DES', 4). -define(DES40, 5). -define(IDEA, 6). --define(AES, 7). +-define(AES_CBC, 7). +-define(AES_GCM, 8). +-define(CHACHA20_POLY1305, 9). %% CipherType -define(STREAM, 0). -define(BLOCK, 1). +-define(AEAD, 2). %% IsExportable %-define(TRUE, 0). %% Already defined by ssl_internal.hrl diff --git a/lib/ssl/src/ssl_session_cache.erl b/lib/ssl/src/ssl_session_cache.erl index 5c6ee3c54c..b011732f2c 100644 --- a/lib/ssl/src/ssl_session_cache.erl +++ b/lib/ssl/src/ssl_session_cache.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2012. All Rights Reserved. +%% Copyright Ericsson AB 2008-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,8 +31,8 @@ %%-------------------------------------------------------------------- %% Description: Return table reference. Called by ssl_manager process. %%-------------------------------------------------------------------- -init(_) -> - ets:new(cache_name(), [ordered_set, protected]). +init(Options) -> + ets:new(cache_name(proplists:get_value(role, Options)), [ordered_set, protected]). %%-------------------------------------------------------------------- %% Description: Handles cache table at termination of ssl manager. @@ -87,5 +87,5 @@ select_session(Cache, PartialKey) -> %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- -cache_name() -> - ssl_otp_session_cache. +cache_name(Name) -> + list_to_atom(atom_to_list(Name) ++ "_ssl_otp_session_cache"). diff --git a/lib/ssl/src/ssl_v3.erl b/lib/ssl/src/ssl_v3.erl index 68f7f5dee2..169b39be32 100644 --- a/lib/ssl/src/ssl_v3.erl +++ b/lib/ssl/src/ssl_v3.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -143,9 +143,6 @@ suites() -> ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA, ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA, ?TLS_RSA_WITH_AES_128_CBC_SHA, - %%?TLS_RSA_WITH_IDEA_CBC_SHA, - ?TLS_RSA_WITH_RC4_128_SHA, - ?TLS_RSA_WITH_RC4_128_MD5, ?TLS_RSA_WITH_DES_CBC_SHA ]. diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl index b0b6d5a8e3..548ec4aebe 100644 --- a/lib/ssl/src/tls_handshake.erl +++ b/lib/ssl/src/tls_handshake.erl @@ -82,8 +82,7 @@ client_hello(Host, Port, ConnectionStates, boolean()) -> {tls_record:tls_version(), session_id(), #connection_states{}, binary() | undefined}| {tls_record:tls_version(), {resumed | new, #session{}}, #connection_states{}, - [binary()] | undefined, - [ssl_handshake:oid()] | undefined, [ssl_handshake:oid()] | undefined} | + #hello_extensions{}} | #alert{}. %% %% Description: Handles a recieved hello message diff --git a/lib/ssl/src/tls_record.erl b/lib/ssl/src/tls_record.erl index 168b2c8fd3..14a49ac7da 100644 --- a/lib/ssl/src/tls_record.erl +++ b/lib/ssl/src/tls_record.erl @@ -132,6 +132,23 @@ encode_plain_text(Type, Version, Data, sequence_number = Seq, compression_state=CompS0, security_parameters= + #security_parameters{ + cipher_type = ?AEAD, + compression_algorithm=CompAlg} + }= WriteState0} = ConnectionStates) -> + {Comp, CompS1} = ssl_record:compress(CompAlg, Data, CompS0), + WriteState1 = WriteState0#connection_state{compression_state = CompS1}, + AAD = calc_aad(Type, Version, WriteState1), + {CipherFragment, WriteState} = ssl_record:cipher_aead(Version, Comp, WriteState1, AAD), + CipherText = encode_tls_cipher_text(Type, Version, CipherFragment), + {CipherText, ConnectionStates#connection_states{current_write = WriteState#connection_state{sequence_number = Seq +1}}}; + +encode_plain_text(Type, Version, Data, + #connection_states{current_write = + #connection_state{ + sequence_number = Seq, + compression_state=CompS0, + security_parameters= #security_parameters{compression_algorithm=CompAlg} }= WriteState0} = ConnectionStates) -> {Comp, CompS1} = ssl_record:compress(CompAlg, Data, CompS0), @@ -154,14 +171,39 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version, compression_state = CompressionS0, sequence_number = Seq, security_parameters= - #security_parameters{compression_algorithm = CompressAlg} + #security_parameters{ + cipher_type = ?AEAD, + compression_algorithm=CompAlg} + } = ReadState0} = ConnnectionStates0, _) -> + AAD = calc_aad(Type, Version, ReadState0), + case ssl_record:decipher_aead(Version, CipherFragment, ReadState0, AAD) of + {PlainFragment, ReadState1} -> + {Plain, CompressionS1} = ssl_record:uncompress(CompAlg, + PlainFragment, CompressionS0), + ConnnectionStates = ConnnectionStates0#connection_states{ + current_read = ReadState1#connection_state{ + sequence_number = Seq + 1, + compression_state = CompressionS1}}, + {CipherText#ssl_tls{fragment = Plain}, ConnnectionStates}; + #alert{} = Alert -> + Alert + end; + +decode_cipher_text(#ssl_tls{type = Type, version = Version, + fragment = CipherFragment} = CipherText, + #connection_states{current_read = + #connection_state{ + compression_state = CompressionS0, + sequence_number = Seq, + security_parameters= + #security_parameters{compression_algorithm=CompAlg} } = ReadState0} = ConnnectionStates0, PaddingCheck) -> case ssl_record:decipher(Version, CipherFragment, ReadState0, PaddingCheck) of {PlainFragment, Mac, ReadState1} -> MacHash = calc_mac_hash(Type, Version, PlainFragment, ReadState1), case ssl_record:is_correct_mac(Mac, MacHash) of true -> - {Plain, CompressionS1} = ssl_record:uncompress(CompressAlg, + {Plain, CompressionS1} = ssl_record:uncompress(CompAlg, PlainFragment, CompressionS0), ConnnectionStates = ConnnectionStates0#connection_states{ current_read = ReadState1#connection_state{ @@ -276,8 +318,17 @@ supported_protocol_versions([]) -> Vsns; supported_protocol_versions([_|_] = Vsns) -> - Vsns. - + case sufficient_tlsv1_2_crypto_support() of + true -> + Vsns; + false -> + case Vsns -- ['tlsv1.2'] of + [] -> + ?MIN_SUPPORTED_VERSIONS; + NewVsns -> + NewVsns + end + end. %%-------------------------------------------------------------------- %% %% Description: ssl version 2 is not acceptable security risks are too big. @@ -331,3 +382,7 @@ calc_mac_hash(Type, Version, mac_hash(Version, SecPars#security_parameters.mac_algorithm, MacSecret, SeqNo, Type, Length, PlainFragment). + +calc_aad(Type, {MajVer, MinVer}, + #connection_state{sequence_number = SeqNo}) -> + <<SeqNo:64/integer, ?BYTE(Type), ?BYTE(MajVer), ?BYTE(MinVer)>>. diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl index 7a5f9c1b38..559fc1d6a8 100644 --- a/lib/ssl/src/tls_v1.erl +++ b/lib/ssl/src/tls_v1.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2014. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -208,38 +208,55 @@ suites(Minor) when Minor == 1; Minor == 2 -> ?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA, ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA, ?TLS_RSA_WITH_AES_128_CBC_SHA, - - ?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA, - ?TLS_ECDHE_RSA_WITH_RC4_128_SHA, - ?TLS_RSA_WITH_RC4_128_SHA, - ?TLS_RSA_WITH_RC4_128_MD5, ?TLS_DHE_RSA_WITH_DES_CBC_SHA, - ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA, - ?TLS_ECDH_RSA_WITH_RC4_128_SHA, - ?TLS_RSA_WITH_DES_CBC_SHA ]; suites(3) -> [ + ?TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + ?TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, + + ?TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + ?TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, ?TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, ?TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + ?TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384, + ?TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384, ?TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384, ?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384, + ?TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256, + ?TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + ?TLS_DHE_DSS_WITH_AES_256_GCM_SHA384, ?TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, ?TLS_DHE_DSS_WITH_AES_256_CBC_SHA256, + ?TLS_RSA_WITH_AES_256_GCM_SHA384, ?TLS_RSA_WITH_AES_256_CBC_SHA256, + ?TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, ?TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, ?TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + ?TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256, + ?TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256, ?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256, ?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256, + ?TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + ?TLS_DHE_DSS_WITH_AES_128_GCM_SHA256, ?TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, ?TLS_DHE_DSS_WITH_AES_128_CBC_SHA256, + ?TLS_RSA_WITH_AES_128_GCM_SHA256, ?TLS_RSA_WITH_AES_128_CBC_SHA256 + + %% not supported + %% ?TLS_DH_RSA_WITH_AES_256_GCM_SHA384, + %% ?TLS_DH_DSS_WITH_AES_256_GCM_SHA384, + %% ?TLS_DH_RSA_WITH_AES_128_GCM_SHA256, + %% ?TLS_DH_DSS_WITH_AES_128_GCM_SHA256 ] ++ suites(2). + %%-------------------------------------------------------------------- %%% Internal functions %%-------------------------------------------------------------------- diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile index 0d241707d9..09cc5981e7 100644 --- a/lib/ssl/test/Makefile +++ b/lib/ssl/test/Makefile @@ -37,6 +37,7 @@ VSN=$(GS_VSN) MODULES = \ ssl_test_lib \ ssl_basic_SUITE \ + ssl_bench_SUITE \ ssl_cipher_SUITE \ ssl_certificate_verify_SUITE\ ssl_crl_SUITE\ @@ -50,6 +51,7 @@ MODULES = \ ssl_session_cache_SUITE \ ssl_to_openssl_SUITE \ ssl_ECC_SUITE \ + ssl_upgrade_SUITE\ make_certs\ erl_make_certs @@ -131,7 +133,7 @@ release_spec: opt release_tests_spec: opt $(INSTALL_DIR) "$(RELSYSDIR)" $(INSTALL_DATA) $(ERL_FILES) $(HRL_FILES) $(HRL_FILES_NEEDED_IN_TEST) $(COVER_FILE) "$(RELSYSDIR)" - $(INSTALL_DATA) ssl.spec ssl.cover "$(RELSYSDIR)" + $(INSTALL_DATA) ssl.spec ssl_bench.spec ssl.cover "$(RELSYSDIR)" chmod -R u+w "$(RELSYSDIR)" @tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -) diff --git a/lib/ssl/test/erl_make_certs.erl b/lib/ssl/test/erl_make_certs.erl index daf4466f11..b534c0130e 100644 --- a/lib/ssl/test/erl_make_certs.erl +++ b/lib/ssl/test/erl_make_certs.erl @@ -204,7 +204,7 @@ issuer_der(Issuer) -> Subject. subject(undefined, IsRootCA) -> - User = if IsRootCA -> "RootCA"; true -> user() end, + User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end, Opts = [{email, User ++ "@erlang.org"}, {name, User}, {city, "Stockholm"}, @@ -215,14 +215,6 @@ subject(undefined, IsRootCA) -> subject(Opts, _) -> subject(Opts). -user() -> - case os:getenv("USER") of - false -> - "test_user"; - User -> - User - end. - subject(SubjectOpts) when is_list(SubjectOpts) -> Encode = fun(Opt) -> {Type,Value} = subject_enc(Opt), diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl index 15a7e118ff..77631f62d3 100644 --- a/lib/ssl/test/make_certs.erl +++ b/lib/ssl/test/make_certs.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2007-2012. All Rights Reserved. +%% Copyright Ericsson AB 2007-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -324,8 +324,9 @@ eval_cmd(Port, Cmd) -> ok end, receive - {Port, {exit_status, Status}} when Status /= 0 -> - %% io:fwrite("exit status: ~w~n", [Status]), + {Port, {exit_status, 0}} -> + ok; + {Port, {exit_status, Status}} -> exit({eval_cmd, Cmd, Status}) after 0 -> ok @@ -369,7 +370,7 @@ req_cnf(C) -> "subjectKeyIdentifier = hash\n" "subjectAltName = email:copy\n"]. -ca_cnf(C) -> +ca_cnf(C = #config{issuing_distribution_point = true}) -> ["# Purpose: Configuration for CAs.\n" "\n" "ROOTDIR = $ENV::ROOTDIR\n" @@ -446,5 +447,83 @@ ca_cnf(C) -> "subjectAltName = email:copy\n" "issuerAltName = issuer:copy\n" "crlDistributionPoints=@crl_section\n" - ]. + ]; +ca_cnf(C = #config{issuing_distribution_point = false}) -> + ["# Purpose: Configuration for CAs.\n" + "\n" + "ROOTDIR = $ENV::ROOTDIR\n" + "default_ca = ca\n" + "\n" + + "[ca]\n" + "dir = $ROOTDIR/", C#config.commonName, "\n" + "certs = $dir/certs\n" + "crl_dir = $dir/crl\n" + "database = $dir/index.txt\n" + "new_certs_dir = $dir/newcerts\n" + "certificate = $dir/cert.pem\n" + "serial = $dir/serial\n" + "crl = $dir/crl.pem\n", + ["crlnumber = $dir/crlnumber\n" || C#config.v2_crls], + "private_key = $dir/private/key.pem\n" + "RANDFILE = $dir/private/RAND\n" + "\n" + "x509_extensions = user_cert\n", + ["crl_extensions = crl_ext\n" || C#config.v2_crls], + "unique_subject = no\n" + "default_days = 3600\n" + "default_md = md5\n" + "preserve = no\n" + "policy = policy_match\n" + "\n" + + "[policy_match]\n" + "commonName = supplied\n" + "organizationalUnitName = optional\n" + "organizationName = match\n" + "countryName = match\n" + "localityName = match\n" + "emailAddress = supplied\n" + "\n" + + "[crl_ext]\n" + "authorityKeyIdentifier=keyid:always,issuer:always\n", + %["issuingDistributionPoint=critical, @idpsec\n" || C#config.issuing_distribution_point], + + %"[idpsec]\n" + %"fullname=URI:http://localhost:8000/",C#config.commonName,"/crl.pem\n" + + "[user_cert]\n" + "basicConstraints = CA:false\n" + "keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + %"crlDistributionPoints=@crl_section\n" + + %%"[crl_section]\n" + %% intentionally invalid + %%"URI.1=http://localhost/",C#config.commonName,"/crl.pem\n" + %%"URI.2=http://localhost:",integer_to_list(C#config.crl_port),"/",C#config.commonName,"/crl.pem\n" + %%"\n" + + "[user_cert_digital_signature_only]\n" + "basicConstraints = CA:false\n" + "keyUsage = digitalSignature\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + "\n" + + "[ca_cert]\n" + "basicConstraints = critical,CA:true\n" + "keyUsage = cRLSign, keyCertSign\n" + "subjectKeyIdentifier = hash\n" + "authorityKeyIdentifier = keyid:always,issuer:always\n" + "subjectAltName = email:copy\n" + "issuerAltName = issuer:copy\n" + %"crlDistributionPoints=@crl_section\n" + ]. diff --git a/lib/ssl/test/ssl.spec b/lib/ssl/test/ssl.spec index fc7c1bbb82..86e14c033e 100644 --- a/lib/ssl/test/ssl.spec +++ b/lib/ssl/test/ssl.spec @@ -1 +1,4 @@ {suites,"../ssl_test",all}. +{skip_cases, "../ssl_test", + ssl_bench_SUITE, [setup_sequential, setup_concurrent, payload_simple], + "Benchmarks run separately"}. diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl index df9432a43b..50d5fb411f 100644 --- a/lib/ssl/test/ssl_basic_SUITE.erl +++ b/lib/ssl/test/ssl_basic_SUITE.erl @@ -65,7 +65,7 @@ groups() -> {'tlsv1.2', [], all_versions_groups()}, {'tlsv1.1', [], all_versions_groups()}, {'tlsv1', [], all_versions_groups() ++ rizzo_tests()}, - {'sslv3', [], all_versions_groups() ++ rizzo_tests()}, + {'sslv3', [], all_versions_groups() ++ rizzo_tests() ++ [ciphersuite_vs_version]}, {api,[], api_tests()}, {session, [], session_tests()}, {renegotiate, [], renegotiate_tests()}, @@ -91,6 +91,7 @@ basic_tests() -> connect_twice, connect_dist, clear_pem_cache, + defaults, fallback ]. @@ -117,7 +118,6 @@ options_tests() -> tcp_reuseaddr, honor_server_cipher_order, honor_client_cipher_order, - ciphersuite_vs_version, unordered_protocol_versions_server, unordered_protocol_versions_client ]. @@ -178,6 +178,9 @@ cipher_tests() -> srp_cipher_suites, srp_anon_cipher_suites, srp_dsa_cipher_suites, + rc4_rsa_cipher_suites, + rc4_ecdh_rsa_cipher_suites, + rc4_ecdsa_cipher_suites, default_reject_anonymous]. cipher_tests_ec() -> @@ -347,7 +350,7 @@ alerts(Config) when is_list(Config) -> end, Alerts). %%-------------------------------------------------------------------- new_options_in_accept() -> - [{doc,"Test that you can set ssl options in ssl_accept/3 and not tcp upgrade"}]. + [{doc,"Test that you can set ssl options in ssl_accept/3 and not only in tcp upgrade"}]. new_options_in_accept(Config) when is_list(Config) -> ClientOpts = ?config(client_opts, Config), ServerOpts0 = ?config(server_dsa_opts, Config), @@ -365,7 +368,9 @@ new_options_in_accept(Config) when is_list(Config) -> {host, Hostname}, {from, self()}, {mfa, {?MODULE, connection_info_result, []}}, - {options, [{versions, [sslv3]} | ClientOpts]}]), + {options, [{versions, [sslv3]}, + {ciphers,[{rsa,rc4_128,sha} + ]} | ClientOpts]}]), ct:log("Testcase ~p, Client ~p Server ~p ~n", [self(), Client, Server]), @@ -395,7 +400,7 @@ connection_info(Config) when is_list(Config) -> {from, self()}, {mfa, {?MODULE, connection_info_result, []}}, {options, - [{ciphers,[{rsa,rc4_128,sha,no_export}]} | + [{ciphers,[{rsa,des_cbc,sha,no_export}]} | ClientOpts]}]), ct:log("Testcase ~p, Client ~p Server ~p ~n", @@ -404,7 +409,7 @@ connection_info(Config) when is_list(Config) -> Version = tls_record:protocol_version(tls_record:highest_protocol_version([])), - ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha}}}, + ServerMsg = ClientMsg = {ok, {Version, {rsa, des_cbc, sha}}}, ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg), @@ -633,7 +638,7 @@ clear_pem_cache(Config) when is_list(Config) -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - [_,FilRefDb, _] = element(5, State), + [_,FilRefDb |_] = element(6, State), {Server, Client} = basic_verify_test_no_close(Config), 2 = ets:info(FilRefDb, size), ssl:clear_pem_cache(), @@ -1811,6 +1816,32 @@ srp_dsa_cipher_suites(Config) when is_list(Config) -> Version = tls_record:protocol_version(tls_record:highest_protocol_version([])), Ciphers = ssl_test_lib:srp_dss_suites(), run_suites(Ciphers, Version, Config, srp_dsa). +%%------------------------------------------------------------------- +rc4_rsa_cipher_suites()-> + [{doc, "Test the RC4 ciphersuites"}]. +rc4_rsa_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), + Version = tls_record:protocol_version(NVersion), + Ciphers = ssl_test_lib:rc4_suites(NVersion), + run_suites(Ciphers, Version, Config, rc4_rsa). +%------------------------------------------------------------------- +rc4_ecdh_rsa_cipher_suites()-> + [{doc, "Test the RC4 ciphersuites"}]. +rc4_ecdh_rsa_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), + Version = tls_record:protocol_version(NVersion), + Ciphers = ssl_test_lib:rc4_suites(NVersion), + run_suites(Ciphers, Version, Config, rc4_ecdh_rsa). + +%%------------------------------------------------------------------- +rc4_ecdsa_cipher_suites()-> + [{doc, "Test the RC4 ciphersuites"}]. +rc4_ecdsa_cipher_suites(Config) when is_list(Config) -> + NVersion = tls_record:highest_protocol_version([]), + Version = tls_record:protocol_version(NVersion), + Ciphers = ssl_test_lib:rc4_suites(NVersion), + run_suites(Ciphers, Version, Config, rc4_ecdsa). + %%-------------------------------------------------------------------- default_reject_anonymous()-> [{doc,"Test that by default anonymous cipher suites are rejected "}]. @@ -2371,7 +2402,7 @@ der_input(Config) when is_list(Config) -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - [CADb | _] = element(5, State), + [CADb | _] = element(6, State), [] = ets:tab2list(CADb). %%-------------------------------------------------------------------- @@ -2539,6 +2570,16 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) -> ssl_test_lib:close(Client1). %%-------------------------------------------------------------------- +defaults(Config) when is_list(Config)-> + [_, + {supported, Supported}, + {available, Available}] + = ssl:versions(), + true = lists:member(sslv3, Available), + false = lists:member(sslv3, Supported), + false = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites()), + true = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites(all)). +%%-------------------------------------------------------------------- reuseaddr() -> [{doc,"Test reuseaddr option"}]. @@ -2663,6 +2704,8 @@ honor_cipher_order(Config, Honor, ServerCiphers, ClientCiphers, Expected) -> ssl_test_lib:close(Client). %%-------------------------------------------------------------------- +ciphersuite_vs_version() -> + [{doc,"Test a SSLv3 client can not negotiate a TLSv* cipher suite."}]. ciphersuite_vs_version(Config) when is_list(Config) -> {_ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), @@ -3726,8 +3769,20 @@ run_suites(Ciphers, Version, Config, Type) -> ?config(server_ecdsa_opts, Config)}; ecdh_rsa -> {?config(client_opts, Config), - ?config(server_ecdh_rsa_opts, Config)} - end, + ?config(server_ecdh_rsa_opts, Config)}; + rc4_rsa -> + {?config(client_opts, Config), + [{ciphers, Ciphers} | + ?config(server_opts, Config)]}; + rc4_ecdh_rsa -> + {?config(client_opts, Config), + [{ciphers, Ciphers} | + ?config(server_ecdh_rsa_opts, Config)]}; + rc4_ecdsa -> + {?config(client_opts, Config), + [{ciphers, Ciphers} | + ?config(server_ecdsa_opts, Config)]} + end, Result = lists:map(fun(Cipher) -> cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end, @@ -3748,6 +3803,7 @@ erlang_cipher_suite(Suite) -> cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) -> %% process_flag(trap_exit, true), ct:log("Testing CipherSuite ~p~n", [CipherSuite]), + ct:log("Server Opts ~p~n", [ServerOpts]), {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), ErlangCipherSuite = erlang_cipher_suite(CipherSuite), diff --git a/lib/ssl/test/ssl_bench.spec b/lib/ssl/test/ssl_bench.spec new file mode 100644 index 0000000000..d2f75b4203 --- /dev/null +++ b/lib/ssl/test/ssl_bench.spec @@ -0,0 +1 @@ +{suites,"../ssl_test",[ssl_bench_SUITE]}. diff --git a/lib/ssl/test/ssl_bench_SUITE.erl b/lib/ssl/test/ssl_bench_SUITE.erl new file mode 100644 index 0000000000..b6b3769922 --- /dev/null +++ b/lib/ssl/test/ssl_bench_SUITE.erl @@ -0,0 +1,366 @@ +%%%------------------------------------------------------------------- +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/.2 +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(ssl_bench_SUITE). +-compile(export_all). +-include_lib("common_test/include/ct_event.hrl"). + +-define(remote_host, "NETMARKS_REMOTE_HOST"). + +suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,2}]}]}]. + +all() -> [{group, setup}, {group, payload}]. + +groups() -> + [{setup, [{repeat, 3}], [setup_sequential, setup_concurrent]}, + {payload, [{repeat, 3}], [payload_simple]} + ]. + +init_per_group(_GroupName, Config) -> + Config. + +end_per_group(_GroupName, _Config) -> + ok. + +init_per_suite(Config) -> + try + Server = setup(ssl, node()), + [{server_node, Server}|Config] + catch _:_ -> + {skipped, "Benchmark machines only"} + end. + +end_per_suite(_Config) -> + ok. + +init_per_testcase(_Func, Conf) -> + Conf. + +end_per_testcase(_Func, _Conf) -> + ok. + + +-define(COUNT, 400). +-define(TC(Cmd), tc(fun() -> Cmd end, ?MODULE, ?LINE)). + +-define(FPROF_CLIENT, false). +-define(FPROF_SERVER, false). +-define(EPROF_CLIENT, false). +-define(EPROF_SERVER, false). +-define(PERCEPT_SERVER, false). + +%% Current numbers gives roughly a testcase per minute on todays hardware.. + +setup_sequential(Config) -> + Server = proplists:get_value(server_node, Config), + Server =/= undefined orelse error(no_server), + {ok, Result} = do_test(ssl, setup_connection, ?COUNT * 20, 1, Server), + ct_event:notify(#event{name = benchmark_data, + data=[{value, Result}, + {suite, "ssl"}, {name, "Sequential setup"}]}), + ok. + +setup_concurrent(Config) -> + Server = proplists:get_value(server_node, Config), + Server =/= undefined orelse error(no_server), + {ok, Result} = do_test(ssl, setup_connection, ?COUNT, 100, Server), + ct_event:notify(#event{name = benchmark_data, + data=[{value, Result}, + {suite, "ssl"}, {name, "Concurrent setup"}]}), + ok. + +payload_simple(Config) -> + Server = proplists:get_value(server_node, Config), + Server =/= undefined orelse error(no_server), + {ok, Result} = do_test(ssl, payload, ?COUNT*300, 10, Server), + ct_event:notify(#event{name = benchmark_data, + data=[{value, Result}, + {suite, "ssl"}, {name, "Payload simple"}]}), + ok. + + +ssl() -> + test(ssl, ?COUNT, node()). + +test(Type, Count, Host) -> + Server = setup(Type, Host), + (do_test(Type, setup_connection, Count * 20, 1, Server)), + (do_test(Type, setup_connection, Count, 100, Server)), + (do_test(Type, payload, Count*300, 10, Server)), + ok. + +do_test(Type, TC, Loop, ParallellConnections, Server) -> + _ = ssl:stop(), + {ok, _} = ensure_all_started(ssl, []), + + {ok, {SPid, Host, Port}} = rpc:call(Server, ?MODULE, setup_server_init, + [Type, TC, Loop, ParallellConnections]), + link(SPid), + Me = self(), + Test = fun(Id) -> + CData = client_init(Me, Type, TC, Host, Port), + receive + go -> + ?FPROF_CLIENT andalso Id =:= 1 andalso + start_profile(fprof, [self(),new]), + ?EPROF_CLIENT andalso Id =:= 1 andalso + start_profile(eprof, [ssl_connection_sup, ssl_manager]), + ok = ?MODULE:TC(Loop, Type, CData), + ?FPROF_CLIENT andalso Id =:= 1 andalso + stop_profile(fprof, "test_connection_client_res.fprof"), + ?EPROF_CLIENT andalso Id =:= 1 andalso + stop_profile(eprof, "test_connection_client_res.eprof"), + Me ! self() + end + end, + Spawn = fun(Id) -> + Pid = spawn(fun() -> Test(Id) end), + receive {Pid, init} -> Pid end + end, + Pids = [Spawn(Id) || Id <- lists:seq(ParallellConnections, 1, -1)], + Run = fun() -> + [Pid ! go || Pid <- Pids], + [receive Pid -> ok end || Pid <- Pids] + end, + {TimeInMicro, _} = timer:tc(Run), + TotalTests = ParallellConnections * Loop, + TestPerSecond = 1000000 * TotalTests div TimeInMicro, + io:format("TC ~p ~p ~p ~p 1/s~n", [TC, Type, ParallellConnections, TestPerSecond]), + unlink(SPid), + SPid ! quit, + {ok, TestPerSecond}. + +server_init(ssl, setup_connection, _, _, Server) -> + {ok, Socket} = ssl:listen(0, ssl_opts(listen)), + {ok, {_Host, Port}} = ssl:sockname(Socket), + {ok, Host} = inet:gethostname(), + ?FPROF_SERVER andalso start_profile(fprof, [whereis(ssl_manager), new]), + %%?EPROF_SERVER andalso start_profile(eprof, [ssl_connection_sup, ssl_manager]), + ?EPROF_SERVER andalso start_profile(eprof, [ssl_manager]), + ?PERCEPT_SERVER andalso percept:profile("/tmp/ssl_server.percept"), + Server ! {self(), {init, Host, Port}}, + Test = fun(TSocket) -> + ok = ssl:ssl_accept(TSocket), + ssl:close(TSocket) + end, + setup_server_connection(Socket, Test); +server_init(ssl, payload, Loop, _, Server) -> + {ok, Socket} = ssl:listen(0, ssl_opts(listen)), + {ok, {_Host, Port}} = ssl:sockname(Socket), + {ok, Host} = inet:gethostname(), + Server ! {self(), {init, Host, Port}}, + Test = fun(TSocket) -> + ok = ssl:ssl_accept(TSocket), + Size = byte_size(msg()), + server_echo(TSocket, Size, Loop), + ssl:close(TSocket) + end, + setup_server_connection(Socket, Test); + +server_init(Type, Tc, _, _, Server) -> + io:format("No server init code for ~p ~p~n",[Type, Tc]), + Server ! {self(), no_init}. + +client_init(Master, ssl, setup_connection, Host, Port) -> + Master ! {self(), init}, + {Host, Port, ssl_opts(connect)}; +client_init(Master, ssl, payload, Host, Port) -> + {ok, Sock} = ssl:connect(Host, Port, ssl_opts(connect)), + Master ! {self(), init}, + Size = byte_size(msg()), + {Sock, Size}; +client_init(_Me, Type, Tc, Host, Port) -> + io:format("No client init code for ~p ~p~n",[Type, Tc]), + {Host, Port}. + +setup_server_connection(LSocket, Test) -> + receive quit -> + ?FPROF_SERVER andalso stop_profile(fprof, "test_server_res.fprof"), + ?EPROF_SERVER andalso stop_profile(eprof, "test_server_res.eprof"), + ?PERCEPT_SERVER andalso stop_profile(percept, "/tmp/ssl_server.percept"), + ok + after 0 -> + case ssl:transport_accept(LSocket, 2000) of + {ok, TSocket} -> spawn_link(fun() -> Test(TSocket) end); + {error, timeout} -> ok + end, + setup_server_connection(LSocket, Test) + end. + +server_echo(Socket, Size, Loop) when Loop > 0 -> + {ok, Msg} = ssl:recv(Socket, Size), + ok = ssl:send(Socket, Msg), + server_echo(Socket, Size, Loop-1); +server_echo(_, _, _) -> ok. + +setup_connection(N, ssl, Env = {Host, Port, Opts}) when N > 0 -> + case ssl:connect(Host, Port, Opts) of + {ok, Sock} -> + ssl:close(Sock), + setup_connection(N-1, ssl, Env); + {error, Error} -> + io:format("Error: ~p (~p)~n",[Error, length(erlang:ports())]), + setup_connection(N, ssl, Env) + end; +setup_connection(_, _, _) -> + ok. + +payload(Loop, ssl, D = {Socket, Size}) when Loop > 0 -> + ok = ssl:send(Socket, msg()), + {ok, _} = ssl:recv(Socket, Size), + payload(Loop-1, ssl, D); +payload(_, _, {Socket, _}) -> + ssl:close(Socket). + +msg() -> + <<"Hello", + 0:(512*8), + "asdlkjsafsdfoierwlejsdlkfjsdf", + 1:(512*8), + "asdlkjsafsdfoierwlejsdlkfjsdf">>. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +setup(_Type, nonode@nohost) -> + exit(dist_not_enabled); +setup(Type, _This) -> + Host = case os:getenv(?remote_host) of + false -> + {ok, This} = inet:gethostname(), + This; + RemHost -> + RemHost + end, + Node = list_to_atom("perf_server@" ++ Host), + SlaveArgs = case init:get_argument(pa) of + {ok, PaPaths} -> + lists:append([" -pa " ++ P || [P] <- PaPaths]); + _ -> [] + end, + %% io:format("Slave args: ~p~n",[SlaveArgs]), + Prog = + case os:find_executable("erl") of + false -> "erl"; + P -> P + end, + io:format("Prog = ~p~n", [Prog]), + + case net_adm:ping(Node) of + pong -> ok; + pang -> + {ok, Node} = slave:start(Host, perf_server, SlaveArgs, no_link, Prog) + end, + Path = code:get_path(), + true = rpc:call(Node, code, set_path, [Path]), + ok = rpc:call(Node, ?MODULE, setup_server, [Type, node()]), + io:format("Client (~p) using ~s~n",[node(), code:which(ssl)]), + (Node =:= node()) andalso restrict_schedulers(client), + Node. + +setup_server(_Type, ClientNode) -> + (ClientNode =:= node()) andalso restrict_schedulers(server), + io:format("Server (~p) using ~s~n",[node(), code:which(ssl)]), + ok. + + +ensure_all_started(App, Ack) -> + case application:start(App) of + ok -> {ok, [App|Ack]}; + {error, {not_started, Dep}} -> + {ok, Ack1} = ensure_all_started(Dep, Ack), + ensure_all_started(App, Ack1); + {error, {already_started, _}} -> + {ok, Ack} + end. + +setup_server_init(Type, Tc, Loop, PC) -> + _ = ssl:stop(), + {ok, _} = ensure_all_started(ssl, []), + Me = self(), + Pid = spawn_link(fun() -> server_init(Type, Tc, Loop, PC, Me) end), + Res = receive + {Pid, {init, Host, Port}} -> {ok, {Pid, Host, Port}}; + {Pid, Error} -> {error, Error} + end, + unlink(Pid), + Res. + +restrict_schedulers(Type) -> + %% We expect this to run on 8 core machine + Extra0 = 1, + Extra = if (Type =:= server) -> -Extra0; true -> Extra0 end, + Scheds = erlang:system_info(schedulers), + erlang:system_flag(schedulers_online, (Scheds div 2) + Extra). + +tc(Fun, Mod, Line) -> + case timer:tc(Fun) of + {_,{'EXIT',Reason}} -> + io:format("Process EXITED ~p:~p \n", [Mod, Line]), + exit(Reason); + {_T,R={error,_}} -> + io:format("Process Error ~p:~p \n", [Mod, Line]), + R; + {T,R} -> + io:format("~p:~p: Time: ~p\n", [Mod, Line, T]), + R + end. + +start_profile(eprof, Procs) -> + profiling = eprof:start_profiling(Procs), + io:format("(E)Profiling ...",[]); +start_profile(fprof, Procs) -> + fprof:trace([start, {procs, Procs}]), + io:format("(F)Profiling ...",[]). + +stop_profile(percept, File) -> + percept:stop_profile(), + percept:analyze(File), + {started, _Host, Port} = percept:start_webserver(), + wx:new(), + wx_misc:launchDefaultBrowser("http://" ++ net_adm:localhost() ++ ":" ++ integer_to_list(Port)), + ok; +stop_profile(eprof, File) -> + profiling_stopped = eprof:stop_profiling(), + eprof:log(File), + io:format(".analysed => ~s ~n",[File]), + eprof:analyze(total), + eprof:stop(); +stop_profile(fprof, File) -> + fprof:trace(stop), + io:format("..collect..",[]), + fprof:profile(), + fprof:analyse([{dest, File},{totals, true}]), + io:format(".analysed => ~s ~n",[File]), + fprof:stop(), + ok. + +ssl_opts(listen) -> + [{backlog, 500} | ssl_opts("server")]; +ssl_opts(connect) -> + [{verify, verify_peer} + | ssl_opts("client")]; +ssl_opts(Role) -> + Dir = filename:join([code:lib_dir(ssl), "examples", "certs", "etc"]), + [{active, false}, + {depth, 2}, + {reuseaddr, true}, + {mode,binary}, + {nodelay, true}, + {ciphers, [{dhe_rsa,aes_256_cbc,sha}]}, + {cacertfile, filename:join([Dir, Role, "cacerts.pem"])}, + {certfile, filename:join([Dir, Role, "cert.pem"])}, + {keyfile, filename:join([Dir, Role, "key.pem"])}]. diff --git a/lib/ssl/test/ssl_cipher_SUITE.erl b/lib/ssl/test/ssl_cipher_SUITE.erl index 0e48b674e0..3433f9a445 100644 --- a/lib/ssl/test/ssl_cipher_SUITE.erl +++ b/lib/ssl/test/ssl_cipher_SUITE.erl @@ -84,13 +84,11 @@ aes_decipher_good(Config) when is_list(Config) -> decipher_check_good(HashSz, CipherState, {3,3}). %%-------------------------------------------------------------------- - aes_decipher_fail() -> [{doc,"Decipher a known cryptotext using a incorrect key"}]. aes_decipher_fail(Config) when is_list(Config) -> HashSz = 32, - CipherState = incorrect_cipher_state(), decipher_check_fail(HashSz, CipherState, {3,0}), decipher_check_fail(HashSz, CipherState, {3,1}), @@ -111,36 +109,36 @@ padding_test(Config) when is_list(Config) -> %%-------------------------------------------------------------------- decipher_check_good(HashSz, CipherState, Version) -> {Content, NextIV, Mac} = content_nextiv_mac(Version), - {Content, Mac, #cipher_state{iv = NextIV}} = - ssl_cipher:decipher(?AES, HashSz, CipherState, aes_fragment(Version), Version, true). + {Content, Mac, _} = + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true). decipher_check_fail(HashSz, CipherState, Version) -> {Content, NextIV, Mac} = content_nextiv_mac(Version), true = {Content, Mac, #cipher_state{iv = NextIV}} =/= - ssl_cipher:decipher(?AES, HashSz, CipherState, aes_fragment(Version), Version, true). + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true). pad_test(HashSz, CipherState, {3,0} = Version) -> %% 3.0 does not have padding test {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version), {Content, Mac, #cipher_state{iv = NextIV}} = - ssl_cipher:decipher(?AES, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, true), + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, true), {Content, Mac, #cipher_state{iv = NextIV}} = - ssl_cipher:decipher(?AES, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, false); + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, false); pad_test(HashSz, CipherState, {3,1} = Version) -> %% 3.1 should have padding test, but may be disabled {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version), BadCont = badpad_content(Content), {Content, Mac, #cipher_state{iv = NextIV}} = - ssl_cipher:decipher(?AES, HashSz, CipherState, badpad_aes_fragment({3,1}) , {3,1}, false), + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}) , {3,1}, false), {BadCont, Mac, #cipher_state{iv = NextIV}} = - ssl_cipher:decipher(?AES, HashSz, CipherState, badpad_aes_fragment({3,1}), {3,1}, true); + ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}), {3,1}, true); pad_test(HashSz, CipherState, Version) -> %% 3.2 and 3.3 must have padding test {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version), BadCont = badpad_content(Content), - {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES, HashSz, CipherState, + {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment(Version), Version, false), - {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES, HashSz, CipherState, + {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment(Version), Version, true). aes_fragment({3,N}) when N == 0; N == 1-> @@ -164,7 +162,7 @@ badpad_aes_fragment(_) -> content_nextiv_mac({3,N}) when N == 0; N == 1 -> {<<"HELLO\n">>, - <<33,0, 177,251, 91,44, 247,53, 183,198, 165,63, 20,194, 159,107>>, + <<72,196,247,97,62,213,222,109,210,204,217,186,172,184, 197,148>>, <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>}; content_nextiv_mac(_) -> {<<"HELLO\n">>, @@ -193,3 +191,4 @@ correct_cipher_state() -> incorrect_cipher_state() -> #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>, key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>}. + diff --git a/lib/ssl/test/ssl_crl_SUITE.erl b/lib/ssl/test/ssl_crl_SUITE.erl index bad0949ec4..c6bf8898ad 100644 --- a/lib/ssl/test/ssl_crl_SUITE.erl +++ b/lib/ssl/test/ssl_crl_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2013. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -26,43 +26,40 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("public_key/include/public_key.hrl"). --define(TIMEOUT, 120000). -define(LONG_TIMEOUT, 600000). --define(SLEEP, 1000). --define(OPENSSL_RENEGOTIATE, "R\n"). --define(OPENSSL_QUIT, "Q\n"). --define(OPENSSL_GARBAGE, "P\n"). --define(EXPIRE, 10). %%-------------------------------------------------------------------- %% Common Test interface functions ----------------------------------- %%-------------------------------------------------------------------- -suite() -> [{ct_hooks,[ts_install_cth]}]. +suite() -> + [{ct_hooks,[ts_install_cth]}]. all() -> [ - {group, basic}, - {group, v1_crl}, - {group, idp_crl} + {group, check_true}, + {group, check_peer}, + {group, check_best_effort} ]. groups() -> - [{basic, [], basic_tests()}, - {v1_crl, [], v1_crl_tests()}, - {idp_crl, [], idp_crl_tests()}]. + [ + {check_true, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {check_peer, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {check_best_effort, [], [{group, v2_crl}, + {group, v1_crl}, + {group, idp_crl}]}, + {v2_crl, [], basic_tests()}, + {v1_crl, [], basic_tests()}, + {idp_crl, [], basic_tests()}]. basic_tests() -> [crl_verify_valid, crl_verify_revoked]. -v1_crl_tests() -> - [crl_verify_valid, crl_verify_revoked]. - -idp_crl_tests() -> - [crl_verify_valid, crl_verify_revoked]. - -%%%================================================================ -%%% Suite init/end init_per_suite(Config0) -> Dog = ct:timetrap(?LONG_TIMEOUT *2), @@ -70,10 +67,7 @@ init_per_suite(Config0) -> false -> {skip, "Openssl not found"}; _ -> - TLSVersion = ?config(tls_version, Config0), OpenSSL_version = (catch os:cmd("openssl version")), - ct:log("TLS version: ~p~nOpenSSL version: ~p~n~n~p:module_info(): ~p~n~nssl:module_info(): ~p~n", - [TLSVersion, OpenSSL_version, ?MODULE, ?MODULE:module_info(), ssl:module_info()]), case ssl_test_lib:enough_openssl_crl_support(OpenSSL_version) of false -> {skip, io_lib:format("Bad openssl version: ~p",[OpenSSL_version])}; @@ -81,7 +75,6 @@ init_per_suite(Config0) -> catch crypto:stop(), try crypto:start() of ok -> - ssl:start(), {ok, Hostname0} = inet:gethostname(), IPfamily = case lists:member(list_to_atom(Hostname0), ct:get_config(ipv6_hosts,[])) of @@ -89,8 +82,7 @@ init_per_suite(Config0) -> false -> inet end, [{ipfamily,IPfamily}, {watchdog, Dog}, {openssl_version,OpenSSL_version} | Config0] - catch _C:_E -> - ct:log("crypto:start() caught ~p:~p",[_C,_E]), + catch _:_ -> {skip, "Crypto did not start"} end end @@ -100,443 +92,175 @@ end_per_suite(_Config) -> ssl:stop(), application:stop(crypto). -%%%================================================================ -%%% Group init/end - -init_per_group(Group, Config) -> - ssl:start(), - inets:start(), - CertDir = filename:join(?config(priv_dir, Config), Group), - DataDir = ?config(data_dir, Config), - ServerRoot = make_dir_path([?config(priv_dir,Config), Group, tmp]), - %% start a HTTP server to serve the CRLs - {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily,Config)}, - {server_name, "localhost"}, {port, 0}, - {server_root, ServerRoot}, - {document_root, CertDir}, - {modules, [mod_get]} - ]), - [{port,Port}] = httpd:info(Httpd, [port]), - ct:log("~p:~p~nHTTPD IP family=~p, port=~p~n", [?MODULE, ?LINE, ?config(ipfamily,Config), Port]), - CertOpts = [{crl_port,Port}|cert_opts(Group)], - Result = make_certs:all(DataDir, CertDir, CertOpts), - ct:log("~p:~p~nmake_certs:all(~n DataDir=~p,~n CertDir=~p,~n ServerRoot=~p~n Opts=~p~n) returned ~p~n", [?MODULE,?LINE,DataDir, CertDir, ServerRoot, CertOpts, Result]), - [{make_cert_result, Result}, {cert_dir, CertDir}, {httpd, Httpd} | Config]. - -cert_opts(v1_crl) -> [{v2_crls, false}]; -cert_opts(idp_crl) -> [{issuing_distribution_point, true}]; -cert_opts(_) -> []. - -make_dir_path(PathComponents) -> - lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end, - "", - PathComponents). - +init_per_group(check_true, Config) -> + [{crl_check, true} | Config]; +init_per_group(check_peer, Config) -> + [{crl_check, peer} | Config]; +init_per_group(check_best_effort, Config) -> + [{crl_check, best_effort} | Config]; +init_per_group(Group, Config0) -> + case is_idp(Group) of + true -> + [{idp_crl, true} | Config0]; + false -> + DataDir = ?config(data_dir, Config0), + CertDir = filename:join(?config(priv_dir, Config0), Group), + {CertOpts, Config} = init_certs(CertDir, Group, Config0), + Result = make_certs:all(DataDir, CertDir, CertOpts), + [{make_cert_result, Result}, {cert_dir, CertDir}, {idp_crl, false} | Config] + end. end_per_group(_GroupName, Config) -> - case ?config(httpd, Config) of - undefined -> ok; - Pid -> - ct:log("Stop httpd ~p",[Pid]), - ok = inets:stop(httpd, Pid) - ,ct:log("Stopped",[]) - end, - inets:stop(), + Config. +init_per_testcase(Case, Config0) -> + case ?config(idp_crl, Config0) of + true -> + end_per_testcase(Case, Config0), + inets:start(), + ssl:start(), + ServerRoot = make_dir_path([?config(priv_dir, Config0), idp_crl, tmp]), + %% start a HTTP server to serve the CRLs + {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily, Config0)}, + {server_name, "localhost"}, {port, 0}, + {server_root, ServerRoot}, + {document_root, + filename:join(?config(priv_dir, Config0), idp_crl)} + ]), + [{port,Port}] = httpd:info(Httpd, [port]), + Config = [{httpd_port, Port} | Config0], + DataDir = ?config(data_dir, Config), + CertDir = filename:join(?config(priv_dir, Config0), idp_crl), + {CertOpts, Config} = init_certs(CertDir, idp_crl, Config), + Result = make_certs:all(DataDir, CertDir, CertOpts), + [{make_cert_result, Result}, {cert_dir, CertDir} | Config]; + false -> + end_per_testcase(Case, Config0), + ssl:start(), + Config0 + end. + +end_per_testcase(_, Config) -> + case ?config(idp_crl, Config) of + true -> + ssl:stop(), + inets:stop(); + false -> + ssl:stop() + end. + %%%================================================================ %%% Test cases +%%%================================================================ crl_verify_valid() -> [{doc,"Verify a simple valid CRL chain"}]. crl_verify_valid(Config) when is_list(Config) -> - process_flag(trap_exit, true), PrivDir = ?config(cert_dir, Config), - ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])}, - {certfile, filename:join([PrivDir, "server", "cert.pem"])}, - {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}], - + Check = ?config(crl_check, Config), + ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])}, + {certfile, filename:join([PrivDir, "server", "cert.pem"])}, + {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}], + ClientOpts = case ?config(idp_crl, Config) of + true -> + [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}, + {crl_check, Check}, + {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}}, + {verify, verify_peer}]; + false -> + [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}, + {crl_check, Check}, + {verify, verify_peer}] + end, {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), - Data = "From openssl to erlang", - Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, - {from, self()}, - {mfa, {?MODULE, erlang_ssl_receive, [Data]}}, - %{mfa, {ssl_test_lib, no_result, []}}, - {options, ServerOpts}]), - ct:log("~p:~p~nreturn from ssl_test_lib:start_server:~n~p",[?MODULE,?LINE,Server]), - Port = ssl_test_lib:inet_port(Server), - - CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])), - - ClientOpts = [{cacerts, CACerts}, - {verify, verify_peer}, - {verify_fun, {fun validate_function/3, {CACerts, []}}}], - - - ct:log("~p:~p~ncalling ssl_test_lib:start_client",[?MODULE,?LINE]), - Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, - {host, Hostname}, - {from, self()}, - {mfa, {?MODULE, - erlang_ssl_send, [Data]}}, - %{mfa, {ssl_test_lib, no_result, []}}, - {options, ClientOpts}]), - ct:log("~p:~p~nreturn from ssl_test_lib:start_client:~n~p",[?MODULE,?LINE,Client]), - - ssl_test_lib:check_result(Client, ok, Server, ok), - - %% Clean close down! Server needs to be closed first !! - ssl_test_lib:close(Server), - ssl_test_lib:close(Client), - process_flag(trap_exit, false). + ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}), + ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}), + + crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts). crl_verify_revoked() -> - [{doc,"Verify a simple valid CRL chain"}]. -crl_verify_revoked(Config) when is_list(Config) -> - process_flag(trap_exit, true), + [{doc,"Verify a simple CRL chain when peer cert is reveoked"}]. +crl_verify_revoked(Config) when is_list(Config) -> PrivDir = ?config(cert_dir, Config), + Check = ?config(crl_check, Config), ServerOpts = [{keyfile, filename:join([PrivDir, "revoked", "key.pem"])}, - {certfile, filename:join([PrivDir, "revoked", "cert.pem"])}, - {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}], - ct:log("~p:~p~nserver opts ~p~n", [?MODULE,?LINE, ServerOpts]), + {certfile, filename:join([PrivDir, "revoked", "cert.pem"])}, + {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}], {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), - Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, - {from, self()}, - %{mfa, {?MODULE, erlang_ssl_receive, [Data]}}, - {mfa, {ssl_test_lib, no_result, []}}, - {options, ServerOpts}]), + Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0}, + {from, self()}, + {options, ServerOpts}]), Port = ssl_test_lib:inet_port(Server), + + ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}), + ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}), + + ClientOpts = case ?config(idp_crl, Config) of + true -> + [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}, + {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}}, + {crl_check, Check}, + {verify, verify_peer}]; + false -> + [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}, + {crl_check, Check}, + {verify, verify_peer}] + end, + + Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port}, + {host, Hostname}, + {from, self()}, + {options, ClientOpts}]), + receive + {Server, AlertOrColse} -> + ct:pal("Server Alert or Close ~p", [AlertOrColse]) + end, + ssl_test_lib:check_result(Client, {error, {tls_alert, "certificate revoked"}}). - CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])), - ClientOpts = [{cacerts, CACerts}, - {verify, verify_peer}, - {verify_fun, {fun validate_function/3, {CACerts, []}}}], - {connect_failed, _} = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, +crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts) -> + Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, + {from, self()}, + {mfa, {ssl_test_lib, + send_recv_result_active, []}}, + {options, ServerOpts}]), + Port = ssl_test_lib:inet_port(Server), + Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, {host, Hostname}, {from, self()}, - %{mfa, {?MODULE, - %erlang_ssl_receive, [Data]}}, - {mfa, {ssl_test_lib, no_result, []}}, + {mfa, {ssl_test_lib, + send_recv_result_active, []}}, {options, ClientOpts}]), + + ssl_test_lib:check_result(Client, ok, Server, ok), - %% Clean close down! Server needs to be closed first !! ssl_test_lib:close(Server), - process_flag(trap_exit, false). - -%%%================================================================ -%%% Lib - -erlang_ssl_receive(Socket, Data) -> - ct:log("~p:~p~nConnection info: ~p~n", - [?MODULE,?LINE, ssl:connection_info(Socket)]), - receive - {ssl, Socket, Data} -> - ct:log("~p:~p~nReceived ~p~n",[?MODULE,?LINE, Data]), - %% open_ssl server sometimes hangs waiting in blocking read - ssl:send(Socket, "Got it"), - ok; - {ssl, Socket, Byte} when length(Byte) == 1 -> - erlang_ssl_receive(Socket, tl(Data)); - {Port, {data,Debug}} when is_port(Port) -> - ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), - erlang_ssl_receive(Socket,Data); - Other -> - ct:fail({unexpected_message, Other}) - after 4000 -> - ct:fail({did_not_get, Data}) - end. - - -erlang_ssl_send(Socket, Data) -> - ct:log("~p:~p~nConnection info: ~p~n", - [?MODULE,?LINE, ssl:connection_info(Socket)]), - ssl:send(Socket, Data), - ok. - -load_certs(undefined) -> - undefined; -load_certs(CertDir) -> - case file:list_dir(CertDir) of - {ok, Certs} -> - load_certs(lists:map(fun(Cert) -> filename:join(CertDir, Cert) - end, Certs), []); - {error, _} -> - undefined - end. - -load_certs([], Acc) -> - ct:log("~p:~p~nSuccessfully loaded ~p CA certificates~n", [?MODULE,?LINE, length(Acc)]), - Acc; -load_certs([Cert|Certs], Acc) -> - case filelib:is_dir(Cert) of - true -> - load_certs(Certs, Acc); - _ -> - %ct:log("~p:~p~nLoading certificate ~p~n", [?MODULE,?LINE, Cert]), - load_certs(Certs, load_cert(Cert) ++ Acc) - end. - -load_cert(Cert) -> - {ok, Bin} = file:read_file(Cert), - case filename:extension(Cert) of - ".der" -> - %% no decoding necessary - [Bin]; - _ -> - %% assume PEM otherwise - Contents = public_key:pem_decode(Bin), - [DER || {Type, DER, Cipher} <- Contents, Type == 'Certificate', Cipher == 'not_encrypted'] - end. - -%% @doc Validator function for SSL negotiation. -%% -validate_function(Cert, valid_peer, State) -> - ct:log("~p:~p~nvaliding peer ~p with ~p intermediate certs~n", - [?MODULE,?LINE, get_common_name(Cert), - length(element(2, State))]), - %% peer certificate validated, now check the CRL - Res = (catch check_crl(Cert, State)), - ct:log("~p:~p~nCRL validate result for ~p: ~p~n", - [?MODULE,?LINE, get_common_name(Cert), Res]), - {Res, State}; -validate_function(Cert, valid, {TrustedCAs, IntermediateCerts}=State) -> - case public_key:pkix_is_self_signed(Cert) of - true -> - ct:log("~p:~p~nroot certificate~n",[?MODULE,?LINE]), - %% this is a root cert, no CRL - {valid, {TrustedCAs, [Cert|IntermediateCerts]}}; - false -> - %% check is valid CA certificate, add to the list of - %% intermediates - Res = (catch check_crl(Cert, State)), - ct:log("~p:~p~nCRL intermediate CA validate result for ~p: ~p~n", - [?MODULE,?LINE, get_common_name(Cert), Res]), - {Res, {TrustedCAs, [Cert|IntermediateCerts]}} - end; -validate_function(_Cert, _Event, State) -> - %ct:log("~p:~p~nignoring event ~p~n", [?MODULE,?LINE, _Event]), - {valid, State}. + ssl_test_lib:close(Client). -%% @doc Given a certificate, find CRL distribution points for the given -%% certificate, fetch, and attempt to validate each CRL through -%% issuer_function/4. -%% -check_crl(Cert, State) -> - %% pull the CRL distribution point(s) out of the certificate, if any - ct:log("~p:~p~ncheck_crl(~n Cert=~p,~nState=~p~n)",[?MODULE,?LINE,Cert,State]), - case pubkey_cert:select_extension( - ?'id-ce-cRLDistributionPoints', - pubkey_cert:extensions_list(Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.extensions)) of - undefined -> - ct:log("~p:~p~nno CRL distribution points for ~p~n", - [?MODULE,?LINE, get_common_name(Cert)]), - %% fail; we can't validate if there's no CRL - no_crl; - CRLExtension -> - ct:log("~p:~p~nCRLExtension=~p)",[?MODULE,?LINE,CRLExtension]), - CRLDistPoints = CRLExtension#'Extension'.extnValue, - DPointsAndCRLs = lists:foldl(fun(Point, Acc) -> - %% try to read the CRL over http or from a - %% local file - case fetch_point(Point) of - not_available -> - ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,not_available]), - Acc; - Res -> - ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,Res]), - [{Point, Res} | Acc] - end - end, [], CRLDistPoints), - public_key:pkix_crls_validate(Cert, - DPointsAndCRLs, - [{issuer_fun, - {fun issuer_function/4, State}}]) - end. - -%% @doc Given a list of distribution points for CRLs, certificates and -%% both trusted and intermediary certificates, attempt to build and -%% authority chain back via build_chain to verify that it is valid. -%% -issuer_function(_DP, CRL, _Issuer, {TrustedCAs, IntermediateCerts}) -> - %% XXX the 'Issuer' we get passed here is the AuthorityKeyIdentifier, - %% which we are not currently smart enough to understand - %% Read the CA certs out of the file - ct:log("~p:~p~nissuer_function(~nCRL=~p,~nLast param=~p)",[?MODULE,?LINE,CRL, {TrustedCAs, IntermediateCerts}]), - Certs = [public_key:pkix_decode_cert(DER, otp) || DER <- TrustedCAs], - %% get the real issuer out of the CRL - Issuer = public_key:pkix_normalize_name( - pubkey_cert_records:transform( - CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode)), - %% assume certificates are ordered from root to tip - case find_issuer(Issuer, IntermediateCerts ++ Certs) of - undefined -> - ct:log("~p:~p~nunable to find certificate matching CRL issuer ~p~n", - [?MODULE,?LINE, Issuer]), - error; - IssuerCert -> - ct:log("~p:~p~nIssuerCert=~p~n)",[?MODULE,?LINE,IssuerCert]), - case build_chain({public_key:pkix_encode('OTPCertificate', - IssuerCert, - otp), - IssuerCert}, IntermediateCerts, Certs, []) of - undefined -> - error; - {OTPCert, Path} -> - {ok, OTPCert, Path} - end - end. - -%% @doc Attempt to build authority chain back using intermediary -%% certificates, falling back on trusted certificates if the -%% intermediary chain of certificates does not fully extend to the -%% root. -%% -%% Returns: {RootCA :: #OTPCertificate{}, Chain :: [der_encoded()]} -%% -build_chain({DER, Cert}, IntCerts, TrustedCerts, Acc) -> - %% check if this cert is self-signed, if it is, we've reached the - %% root of the chain - Issuer = public_key:pkix_normalize_name( - Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer), - Subject = public_key:pkix_normalize_name( - Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - case Issuer == Subject of - true -> - case find_issuer(Issuer, TrustedCerts) of - undefined -> - ct:log("~p:~p~nself-signed certificate is NOT trusted~n",[?MODULE,?LINE]), - undefined; - TrustedCert -> - %% return the cert from the trusted list, to prevent - %% issuer spoofing - {TrustedCert, - [public_key:pkix_encode( - 'OTPCertificate', TrustedCert, otp)|Acc]} - end; - false -> - Match = lists:foldl( - fun(C, undefined) -> - S = public_key:pkix_normalize_name(C#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - %% compare the subject to the current issuer - case Issuer == S of - true -> - %% we've found our man - {public_key:pkix_encode('OTPCertificate', C, otp), C}; - false -> - undefined - end; - (_E, A) -> - %% already matched - A - end, undefined, IntCerts), - case Match of - undefined when IntCerts /= TrustedCerts -> - %% continue the chain by using the trusted CAs - ct:log("~p:~p~nRan out of intermediate certs, switching to trusted certs~n",[?MODULE,?LINE]), - build_chain({DER, Cert}, TrustedCerts, TrustedCerts, Acc); - undefined -> - ct:log("Can't construct chain of trust beyond ~p~n", - [?MODULE,?LINE, get_common_name(Cert)]), - %% can't find the current cert's issuer - undefined; - Match -> - build_chain(Match, IntCerts, TrustedCerts, [DER|Acc]) - end - end. - -%% @doc Given a certificate and a list of trusted or intermediary -%% certificates, attempt to find a match in the list or bail with -%% undefined. -find_issuer(Issuer, Certs) -> - lists:foldl( - fun(OTPCert, undefined) -> - %% check if this certificate matches the issuer - Normal = public_key:pkix_normalize_name( - OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject), - case Normal == Issuer of - true -> - OTPCert; - false -> - undefined - end; - (_E, Acc) -> - %% already found a match - Acc - end, undefined, Certs). - -%% @doc Find distribution points for a given CRL and then attempt to -%% fetch the CRL from the first available. -fetch_point(#'DistributionPoint'{distributionPoint={fullName, Names}}) -> - Decoded = [{NameType, - pubkey_cert_records:transform(Name, decode)} - || {NameType, Name} <- Names], - ct:log("~p:~p~ncall fetch(~nDecoded=~p~n)",[?MODULE,?LINE,Decoded]), - fetch(Decoded). - -%% @doc Given a list of locations to retrieve a CRL from, attempt to -%% retrieve either from a file or http resource and bail as soon as -%% it can be found. -%% -%% Currently, only hand a armored PEM or DER encoded file, with -%% defaulting to DER. -%% -fetch([]) -> - not_available; -fetch([{uniformResourceIdentifier, "http"++_=URL}|Rest]) -> - ct:log("~p:~p~ngetting CRL from ~p~n", [?MODULE,?LINE, URL]), - case httpc:request(get, {URL, []}, [], [{body_format, binary}]) of - {ok, {_Status, _Headers, Body}} -> - case Body of - <<"-----BEGIN", _/binary>> -> - ct:log("~p:~p~npublic_key:pem_decode,~nBody=~p~n)",[?MODULE,?LINE,Body]), - [{'CertificateList', - DER, _}=CertList] = public_key:pem_decode(Body), - ct:log("~p:~p~npublic_key:pem_entry_decode,~nCertList=~p~n)",[?MODULE,?LINE,CertList]), - {DER, public_key:pem_entry_decode(CertList)}; - _ -> - ct:log("~p:~p~npublic_key:pem_entry_decode,~nBody=~p~n)",[?MODULE,?LINE,{'CertificateList', Body, not_encrypted}]), - %% assume DER encoded - try - public_key:pem_entry_decode({'CertificateList', Body, not_encrypted}) - of - CertList -> {Body, CertList} - catch - _C:_E -> - ct:log("~p:~p~nfailed DER assumption~nRest=~p", [?MODULE,?LINE,Rest]), - fetch(Rest) - end - end; - {error, _Reason} -> - ct:log("~p:~p~nfailed to get CRL ~p~n", [?MODULE,?LINE, _Reason]), - fetch(Rest); - Other -> - ct:log("~p:~p~nreally failed to get CRL ~p~n", [?MODULE,?LINE, Other]), - fetch(Rest) - end; -fetch([Loc|Rest]) -> - %% unsupported CRL location - ct:log("~p:~p~nunable to fetch CRL from unsupported location ~p~n", - [?MODULE,?LINE, Loc]), - fetch(Rest). +%%-------------------------------------------------------------------- +%% Internal functions ------------------------------------------------ +%%-------------------------------------------------------------------- +is_idp(idp_crl) -> + true; +is_idp(_) -> + false. + +init_certs(_,v1_crl, Config) -> + {[{v2_crls, false}], Config}; +init_certs(_, idp_crl, Config) -> + Port = ?config(httpd_port, Config), + {[{crl_port,Port}, + {issuing_distribution_point, true}], Config + }; +init_certs(_,_,Config) -> + {[], Config}. -%% get the common name attribute out of an OTPCertificate record -get_common_name(OTPCert) -> - %% You'd think there'd be an easier way than this giant mess, but I - %% couldn't find one. - {rdnSequence, Subject} = OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject, - case [Attribute#'AttributeTypeAndValue'.value || [Attribute] <- Subject, - Attribute#'AttributeTypeAndValue'.type == ?'id-at-commonName'] of - [Att] -> - case Att of - {teletexString, Str} -> Str; - {printableString, Str} -> Str; - {utf8String, Bin} -> binary_to_list(Bin) - end; - _ -> - unknown - end. +make_dir_path(PathComponents) -> + lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end, + "", + PathComponents). diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl index 843079e2fe..23584dfcdf 100644 --- a/lib/ssl/test/ssl_pem_cache_SUITE.erl +++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl @@ -113,9 +113,9 @@ get_pem_cache() -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - case element(5, State) of - [_CertDb, _FileRefDb, PemChace] -> - PemChace; + case element(6, State) of + [_CertDb, _FileRefDb, PemCache| _] -> + PemCache; _ -> undefined end. diff --git a/lib/ssl/test/ssl_session_cache_SUITE.erl b/lib/ssl/test/ssl_session_cache_SUITE.erl index c31f6c2d7d..36d086338e 100644 --- a/lib/ssl/test/ssl_session_cache_SUITE.erl +++ b/lib/ssl/test/ssl_session_cache_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2010-2013. All Rights Reserved. +%% Copyright Ericsson AB 2010-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -108,8 +108,12 @@ init_customized_session_cache(Type, Config0) -> ssl:stop(), application:load(ssl), application:set_env(ssl, session_cb, ?MODULE), - application:set_env(ssl, session_cb_init_args, [Type]), + application:set_env(ssl, session_cb_init_args, [{type, Type}]), ssl:start(), + catch (end_per_testcase(list_to_atom("session_cache_process" ++ atom_to_list(Type)), + Config)), + ets:new(ssl_test, [named_table, public, set]), + ets:insert(ssl_test, {type, Type}), [{watchdog, Dog} | Config]. end_per_testcase(session_cache_process_list, Config) -> @@ -126,7 +130,11 @@ end_per_testcase(session_cleanup, Config) -> application:unset_env(ssl, session_delay_cleanup_time), application:unset_env(ssl, session_lifetime), end_per_testcase(default_action, Config); -end_per_testcase(_TestCase, Config) -> +end_per_testcase(Case, Config) when Case == session_cache_process_list; + Case == session_cache_process_mnesia -> + ets:delete(ssl_test), + Config; +end_per_testcase(_, Config) -> Config. %%-------------------------------------------------------------------- @@ -164,12 +172,13 @@ session_cleanup(Config)when is_list(Config) -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - Cache = element(2, State), - SessionTimer = element(6, State), + ClientCache = element(2, State), + ServerCache = element(3, State), + SessionTimer = element(7, State), Id = proplists:get_value(session_id, SessionInfo), - CSession = ssl_session_cache:lookup(Cache, {{Hostname, Port}, Id}), - SSession = ssl_session_cache:lookup(Cache, {Port, Id}), + CSession = ssl_session_cache:lookup(ClientCache, {{Hostname, Port}, Id}), + SSession = ssl_session_cache:lookup(ServerCache, {Port, Id}), true = CSession =/= undefined, true = SSession =/= undefined, @@ -185,8 +194,8 @@ session_cleanup(Config)when is_list(Config) -> ct:sleep(?SLEEP), %% Make sure clean has had time to run - undefined = ssl_session_cache:lookup(Cache, {{Hostname, Port}, Id}), - undefined = ssl_session_cache:lookup(Cache, {Port, Id}), + undefined = ssl_session_cache:lookup(ClientCache, {{Hostname, Port}, Id}), + undefined = ssl_session_cache:lookup(ServerCache, {Port, Id}), process_flag(trap_exit, false), ssl_test_lib:close(Server), @@ -208,7 +217,7 @@ get_delay_timers() -> {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)), [_, _,_, _, Prop] = StatusInfo, State = ssl_test_lib:state(Prop), - case element(7, State) of + case element(8, State) of {undefined, undefined} -> ct:sleep(?SLEEP), get_delay_timers(); @@ -236,16 +245,16 @@ session_cache_process_mnesia(Config) when is_list(Config) -> %%% Session cache API callbacks %%-------------------------------------------------------------------- -init([Type]) -> - ets:new(ssl_test, [named_table, public, set]), - ets:insert(ssl_test, {type, Type}), - case Type of +init(Opts) -> + case proplists:get_value(type, Opts) of list -> spawn(fun() -> session_loop([]) end); mnesia -> mnesia:start(), - {atomic,ok} = mnesia:create_table(sess_cache, []), - sess_cache + Name = atom_to_list(proplists:get_value(role, Opts)), + TabName = list_to_atom(Name ++ "sess_cache"), + {atomic,ok} = mnesia:create_table(TabName, []), + TabName end. session_cb() -> @@ -258,7 +267,7 @@ terminate(Cache) -> Cache ! terminate; mnesia -> catch {atomic,ok} = - mnesia:delete_table(sess_cache) + mnesia:delete_table(Cache) end. lookup(Cache, Key) -> @@ -268,10 +277,10 @@ lookup(Cache, Key) -> receive {Cache, Res} -> Res end; mnesia -> case mnesia:transaction(fun() -> - mnesia:read(sess_cache, + mnesia:read(Cache, Key, read) end) of - {atomic, [{sess_cache, Key, Value}]} -> + {atomic, [{Cache, Key, Value}]} -> Value; _ -> undefined @@ -285,8 +294,8 @@ update(Cache, Key, Value) -> mnesia -> {atomic, ok} = mnesia:transaction(fun() -> - mnesia:write(sess_cache, - {sess_cache, Key, Value}, write) + mnesia:write(Cache, + {Cache, Key, Value}, write) end) end. @@ -297,7 +306,7 @@ delete(Cache, Key) -> mnesia -> {atomic, ok} = mnesia:transaction(fun() -> - mnesia:delete(sess_cache, Key) + mnesia:delete(Cache, Key) end) end. @@ -308,7 +317,7 @@ foldl(Fun, Acc, Cache) -> receive {Cache, Res} -> Res end; mnesia -> Foldl = fun() -> - mnesia:foldl(Fun, Acc, sess_cache) + mnesia:foldl(Fun, Acc, Cache) end, {atomic, Res} = mnesia:transaction(Foldl), Res @@ -325,7 +334,7 @@ select_session(Cache, PartialKey) -> mnesia -> Sel = fun() -> mnesia:select(Cache, - [{{sess_cache,{PartialKey,'$1'}, '$2'}, + [{{Cache,{PartialKey,'$1'}, '$2'}, [],['$$']}]) end, {atomic, Res} = mnesia:transaction(Sel), diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl index 74d71263de..7d0546210c 100644 --- a/lib/ssl/test/ssl_test_lib.erl +++ b/lib/ssl/test/ssl_test_lib.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2014. All Rights Reserved. +%% Copyright Ericsson AB 2008-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -187,6 +187,7 @@ run_client(Opts) -> Transport = proplists:get_value(transport, Opts, ssl), Options = proplists:get_value(options, Opts), ct:log("~p:~p~n~p:connect(~p, ~p)@~p~n", [?MODULE,?LINE, Transport, Host, Port, Node]), + ct:log("SSLOpts: ~p", [Options]), case rpc:call(Node, Transport, connect, [Host, Port, Options]) of {ok, Socket} -> Pid ! {connected, Socket}, @@ -253,7 +254,6 @@ check_result(Server, ServerMsg, Client, ClientMsg) -> {Port, {data,Debug}} when is_port(Port) -> ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Server, ServerMsg, Client, ClientMsg); - Unexpected -> Reason = {{expected, {Client, ClientMsg}}, {expected, {Server, ServerMsg}}, {got, Unexpected}}, @@ -267,6 +267,9 @@ check_result(Pid, Msg) -> {Port, {data,Debug}} when is_port(Port) -> ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]), check_result(Pid,Msg); + %% {Port, {exit_status, Status}} when is_port(Port) -> + %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]), + %% check_result(Pid, Msg); Unexpected -> Reason = {{expected, {Pid, Msg}}, {got, Unexpected}}, @@ -811,48 +814,34 @@ openssl_rsa_suites(CounterPart) -> false -> "DSS | ECDHE | ECDH" end, - lists:filter(fun(Str) -> - case re:run(Str, Names,[]) of - nomatch -> - false; - _ -> - true - end - end, Ciphers). + lists:filter(fun(Str) -> string_regex_filter(Str, Names) + end, Ciphers). openssl_dsa_suites() -> Ciphers = ssl:cipher_suites(openssl), - lists:filter(fun(Str) -> - case re:run(Str,"DSS",[]) of - nomatch -> - false; - _ -> - true - end + lists:filter(fun(Str) -> string_regex_filter(Str, "DSS") end, Ciphers). openssl_ecdsa_suites() -> Ciphers = ssl:cipher_suites(openssl), - lists:filter(fun(Str) -> - case re:run(Str,"ECDHE-ECDSA",[]) of - nomatch -> - false; - _ -> - true - end + lists:filter(fun(Str) -> string_regex_filter(Str, "ECDHE-ECDSA") end, Ciphers). openssl_ecdh_rsa_suites() -> Ciphers = ssl:cipher_suites(openssl), - lists:filter(fun(Str) -> - case re:run(Str,"ECDH-RSA",[]) of - nomatch -> - false; - _ -> - true - end + lists:filter(fun(Str) -> string_regex_filter(Str, "ECDH-RSA") end, Ciphers). +string_regex_filter(Str, Search) when is_list(Str) -> + case re:run(Str, Search, []) of + nomatch -> + false; + _ -> + true + end; +string_regex_filter(_Str, _Search) -> + false. + anonymous_suites() -> Suites = [{dh_anon, rc4_128, md5}, @@ -860,6 +849,8 @@ anonymous_suites() -> {dh_anon, '3des_ede_cbc', sha}, {dh_anon, aes_128_cbc, sha}, {dh_anon, aes_256_cbc, sha}, + {dh_anon, aes_128_gcm, null}, + {dh_anon, aes_256_gcm, null}, {ecdh_anon,rc4_128,sha}, {ecdh_anon,'3des_ede_cbc',sha}, {ecdh_anon,aes_128_cbc,sha}, @@ -885,8 +876,13 @@ psk_suites() -> {rsa_psk, aes_128_cbc, sha}, {rsa_psk, aes_256_cbc, sha}, {rsa_psk, aes_128_cbc, sha256}, - {rsa_psk, aes_256_cbc, sha384} -], + {rsa_psk, aes_256_cbc, sha384}, + {psk, aes_128_gcm, null}, + {psk, aes_256_gcm, null}, + {dhe_psk, aes_128_gcm, null}, + {dhe_psk, aes_256_gcm, null}, + {rsa_psk, aes_128_gcm, null}, + {rsa_psk, aes_256_gcm, null}], ssl_cipher:filter_suites(Suites). psk_anon_suites() -> @@ -925,6 +921,10 @@ srp_dss_suites() -> {srp_dss, aes_256_cbc, sha}], ssl_cipher:filter_suites(Suites). +rc4_suites(Version) -> + Suites = ssl_cipher:rc4_suites(Version), + ssl_cipher:filter_suites(Suites). + pem_to_der(File) -> {ok, PemBin} = file:read_file(File), public_key:pem_decode(PemBin). @@ -1130,9 +1130,10 @@ version_flag(sslv3) -> filter_suites(Ciphers0) -> Version = tls_record:highest_protocol_version([]), Supported0 = ssl_cipher:suites(Version) - ++ ssl_cipher:anonymous_suites() + ++ ssl_cipher:anonymous_suites(Version) ++ ssl_cipher:psk_suites(Version) - ++ ssl_cipher:srp_suites(), + ++ ssl_cipher:srp_suites() + ++ ssl_cipher:rc4_suites(Version), Supported1 = ssl_cipher:filter_suites(Supported0), Supported2 = [ssl:suite_definition(S) || S <- Supported1], [Cipher || Cipher <- Ciphers0, lists:member(Cipher, Supported2)]. diff --git a/lib/ssl/test/ssl_upgrade_SUITE.erl b/lib/ssl/test/ssl_upgrade_SUITE.erl new file mode 100644 index 0000000000..c83fb367dc --- /dev/null +++ b/lib/ssl/test/ssl_upgrade_SUITE.erl @@ -0,0 +1,164 @@ +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2014-2015. All Rights Reserved. +%% +%% The contents of this file are subject to the Erlang Public License, +%% Version 1.1, (the "License"); you may not use this file except in +%% compliance with the License. You should have received a copy of the +%% Erlang Public License along with this software. If not, it can be +%% retrieved online at http://www.erlang.org/.2 +%% +%% Software distributed under the License is distributed on an "AS IS" +%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See +%% the License for the specific language governing rights and limitations +%% under the License. +%% +%% %CopyrightEnd% +%% +-module(ssl_upgrade_SUITE). + +%% Note: This directive should only be used in test suites. +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +-record(state, { + config, + server, + client, + soft + }). + +all() -> + [ + minor_upgrade, + major_upgrade + ]. + +init_per_suite(Config0) -> + catch crypto:stop(), + try {crypto:start(), erlang:system_info({wordsize, internal}) == erlang:system_info({wordsize, external})} of + {ok, true} -> + case ct_release_test:init(Config0) of + {skip, Reason} -> + {skip, Reason}; + Config -> + Result = + (catch make_certs:all(?config(data_dir, Config), + ?config(priv_dir, Config))), + ct:log("Make certs ~p~n", [Result]), + ssl_test_lib:cert_options(Config) + end; + {ok, false} -> + {skip, "Test server will not handle halfwordemulator correctly. Skip as halfwordemulator is deprecated"} + catch _:_ -> + {skip, "Crypto did not start"} + end. + +end_per_suite(Config) -> + ct_release_test:cleanup(Config), + crypto:stop(). + +init_per_testcase(_TestCase, Config) -> + Config. +end_per_testcase(_TestCase, Config) -> + Config. + +major_upgrade(Config) when is_list(Config) -> + ct_release_test:upgrade(ssl, major,{?MODULE, #state{config = Config}}, Config). + +minor_upgrade(Config) when is_list(Config) -> + ct_release_test:upgrade(ssl, minor,{?MODULE, #state{config = Config}}, Config). + +upgrade_init(CTData, #state{config = Config} = State) -> + {ok, {_, _, Up, _Down}} = ct_release_test:get_appup(CTData, ssl), + ct:pal("Up: ~p", [Up]), + Soft = is_soft(Up), %% It is symmetrical, if upgrade is soft so is downgrade + case Soft of + true -> + {Server, Client} = soft_start_connection(Config), + State#state{server = Server, client = Client, + soft = Soft}; + false -> + State#state{soft = Soft} + end. + +upgrade_upgraded(_, #state{soft = false, config = Config} = State) -> + {Server, Client} = restart_start_connection(Config), + ssl_test_lib:check_result(Server, ok, Client, ok), + ssl_test_lib:close(Server), + ssl_test_lib:close(Client), + State; + +upgrade_upgraded(_, #state{server = Server0, client = Client0, + config = Config, soft = true} = State) -> + Server0 ! changed_version, + Client0 ! changed_version, + ssl_test_lib:check_result(Server0, ok, Client0, ok), + ssl_test_lib:close(Server0), + ssl_test_lib:close(Client0), + {Server, Client} = soft_start_connection(Config), + State#state{server = Server, client = Client}. + +upgrade_downgraded(_, #state{soft = false, config = Config} = State) -> + {Server, Client} = restart_start_connection(Config), + ssl_test_lib:check_result(Server, ok, Client, ok), + ssl_test_lib:close(Server), + ssl_test_lib:close(Client), + State; + +upgrade_downgraded(_, #state{server = Server, client = Client, soft = true} = State) -> + Server ! changed_version, + Client ! changed_version, + ssl_test_lib:check_result(Server, ok, Client, ok), + ssl_test_lib:close(Server), + ssl_test_lib:close(Client), + State. + +use_connection(Socket) -> + ssl_test_lib:send_recv_result_active(Socket), + receive + changed_version -> + ssl_test_lib:send_recv_result_active(Socket) + end. + +soft_start_connection(Config) -> + ClientOpts = ?config(client_verification_opts, Config), + ServerOpts = ?config(server_verification_opts, Config), + {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), + Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, + {from, self()}, + {mfa, {?MODULE, use_connection, []}}, + {options, ServerOpts}]), + + Port = ssl_test_lib:inet_port(Server), + Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, + {host, Hostname}, + {from, self()}, + {mfa, {?MODULE, use_connection, []}}, + {options, ClientOpts}]), + {Server, Client}. + +restart_start_connection(Config) -> + ClientOpts = ?config(client_verification_opts, Config), + ServerOpts = ?config(server_verification_opts, Config), + {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config), + Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0}, + {from, self()}, + {mfa, {ssl_test_lib, send_recv_result_active, []}}, + {options, ServerOpts}]), + + Port = ssl_test_lib:inet_port(Server), + Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port}, + {host, Hostname}, + {from, self()}, + {mfa, {ssl_test_lib, send_recv_result_active, []}}, + {options, ClientOpts}]), + {Server, Client}. + +is_soft([{restart_application, ssl}]) -> + false; +is_soft(_) -> + true. + diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk index bda974da0e..171147adf2 100644 --- a/lib/ssl/vsn.mk +++ b/lib/ssl/vsn.mk @@ -1 +1 @@ -SSL_VSN = 5.3.8 +SSL_VSN = 7.0 diff --git a/lib/stdlib/doc/src/Makefile b/lib/stdlib/doc/src/Makefile index ff77c3eea0..f5d8b2072a 100644 --- a/lib/stdlib/doc/src/Makefile +++ b/lib/stdlib/doc/src/Makefile @@ -76,7 +76,6 @@ XML_REF3_FILES = \ ms_transform.xml \ orddict.xml \ ordsets.xml \ - pg.xml \ pool.xml \ proc_lib.xml \ proplists.xml \ diff --git a/lib/stdlib/doc/src/binary.xml b/lib/stdlib/doc/src/binary.xml index 2410f1f9b8..6c0968d242 100644 --- a/lib/stdlib/doc/src/binary.xml +++ b/lib/stdlib/doc/src/binary.xml @@ -5,7 +5,7 @@ <header> <copyright> <year>2009</year> - <year>2013</year> + <year>2014</year> <holder>Ericsson AB, All Rights Reserved</holder> </copyright> <legalnotice> @@ -450,7 +450,7 @@ store(Binary, GBSet) -> </code> <p>In this example, we chose to copy the binary content before - inserting it in the <c>gb_set()</c> if it references a binary more than + inserting it in the <c>gb_sets:set()</c> if it references a binary more than twice the size of the data we're going to keep. Of course different rules for when copying will apply to different programs.</p> @@ -578,6 +578,10 @@ store(Binary, GBSet) -> <item><p>Removes trailing empty parts of the result (as does trim in <c>re:split/3</c>)</p></item> + <tag>trim_all</tag> + + <item><p>Removes all empty parts of the result.</p></item> + <tag>global</tag> <item><p>Repeats the split until the <c><anno>Subject</anno></c> is diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml index 3df24bf688..6b9524ef63 100644 --- a/lib/stdlib/doc/src/ets.xml +++ b/lib/stdlib/doc/src/ets.xml @@ -456,6 +456,12 @@ Error: fun containing local Erlang function calls <item><c>{type, <seealso marker="#type-type">type()</seealso>}</c> <br></br> The table type.</item> + <item><c>{read_concurrency, boolean()}</c> <br></br> + + Indicates whether the table uses read_concurrency or not.</item> + <item><c>{write_concurrency, boolean()}</c> <br></br> + + Indicates whether the table uses write_concurrency or not.</item> </list> </desc> </func> @@ -1587,6 +1593,21 @@ true</pre> </desc> </func> <func> + <name name="take" arity="2"/> + <fsummary>Return and remove all objects with a given key from an ETS + table.</fsummary> + <desc> + <p>Returns a list of all objects with the key <c><anno>Key</anno></c> in + the table <c><anno>Tab</anno></c> and removes.</p> + <p>The given <c><anno>Key</anno></c> is used to identify the object by + either <em>comparing equal</em> the key of an object in an + <c>ordered_set</c> table, or <em>matching</em> in other types of + tables (see <seealso marker="#lookup/2">lookup/2</seealso> and + <seealso marker="#new/2">new/2</seealso> for details on the + difference).</p> + </desc> + </func> + <func> <name name="to_dets" arity="2"/> <fsummary>Fill a Dets table with objects from an ETS table.</fsummary> <desc> @@ -1597,14 +1618,18 @@ true</pre> </func> <func> <name name="update_counter" arity="3" clause_i="1"/> + <name name="update_counter" arity="4" clause_i="1"/> <name name="update_counter" arity="3" clause_i="2"/> + <name name="update_counter" arity="4" clause_i="2"/> <name name="update_counter" arity="3" clause_i="3"/> + <name name="update_counter" arity="4" clause_i="3"/> <type variable="Tab"/> <type variable="Key"/> <type variable="UpdateOp" name_i="1"/> <type variable="Pos" name_i="1"/> <type variable="Threshold" name_i="1"/> <type variable="SetValue" name_i="1"/> + <type variable="Default"/> <fsummary>Update a counter object in an ETS table.</fsummary> <desc> <p>This function provides an efficient way to update one or more @@ -1646,12 +1671,22 @@ true</pre> <seealso marker="#lookup/2">lookup/2</seealso> and <seealso marker="#new/2">new/2</seealso> for details on the difference).</p> + <p>If a default object <c><anno>Default</anno></c> is given, it is used + as the object to be updated if the key is missing from the table. The + value in place of the key is ignored and replaced by the proper key + value. The return value is as if the default object had not been used, + that is a single updated element or a list of them.</p> <p>The function will fail with reason <c>badarg</c> if:</p> <list type="bulleted"> <item>the table is not of type <c>set</c> or <c>ordered_set</c>,</item> - <item>no object with the right key exists,</item> + <item>no object with the right key exists and no default object were + supplied,</item> <item>the object has the wrong arity,</item> + <item>the default object arity is smaller than + <c><![CDATA[<keypos>]]></c></item> + <item>any field from the default object being updated is not an + integer</item> <item>the element to update is not an integer,</item> <item>the element to update is also the key, or,</item> <item>any of <c><anno>Pos</anno></c>, <c><anno>Incr</anno></c>, <c><anno>Threshold</anno></c> or diff --git a/lib/stdlib/doc/src/gen_event.xml b/lib/stdlib/doc/src/gen_event.xml index b9dfff833e..5c96d6e576 100644 --- a/lib/stdlib/doc/src/gen_event.xml +++ b/lib/stdlib/doc/src/gen_event.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -44,6 +44,7 @@ <pre> gen_event module Callback module ---------------- --------------- +gen_event:start gen_event:start_link -----> - gen_event:add_handler @@ -177,7 +178,7 @@ gen_event:stop -----> Module:terminate/2 <name>add_handler(EventMgrRef, Handler, Args) -> Result</name> <fsummary>Add an event handler to a generic event manager.</fsummary> <type> - <v>EventMgr = Name | {Name,Node} | {global,GlobalName} + <v>EventMgrRef = Name | {Name,Node} | {global,GlobalName} | {via,Module,ViaName} | pid()</v> <v> Name = Node = atom()</v> <v> GlobalName = ViaName = term()</v> @@ -223,7 +224,7 @@ gen_event:stop -----> Module:terminate/2 <name>add_sup_handler(EventMgrRef, Handler, Args) -> Result</name> <fsummary>Add a supervised event handler to a generic event manager.</fsummary> <type> - <v>EventMgr = Name | {Name,Node} | {global,GlobalName} + <v>EventMgrRef = Name | {Name,Node} | {global,GlobalName} | {via,Module,ViaName} | pid()</v> <v> Name = Node = atom()</v> <v> GlobalName = ViaName = term()</v> @@ -456,19 +457,37 @@ gen_event:stop -----> Module:terminate/2 </func> <func> <name>stop(EventMgrRef) -> ok</name> + <name>stop(EventMgrRef, Reason, Timeout) -> ok</name> <fsummary>Terminate a generic event manager.</fsummary> <type> <v>EventMgrRef = Name | {Name,Node} | {global,GlobalName} | {via,Module,ViaName} | pid()</v> <v>Name = Node = atom()</v> <v>GlobalName = ViaName = term()</v> + <v>Reason = term()</v> + <v>Timeout = int()>0 | infinity</v> </type> <desc> - <p>Terminates the event manager <c>EventMgrRef</c>. Before - terminating, the event manager will call - <c>Module:terminate(stop,...)</c> for each installed event - handler.</p> - <p>See <c>add_handler/3</c> for a description of the argument.</p> + <p>Orders the event manager <c>EventMgrRef</c> to exit with + the given <c>Reason</c> and waits for it to + terminate. Before terminating, the gen_event will call + <seealso marker="#Module:terminate/2">Module:terminate(stop,...)</seealso> + for each installed event handler.</p> + <p>The function returns <c>ok</c> if the event manager terminates + with the expected reason. Any other reason than <c>normal</c>, + <c>shutdown</c>, or <c>{shutdown,Term}</c> will cause an + error report to be issued using + <seealso marker="kernel:error_logger#format/2">error_logger:format/2</seealso>. + The default <c>Reason</c> is <c>normal</c>.</p> + <p><c>Timeout</c> is an integer greater than zero which + specifies how many milliseconds to wait for the event manager to + terminate, or the atom <c>infinity</c> to wait + indefinitely. The default value is <c>infinity</c>. If the + event manager has not terminated within the specified time, a + <c>timeout</c> exception is raised.</p> + <p>If the process does not exist, a <c>noproc</c> exception + is raised.</p> + <p>See <c>add_handler/3</c> for a description of <c>EventMgrRef</c>.</p> </desc> </func> </funcs> diff --git a/lib/stdlib/doc/src/gen_fsm.xml b/lib/stdlib/doc/src/gen_fsm.xml index 848d57f3e6..b1bba3eff0 100644 --- a/lib/stdlib/doc/src/gen_fsm.xml +++ b/lib/stdlib/doc/src/gen_fsm.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -43,8 +43,11 @@ <pre> gen_fsm module Callback module -------------- --------------- +gen_fsm:start gen_fsm:start_link -----> Module:init/1 +gen_fsm:stop -----> Module:terminate/3 + gen_fsm:send_event -----> Module:StateName/2 gen_fsm:send_all_state_event -----> Module:handle_event/3 @@ -187,6 +190,39 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4 </desc> </func> <func> + <name>stop(FsmRef) -> ok</name> + <name>stop(FsmRef, Reason, Timeout) -> ok</name> + <fsummary>Synchronously stop a generic FSM.</fsummary> + <type> + <v>FsmRef = Name | {Name,Node} | {global,GlobalName} + | {via,Module,ViaName} | pid()</v> + <v> Node = atom()</v> + <v> GlobalName = ViaName = term()</v> + <v>Reason = term()</v> + <v>Timeout = int()>0 | infinity</v> + </type> + <desc> + <p>Orders a generic FSM to exit with the given <c>Reason</c> + and waits for it to terminate. The gen_fsm will call + <seealso marker="#Module:terminate/3">Module:terminate/3</seealso> + before exiting.</p> + <p>The function returns <c>ok</c> if the generic FSM terminates + with the expected reason. Any other reason than <c>normal</c>, + <c>shutdown</c>, or <c>{shutdown,Term}</c> will cause an + error report to be issued using + <seealso marker="kernel:error_logger#format/2">error_logger:format/2</seealso>. + The default <c>Reason</c> is <c>normal</c>.</p> + <p><c>Timeout</c> is an integer greater than zero which + specifies how many milliseconds to wait for the generic FSM + to terminate, or the atom <c>infinity</c> to wait + indefinitely. The default value is <c>infinity</c>. If the + generic FSM has not terminated within the specified time, a + <c>timeout</c> exception is raised.</p> + <p>If the process does not exist, a <c>noproc</c> exception + is raised.</p> + </desc> + </func> + <func> <name>send_event(FsmRef, Event) -> ok</name> <fsummary>Send an event asynchronously to a generic FSM.</fsummary> <type> @@ -528,7 +564,8 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4 <c>Module:init/1</c> for a description of <c>Timeout</c> and <c>hibernate</c>.</p> <p>If the function returns <c>{stop,Reason,NewStateData}</c>, the gen_fsm will call - <c>Module:terminate(Reason,NewStateData)</c> and terminate.</p> + <c>Module:terminate(Reason,StateName,NewStateData)</c> and + terminate.</p> </desc> </func> <func> @@ -614,7 +651,8 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4 <c>{stop,Reason,NewStateData}</c>, any reply to <c>From</c> must be given explicitly using <c>gen_fsm:reply/2</c>. The gen_fsm will then call - <c>Module:terminate(Reason,NewStateData)</c> and terminate.</p> + <c>Module:terminate(Reason,StateName,NewStateData)</c> and + terminate.</p> </desc> </func> <func> diff --git a/lib/stdlib/doc/src/gen_server.xml b/lib/stdlib/doc/src/gen_server.xml index 62c0394479..a915e567a5 100644 --- a/lib/stdlib/doc/src/gen_server.xml +++ b/lib/stdlib/doc/src/gen_server.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -43,8 +43,11 @@ <pre> gen_server module Callback module ----------------- --------------- +gen_server:start gen_server:start_link -----> Module:init/1 +gen_server:stop -----> Module:terminate/2 + gen_server:call gen_server:multi_call -----> Module:handle_call/3 @@ -184,6 +187,40 @@ gen_server:abcast -----> Module:handle_cast/2 </desc> </func> <func> + <name>stop(ServerRef) -> ok</name> + <name>stop(ServerRef, Reason, Timeout) -> ok</name> + <fsummary>Synchronously stop a generic server.</fsummary> + <type> + <v>ServerRef = Name | {Name,Node} | {global,GlobalName} + | {via,Module,ViaName} | pid()</v> + <v> Node = atom()</v> + <v> GlobalName = ViaName = term()</v> + <v>Reason = term()</v> + <v>Timeout = int()>0 | infinity</v> + </type> + <desc> + <p>Orders a generic server to exit with the + given <c>Reason</c> and waits for it to terminate. The + gen_server will call + <seealso marker="#Module:terminate/2">Module:terminate/2</seealso> + before exiting.</p> + <p>The function returns <c>ok</c> if the server terminates + with the expected reason. Any other reason than <c>normal</c>, + <c>shutdown</c>, or <c>{shutdown,Term}</c> will cause an + error report to be issued using + <seealso marker="kernel:error_logger#format/2">error_logger:format/2</seealso>. + The default <c>Reason</c> is <c>normal</c>.</p> + <p><c>Timeout</c> is an integer greater than zero which + specifies how many milliseconds to wait for the server to + terminate, or the atom <c>infinity</c> to wait + indefinitely. The default value is <c>infinity</c>. If the + server has not terminated within the specified time, a + <c>timeout</c> exception is raised.</p> + <p>If the process does not exist, a <c>noproc</c> exception + is raised.</p> + </desc> + </func> + <func> <name>call(ServerRef, Request) -> Reply</name> <name>call(ServerRef, Request, Timeout) -> Reply</name> <fsummary>Make a synchronous call to a generic server.</fsummary> diff --git a/lib/stdlib/doc/src/io.xml b/lib/stdlib/doc/src/io.xml index a28180b42a..8ebfdb2e7f 100644 --- a/lib/stdlib/doc/src/io.xml +++ b/lib/stdlib/doc/src/io.xml @@ -505,7 +505,8 @@ ok <p>Writes the data with standard syntax in the same way as <c>~w</c>, but breaks terms whose printed representation is longer than one line into many lines and indents each - line sensibly. It also tries to detect lists of + line sensibly. Left justification is not supported. + It also tries to detect lists of printable characters and to output these as strings. The Unicode translation modifier is used for determining what characters are printable. For example:</p> diff --git a/lib/stdlib/doc/src/io_lib.xml b/lib/stdlib/doc/src/io_lib.xml index 3312b08064..2117d66381 100644 --- a/lib/stdlib/doc/src/io_lib.xml +++ b/lib/stdlib/doc/src/io_lib.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -59,6 +59,35 @@ <datatype> <name name="latin1_string"/> </datatype> + <datatype> + <name name="format_spec"/> + <desc><p>Description:</p> + <list type="bulleted"> + <item><p><c>control_char</c> is the type of control + sequence: <c>$P</c>, <c>$w</c>, and so on;</p> + </item> + <item><p><c>args</c> is a list of the arguments used by the + control sequence, or an empty list if the control sequence + does not take any arguments;</p> + </item> + <item><p><c>width</c> is the field width;</p> + </item> + <item><p><c>adjust</c> is the adjustment;</p> + </item> + <item><p><c>precision</c> is the precision of the printed + argument;</p> + </item> + <item><p><c>pad_char</c> is the padding character;</p> + </item> + <item><p><c>encoding</c> is set to <c>true</c> if the translation + modifier <c>t</c> is present;</p> + </item> + <item><p><c>strings</c> is set to <c>false</c> if the modifier + <c>l</c> is present.</p> + </item> + </list> + </desc> + </datatype> </datatypes> <funcs> <func> @@ -260,6 +289,45 @@ </desc> </func> <func> + <name name="scan_format" arity="2"/> + <fsummary>Parse all control sequences in the format string</fsummary> + <desc> + <p>Returns a list corresponding to the given format string, + where control sequences have been replaced with + corresponding tuples. This list can be passed to <seealso + marker="#build_text/1">io_lib:build_text/1</seealso> to have + the same effect as <c>io_lib:format(Format, Args)</c>, or to + <seealso + marker="#unscan_format/1">io_lib:unscan_format/1</seealso> + in order to get the corresponding pair of <c>Format</c> and + <c>Args</c> (with every <c>*</c> and corresponding argument + expanded to numeric values).</p> + <p>A typical use of this function is to replace unbounded-size + control sequences like <c>~w</c> and <c>~p</c> with the + depth-limited variants <c>~W</c> and <c>~P</c> before + formatting to text, e.g. in a logger.</p> + </desc> + </func> + <func> + <name name="unscan_format" arity="1"/> + <fsummary>Revert a pre-parsed format list to a plain character list + and a list of arguments</fsummary> + <desc> + <p>See <seealso + marker="#scan_format/2">io_lib:scan_format/2</seealso> for + details.</p> + </desc> + </func> + <func> + <name name="build_text" arity="1"/> + <fsummary>Build the output text for a pre-parsed format list</fsummary> + <desc> + <p>See <seealso + marker="#scan_format/2">io_lib:scan_format/2</seealso> for + details.</p> + </desc> + </func> + <func> <name name="indentation" arity="2"/> <fsummary>Indentation after printing string</fsummary> <desc> diff --git a/lib/stdlib/doc/src/math.xml b/lib/stdlib/doc/src/math.xml index 43cd20e726..7cfc8a1175 100644 --- a/lib/stdlib/doc/src/math.xml +++ b/lib/stdlib/doc/src/math.xml @@ -67,6 +67,7 @@ <name name="atanh" arity="1"/> <name name="exp" arity="1"/> <name name="log" arity="1"/> + <name name="log2" arity="1"/> <name name="log10" arity="1"/> <name name="pow" arity="2"/> <name name="sqrt" arity="1"/> diff --git a/lib/stdlib/doc/src/pg.xml b/lib/stdlib/doc/src/pg.xml deleted file mode 100644 index a3b69884b6..0000000000 --- a/lib/stdlib/doc/src/pg.xml +++ /dev/null @@ -1,114 +0,0 @@ -<?xml version="1.0" encoding="utf-8" ?> -<!DOCTYPE erlref SYSTEM "erlref.dtd"> - -<erlref> - <header> - <copyright> - <year>1996</year> - <year>2014</year> - <holder>Ericsson AB, All Rights Reserved</holder> - </copyright> - <legalnotice> - The contents of this file are subject to the Erlang Public License, - Version 1.1, (the "License"); you may not use this file except in - compliance with the License. You should have received a copy of the - Erlang Public License along with this software. If not, it can be - retrieved online at http://www.erlang.org/. - - Software distributed under the License is distributed on an "AS IS" - basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See - the License for the specific language governing rights and limitations - under the License. - - The Initial Developer of the Original Code is Ericsson AB. - </legalnotice> - - <title>pg</title> - <prepared></prepared> - <docno></docno> - <date></date> - <rev></rev> - </header> - <module>pg</module> - <modulesummary>Distributed, Named Process Groups</modulesummary> - <description> - <warning> - <p>This module is deprecated and will be removed in Erlang/OTP 18.</p> - </warning> - <p>This (experimental) module implements process groups. A process - group is a group of processes that can be accessed by a common - name. For example, a group named <c>foobar</c> can include a set - of processes as members of this group and they can be located on - different nodes.</p> - <p>When messages are sent to the named group, all members of - the group receive the message. The messages are serialized. If - the process <c>P1</c> sends the message <c>M1</c> to the group, - and process <c>P2</c> simultaneously sends message <c>M2</c>, then - all members of the group receive the two messages in the same - order. If members of a group terminate, they are automatically - removed from the group.</p> - <p>This module is not complete. The module is inspired by the ISIS - system and the causal order protocol of the ISIS system should - also be implemented. At the moment, all messages are serialized - by sending them through a group master process.</p> - </description> - <funcs> - <func> - <name name="create" arity="1"/> - <fsummary>Create an empty group</fsummary> - <desc> - <p>Creates an empty group named <c><anno>PgName</anno></c> on the current - node.</p> - </desc> - </func> - <func> - <name name="create" arity="2"/> - <fsummary>Create an empty group on another node</fsummary> - <desc> - <p>Creates an empty group named <c><anno>PgName</anno></c> on the node - <c><anno>Node</anno></c>.</p> - </desc> - </func> - <func> - <name name="join" arity="2"/> - <fsummary>Join a pid to a process group</fsummary> - <desc> - <p>Joins the pid <c><anno>Pid</anno></c> to the process group - <c><anno>PgName</anno></c>. - Returns a list of all old members of the group.</p> - </desc> - </func> - <func> - <name name="send" arity="2"/> - <fsummary>Send a message to all members of a process group</fsummary> - <desc> - <p>Sends the tuple <c>{pg_message, From, PgName, Msg}</c> to - all members of the process group <c><anno>PgName</anno></c>.</p> - <p>Failure: <c>{badarg, {<anno>PgName</anno>, <anno>Msg</anno>}}</c> - if <c><anno>PgName</anno></c> is - not a process group (a globally registered name).</p> - </desc> - </func> - <func> - <name name="esend" arity="2"/> - <fsummary>Send a message to all members of a process group, except ourselves</fsummary> - <desc> - <p>Sends the tuple <c>{pg_message, From, PgName, Msg}</c> to - all members of the process group <c><anno>PgName</anno></c>, except - ourselves.</p> - <p>Failure: <c>{badarg, {<anno>PgName</anno>, <anno>Msg</anno>}}</c> - if <c><anno>PgName</anno></c> is - not a process group (a globally registered name).</p> - </desc> - </func> - <func> - <name name="members" arity="1"/> - <fsummary>Return a list of all members of a process group</fsummary> - <desc> - <p>Returns a list of all members of the process group - <c>PgName</c>.</p> - </desc> - </func> - </funcs> -</erlref> - diff --git a/lib/stdlib/doc/src/proc_lib.xml b/lib/stdlib/doc/src/proc_lib.xml index 5bf5744622..f27a974242 100644 --- a/lib/stdlib/doc/src/proc_lib.xml +++ b/lib/stdlib/doc/src/proc_lib.xml @@ -4,7 +4,7 @@ <erlref> <header> <copyright> - <year>1996</year><year>2013</year> + <year>1996</year><year>2014</year> <holder>Ericsson AB. All Rights Reserved.</holder> </copyright> <legalnotice> @@ -298,6 +298,40 @@ init(Parent) -> <c>proc_lib</c> functions.</p> </desc> </func> + <func> + <name name="stop" arity="1"/> + <fsummary>Terminate a process synchronously.</fsummary> + <type variable="Process"/> + <desc> + <p>Equivalent to <seealso marker="#stop/3">stop(Process, + normal, infinity)</seealso>.</p> + </desc> + </func> + <func> + <name name="stop" arity="3"/> + <fsummary>Terminate a process synchronously.</fsummary> + <type variable="Process"/> + <type variable="Reason"/> + <type variable="Timeout"/> + <desc> + <p>Orders the process to exit with the given <c>Reason</c> and + waits for it to terminate.</p> + <p>The function returns <c>ok</c> if the process exits with + the given <c>Reason</c> within <c>Timeout</c> + milliseconds.</p> + <p>If the call times out, a <c>timeout</c> exception is + raised.</p> + <p>If the process does not exist, a <c>noproc</c> + exception is raised.</p> + <p>The implementation of this function is based on the + <c>terminate</c> system message, and requires that the + process handles system messages correctly. + See <seealso marker="sys">sys(3)</seealso> + and <seealso marker="doc/design_principles:spec_proc">OTP + Design Principles</seealso> for information about system + messages.</p> + </desc> + </func> </funcs> <section> diff --git a/lib/stdlib/doc/src/ref_man.xml b/lib/stdlib/doc/src/ref_man.xml index 6c35578bdf..ea4009dc3e 100644 --- a/lib/stdlib/doc/src/ref_man.xml +++ b/lib/stdlib/doc/src/ref_man.xml @@ -73,7 +73,6 @@ <xi:include href="ms_transform.xml"/> <xi:include href="orddict.xml"/> <xi:include href="ordsets.xml"/> - <xi:include href="pg.xml"/> <xi:include href="pool.xml"/> <xi:include href="proc_lib.xml"/> <xi:include href="proplists.xml"/> diff --git a/lib/stdlib/doc/src/specs.xml b/lib/stdlib/doc/src/specs.xml index 60a04ed5e7..fd77b52da6 100644 --- a/lib/stdlib/doc/src/specs.xml +++ b/lib/stdlib/doc/src/specs.xml @@ -39,7 +39,6 @@ <xi:include href="../specs/specs_ms_transform.xml"/> <xi:include href="../specs/specs_orddict.xml"/> <xi:include href="../specs/specs_ordsets.xml"/> - <xi:include href="../specs/specs_pg.xml"/> <xi:include href="../specs/specs_pool.xml"/> <xi:include href="../specs/specs_proc_lib.xml"/> <xi:include href="../specs/specs_proplists.xml"/> diff --git a/lib/stdlib/doc/src/supervisor.xml b/lib/stdlib/doc/src/supervisor.xml index 3a5027d595..ffac1c0bd7 100644 --- a/lib/stdlib/doc/src/supervisor.xml +++ b/lib/stdlib/doc/src/supervisor.xml @@ -37,12 +37,12 @@ the <c>gen_event</c>, <c>gen_fsm</c>, or <c>gen_server</c> behaviours. A supervisor implemented using this module will have a standard set of interface functions and include functionality - for tracing and error reporting. Supervisors are used to build an + for tracing and error reporting. Supervisors are used to build a hierarchical process structure called a supervision tree, a nice way to structure a fault tolerant application. Refer to <em>OTP Design Principles</em> for more information.</p> - <p>A supervisor assumes the definition of which child processes to - supervise to be located in a callback module exporting a + <p>A supervisor expects the definition of which child processes to + supervise to be specified in a callback module exporting a pre-defined set of functions.</p> <p>Unless otherwise stated, all functions in this module will fail if the specified supervisor does not exist or if bad arguments @@ -53,18 +53,30 @@ <title>Supervision Principles</title> <p>The supervisor is responsible for starting, stopping and monitoring its child processes. The basic idea of a supervisor is - that it should keep its child processes alive by restarting them + that it shall keep its child processes alive by restarting them when necessary.</p> - <p>The children of a supervisor is defined as a list of + <p>The children of a supervisor are defined as a list of <em>child specifications</em>. When the supervisor is started, the child processes are started in order from left to right according to this list. When the supervisor terminates, it first terminates its child processes in reversed start order, from right to left.</p> - <p>A supervisor can have one of the following <em>restart strategies</em>:</p> + <marker id="sup_flags"/> + <p>The properties of a supervisor are defined by the supervisor + flags. This is the type definition for the supervisor flags: + </p> + <pre>sup_flags() = #{strategy => strategy(), % optional + intensity => non_neg_integer(), % optional + period => pos_integer()} % optional + </pre> + <p>A supervisor can have one of the following <em>restart + strategies</em>, specified with the <c>strategy</c> key in the + above map: + </p> <list type="bulleted"> <item> <p><c>one_for_one</c> - if one child process terminates and - should be restarted, only that child process is affected.</p> + should be restarted, only that child process is + affected. This is the default restart strategy.</p> </item> <item> <p><c>one_for_all</c> - if one child process terminates and @@ -94,43 +106,53 @@ instead the child specification identifier is used, <c>terminate_child/2</c> will return <c>{error,simple_one_for_one}</c>.</p> - <p>Because a <c>simple_one_for_one</c> supervisor could have many - children, it shuts them all down at same time. So, order in which they - are stopped is not defined. For the same reason, it could have an - overhead with regards to the <c>Shutdown</c> strategy.</p> + <p>Because a <c>simple_one_for_one</c> supervisor could have + many children, it shuts them all down asynchronously. This + means that the children will do their cleanup in parallel, + and therefore the order in which they are stopped is not + defined.</p> </item> </list> <p>To prevent a supervisor from getting into an infinite loop of - child process terminations and restarts, a <em>maximum restart frequency</em> - is defined using two integer values <c>MaxR</c> - and <c>MaxT</c>. If more than <c>MaxR</c> restarts occur within - <c>MaxT</c> seconds, the supervisor terminates all child - processes and then itself. + child process terminations and restarts, a <em>maximum restart + intensity</em> is defined using two integer values specified + with the <c>intensity</c> and <c>period</c> keys in the above + map. Assuming the values <c>MaxR</c> for <c>intensity</c> + and <c>MaxT</c> for <c>period</c>, then if more than <c>MaxR</c> + restarts occur within <c>MaxT</c> seconds, the supervisor will + terminate all child processes and then itself. The default value + for <c>intensity</c> is <c>1</c>, and the default value + for <c>period</c> is <c>5</c>. </p> <marker id="child_spec"/> <p>This is the type definition of a child specification:</p> - <pre> -child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} - Id = term() - StartFunc = {M,F,A} - M = F = atom() - A = [term()] - Restart = permanent | transient | temporary - Shutdown = brutal_kill | int()>0 | infinity - Type = worker | supervisor - Modules = [Module] | dynamic - Module = atom()</pre> + <pre>child_spec() = #{id => child_id(), % mandatory + start => mfargs(), % mandatory + restart => restart(), % optional + shutdown => shutdown(), % optional + type => worker(), % optional + modules => modules()} % optional</pre> + <p>The old tuple format is kept for backwards compatibility, + see <seealso marker="#type-child_spec">child_spec()</seealso>, + but the map is preferred. + </p> <list type="bulleted"> <item> - <p><c>Id</c> is a name that is used to identify the child + <p><c>id</c> is used to identify the child specification internally by the supervisor.</p> + <p>The <c>id</c> key is mandatory.</p> + <p>Note that this identifier on occations has been called + "name". As far as possible, the terms "identifier" or "id" + are now used but in order to keep backwards compatibility, + some occurences of "name" can still be found, for example + in error messages.</p> </item> <item> - <p><c>StartFunc</c> defines the function call used to start - the child process. It should be a module-function-arguments + <p><c>start</c> defines the function call used to start the + child process. It must be a module-function-arguments tuple <c>{M,F,A}</c> used as <c>apply(M,F,A)</c>.</p> <p>The start function <em>must create and link to</em> the child - process, and should return <c>{ok,Child}</c> or + process, and must return <c>{ok,Child}</c> or <c>{ok,Child,Info}</c> where <c>Child</c> is the pid of the child process and <c>Info</c> an arbitrary term which is ignored by the supervisor.</p> @@ -143,20 +165,23 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} error tuple <c>{error,Error}</c>.</p> <p>Note that the <c>start_link</c> functions of the different behaviour modules fulfill the above requirements.</p> + <p>The <c>start</c> key is mandatory.</p> </item> <item> - <p><c>Restart</c> defines when a terminated child process - should be restarted. A <c>permanent</c> child process should - always be restarted, a <c>temporary</c> child process should + <p><c>restart</c> defines when a terminated child process + shall be restarted. A <c>permanent</c> child process will + always be restarted, a <c>temporary</c> child process will never be restarted (even when the supervisor's restart strategy is <c>rest_for_one</c> or <c>one_for_all</c> and a sibling's death causes the temporary process to be terminated) and a - <c>transient</c> child process should be restarted only if + <c>transient</c> child process will be restarted only if it terminates abnormally, i.e. with another exit reason than <c>normal</c>, <c>shutdown</c> or <c>{shutdown,Term}</c>.</p> + <p>The <c>restart</c> key is optional. If it is not given, the + default value <c>permanent</c> will be used.</p> </item> <item> - <p><c>Shutdown</c> defines how a child process should be + <p><c>shutdown</c> defines how a child process shall be terminated. <c>brutal_kill</c> means the child process will be unconditionally terminated using <c>exit(Child,kill)</c>. An integer timeout value means that the supervisor will tell @@ -166,35 +191,45 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} no exit signal is received within the specified number of milliseconds, the child process is unconditionally terminated using <c>exit(Child,kill)</c>.</p> - <p>If the child process is another supervisor, <c>Shutdown</c> + <p>If the child process is another supervisor, the shutdown time should be set to <c>infinity</c> to give the subtree ample - time to shutdown. It is also allowed to set it to <c>infinity</c>, + time to shut down. It is also allowed to set it to <c>infinity</c>, if the child process is a worker.</p> <warning> - <p>Be careful by setting the <c>Shutdown</c> strategy to + <p>Be careful when setting the shutdown time to <c>infinity</c> when the child process is a worker. Because, in this situation, the termination of the supervision tree depends on the child process, it must be implemented in a safe way and its cleanup procedure must always return.</p> </warning> <p>Note that all child processes implemented using the standard - OTP behavior modules automatically adhere to the shutdown + OTP behaviour modules automatically adhere to the shutdown protocol.</p> + <p>The <c>shutdown</c> key is optional. If it is not given, + the default value <c>5000</c> will be used if the child is + of type <c>worker</c>; and <c>infinity</c> will be used if + the child is of type <c>supervisor</c>.</p> </item> <item> - <p><c>Type</c> specifies if the child process is a supervisor or + <p><c>type</c> specifies if the child process is a supervisor or a worker.</p> + <p>The <c>type</c> key is optional. If it is not given, the + default value <c>worker</c> will be used.</p> </item> <item> - <p><c>Modules</c> is used by the release handler during code + <p><c>modules</c> is used by the release handler during code replacement to determine which processes are using a certain - module. As a rule of thumb <c>Modules</c> should be a list - with one element <c>[Module]</c>, where <c>Module</c> is - the callback module, if the child process is a supervisor, - gen_server or gen_fsm. If the child process is an event - manager (gen_event) with a dynamic set of callback modules, - <c>Modules</c> should be <c>dynamic</c>. See <em>OTP Design Principles</em> - for more information about release handling.</p> + module. As a rule of thumb, if the child process is a + <c>supervisor</c>, <c>gen_server</c>, or <c>gen_fsm</c>, + this should be a list with one element <c>[Module]</c>, + where <c>Module</c> is the callback module. If the child + process is an event manager (<c>gen_event</c>) with a + dynamic set of callback modules, the value <c>dynamic</c> + shall be used. See <em>OTP Design Principles</em> for more + information about release handling.</p> + <p>The <c>modules</c> key is optional. If it is not given, it + defaults to <c>[M]</c>, where <c>M</c> comes from the + child's start <c>{M,F,A}</c></p> </item> <item> <p>Internally, the supervisor also keeps track of the pid @@ -213,11 +248,20 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} </datatype> <datatype> <name name="child_spec"/> + <desc><p>The tuple format is kept for backwards compatibility + only. A map is preferred; see more details + <seealso marker="#child_spec">above</seealso>.</p></desc> </datatype> <datatype> <name name="mfargs"/> - <desc><p><c>A</c> (the argument list) has the value - <c>undefined</c> if <c>Restart</c> is <c>temporary</c>.</p> + <desc> + <p>The value <c>undefined</c> for <c><anno>A</anno></c> (the + argument list) is only to be used internally + in <c>supervisor</c>. If the restart type of the child + is <c>temporary</c>, then the process is never to be + restarted and therefore there is no need to store the real + argument list. The value <c>undefined</c> will then be + stored instead.</p> </desc> </datatype> <datatype> @@ -233,6 +277,12 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <name name="strategy"/> </datatype> <datatype> + <name name="sup_flags"/> + <desc><p>The tuple format is kept for backwards compatibility + only. A map is preferred; see more details + <seealso marker="#sup_flags">above</seealso>.</p></desc> + </datatype> + <datatype> <name name="sup_ref"/> </datatype> <datatype> @@ -253,20 +303,20 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} the supervisor is linked to the calling process (its supervisor).</p> <p>The created supervisor process calls <c><anno>Module</anno>:init/1</c> to - find out about restart strategy, maximum restart frequency + find out about restart strategy, maximum restart intensity and child processes. To ensure a synchronized start-up procedure, <c>start_link/2,3</c> does not return until <c><anno>Module</anno>:init/1</c> has returned and all child processes have been started.</p> - <p>If <c><anno>SupName</anno>={local,Name}</c> the supervisor is registered + <p>If <c><anno>SupName</anno>={local,Name}</c>, the supervisor is registered locally as <c>Name</c> using <c>register/2</c>. If <c><anno>SupName</anno>={global,Name}</c> the supervisor is registered globally as <c>Name</c> using <c>global:register_name/2</c>. If <c><anno>SupName</anno>={via,<anno>Module</anno>,<anno>Name</anno>}</c> the supervisor is registered as <c>Name</c> using the registry represented by - <c>Module</c>. The <c>Module</c> callback should export the functions + <c>Module</c>. The <c>Module</c> callback must export the functions <c>register_name/2</c>, <c>unregister_name/1</c> and <c>send/2</c>, - which should behave like the corresponding functions in <c>global</c>. + which shall behave like the corresponding functions in <c>global</c>. Thus, <c>{via,global,<anno>Name</anno>}</c> is a valid reference.</p> <p>If no name is provided, the supervisor is not registered.</p> <p><c><anno>Module</anno></c> is the name of the callback module.</p> @@ -274,14 +324,14 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} the argument to <c><anno>Module</anno>:init/1</c>.</p> <p>If the supervisor and its child processes are successfully created (i.e. if all child process start functions return - <c>{ok,Child}</c>, <c>{ok,Child,Info}</c>, or <c>ignore</c>) + <c>{ok,Child}</c>, <c>{ok,Child,Info}</c>, or <c>ignore</c>), the function returns <c>{ok,Pid}</c>, where <c>Pid</c> is the pid of the supervisor. If there already exists a process - with the specified <c><anno>SupName</anno></c> the function returns + with the specified <c><anno>SupName</anno></c>, the function returns <c>{error,{already_started,Pid}}</c>, where <c>Pid</c> is the pid of that process.</p> <p>If <c><anno>Module</anno>:init/1</c> returns <c>ignore</c>, this function - returns <c>ignore</c> as well and the supervisor terminates + returns <c>ignore</c> as well, and the supervisor terminates with reason <c>normal</c>. If <c><anno>Module</anno>:init/1</c> fails or returns an incorrect value, this function returns <c>{error,Term}</c> where <c>Term</c> @@ -297,7 +347,6 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <func> <name name="start_child" arity="2"/> <fsummary>Dynamically add a child process to a supervisor.</fsummary> - <type name="child_spec"/> <type name="startchild_ret"/> <type name="startchild_err"/> <desc> @@ -314,35 +363,35 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <item><c>{via,Module,Name}</c>, if the supervisor is registered through an alternative process registry.</item> </list> - <p><c><anno>ChildSpec</anno></c> should be a valid child specification + <p><c><anno>ChildSpec</anno></c> must be a valid child specification (unless the supervisor is a <c>simple_one_for_one</c> - supervisor, see below). The child process will be started by + supervisor; see below). The child process will be started by using the start function as defined in the child specification.</p> - <p>If the case of a <c>simple_one_for_one</c> supervisor, + <p>In the case of a <c>simple_one_for_one</c> supervisor, the child specification defined in <c>Module:init/1</c> will - be used and <c><anno>ChildSpec</anno></c> should instead be an arbitrary + be used, and <c><anno>ChildSpec</anno></c> shall instead be an arbitrary list of terms <c><anno>List</anno></c>. The child process will then be started by appending <c><anno>List</anno></c> to the existing start function arguments, i.e. by calling <c>apply(M, F, A++<anno>List</anno>)</c> where <c>{M,F,A}</c> is the start function defined in the child specification.</p> <p>If there already exists a child specification with - the specified <c><anno>Id</anno></c>, <c><anno>ChildSpec</anno></c> is discarded and + the specified identifier, <c><anno>ChildSpec</anno></c> is discarded, and the function returns <c>{error,already_present}</c> or <c>{error,{already_started,<anno>Child</anno>}}</c>, depending on if the corresponding child process is running or not.</p> <p>If the child process start function returns <c>{ok,<anno>Child</anno>}</c> - or <c>{ok,<anno>Child</anno>,<anno>Info</anno>}</c>, the child specification and pid is + or <c>{ok,<anno>Child</anno>,<anno>Info</anno>}</c>, the child specification and pid are added to the supervisor and the function returns the same value.</p> <p>If the child process start function returns <c>ignore</c>, the child specification is added to the supervisor, the pid - is set to <c>undefined</c> and the function returns + is set to <c>undefined</c>, and the function returns <c>{ok,undefined}</c>.</p> <p>If the child process start function returns an error tuple or an erroneous value, or if it fails, the child specification is - discarded and the function returns <c>{error,Error}</c> where + discarded, and the function returns <c>{error,Error}</c> where <c>Error</c> is a term containing information about the error and child specification.</p> </desc> @@ -366,7 +415,7 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <p>If the child is temporary, the child specification is deleted as soon as the process terminates. This means - that <c>delete_child/2</c> has no meaning + that <c>delete_child/2</c> has no meaning, and <c>restart_child/2</c> can not be used for these children.</p> @@ -375,13 +424,13 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} process is alive, but is not a child of the given supervisor, the function will return <c>{error,not_found}</c>. If the child specification - identifier is given instead instead of a <c>pid()</c>, the + identifier is given instead of a <c>pid()</c>, the function will return <c>{error,simple_one_for_one}</c>.</p> <p>If successful, the function returns <c>ok</c>. If there is no child specification with the specified <c><anno>Id</anno></c>, the function returns <c>{error,not_found}</c>.</p> - <p>See <c>start_child/2</c> for a description of - <c><anno>SupRef</anno></c>.</p> + <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> + for a description of <c><anno>SupRef</anno></c>.</p> </desc> </func> <func> @@ -390,15 +439,15 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <desc> <p>Tells the supervisor <c><anno>SupRef</anno></c> to delete the child specification identified by <c><anno>Id</anno></c>. The corresponding child - process must not be running, use <c>terminate_child/2</c> to + process must not be running. Use <c>terminate_child/2</c> to terminate it.</p> - <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> for a description of - <c>SupRef</c>.</p> + <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> + for a description of <c><anno>SupRef</anno></c>.</p> <p>If successful, the function returns <c>ok</c>. If the child specification identified by <c><anno>Id</anno></c> exists but the corresponding child process is running or about to be restarted, the function returns <c>{error,running}</c> or - <c>{error,restarting}</c> respectively. If the child specification + <c>{error,restarting}</c>, respectively. If the child specification identified by <c><anno>Id</anno></c> does not exist, the function returns <c>{error,not_found}</c>.</p> </desc> @@ -410,10 +459,10 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <p>Tells the supervisor <c><anno>SupRef</anno></c> to restart a child process corresponding to the child specification identified by <c><anno>Id</anno></c>. The child - specification must exist and the corresponding child process + specification must exist, and the corresponding child process must not be running.</p> <p>Note that for temporary children, the child specification - is automatically deleted when the child terminates, and thus + is automatically deleted when the child terminates; thus it is not possible to restart such children.</p> <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> for a description of <c>SupRef</c>.</p> @@ -429,7 +478,7 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} is added to the supervisor and the function returns the same value.</p> <p>If the child process start function returns <c>ignore</c>, - the pid remains set to <c>undefined</c> and the function + the pid remains set to <c>undefined</c>, and the function returns <c>{ok,undefined}</c>.</p> <p>If the child process start function returns an error tuple or an erroneous value, or if it fails, the function returns @@ -462,7 +511,7 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <item> <p><c><anno>Child</anno></c> - the pid of the corresponding child process, the atom <c>restarting</c> if the process is about to be - restarted or <c>undefined</c> if there is no such process.</p> + restarted, or <c>undefined</c> if there is no such process.</p> </item> <item> <p><c><anno>Type</anno></c> - as defined in the child specification.</p> @@ -475,8 +524,8 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} </func> <func> <name name="count_children" arity="1"/> - <fsummary>Return counts for the number of childspecs, active children, - supervisors and workers.</fsummary> + <fsummary>Return counts for the number of child specifications, + active children, supervisors, and workers.</fsummary> <desc> <p>Returns a property list (see <c>proplists</c>) containing the counts for each of the following elements of the supervisor's @@ -500,6 +549,8 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} process is still alive.</p> </item> </list> + <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> + for a description of <c><anno>SupRef</anno></c>.</p> </desc> </func> <func> @@ -511,11 +562,23 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} correct, or <c>{error,<anno>Error</anno>}</c> otherwise.</p> </desc> </func> + <func> + <name name="get_childspec" arity="2"/> + <fsummary>Return the child specification map for the given + child.</fsummary> + <desc> + <p>Returns the child specification map for the child identified + by <c>Id</c> under supervisor <c>SupRef</c>. The returned + map contains all keys, both mandatory and optional.</p> + <p>See <seealso marker="#SupRef"><c>start_child/2</c></seealso> + for a description of <c><anno>SupRef</anno></c>.</p> + </desc> + </func> </funcs> <section> <title>CALLBACK FUNCTIONS</title> - <p>The following functions should be exported from a + <p>The following functions must be exported from a <c>supervisor</c> callback module.</p> </section> <funcs> @@ -524,33 +587,37 @@ child_spec() = {Id,StartFunc,Restart,Shutdown,Type,Modules} <fsummary>Return a supervisor specification.</fsummary> <type> <v>Args = term()</v> - <v>Result = {ok,{{RestartStrategy,MaxR,MaxT},[ChildSpec]}} | ignore</v> - <v> RestartStrategy = <seealso marker="#type-strategy">strategy()</seealso></v> - <v> MaxR = integer()>=0</v> - <v> MaxT = integer()>0</v> + <v>Result = {ok,{SupFlags,[ChildSpec]}} | ignore</v> + <v> SupFlags = <seealso marker="#type-sup_flags">sup_flags()</seealso></v> <v> ChildSpec = <seealso marker="#type-child_spec">child_spec()</seealso></v> </type> <desc> <p>Whenever a supervisor is started using <c>supervisor:start_link/2,3</c>, this function is called by the new process to find out about restart strategy, maximum - restart frequency and child specifications.</p> + restart intensity, and child specifications.</p> <p><c>Args</c> is the <c>Args</c> argument provided to the start function.</p> - <p><c>RestartStrategy</c> is the restart strategy and - <c>MaxR</c> and <c>MaxT</c> defines the maximum restart - frequency of the supervisor. <c>[ChildSpec]</c> is a list of - valid child specifications defining which child processes - the supervisor should start and monitor. See the discussion - about Supervision Principles above.</p> + <p><c>SupFlags</c> is the supervisor flags defining the + restart strategy and max restart intensity for the + supervisor. <c>[ChildSpec]</c> is a list of valid child + specifications defining which child processes the supervisor + shall start and monitor. See the discussion about + Supervision Principles above.</p> <p>Note that when the restart strategy is <c>simple_one_for_one</c>, the list of child specifications must be a list with one child specification only. - (The <c>Id</c> is ignored). No child process is then started + (The child specification identifier is ignored.) No child process is then started during the initialization phase, but all children are assumed to be started dynamically using <c>supervisor:start_child/2</c>.</p> <p>The function may also return <c>ignore</c>.</p> + <p>Note that this function might also be called as a part of a + code upgrade procedure. For this reason, the function should + not have any side effects. See + <seealso marker="doc/design_principles:appup_cookbook#sup">Design + Principles</seealso> for more information about code upgrade + of supervisors.</p> </desc> </func> </funcs> diff --git a/lib/stdlib/doc/src/sys.xml b/lib/stdlib/doc/src/sys.xml index 19605f325b..cf7df54d1d 100644 --- a/lib/stdlib/doc/src/sys.xml +++ b/lib/stdlib/doc/src/sys.xml @@ -359,6 +359,17 @@ installed.</p> </desc> </func> + <func> + <name name="terminate" arity="2"/> + <name name="terminate" arity="3"/> + <fsummary>Terminate the process</fsummary> + <desc> + <p>This function orders the process to terminate with the + given <c><anno>Reason</anno></c>. The termination is done + asynchronously, so there is no guarantee that the process is + actually terminated when the function returns.</p> + </desc> + </func> </funcs> <section> diff --git a/lib/stdlib/doc/src/zip.xml b/lib/stdlib/doc/src/zip.xml index 48b376743d..d201e81a79 100644 --- a/lib/stdlib/doc/src/zip.xml +++ b/lib/stdlib/doc/src/zip.xml @@ -135,6 +135,12 @@ <p>These options are described in <seealso marker="#zip_options">create/3</seealso>.</p> </desc> </datatype> + <datatype> + <name name="handle"/> + <desc> + <p>As returned by <seealso marker="#zip_open/2">zip_open/2</seealso>.</p> + </desc> + </datatype> </datatypes> <funcs> <func> @@ -430,6 +436,8 @@ means that subsequently reading files from the archive will be faster than unzipping files one at a time with <c>unzip</c>.</p> <p>The archive must be closed with <c>zip_close/1</c>.</p> + <p>The <c><anno>ZipHandle</anno></c> will be closed if the + process which originally opened the archive dies.</p> </desc> </func> <func> diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile index 9ab2cd4134..1b3744b6fb 100644 --- a/lib/stdlib/src/Makefile +++ b/lib/stdlib/src/Makefile @@ -97,7 +97,6 @@ MODULES= \ otp_internal \ orddict \ ordsets \ - pg \ re \ pool \ proc_lib \ diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl index 8d07a356dd..de26784ead 100644 --- a/lib/stdlib/src/binary.erl +++ b/lib/stdlib/src/binary.erl @@ -215,12 +215,13 @@ split(H,N) -> Subject :: binary(), Pattern :: binary() | [binary()] | cp(), Options :: [Option], - Option :: {scope, part()} | trim | global, + Option :: {scope, part()} | trim | global | trim_all, Parts :: [binary()]. split(Haystack,Needles,Options) -> try - {Part,Global,Trim} = get_opts_split(Options,{no,false,false}), + {Part,Global,Trim,TrimAll} = + get_opts_split(Options,{no,false,false,false}), Moptlist = case Part of no -> []; @@ -236,20 +237,24 @@ split(Haystack,Needles,Options) -> Match -> [Match] end end, - do_split(Haystack,MList,0,Trim) + do_split(Haystack,MList,0,Trim,TrimAll) catch _:_ -> erlang:error(badarg) end. -do_split(H,[],N,true) when N >= byte_size(H) -> +do_split(H,[],N,true,_) when N >= byte_size(H) -> []; -do_split(H,[],N,_) -> +do_split(H,[],N,_,true) when N >= byte_size(H) -> + []; +do_split(H,[],N,_,_) -> [binary:part(H,{N,byte_size(H)-N})]; -do_split(H,[{A,B}|T],N,Trim) -> +do_split(H,[{A,B}|T],N,Trim,TrimAll) -> case binary:part(H,{N,A-N}) of + <<>> when TrimAll == true -> + do_split(H,T,A+B,Trim,TrimAll); <<>> -> - Rest = do_split(H,T,A+B,Trim), + Rest = do_split(H,T,A+B,Trim,TrimAll), case {Trim, Rest} of {true,[]} -> []; @@ -257,7 +262,7 @@ do_split(H,[{A,B}|T],N,Trim) -> [<<>> | Rest] end; Oth -> - [Oth | do_split(H,T,A+B,Trim)] + [Oth | do_split(H,T,A+B,Trim,TrimAll)] end. @@ -346,14 +351,16 @@ splitat(H,N,[I|T]) -> %% Simple helper functions %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -get_opts_split([],{Part,Global,Trim}) -> - {Part,Global,Trim}; -get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim}) -> - get_opts_split(T,{{A,B},Global,Trim}); -get_opts_split([global | T],{Part,_Global,Trim}) -> - get_opts_split(T,{Part,true,Trim}); -get_opts_split([trim | T],{Part,Global,_Trim}) -> - get_opts_split(T,{Part,Global,true}); +get_opts_split([],{Part,Global,Trim,TrimAll}) -> + {Part,Global,Trim,TrimAll}; +get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim,TrimAll}) -> + get_opts_split(T,{{A,B},Global,Trim,TrimAll}); +get_opts_split([global | T],{Part,_Global,Trim,TrimAll}) -> + get_opts_split(T,{Part,true,Trim,TrimAll}); +get_opts_split([trim | T],{Part,Global,_Trim,TrimAll}) -> + get_opts_split(T,{Part,Global,true,TrimAll}); +get_opts_split([trim_all | T],{Part,Global,Trim,_TrimAll}) -> + get_opts_split(T,{Part,Global,Trim,true}); get_opts_split(_,_) -> throw(badopt). diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl index cf8fb3114a..5a9f63c5e2 100644 --- a/lib/stdlib/src/dict.erl +++ b/lib/stdlib/src/dict.erl @@ -417,6 +417,8 @@ on_bucket(F, T, Slot) -> %% could have implemented map and filter using fold but these are %% faster. We hope! +fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) -> + Acc; fold_dict(F, Acc, D) -> Segs = D#dict.segs, fold_segs(F, Acc, Segs, tuple_size(Segs)). @@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) -> fold_bucket(F, F(Key, Val, Acc), Bkt); fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc. +map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; map_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), Segs1 = map_seg_list(F, Segs0), @@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) -> [?kv(Key,F(Key, Val))|map_bucket(F, Bkt)]; map_bucket(F, []) when is_function(F, 2) -> []. +filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) -> + Dict; filter_dict(F, D) -> Segs0 = tuple_to_list(D#dict.segs), {Segs1,Fc} = filter_seg_list(F, Segs0, [], 0), diff --git a/lib/stdlib/src/erl_eval.erl b/lib/stdlib/src/erl_eval.erl index 639ddfc214..371573dc23 100644 --- a/lib/stdlib/src/erl_eval.erl +++ b/lib/stdlib/src/erl_eval.erl @@ -1172,7 +1172,7 @@ match_tuple([], _, _, Bs, _BBs) -> match_map([{map_field_exact, _, K, V}|Fs], Map, Bs0, BBs) -> Vm = try - {value, Ke, _} = expr(K, new_bindings()), + {value, Ke, _} = expr(K, Bs0), maps:get(Ke,Map) catch error:_ -> throw(nomatch) diff --git a/lib/stdlib/src/erl_internal.erl b/lib/stdlib/src/erl_internal.erl index edfb097de0..2bf8b86c23 100644 --- a/lib/stdlib/src/erl_internal.erl +++ b/lib/stdlib/src/erl_internal.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1998-2013. All Rights Reserved. +%% Copyright Ericsson AB 1998-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -51,6 +51,8 @@ type_test/2,new_type_test/2,old_type_test/2,old_bif/2]). -export([arith_op/2,bool_op/2,comp_op/2,list_op/2,send_op/2,op_type/2]). +-export([is_type/2]). + %%--------------------------------------------------------------------------- %% Erlang builtin functions allowed in guards. @@ -293,6 +295,7 @@ bif(garbage_collect, 1) -> true; bif(garbage_collect, 2) -> true; bif(get, 0) -> true; bif(get, 1) -> true; +bif(get_keys, 0) -> true; bif(get_keys, 1) -> true; bif(group_leader, 0) -> true; bif(group_leader, 2) -> true; @@ -530,3 +533,53 @@ old_bif(unlink, 1) -> true; old_bif(unregister, 1) -> true; old_bif(whereis, 1) -> true; old_bif(Name, A) when is_atom(Name), is_integer(A) -> false. + +-spec is_type(Name, NumberOfTypeVariables) -> boolean() when + Name :: atom(), + NumberOfTypeVariables :: non_neg_integer(). +%% Returns true if Name/NumberOfTypeVariables is a predefined type. + +is_type(any, 0) -> true; +is_type(arity, 0) -> true; +is_type(atom, 0) -> true; +is_type(binary, 0) -> true; +is_type(bitstring, 0) -> true; +is_type(bool, 0) -> true; +is_type(boolean, 0) -> true; +is_type(byte, 0) -> true; +is_type(char, 0) -> true; +is_type(float, 0) -> true; +is_type(function, 0) -> true; +is_type(identifier, 0) -> true; +is_type(integer, 0) -> true; +is_type(iodata, 0) -> true; +is_type(iolist, 0) -> true; +is_type(list, 0) -> true; +is_type(list, 1) -> true; +is_type(map, 0) -> true; +is_type(maybe_improper_list, 0) -> true; +is_type(maybe_improper_list, 2) -> true; +is_type(mfa, 0) -> true; +is_type(module, 0) -> true; +is_type(neg_integer, 0) -> true; +is_type(nil, 0) -> true; +is_type(no_return, 0) -> true; +is_type(node, 0) -> true; +is_type(non_neg_integer, 0) -> true; +is_type(none, 0) -> true; +is_type(nonempty_improper_list, 2) -> true; +is_type(nonempty_list, 0) -> true; +is_type(nonempty_list, 1) -> true; +is_type(nonempty_maybe_improper_list, 0) -> true; +is_type(nonempty_maybe_improper_list, 2) -> true; +is_type(nonempty_string, 0) -> true; +is_type(number, 0) -> true; +is_type(pid, 0) -> true; +is_type(port, 0) -> true; +is_type(pos_integer, 0) -> true; +is_type(reference, 0) -> true; +is_type(string, 0) -> true; +is_type(term, 0) -> true; +is_type(timeout, 0) -> true; +is_type(tuple, 0) -> true; +is_type(_, _) -> false. diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl index 39cc03cf7a..cbe6eeec3c 100644 --- a/lib/stdlib/src/erl_lint.erl +++ b/lib/stdlib/src/erl_lint.erl @@ -130,6 +130,8 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) -> :: dict:dict(mfa(), line()), callbacks = dict:new() %Callback types :: dict:dict(mfa(), line()), + optional_callbacks = dict:new() %Optional callbacks + :: dict:dict(mfa(), line()), types = dict:new() %Type definitions :: dict:dict(ta(), #typeinfo{}), exp_types=gb_sets:empty() %Exported types @@ -237,10 +239,7 @@ format_error({too_many_arguments,Arity}) -> "maximum allowed is ~w", [Arity,?MAX_ARGUMENTS]); %% --- patterns and guards --- format_error(illegal_pattern) -> "illegal pattern"; -format_error(illegal_map_key) -> - "illegal map key"; -format_error({illegal_map_key_variable,K}) -> - io_lib:format("illegal use of variable ~w in map",[K]); +format_error(illegal_map_key) -> "illegal map key in pattern"; format_error(illegal_bin_pattern) -> "binary patterns cannot be matched in parallel using '='"; format_error(illegal_expr) -> "illegal expression"; @@ -313,13 +312,20 @@ format_error({undefined_behaviour,Behaviour}) -> io_lib:format("behaviour ~w undefined", [Behaviour]); format_error({undefined_behaviour_callbacks,Behaviour}) -> io_lib:format("behaviour ~w callback functions are undefined", - [Behaviour]); + [Behaviour]); format_error({ill_defined_behaviour_callbacks,Behaviour}) -> io_lib:format("behaviour ~w callback functions erroneously defined", [Behaviour]); +format_error({ill_defined_optional_callbacks,Behaviour}) -> + io_lib:format("behaviour ~w optional callback functions erroneously defined", + [Behaviour]); format_error({behaviour_info, {_M,F,A}}) -> io_lib:format("cannot define callback attibute for ~w/~w when " "behaviour_info is defined",[F,A]); +format_error({redefine_optional_callback, {F, A}}) -> + io_lib:format("optional callback ~w/~w duplicated", [F, A]); +format_error({undefined_callback, {_M, F, A}}) -> + io_lib:format("callback ~w/~w is undefined", [F, A]); %% --- types and specs --- format_error({singleton_typevar, Name}) -> io_lib:format("type variable ~w is only used once (is unbound)", [Name]); @@ -331,14 +337,10 @@ format_error({undefined_type, {TypeName, Arity}}) -> io_lib:format("type ~w~s undefined", [TypeName, gen_type_paren(Arity)]); format_error({unused_type, {TypeName, Arity}}) -> io_lib:format("type ~w~s is unused", [TypeName, gen_type_paren(Arity)]); -%% format_error({new_builtin_type, {TypeName, Arity}}) -> -%% io_lib:format("type ~w~s is a new builtin type; " -%% "its (re)definition is allowed only until the next release", -%% [TypeName, gen_type_paren(Arity)]); -format_error({new_var_arity_type, TypeName}) -> - io_lib:format("type ~w is a new builtin type; " +format_error({new_builtin_type, {TypeName, Arity}}) -> + io_lib:format("type ~w~s is a new builtin type; " "its (re)definition is allowed only until the next release", - [TypeName]); + [TypeName, gen_type_paren(Arity)]); format_error({builtin_type, {TypeName, Arity}}) -> io_lib:format("type ~w~s is a builtin type; it cannot be redefined", [TypeName, gen_type_paren(Arity)]); @@ -352,10 +354,14 @@ format_error({type_syntax, Constr}) -> io_lib:format("bad ~w type", [Constr]); format_error({redefine_spec, {M, F, A}}) -> io_lib:format("spec for ~w:~w/~w already defined", [M, F, A]); -format_error({redefine_callback, {M, F, A}}) -> - io_lib:format("callback ~w:~w/~w already defined", [M, F, A]); -format_error({spec_fun_undefined, {M, F, A}}) -> - io_lib:format("spec for undefined function ~w:~w/~w", [M, F, A]); +format_error({redefine_spec, {F, A}}) -> + io_lib:format("spec for ~w/~w already defined", [F, A]); +format_error({redefine_callback, {F, A}}) -> + io_lib:format("callback ~w/~w already defined", [F, A]); +format_error({bad_callback, {M, F, A}}) -> + io_lib:format("explicit module not allowed for callback ~w:~w/~w ", [M, F, A]); +format_error({spec_fun_undefined, {F, A}}) -> + io_lib:format("spec for undefined function ~w/~w", [F, A]); format_error({missing_spec, {F,A}}) -> io_lib:format("missing specification for function ~w/~w", [F, A]); format_error(spec_wrong_arity) -> @@ -383,9 +389,7 @@ format_error({underspecified_opaque, {TypeName, Arity}}) -> [TypeName, gen_type_paren(Arity)]); %% --- obsolete? unused? --- format_error({format_error, {Fmt, Args}}) -> - io_lib:format(Fmt, Args); -format_error({mnemosyne, What}) -> - "mnemosyne " ++ What ++ ", missing transformation". + io_lib:format(Fmt, Args). gen_type_paren(Arity) when is_integer(Arity), Arity >= 0 -> gen_type_paren_1(Arity, ")"). @@ -727,6 +731,8 @@ attribute_state({attribute,L,spec,{Fun,Types}}, St) -> spec_decl(L, Fun, Types, St); attribute_state({attribute,L,callback,{Fun,Types}}, St) -> callback_decl(L, Fun, Types, St); +attribute_state({attribute,L,optional_callbacks,Es}, St) -> + optional_callbacks(L, Es, St); attribute_state({attribute,L,on_load,Val}, St) -> on_load(L, Val, St); attribute_state({attribute,_L,_Other,_Val}, St) -> % Ignore others @@ -738,6 +744,8 @@ attribute_state(Form, St) -> %% State' %% Allow for record, type and opaque type definitions and spec %% declarations to be intersperced within function definitions. +%% Dialyzer attributes are also allowed everywhere, but are not +%% checked at all. function_state({attribute,L,record,{Name,Fields}}, St) -> record_def(L, Name, Fields, St); @@ -747,12 +755,12 @@ function_state({attribute,L,opaque,{TypeName,TypeDef,Args}}, St) -> type_def(opaque, L, TypeName, TypeDef, Args, St); function_state({attribute,L,spec,{Fun,Types}}, St) -> spec_decl(L, Fun, Types, St); +function_state({attribute,_L,dialyzer,_Val}, St) -> + St; function_state({attribute,La,Attr,_Val}, St) -> add_error(La, {attribute,Attr}, St); function_state({function,L,N,A,Cs}, St) -> function(L, N, A, Cs, St); -function_state({rule,L,_N,_A,_Cs}, St) -> - add_error(L, {mnemosyne,"rule"}, St); function_state({eof,L}, St) -> eof(L, St). %% eof(LastLine, State) -> @@ -834,57 +842,73 @@ check_behaviour(St0) -> %% Check behaviours for existence and defined functions. behaviour_check(Bs, St0) -> - {AllBfs,St1} = all_behaviour_callbacks(Bs, [], St0), - St = behaviour_missing_callbacks(AllBfs, St1), + {AllBfs0, St1} = all_behaviour_callbacks(Bs, [], St0), + St = behaviour_missing_callbacks(AllBfs0, St1), + Exports = exports(St0), + F = fun(Bfs, OBfs) -> + [B || B <- Bfs, + not lists:member(B, OBfs) + orelse gb_sets:is_member(B, Exports)] + end, + %% After fixing missing callbacks new warnings may be emitted. + AllBfs = [{Item,F(Bfs0, OBfs0)} || {Item,Bfs0,OBfs0} <- AllBfs0], behaviour_conflicting(AllBfs, St). all_behaviour_callbacks([{Line,B}|Bs], Acc, St0) -> - {Bfs0,St} = behaviour_callbacks(Line, B, St0), - all_behaviour_callbacks(Bs, [{{Line,B},Bfs0}|Acc], St); + {Bfs0,OBfs0,St} = behaviour_callbacks(Line, B, St0), + all_behaviour_callbacks(Bs, [{{Line,B},Bfs0,OBfs0}|Acc], St); all_behaviour_callbacks([], Acc, St) -> {reverse(Acc),St}. behaviour_callbacks(Line, B, St0) -> try B:behaviour_info(callbacks) of - Funcs when is_list(Funcs) -> - All = all(fun({FuncName, Arity}) -> - is_atom(FuncName) andalso is_integer(Arity); - ({FuncName, Arity, Spec}) -> - is_atom(FuncName) andalso is_integer(Arity) - andalso is_list(Spec); - (_Other) -> - false - end, - Funcs), - MaybeRemoveSpec = fun({_F,_A}=FA) -> FA; - ({F,A,_S}) -> {F,A}; - (Other) -> Other - end, - if - All =:= true -> - {[MaybeRemoveSpec(F) || F <- Funcs], St0}; + undefined -> + St1 = add_warning(Line, {undefined_behaviour_callbacks, B}, St0), + {[], [], St1}; + Funcs -> + case is_fa_list(Funcs) of true -> + try B:behaviour_info(optional_callbacks) of + undefined -> + {Funcs, [], St0}; + OptFuncs -> + %% OptFuncs should always be OK thanks to + %% sys_pre_expand. + case is_fa_list(OptFuncs) of + true -> + {Funcs, OptFuncs, St0}; + false -> + W = {ill_defined_optional_callbacks, B}, + St1 = add_warning(Line, W, St0), + {Funcs, [], St1} + end + catch + _:_ -> + {Funcs, [], St0} + end; + false -> St1 = add_warning(Line, - {ill_defined_behaviour_callbacks,B}, + {ill_defined_behaviour_callbacks, B}, St0), - {[], St1} - end; - undefined -> - St1 = add_warning(Line, {undefined_behaviour_callbacks,B}, St0), - {[], St1}; - _Other -> - St1 = add_warning(Line, {ill_defined_behaviour_callbacks,B}, St0), - {[], St1} + {[], [], St1} + end catch _:_ -> - St1 = add_warning(Line, {undefined_behaviour,B}, St0), - {[], St1} + St1 = add_warning(Line, {undefined_behaviour, B}, St0), + {[], [], St1} end. -behaviour_missing_callbacks([{{Line,B},Bfs}|T], St0) -> +behaviour_missing_callbacks([{{Line,B},Bfs0,OBfs}|T], St0) -> + Bfs = ordsets:subtract(ordsets:from_list(Bfs0), ordsets:from_list(OBfs)), Exports = gb_sets:to_list(exports(St0)), - Missing = ordsets:subtract(ordsets:from_list(Bfs), Exports), + Missing = ordsets:subtract(Bfs, Exports), St = foldl(fun (F, S0) -> - add_warning(Line, {undefined_behaviour_func,F,B}, S0) + case is_fa(F) of + true -> + M = {undefined_behaviour_func,F,B}, + add_warning(Line, M, S0); + false -> + S0 % ill_defined_behaviour_callbacks + end end, St0, Missing), behaviour_missing_callbacks(T, St); behaviour_missing_callbacks([], St) -> St. @@ -1046,10 +1070,9 @@ check_undefined_types(#lint{usage=Usage,types=Def}=St0) -> Used = Usage#usage.used_types, UTAs = dict:fetch_keys(Used), Undef = [{TA,dict:fetch(TA, Used)} || - {T,_}=TA <- UTAs, + TA <- UTAs, not dict:is_key(TA, Def), - not is_default_type(TA), - not is_newly_introduced_var_arity_type(T)], + not is_default_type(TA)], foldl(fun ({TA,L}, St) -> add_error(L, {undefined_type,TA}, St) end, St0, Undef). @@ -1127,19 +1150,29 @@ check_unused_records(Forms, St0) -> end. check_callback_information(#lint{callbacks = Callbacks, - defined = Defined} = State) -> - case gb_sets:is_member({behaviour_info,1}, Defined) of - false -> State; + optional_callbacks = OptionalCbs, + defined = Defined} = St0) -> + OptFun = fun({MFA, Line}, St) -> + case dict:is_key(MFA, Callbacks) of + true -> + St; + false -> + add_error(Line, {undefined_callback, MFA}, St) + end + end, + St1 = lists:foldl(OptFun, St0, dict:to_list(OptionalCbs)), + case gb_sets:is_member({behaviour_info, 1}, Defined) of + false -> St1; true -> case dict:size(Callbacks) of - 0 -> State; + 0 -> St1; _ -> CallbacksList = dict:to_list(Callbacks), FoldL = - fun({Fa,Line},St) -> + fun({Fa, Line}, St) -> add_error(Line, {behaviour_info, Fa}, St) end, - lists:foldl(FoldL, State, CallbacksList) + lists:foldl(FoldL, St1, CallbacksList) end end. @@ -1404,20 +1437,7 @@ pattern({cons,_Line,H,T}, Vt, Old, Bvt, St0) -> pattern({tuple,_Line,Ps}, Vt, Old, Bvt, St) -> pattern_list(Ps, Vt, Old, Bvt, St); pattern({map,_Line,Ps}, Vt, Old, Bvt, St) -> - foldl(fun - ({map_field_assoc,L,_,_}, {Psvt,Bvt0,St0}) -> - {Psvt,Bvt0,add_error(L, illegal_pattern, St0)}; - ({map_field_exact,L,KP,VP}, {Psvt,Bvt0,St0}) -> - case is_valid_map_key(KP, pattern, St0) of - true -> - {Pvt,Bvt1,St1} = pattern(VP, Vt, Old, Bvt, St0), - {vtmerge_pat(Pvt, Psvt),vtmerge_pat(Bvt0, Bvt1), St1}; - false -> - {Psvt,Bvt0,add_error(L, illegal_map_key, St0)}; - {false,variable,Var} -> - {Psvt,Bvt0,add_error(L, {illegal_map_key_variable,Var}, St0)} - end - end, {[],[],St}, Ps); + pattern_map(Ps, Vt, Old, Bvt, St); %%pattern({struct,_Line,_Tag,Ps}, Vt, Old, Bvt, St) -> %% pattern_list(Ps, Vt, Old, Bvt, St); pattern({record_index,Line,Name,Field}, _Vt, _Old, _Bvt, St) -> @@ -1571,6 +1591,21 @@ is_pattern_expr_1({op,_Line,Op,A1,A2}) -> erl_internal:arith_op(Op, 2) andalso all(fun is_pattern_expr/1, [A1,A2]); is_pattern_expr_1(_Other) -> false. +pattern_map(Ps, Vt, Old, Bvt, St) -> + foldl(fun + ({map_field_assoc,L,_,_}, {Psvt,Bvt0,St0}) -> + {Psvt,Bvt0,add_error(L, illegal_pattern, St0)}; + ({map_field_exact,L,K,V}, {Psvt,Bvt0,St0}) -> + case is_valid_map_key(K) of + true -> + {Kvt,St1} = expr(K, Vt, St0), + {Vvt,Bvt2,St2} = pattern(V, Vt, Old, Bvt, St1), + {vtmerge_pat(vtmerge_pat(Kvt, Vvt), Psvt), vtmerge_pat(Bvt0, Bvt2), St2}; + false -> + {Psvt,Bvt0,add_error(L, illegal_map_key, St0)} + end + end, {[],[],St}, Ps). + %% pattern_bin([Element], VarTable, Old, BinVarTable, State) -> %% {UpdVarTable,UpdBinVarTable,State}. %% Check a pattern group. BinVarTable are used binsize variables. @@ -2085,8 +2120,8 @@ expr({'receive',Line,Cs,To,ToEs}, Vt, St0) -> {Cvt,St3} = icrt_clauses(Cs, Vt, St2), %% Csvts = [vtnew(Tevt, Vt)|Cvt], %This is just NEW variables! Csvts = [Tevt|Cvt], - {Rvt,St4} = icrt_export(Csvts, Vt, {'receive',Line}, St3), - {vtmerge([Tvt,Tevt,Rvt]),St4}; + Rvt = icrt_export(Csvts, Vt, {'receive',Line}), + {vtmerge([Tvt,Tevt,Rvt]),St3}; expr({'fun',Line,Body}, Vt, St) -> %%No one can think funs export! case Body of @@ -2197,21 +2232,20 @@ expr({'try',Line,Es,Scs,Ccs,As}, Vt, St0) -> %% passes cannot handle exports in combination with 'after'. {Evt0,St1} = exprs(Es, Vt, St0), TryLine = {'try',Line}, - Uvt = vtunsafe(vtnames(vtnew(Evt0, Vt)), TryLine, []), - Evt1 = vtupdate(Uvt, vtsubtract(Evt0, Uvt)), + Uvt = vtunsafe(TryLine, Evt0, Vt), + Evt1 = vtupdate(Uvt, Evt0), {Sccs,St2} = icrt_clauses(Scs++Ccs, TryLine, vtupdate(Evt1, Vt), St1), Rvt0 = Sccs, - Rvt1 = vtupdate(vtunsafe(vtnames(vtnew(Rvt0, Vt)), TryLine, []), Rvt0), + Rvt1 = vtupdate(vtunsafe(TryLine, Rvt0, Vt), Rvt0), Evt2 = vtmerge(Evt1, Rvt1), {Avt0,St} = exprs(As, vtupdate(Evt2, Vt), St2), - Avt1 = vtupdate(vtunsafe(vtnames(vtnew(Avt0, Vt)), TryLine, []), Avt0), + Avt1 = vtupdate(vtunsafe(TryLine, Avt0, Vt), Avt0), Avt = vtmerge(Evt2, Avt1), {Avt,St}; expr({'catch',Line,E}, Vt, St0) -> %% No new variables added, flag new variables as unsafe. - {Evt,St1} = expr(E, Vt, St0), - Uvt = vtunsafe(vtnames(vtnew(Evt, Vt)), {'catch',Line}, []), - {vtupdate(Uvt,vtupdate(Evt, Vt)),St1}; + {Evt,St} = expr(E, Vt, St0), + {vtupdate(vtunsafe({'catch',Line}, Evt, Vt), Evt),St}; expr({match,_Line,P,E}, Vt, St0) -> {Evt,St1} = expr(E, Vt, St0), {Pvt,Bvt,St2} = pattern(P, vtupdate(Evt, Vt), St1), @@ -2224,9 +2258,8 @@ expr({op,Line,Op,L,R}, Vt, St0) when Op =:= 'orelse'; Op =:= 'andalso' -> {Evt1,St1} = expr(L, Vt, St0), Vt1 = vtupdate(Evt1, Vt), {Evt2,St2} = expr(R, Vt1, St1), - Vt2 = vtmerge(Evt2, Vt1), - {Vt3,St3} = icrt_export([Vt1,Vt2], Vt1, {Op,Line}, St2), - {vtmerge(Evt1, Vt3),St3}; + Evt3 = vtupdate(vtunsafe({Op,Line}, Evt2, Vt1), Evt2), + {vtmerge(Evt1, Evt3),St2}; expr({op,_Line,_Op,L,R}, Vt, St) -> expr_list([L,R], Vt, St); %They see the same variables %% The following are not allowed to occur anywhere! @@ -2237,11 +2270,10 @@ expr({remote,Line,_M,_F}, _Vt, St) -> %% {UsedVarTable,State} expr_list(Es, Vt, St) -> - {Vt1,St1} = foldl(fun (E, {Esvt,St0}) -> - {Evt,St1} = expr(E, Vt, St0), - {vtmerge_pat(Evt, Esvt),St1} - end, {[],St}, Es), - {vtmerge(vtnew(Vt1, Vt), vtold(Vt1, Vt)),St1}. + foldl(fun (E, {Esvt,St0}) -> + {Evt,St1} = expr(E, Vt, St0), + {vtmerge_pat(Evt, Esvt),St1} + end, {[],St}, Es). record_expr(Line, Rec, Vt, St0) -> St1 = warn_invalid_record(Line, Rec, St0), @@ -2254,18 +2286,13 @@ check_assoc_fields([{map_field_assoc,_,_,_}|Fs], St) -> check_assoc_fields([], St) -> St. -map_fields([{Tag,Line,K,V}|Fs], Vt, St, F) when Tag =:= map_field_assoc; - Tag =:= map_field_exact -> - St1 = case is_valid_map_key(K, St) of - true -> St; - false -> add_error(Line, illegal_map_key, St); - {false,variable,Var} -> add_error(Line, {illegal_map_key_variable,Var}, St) - end, - {Pvt,St2} = F([K,V], Vt, St1), +map_fields([{Tag,_,K,V}|Fs], Vt, St, F) when Tag =:= map_field_assoc; + Tag =:= map_field_exact -> + {Pvt,St2} = F([K,V], Vt, St), {Vts,St3} = map_fields(Fs, Vt, St2, F), {vtupdate(Pvt, Vts),St3}; -map_fields([], Vt, St, _) -> - {Vt,St}. +map_fields([], _, St, _) -> + {[],St}. %% warn_invalid_record(Line, Record, State0) -> State %% Adds warning if the record is invalid. @@ -2319,21 +2346,14 @@ is_valid_call(Call) -> _ -> true end. -%% is_valid_map_key(K,St) -> true | false | {false, Var::atom()} -%% check for value expression without variables - -is_valid_map_key(K,St) -> - is_valid_map_key(K,expr,St). -is_valid_map_key(K,Ctx,St) -> - case expr(K,[],St) of - {[],_} -> - is_valid_map_key_value(K,Ctx); - {[Var|_],_} -> - {false,variable,element(1,Var)} - end. +%% is_valid_map_key(K,St) -> true | false +%% variables are allowed for patterns only at the top of the tree -is_valid_map_key_value(K,Ctx) -> +is_valid_map_key({var,_,_}) -> true; +is_valid_map_key(K) -> is_valid_map_key_value(K). +is_valid_map_key_value(K) -> case K of + {var,_,_} -> false; {char,_,_} -> true; {integer,_,_} -> true; {float,_,_} -> true; @@ -2341,36 +2361,36 @@ is_valid_map_key_value(K,Ctx) -> {nil,_} -> true; {atom,_,_} -> true; {cons,_,H,T} -> - is_valid_map_key_value(H,Ctx) andalso - is_valid_map_key_value(T,Ctx); + is_valid_map_key_value(H) andalso + is_valid_map_key_value(T); {tuple,_,Es} -> foldl(fun(E,B) -> - B andalso is_valid_map_key_value(E,Ctx) + B andalso is_valid_map_key_value(E) end,true,Es); {map,_,Arg,Ps} -> % only check for value expressions to be valid % invalid map expressions are later checked in % core and kernel - is_valid_map_key_value(Arg,Ctx) andalso foldl(fun + is_valid_map_key_value(Arg) andalso foldl(fun ({Tag,_,Ke,Ve},B) when Tag =:= map_field_assoc; - Tag =:= map_field_exact, Ctx =:= expr -> - B andalso is_valid_map_key_value(Ke,Ctx) - andalso is_valid_map_key_value(Ve,Ctx); + Tag =:= map_field_exact -> + B andalso is_valid_map_key_value(Ke) + andalso is_valid_map_key_value(Ve); (_,_) -> false end,true,Ps); {map,_,Ps} -> foldl(fun ({Tag,_,Ke,Ve},B) when Tag =:= map_field_assoc; - Tag =:= map_field_exact, Ctx =:= expr -> - B andalso is_valid_map_key_value(Ke,Ctx) - andalso is_valid_map_key_value(Ve,Ctx); + Tag =:= map_field_exact -> + B andalso is_valid_map_key_value(Ke) + andalso is_valid_map_key_value(Ve); (_,_) -> false end, true, Ps); {record,_,_,Fs} -> foldl(fun ({record_field,_,Ke,Ve},B) -> - B andalso is_valid_map_key_value(Ke,Ctx) - andalso is_valid_map_key_value(Ve,Ctx) + B andalso is_valid_map_key_value(Ke) + andalso is_valid_map_key_value(Ve) end,true,Fs); {bin,_,Es} -> % only check for value expressions to be valid @@ -2378,9 +2398,9 @@ is_valid_map_key_value(K,Ctx) -> % core and kernel foldl(fun ({bin_element,_,E,_,_},B) -> - B andalso is_valid_map_key_value(E,Ctx) + B andalso is_valid_map_key_value(E) end,true,Es); - _ -> false + Val -> is_pattern_expr(Val) end. %% record_def(Line, RecordName, [RecField], State) -> State. @@ -2615,30 +2635,21 @@ type_def(Attr, Line, TypeName, ProtoType, Args, St0) -> true -> case is_obsolete_builtin_type(TypePair) of true -> StoreType(St0); - false -> add_error(Line, {builtin_type, TypePair}, St0) -%% case is_newly_introduced_builtin_type(TypePair) of -%% %% allow some types just for bootstrapping -%% true -> -%% Warn = {new_builtin_type, TypePair}, -%% St1 = add_warning(Line, Warn, St0), -%% StoreType(St1); -%% false -> -%% add_error(Line, {builtin_type, TypePair}, St0) -%% end + false -> + case is_newly_introduced_builtin_type(TypePair) of + %% allow some types just for bootstrapping + true -> + Warn = {new_builtin_type, TypePair}, + St1 = add_warning(Line, Warn, St0), + StoreType(St1); + false -> + add_error(Line, {builtin_type, TypePair}, St0) + end end; false -> - case - dict:is_key(TypePair, TypeDefs) orelse - is_var_arity_type(TypeName) - of + case dict:is_key(TypePair, TypeDefs) of true -> - case is_newly_introduced_var_arity_type(TypeName) of - true -> - Warn = {new_var_arity_type, TypeName}, - add_warning(Line, Warn, St0); - false -> - add_error(Line, {redefine_type, TypePair}, St0) - end; + add_error(Line, {redefine_type, TypePair}, St0); false -> St1 = case Attr =:= opaque andalso @@ -2675,7 +2686,7 @@ check_type({paren_type, _L, [Type]}, SeenVars, St) -> check_type({remote_type, L, [{atom, _, Mod}, {atom, _, Name}, Args]}, SeenVars, #lint{module=CurrentMod} = St) -> case Mod =:= CurrentMod of - true -> check_type({type, L, Name, Args}, SeenVars, St); + true -> check_type({user_type, L, Name, Args}, SeenVars, St); false -> lists:foldl(fun(T, {AccSeenVars, AccSt}) -> check_type(T, AccSeenVars, AccSt) @@ -2709,12 +2720,15 @@ check_type({type, L, range, [From, To]}, SeenVars, St) -> _ -> add_error(L, {type_syntax, range}, St) end, {SeenVars, St1}; -check_type({type, _L, map, any}, SeenVars, St) -> {SeenVars, St}; +check_type({type, L, map, any}, SeenVars, St) -> + %% To get usage right while map/0 is a newly_introduced_builtin_type. + St1 = used_type({map, 0}, L, St), + {SeenVars, St1}; check_type({type, _L, map, Pairs}, SeenVars, St) -> lists:foldl(fun(Pair, {AccSeenVars, AccSt}) -> check_type(Pair, AccSeenVars, AccSt) end, {SeenVars, St}, Pairs); -check_type({type, _L, map_field_assoc, Dom, Range}, SeenVars, St) -> +check_type({type, _L, map_field_assoc, [Dom, Range]}, SeenVars, St) -> check_type({type, -1, product, [Dom, Range]}, SeenVars, St); check_type({type, _L, tuple, any}, SeenVars, St) -> {SeenVars, St}; check_type({type, _L, any}, SeenVars, St) -> {SeenVars, St}; @@ -2733,41 +2747,39 @@ check_type({type, L, record, [Name|Fields]}, SeenVars, St) -> check_record_types(L, Atom, Fields, SeenVars, St1); _ -> {SeenVars, add_error(L, {type_syntax, record}, St)} end; -check_type({type, _L, product, Args}, SeenVars, St) -> +check_type({type, _L, Tag, Args}, SeenVars, St) when Tag =:= product; + Tag =:= union; + Tag =:= tuple -> lists:foldl(fun(T, {AccSeenVars, AccSt}) -> check_type(T, AccSeenVars, AccSt) end, {SeenVars, St}, Args); check_type({type, La, TypeName, Args}, SeenVars, St) -> - #lint{usage=Usage, module = Module, types=Types} = St, + #lint{module = Module, types=Types} = St, Arity = length(Args), TypePair = {TypeName, Arity}, - St1 = case is_var_arity_type(TypeName) of - true -> St; - false -> - Obsolete = (is_warn_enabled(deprecated_type, St) - andalso obsolete_builtin_type(TypePair)), - IsObsolete = - case Obsolete of - {deprecated, Repl, _} when element(1, Repl) =/= Module -> - case dict:find(TypePair, Types) of - {ok, _} -> false; - error -> true - end; - _ -> false - end, - case IsObsolete of - true -> + Obsolete = (is_warn_enabled(deprecated_type, St) + andalso obsolete_builtin_type(TypePair)), + St1 = case Obsolete of + {deprecated, Repl, _} when element(1, Repl) =/= Module -> + case dict:find(TypePair, Types) of + {ok, _} -> + used_type(TypePair, La, St); + error -> {deprecated, Replacement, Rel} = Obsolete, Tag = deprecated_builtin_type, W = {Tag, TypePair, Replacement, Rel}, - add_warning(La, W, St); - false -> - OldUsed = Usage#usage.used_types, - UsedTypes = dict:store(TypePair, La, OldUsed), - St#lint{usage=Usage#usage{used_types=UsedTypes}} - end - end, + add_warning(La, W, St) + end; + _ -> St + end, check_type({type, -1, product, Args}, SeenVars, St1); +check_type({user_type, L, TypeName, Args}, SeenVars, St) -> + Arity = length(Args), + TypePair = {TypeName, Arity}, + St1 = used_type(TypePair, L, St), + lists:foldl(fun(T, {AccSeenVars, AccSt}) -> + check_type(T, AccSeenVars, AccSt) + end, {SeenVars, St1}, Args); check_type(I, SeenVars, St) -> case erl_eval:partial_eval(I) of {integer,_ILn,_Integer} -> {SeenVars, St}; @@ -2809,95 +2821,24 @@ check_record_types([{type, _, field_type, [{atom, AL, FName}, Type]}|Left], check_record_types([], _Name, _DefFields, SeenVars, St, _SeenFields) -> {SeenVars, St}. -is_var_arity_type(tuple) -> true; -is_var_arity_type(map) -> true; -is_var_arity_type(product) -> true; -is_var_arity_type(union) -> true; -is_var_arity_type(record) -> true; -is_var_arity_type(_) -> false. - -is_default_type({any, 0}) -> true; -is_default_type({arity, 0}) -> true; -is_default_type({array, 0}) -> true; -is_default_type({atom, 0}) -> true; -is_default_type({atom, 1}) -> true; -is_default_type({binary, 0}) -> true; -is_default_type({binary, 2}) -> true; -is_default_type({bitstring, 0}) -> true; -is_default_type({bool, 0}) -> true; -is_default_type({boolean, 0}) -> true; -is_default_type({byte, 0}) -> true; -is_default_type({char, 0}) -> true; -is_default_type({dict, 0}) -> true; -is_default_type({digraph, 0}) -> true; -is_default_type({float, 0}) -> true; -is_default_type({'fun', 0}) -> true; -is_default_type({'fun', 2}) -> true; -is_default_type({function, 0}) -> true; -is_default_type({gb_set, 0}) -> true; -is_default_type({gb_tree, 0}) -> true; -is_default_type({identifier, 0}) -> true; -is_default_type({integer, 0}) -> true; -is_default_type({integer, 1}) -> true; -is_default_type({iodata, 0}) -> true; -is_default_type({iolist, 0}) -> true; -is_default_type({list, 0}) -> true; -is_default_type({list, 1}) -> true; -is_default_type({maybe_improper_list, 0}) -> true; -is_default_type({maybe_improper_list, 2}) -> true; -is_default_type({mfa, 0}) -> true; -is_default_type({module, 0}) -> true; -is_default_type({neg_integer, 0}) -> true; -is_default_type({nil, 0}) -> true; -is_default_type({no_return, 0}) -> true; -is_default_type({node, 0}) -> true; -is_default_type({non_neg_integer, 0}) -> true; -is_default_type({none, 0}) -> true; -is_default_type({nonempty_list, 0}) -> true; -is_default_type({nonempty_list, 1}) -> true; -is_default_type({nonempty_improper_list, 2}) -> true; -is_default_type({nonempty_maybe_improper_list, 0}) -> true; -is_default_type({nonempty_maybe_improper_list, 2}) -> true; -is_default_type({nonempty_string, 0}) -> true; -is_default_type({number, 0}) -> true; -is_default_type({pid, 0}) -> true; -is_default_type({port, 0}) -> true; -is_default_type({pos_integer, 0}) -> true; -is_default_type({queue, 0}) -> true; -is_default_type({range, 2}) -> true; -is_default_type({reference, 0}) -> true; -is_default_type({set, 0}) -> true; -is_default_type({string, 0}) -> true; -is_default_type({term, 0}) -> true; -is_default_type({timeout, 0}) -> true; -is_default_type({var, 1}) -> true; -is_default_type(_) -> false. - -is_newly_introduced_var_arity_type(map) -> true; -is_newly_introduced_var_arity_type(_) -> false. - -%% is_newly_introduced_builtin_type({Name, _}) when is_atom(Name) -> false. +used_type(TypePair, L, St) -> + Usage = St#lint.usage, + OldUsed = Usage#usage.used_types, + UsedTypes = dict:store(TypePair, L, OldUsed), + St#lint{usage=Usage#usage{used_types=UsedTypes}}. + +is_default_type({Name, NumberOfTypeVariables}) -> + erl_internal:is_type(Name, NumberOfTypeVariables). + +is_newly_introduced_builtin_type({map, 0}) -> true; +is_newly_introduced_builtin_type({Name, _}) when is_atom(Name) -> false. is_obsolete_builtin_type(TypePair) -> obsolete_builtin_type(TypePair) =/= no. -%% Obsolete in OTP 17.0. -obsolete_builtin_type({array, 0}) -> - {deprecated, {array, array, 1}, "OTP 18.0"}; -obsolete_builtin_type({dict, 0}) -> - {deprecated, {dict, dict, 2}, "OTP 18.0"}; -obsolete_builtin_type({digraph, 0}) -> - {deprecated, {digraph, graph}, "OTP 18.0"}; -obsolete_builtin_type({gb_set, 0}) -> - {deprecated, {gb_sets, set, 1}, "OTP 18.0"}; -obsolete_builtin_type({gb_tree, 0}) -> - {deprecated, {gb_trees, tree, 2}, "OTP 18.0"}; -obsolete_builtin_type({queue, 0}) -> - {deprecated, {queue, queue, 1}, "OTP 18.0"}; -obsolete_builtin_type({set, 0}) -> - {deprecated, {sets, set, 1}, "OTP 18.0"}; -obsolete_builtin_type({tid, 0}) -> - {deprecated, {ets, tid}, "OTP 18.0"}; +%% To keep Dialyzer silent... +obsolete_builtin_type({1, 255}) -> + {deprecated, {2, 255}, ""}; obsolete_builtin_type({Name, A}) when is_atom(Name), is_integer(A) -> no. %% spec_decl(Line, Fun, Types, State) -> State. @@ -2909,7 +2850,7 @@ spec_decl(Line, MFA0, TypeSpecs, St0 = #lint{specs = Specs, module = Mod}) -> end, St1 = St0#lint{specs = dict:store(MFA, Line, Specs)}, case dict:is_key(MFA, Specs) of - true -> add_error(Line, {redefine_spec, MFA}, St1); + true -> add_error(Line, {redefine_spec, MFA0}, St1); false -> check_specs(TypeSpecs, Arity, St1) end. @@ -2917,16 +2858,50 @@ spec_decl(Line, MFA0, TypeSpecs, St0 = #lint{specs = Specs, module = Mod}) -> callback_decl(Line, MFA0, TypeSpecs, St0 = #lint{callbacks = Callbacks, module = Mod}) -> - MFA = case MFA0 of - {F, Arity} -> {Mod, F, Arity}; - {_M, _F, Arity} -> MFA0 - end, - St1 = St0#lint{callbacks = dict:store(MFA, Line, Callbacks)}, - case dict:is_key(MFA, Callbacks) of - true -> add_error(Line, {redefine_callback, MFA}, St1); - false -> check_specs(TypeSpecs, Arity, St1) + case MFA0 of + {_M, _F, _A} -> add_error(Line, {bad_callback, MFA0}, St0); + {F, Arity} -> + MFA = {Mod, F, Arity}, + St1 = St0#lint{callbacks = dict:store(MFA, Line, Callbacks)}, + case dict:is_key(MFA, Callbacks) of + true -> add_error(Line, {redefine_callback, MFA0}, St1); + false -> check_specs(TypeSpecs, Arity, St1) + end end. +%% optional_callbacks(Line, FAs, State) -> State. + +optional_callbacks(Line, Term, St0) -> + try true = is_fa_list(Term), Term of + FAs -> + optional_cbs(Line, FAs, St0) + catch + _:_ -> + St0 % ignore others + end. + +optional_cbs(_Line, [], St) -> + St; +optional_cbs(Line, [{F,A}|FAs], St0) -> + #lint{optional_callbacks = OptionalCbs, module = Mod} = St0, + MFA = {Mod, F, A}, + St1 = St0#lint{optional_callbacks = dict:store(MFA, Line, OptionalCbs)}, + St2 = case dict:is_key(MFA, OptionalCbs) of + true -> + add_error(Line, {redefine_optional_callback, {F,A}}, St1); + false -> + St1 + end, + optional_cbs(Line, FAs, St2). + +is_fa_list([E|L]) -> is_fa(E) andalso is_fa_list(L); +is_fa_list([]) -> true; +is_fa_list(_) -> false. + +is_fa({FuncName, Arity}) + when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> true; +is_fa(_) -> false. + check_specs([FunType|Left], Arity, St0) -> {FunType1, CTypes} = case FunType of @@ -2950,10 +2925,11 @@ check_specs([], _Arity, St) -> St. check_specs_without_function(#lint{module=Mod,defined=Funcs,specs=Specs}=St) -> - Fun = fun({M, F, A} = MFA, Line, AccSt) when M =:= Mod -> - case gb_sets:is_element({F, A}, Funcs) of + Fun = fun({M, F, A}, Line, AccSt) when M =:= Mod -> + FA = {F, A}, + case gb_sets:is_element(FA, Funcs) of true -> AccSt; - false -> add_error(Line, {spec_fun_undefined, MFA}, AccSt) + false -> add_error(Line, {spec_fun_undefined, FA}, AccSt) end; ({_M, _F, _A}, _Line, AccSt) -> AccSt end, @@ -3032,11 +3008,12 @@ check_local_opaque_types(St) -> dict:fold(FoldFun, St, Ts). %% icrt_clauses(Clauses, In, ImportVarTable, State) -> -%% {NewVts,State}. +%% {UpdVt,State}. icrt_clauses(Cs, In, Vt, St0) -> {Csvt,St1} = icrt_clauses(Cs, Vt, St0), - icrt_export(Csvt, Vt, In, St1). + UpdVt = icrt_export(Csvt, Vt, In), + {UpdVt,St1}. %% icrt_clauses(Clauses, ImportVarTable, State) -> %% {NewVts,State}. @@ -3046,26 +3023,73 @@ icrt_clauses(Cs, Vt, St) -> icrt_clause({clause,_Line,H,G,B}, Vt0, St0) -> {Hvt,Binvt,St1} = head(H, Vt0, St0), - Vt1 = vtupdate(Hvt, vtupdate(Binvt, Vt0)), - {Gvt,St2} = guard(G, Vt1, St1), + Vt1 = vtupdate(Hvt, Binvt), + {Gvt,St2} = guard(G, vtupdate(Vt1, Vt0), St1), Vt2 = vtupdate(Gvt, Vt1), - {Bvt,St3} = exprs(B, Vt2, St2), + {Bvt,St3} = exprs(B, vtupdate(Vt2, Vt0), St2), {vtupdate(Bvt, Vt2),St3}. -icrt_export(Csvt, Vt, In, St) -> - Vt1 = vtmerge(Csvt), - All = ordsets:subtract(vintersection(Csvt), vtnames(Vt)), - Some = ordsets:subtract(vtnames(Vt1), vtnames(Vt)), - Xvt = vtexport(All, In, []), - Evt = vtunsafe(ordsets:subtract(Some, All), In, Xvt), - Unused = vtmerge([unused_vars(Vt0, Vt, St) || Vt0 <- Csvt]), - %% Exported and unsafe variables may be unused: - Uvt = vtmerge(Evt, Unused), - %% Make exported and unsafe unused variables unused in subsequent code: - Vt2 = vtmerge(Uvt, vtsubtract(Vt1, Uvt)), - %% Forget about old variables which were not used: - Vt3 = vtmerge(vtnew(Vt2, Vt), vt_no_unused(vtold(Vt2, Vt))), - {Vt3,St}. +icrt_export(Vts, Vt, {Tag,Attrs}) -> + {_File,Loc} = loc(Attrs), + icrt_export(lists:merge(Vts), Vt, {Tag,Loc}, length(Vts), []). + +icrt_export([{V,{{export,_},_,_}}|Vs0], [{V,{{export,_}=S0,_,Ls}}|Vt], + In, I, Acc) -> + %% V was an exported variable and has been used in an expression in at least + %% one clause. Its state needs to be merged from all clauses to silence any + %% exported var warning already emitted. + {VVs,Vs} = lists:partition(fun ({K,_}) -> K =:= V end, Vs0), + S = foldl(fun ({_,{S1,_,_}}, AccS) -> merge_state(AccS, S1) end, S0, VVs), + icrt_export(Vs, Vt, In, I, [{V,{S,used,Ls}}|Acc]); +icrt_export([{V,_}|Vs0], [{V,{_,_,Ls}}|Vt], In, I, Acc) -> + %% V was either unsafe or bound and has now been reused. It may also have + %% been an export but as it was not matched by the previous clause, it means + %% it has been changed to 'bound' in at least one clause because it was used + %% in a pattern. + Vs = lists:dropwhile(fun ({K,_}) -> K =:= V end, Vs0), + icrt_export(Vs, Vt, In, I, [{V,{bound,used,Ls}}|Acc]); +icrt_export([{V1,_}|_]=Vs, [{V2,_}|Vt], In, I, Acc) when V1 > V2 -> + %% V2 was already in scope and has not been reused in any clause. + icrt_export(Vs, Vt, In, I, Acc); +icrt_export([{V,_}|_]=Vs0, Vt, In, I, Acc) -> + %% V is a new variable. + {VVs,Vs} = lists:partition(fun ({K,_}) -> K =:= V end, Vs0), + F = fun ({_,{S,U,Ls}}, {AccI,AccS0,AccLs0}) -> + AccS = case {S,AccS0} of + {{unsafe,_},{unsafe,_}} -> + %% V was found unsafe in a previous clause, mark + %% it as unsafe for the whole parent expression. + {unsafe,In}; + {{unsafe,_},_} -> + %% V was unsafe in a clause, keep that state and + %% generalize it to the whole expression if it + %% is found unsafe in another one. + S; + _ -> + %% V is either bound or exported, keep original + %% state. + AccS0 + end, + AccLs = case U of + used -> AccLs0; + unused -> merge_lines(AccLs0, Ls) + end, + {AccI + 1,AccS,AccLs} + end, + %% Initial state is exported from the current expression. + {Count,S1,Ls} = foldl(F, {0,{export,In},[]}, VVs), + S = case Count of + I -> + %% V was found in all clauses, keep computed state. + S1; + _ -> + %% V was not bound in some clauses, mark as unsafe. + {unsafe,In} + end, + U = case Ls of [] -> used; _ -> unused end, + icrt_export(Vs, Vt, In, I, [{V,{S,U,Ls}}|Acc]); +icrt_export([], _, _, _, Acc) -> + reverse(Acc). handle_comprehension(E, Qs, Vt0, St0) -> {Vt1, Uvt, St1} = lc_quals(Qs, Vt0, St0), @@ -3163,7 +3187,8 @@ fun_clauses(Cs, Vt, St) -> {Cvt,St1} = fun_clause(C, Vt, St0), {vtmerge(Cvt, Bvt0),St1} end, {[],St#lint{recdef_top = false}}, Cs), - {vt_no_unused(vtold(Bvt, Vt)),St2#lint{recdef_top = OldRecDef}}. + Uvt = vt_no_unsafe(vt_no_unused(vtold(Bvt, Vt))), + {Uvt,St2#lint{recdef_top = OldRecDef}}. fun_clause({clause,_Line,H,G,B}, Vt0, St0) -> {Hvt,Binvt,St1} = head(H, Vt0, [], St0), % No imported pattern variables @@ -3277,19 +3302,24 @@ pat_binsize_var(V, Line, Vt, Bvt, St) -> %% exported vars are probably safe, warn only if warn_export_vars is %% set. -expr_var(V, Line, Vt, St0) -> +expr_var(V, Line, Vt, St) -> case orddict:find(V, Vt) of {ok,{bound,_Usage,Ls}} -> - {[{V,{bound,used,Ls}}],St0}; + {[{V,{bound,used,Ls}}],St}; {ok,{{unsafe,In},_Usage,Ls}} -> {[{V,{bound,used,Ls}}], - add_error(Line, {unsafe_var,V,In}, St0)}; + add_error(Line, {unsafe_var,V,In}, St)}; {ok,{{export,From},_Usage,Ls}} -> - {[{V,{bound,used,Ls}}], - exported_var(Line, V, From, St0)}; + case is_warn_enabled(export_vars, St) of + true -> + {[{V,{bound,used,Ls}}], + add_warning(Line, {exported_var,V,From}, St)}; + false -> + {[{V,{{export,From},used,Ls}}],St} + end; error -> {[{V,{bound,used,[Line]}}], - add_error(Line, {unbound_var,V}, St0)} + add_error(Line, {unbound_var,V}, St)} end. exported_var(Line, V, From, St) -> @@ -3353,17 +3383,12 @@ vtupdate(Uvt, Vt0) -> {S, merge_used(U1, U2), merge_lines(L1, L2)} end, Uvt, Vt0). -%% vtexport([Variable], From, VarTable) -> VarTable. -%% vtunsafe([Variable], From, VarTable) -> VarTable. -%% Add the variables to VarTable either as exported from From or as unsafe. - -vtexport(Vs, {InTag,FileLine}, Vt0) -> - {_File,Line} = loc(FileLine), - vtupdate([{V,{{export,{InTag,Line}},unused,[]}} || V <- Vs], Vt0). +%% vtunsafe(From, UpdVarTable, VarTable) -> UnsafeVarTable. +%% Return all new variables in UpdVarTable as unsafe. -vtunsafe(Vs, {InTag,FileLine}, Vt0) -> +vtunsafe({Tag,FileLine}, Uvt, Vt) -> {_File,Line} = loc(FileLine), - vtupdate([{V,{{unsafe,{InTag,Line}},unused,[]}} || V <- Vs], Vt0). + [{V,{{unsafe,{Tag,Line}},U,Ls}} || {V,{_,U,Ls}} <- vtnew(Uvt, Vt)]. %% vtmerge(VarTable, VarTable) -> VarTable. %% Merge two variables tables generating a new vartable. Give priority to @@ -3416,8 +3441,6 @@ vtsubtract(New, Old) -> vtold(New, Old) -> orddict:filter(fun (V, _How) -> orddict:is_key(V, Old) end, New). -vtnames(Vt) -> [ V || {V,_How} <- Vt ]. - vt_no_unsafe(Vt) -> [V || {_,{S,_U,_L}}=V <- Vt, case S of {unsafe,_} -> false; @@ -3426,29 +3449,6 @@ vt_no_unsafe(Vt) -> [V || {_,{S,_U,_L}}=V <- Vt, vt_no_unused(Vt) -> [V || {_,{_,U,_L}}=V <- Vt, U =/= unused]. -%% vunion(VarTable1, VarTable2) -> [VarName]. -%% vunion([VarTable]) -> [VarName]. -%% vintersection(VarTable1, VarTable2) -> [VarName]. -%% vintersection([VarTable]) -> [VarName]. -%% Union/intersection of names of vars in VarTable. - --ifdef(NOTUSED). -vunion(Vs1, Vs2) -> ordsets:union(vtnames(Vs1), vtnames(Vs2)). - -vunion(Vss) -> foldl(fun (Vs, Uvs) -> - ordsets:union(vtnames(Vs), Uvs) - end, [], Vss). - -vintersection(Vs1, Vs2) -> ordsets:intersection(vtnames(Vs1), vtnames(Vs2)). --endif. - -vintersection([Vs]) -> - vtnames(Vs); %Boundary conditions!!! -vintersection([Vs|Vss]) -> - ordsets:intersection(vtnames(Vs), vintersection(Vss)); -vintersection([]) -> - []. - %% copy_expr(Expr, Line) -> Expr. %% Make a copy of Expr converting all line numbers to Line. diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl index 1d4a2a1fef..3502a50eaa 100644 --- a/lib/stdlib/src/erl_parse.yrl +++ b/lib/stdlib/src/erl_parse.yrl @@ -42,7 +42,6 @@ function_call argument_list exprs guard atomic strings prefix_op mult_op add_op list_op comp_op -rule rule_clauses rule_clause rule_body binary bin_elements bin_element bit_expr opt_bit_size_expr bit_size_expr opt_bit_type_list bit_type_list bit_type top_type top_type_100 top_types type typed_expr typed_attr_val @@ -54,7 +53,7 @@ bin_base_type bin_unit_type type_200 type_300 type_400 type_500. Terminals char integer float atom string var -'(' ')' ',' '->' ':-' '{' '}' '[' ']' '|' '||' '<-' ';' ':' '#' '.' +'(' ')' ',' '->' '{' '}' '[' ']' '|' '||' '<-' ';' ':' '#' '.' 'after' 'begin' 'case' 'try' 'catch' 'end' 'fun' 'if' 'of' 'receive' 'when' 'andalso' 'orelse' 'bnot' 'not' @@ -73,7 +72,6 @@ Rootsymbol form. form -> attribute dot : '$1'. form -> function dot : '$1'. -form -> rule dot : '$1'. attribute -> '-' atom attr_val : build_attribute('$2', '$3'). attribute -> '-' atom typed_attr_val : build_typed_attribute('$2','$3'). @@ -146,8 +144,7 @@ type -> '(' top_type ')' : {paren_type, ?line('$2'), ['$2']}. type -> var : '$1'. type -> atom : '$1'. type -> atom '(' ')' : build_gen_type('$1'). -type -> atom '(' top_types ')' : {type, ?line('$1'), - normalise('$1'), '$3'}. +type -> atom '(' top_types ')' : build_type('$1', '$3'). type -> atom ':' atom '(' ')' : {remote_type, ?line('$1'), ['$1', '$3', []]}. type -> atom ':' atom '(' top_types ')' : {remote_type, ?line('$1'), @@ -181,7 +178,7 @@ fun_type -> '(' top_types ')' '->' top_type map_pair_types -> map_pair_type : ['$1']. map_pair_types -> map_pair_type ',' map_pair_types : ['$1'|'$3']. -map_pair_type -> top_type '=>' top_type : {type, ?line('$2'), map_field_assoc,'$1','$3'}. +map_pair_type -> top_type '=>' top_type : {type, ?line('$2'), map_field_assoc,['$1','$3']}. field_types -> field_type : ['$1']. field_types -> field_type ',' field_types : ['$1'|'$3']. @@ -521,17 +518,6 @@ comp_op -> '>' : '$1'. comp_op -> '=:=' : '$1'. comp_op -> '=/=' : '$1'. -rule -> rule_clauses : build_rule('$1'). - -rule_clauses -> rule_clause : ['$1']. -rule_clauses -> rule_clause ';' rule_clauses : ['$1'|'$3']. - -rule_clause -> atom clause_args clause_guard rule_body : - {clause,?line('$1'),element(3, '$1'),'$2','$3','$4'}. - -rule_body -> ':-' lc_exprs: '$2'. - - Erlang code. -export([parse_form/1,parse_exprs/1,parse_term/1]). @@ -665,6 +651,8 @@ find_arity_from_specs([Spec|_]) -> {type, _, 'fun', [{type, _, product, Args},_]} = Fun, length(Args). +build_def({var, L, '_'}, _Types) -> + ret_err(L, "bad type variable"); build_def(LHS, Types) -> IsSubType = {atom, ?line(LHS), is_subtype}, {type, ?line(LHS), constraint, [IsSubType, [LHS, Types]]}. @@ -684,7 +672,8 @@ build_gen_type({atom, La, tuple}) -> build_gen_type({atom, La, map}) -> {type, La, map, any}; build_gen_type({atom, La, Name}) -> - {type, La, Name, []}. + Tag = type_tag(Name, 0), + {Tag, La, Name, []}. build_bin_type([{var, _, '_'}|Left], Int) -> build_bin_type(Left, Int); @@ -693,6 +682,16 @@ build_bin_type([], Int) -> build_bin_type([{var, La, _}|_], _) -> ret_err(La, "Bad binary type"). +build_type({atom, L, Name}, Types) -> + Tag = type_tag(Name, length(Types)), + {Tag, L, Name, Types}. + +type_tag(TypeName, NumberOfTypeVariables) -> + case erl_internal:is_type(TypeName, NumberOfTypeVariables) of + true -> type; + false -> user_type + end. + %% build_attribute(AttrName, AttrValue) -> %% {attribute,Line,module,Module} %% {attribute,Line,export,Exports} @@ -834,13 +833,6 @@ build_function(Cs) -> Arity = length(element(4, hd(Cs))), {function,?line(hd(Cs)),Name,Arity,check_clauses(Cs, Name, Arity)}. -%% build_rule([Clause]) -> {rule,Line,Name,Arity,[Clause]'} - -build_rule(Cs) -> - Name = element(3, hd(Cs)), - Arity = length(element(4, hd(Cs))), - {rule,?line(hd(Cs)),Name,Arity,check_clauses(Cs, Name, Arity)}. - %% build_fun(Line, [Clause]) -> {'fun',Line,{clauses,[Clause]}}. build_fun(Line, Cs) -> diff --git a/lib/stdlib/src/erl_pp.erl b/lib/stdlib/src/erl_pp.erl index 1fd6d2a8df..469ce544c7 100644 --- a/lib/stdlib/src/erl_pp.erl +++ b/lib/stdlib/src/erl_pp.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -22,7 +22,7 @@ %%% the parser. It does not always produce pretty code. -export([form/1,form/2, - attribute/1,attribute/2,function/1,function/2,rule/1,rule/2, + attribute/1,attribute/2,function/1,function/2, guard/1,guard/2,exprs/1,exprs/2,exprs/3,expr/1,expr/2,expr/3,expr/4]). -import(lists, [append/1,foldr/3,mapfoldl/3,reverse/1,reverse/2]). @@ -91,12 +91,6 @@ function(F) -> function(F, Options) -> frmt(lfunction(F, options(Options)), state(Options)). -rule(R) -> - rule(R, none). - -rule(R, Options) -> - frmt(lrule(R, options(Options)), state(Options)). - -spec(guard(Guard) -> io_lib:chars() when Guard :: [erl_parse:abstract_expr()]). @@ -199,8 +193,6 @@ lform({attribute,Line,Name,Arg}, Opts, State) -> lattribute({attribute,Line,Name,Arg}, Opts, State); lform({function,Line,Name,Arity,Clauses}, Opts, _State) -> lfunction({function,Line,Name,Arity,Clauses}, Opts); -lform({rule,Line,Name,Arity,Clauses}, Opts, _State) -> - lrule({rule,Line,Name,Arity,Clauses}, Opts); %% These are specials to make it easier for the compiler. lform({error,E}, _Opts, _State) -> leaf(format("~p\n", [{error,E}])); @@ -232,13 +224,21 @@ lattribute(import, Name, _Opts, _State) when is_list(Name) -> attr("import", [{var,0,pname(Name)}]); lattribute(import, {From,Falist}, _Opts, _State) -> attr("import",[{var,0,pname(From)},falist(Falist)]); +lattribute(optional_callbacks, Falist, Opts, _State) -> + ArgL = try falist(Falist) + catch _:_ -> abstract(Falist, Opts) + end, + call({var,0,"-optional_callbacks"}, [ArgL], 0, options(none)); lattribute(file, {Name,Line}, _Opts, State) -> attr("file", [{var,0,(State#pp.string_fun)(Name)},{integer,0,Line}]); lattribute(record, {Name,Is}, Opts, _State) -> Nl = leaf(format("-record(~w,", [Name])), [{first,Nl,record_fields(Is, Opts)},$)]; -lattribute(Name, Arg, #options{encoding = Encoding}, _State) -> - attr(write(Name), [erl_parse:abstract(Arg, [{encoding,Encoding}])]). +lattribute(Name, Arg, Options, _State) -> + attr(write(Name), [abstract(Arg, Options)]). + +abstract(Arg, #options{encoding = Encoding}) -> + erl_parse:abstract(Arg, [{encoding,Encoding}]). typeattr(Tag, {TypeName,Type,Args}, _Opts) -> {first,leaf("-"++atom_to_list(Tag)++" "), @@ -277,6 +277,9 @@ ltype({type,_,'fun',[{type,_,any},_]}=FunType) -> ltype({type,_Line,'fun',[{type,_,product,_},_]}=FunType) -> [fun_type(['fun',$(], FunType),$)]; ltype({type,Line,T,Ts}) -> + %% Compatibility. Before 18.0. + simple_type({atom,Line,T}, Ts); +ltype({user_type,Line,T,Ts}) -> simple_type({atom,Line,T}, Ts); ltype({remote_type,Line,[M,F,Ts]}) -> simple_type({remote,Line,M,F}, Ts); @@ -299,7 +302,7 @@ map_type(Fs) -> map_pair_types(Fs) -> tuple_type(Fs, fun map_pair_type/1). -map_pair_type({type,_Line,map_field_assoc,Ktype,Vtype}) -> +map_pair_type({type,_Line,map_field_assoc,[Ktype,Vtype]}) -> map_assoc_typed(ltype(Ktype), Vtype). map_assoc_typed(B, {type,_,union,Ts}) -> @@ -407,19 +410,6 @@ func_clause(Name, {clause,Line,Head,Guard,Body}, Opts) -> Bl = body(Body, Opts), {step,Gl,Bl}. -lrule({rule,_Line,Name,_Arity,Cs}, Opts) -> - Cll = nl_clauses(fun (C, H) -> rule_clause(Name, C, H) end, $;, Opts, Cs), - [Cll,leaf(".\n")]. - -rule_clause(Name, {clause,Line,Head,Guard,Body}, Opts) -> - Hl = call({atom,Line,Name}, Head, 0, Opts), - Gl = guard_when(Hl, Guard, Opts, leaf(" :-")), - Bl = rule_body(Body, Opts), - {step,Gl,Bl}. - -rule_body(Es, Opts) -> - lc_quals(Es, Opts). - guard_when(Before, Guard, Opts) -> guard_when(Before, Guard, Opts, ' ->'). diff --git a/lib/stdlib/src/erl_scan.erl b/lib/stdlib/src/erl_scan.erl index 6fd6bb888b..4960a86760 100644 --- a/lib/stdlib/src/erl_scan.erl +++ b/lib/stdlib/src/erl_scan.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -599,9 +599,6 @@ scan1("|"=Cs, _St, Line, Col, Toks) -> %% := scan1(":="++Cs, St, Line, Col, Toks) -> tok2(Cs, St, Line, Col, Toks, ":=", ':=', 2); -%% :- -scan1(":-"++Cs, St, Line, Col, Toks) -> - tok2(Cs, St, Line, Col, Toks, ":-", ':-', 2); %% :: for typed records scan1("::"++Cs, St, Line, Col, Toks) -> tok2(Cs, St, Line, Col, Toks, "::", '::', 2); diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl index 93c4f59896..1df069755d 100644 --- a/lib/stdlib/src/ets.erl +++ b/lib/stdlib/src/ets.erl @@ -71,7 +71,8 @@ rename/2, safe_fixtable/2, select/1, select/2, select/3, select_count/2, select_delete/2, select_reverse/1, select_reverse/2, select_reverse/3, setopts/2, slot/2, - update_counter/3, update_element/3]). + take/2, + update_counter/3, update_counter/4, update_element/3]). -spec all() -> [Tab] when Tab :: tab(). @@ -133,7 +134,9 @@ give_away(_, _, _) -> | {owner, pid()} | {protection, access()} | {size, non_neg_integer()} - | {type, type()}. + | {type, type()} + | {write_concurrency, boolean()} + | {read_concurrency, boolean()}. info(_) -> erlang:nif_error(undef). @@ -142,7 +145,8 @@ info(_) -> Tab :: tab(), Item :: compressed | fixed | heir | keypos | memory | name | named_table | node | owner | protection - | safe_fixed | size | stats | type, + | safe_fixed | size | stats | type + | write_concurrency | read_concurrency, Value :: term(). info(_, _) -> @@ -400,6 +404,14 @@ setopts(_, _) -> slot(_, _) -> erlang:nif_error(undef). +-spec take(Tab, Key) -> [Object] when + Tab :: tab(), + Key :: term(), + Object :: tuple(). + +take(_, _) -> + erlang:nif_error(undef). + -spec update_counter(Tab, Key, UpdateOp) -> Result when Tab :: tab(), Key :: term(), @@ -427,6 +439,38 @@ slot(_, _) -> update_counter(_, _, _) -> erlang:nif_error(undef). +-spec update_counter(Tab, Key, UpdateOp, Default) -> Result when + Tab :: tab(), + Key :: term(), + UpdateOp :: {Pos, Incr} + | {Pos, Incr, Threshold, SetValue}, + Pos :: integer(), + Incr :: integer(), + Threshold :: integer(), + SetValue :: integer(), + Result :: integer(), + Default :: tuple(); + (Tab, Key, [UpdateOp], Default) -> [Result] when + Tab :: tab(), + Key :: term(), + UpdateOp :: {Pos, Incr} + | {Pos, Incr, Threshold, SetValue}, + Pos :: integer(), + Incr :: integer(), + Threshold :: integer(), + SetValue :: integer(), + Result :: integer(), + Default :: tuple(); + (Tab, Key, Incr, Default) -> Result when + Tab :: tab(), + Key :: term(), + Incr :: integer(), + Result :: integer(), + Default :: tuple(). + +update_counter(_, _, _, _) -> + erlang:nif_error(undef). + -spec update_element(Tab, Key, ElementSpec :: {Pos, Value}) -> boolean() when Tab :: tab(), Key :: term(), diff --git a/lib/stdlib/src/filename.erl b/lib/stdlib/src/filename.erl index e6bde5673c..632af17e2a 100644 --- a/lib/stdlib/src/filename.erl +++ b/lib/stdlib/src/filename.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2013. All Rights Reserved. +%% Copyright Ericsson AB 1997-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -444,6 +444,8 @@ join1([], RelativeName, [$:|Rest], win32) -> join1(RelativeName, [], [$:|Rest], win32); join1([], RelativeName, [$/|Result], OsType) -> join1(RelativeName, [], [$/|Result], OsType); +join1([], RelativeName, [$., $/|Result], OsType) -> + join1(RelativeName, [], [$/|Result], OsType); join1([], RelativeName, Result, OsType) -> join1(RelativeName, [], [$/|Result], OsType); join1([[_|_]=List|Rest], RelativeName, Result, OsType) -> @@ -470,6 +472,8 @@ join1b(<<>>, RelativeName, [$:|Rest], win32) -> join1b(RelativeName, <<>>, [$:|Rest], win32); join1b(<<>>, RelativeName, [$/|Result], OsType) -> join1b(RelativeName, <<>>, [$/|Result], OsType); +join1b(<<>>, RelativeName, [$., $/|Result], OsType) -> + join1b(RelativeName, <<>>, [$/|Result], OsType); join1b(<<>>, RelativeName, Result, OsType) -> join1b(RelativeName, <<>>, [$/|Result], OsType); join1b(<<Char,Rest/binary>>, RelativeName, Result, OsType) when is_integer(Char) -> diff --git a/lib/stdlib/src/gen.erl b/lib/stdlib/src/gen.erl index 63116fa16e..6d7ca3d75c 100644 --- a/lib/stdlib/src/gen.erl +++ b/lib/stdlib/src/gen.erl @@ -26,7 +26,7 @@ %%% The standard behaviour should export init_it/6. %%%----------------------------------------------------------------- -export([start/5, start/6, debug_options/1, - call/3, call/4, reply/2]). + call/3, call/4, reply/2, stop/1, stop/3]). -export([init_it/6, init_it/7]). @@ -145,56 +145,10 @@ init_it2(GenMod, Starter, Parent, Name, Mod, Args, Options) -> call(Process, Label, Request) -> call(Process, Label, Request, ?default_timeout). -%% Local or remote by pid -call(Pid, Label, Request, Timeout) - when is_pid(Pid), Timeout =:= infinity; - is_pid(Pid), is_integer(Timeout), Timeout >= 0 -> - do_call(Pid, Label, Request, Timeout); -%% Local by name -call(Name, Label, Request, Timeout) - when is_atom(Name), Timeout =:= infinity; - is_atom(Name), is_integer(Timeout), Timeout >= 0 -> - case whereis(Name) of - Pid when is_pid(Pid) -> - do_call(Pid, Label, Request, Timeout); - undefined -> - exit(noproc) - end; -%% Global by name call(Process, Label, Request, Timeout) - when ((tuple_size(Process) == 2 andalso element(1, Process) == global) - orelse - (tuple_size(Process) == 3 andalso element(1, Process) == via)) - andalso - (Timeout =:= infinity orelse (is_integer(Timeout) andalso Timeout >= 0)) -> - case where(Process) of - Pid when is_pid(Pid) -> - Node = node(Pid), - try do_call(Pid, Label, Request, Timeout) - catch - exit:{nodedown, Node} -> - %% A nodedown not yet detected by global, - %% pretend that it was. - exit(noproc) - end; - undefined -> - exit(noproc) - end; -%% Local by name in disguise -call({Name, Node}, Label, Request, Timeout) - when Node =:= node(), Timeout =:= infinity; - Node =:= node(), is_integer(Timeout), Timeout >= 0 -> - call(Name, Label, Request, Timeout); -%% Remote by name -call({_Name, Node}=Process, Label, Request, Timeout) - when is_atom(Node), Timeout =:= infinity; - is_atom(Node), is_integer(Timeout), Timeout >= 0 -> - if - node() =:= nonode@nohost -> - exit({nodedown, Node}); - true -> - do_call(Process, Label, Request, Timeout) - end. + when Timeout =:= infinity; is_integer(Timeout), Timeout >= 0 -> + Fun = fun(Pid) -> do_call(Pid, Label, Request, Timeout) end, + do_for_proc(Process, Fun). do_call(Process, Label, Request, Timeout) -> try erlang:monitor(process, Process) of @@ -276,6 +230,65 @@ reply({To, Tag}, Reply) -> Msg = {Tag, Reply}, try To ! Msg catch _:_ -> Msg end. +%%----------------------------------------------------------------- +%% Syncronously stop a generic process +%%----------------------------------------------------------------- +stop(Process) -> + stop(Process, normal, infinity). + +stop(Process, Reason, Timeout) + when Timeout =:= infinity; is_integer(Timeout), Timeout >= 0 -> + Fun = fun(Pid) -> proc_lib:stop(Pid, Reason, Timeout) end, + do_for_proc(Process, Fun). + +%%----------------------------------------------------------------- +%% Map different specifications of a process to either Pid or +%% {Name,Node}. Execute the given Fun with the process as only +%% argument. +%% ----------------------------------------------------------------- + +%% Local or remote by pid +do_for_proc(Pid, Fun) when is_pid(Pid) -> + Fun(Pid); +%% Local by name +do_for_proc(Name, Fun) when is_atom(Name) -> + case whereis(Name) of + Pid when is_pid(Pid) -> + Fun(Pid); + undefined -> + exit(noproc) + end; +%% Global by name +do_for_proc(Process, Fun) + when ((tuple_size(Process) == 2 andalso element(1, Process) == global) + orelse + (tuple_size(Process) == 3 andalso element(1, Process) == via)) -> + case where(Process) of + Pid when is_pid(Pid) -> + Node = node(Pid), + try Fun(Pid) + catch + exit:{nodedown, Node} -> + %% A nodedown not yet detected by global, + %% pretend that it was. + exit(noproc) + end; + undefined -> + exit(noproc) + end; +%% Local by name in disguise +do_for_proc({Name, Node}, Fun) when Node =:= node() -> + do_for_proc(Name, Fun); +%% Remote by name +do_for_proc({_Name, Node} = Process, Fun) when is_atom(Node) -> + if + node() =:= nonode@nohost -> + exit({nodedown, Node}); + true -> + Fun(Process) + end. + + %%%----------------------------------------------------------------- %%% Misc. functions. %%%----------------------------------------------------------------- diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl index 469acdc37c..5a1fff3a9c 100644 --- a/lib/stdlib/src/gen_event.erl +++ b/lib/stdlib/src/gen_event.erl @@ -31,8 +31,8 @@ %%% Modified by Martin - uses proc_lib, sys and gen! --export([start/0, start/1, start_link/0, start_link/1, stop/1, notify/2, - sync_notify/2, +-export([start/0, start/1, start_link/0, start_link/1, stop/1, stop/3, + notify/2, sync_notify/2, add_handler/3, add_sup_handler/3, delete_handler/3, swap_handler/3, swap_sup_handler/3, which_handlers/1, call/3, call/4, wake_hib/4]). @@ -99,6 +99,14 @@ -callback code_change(OldVsn :: (term() | {down, term()}), State :: term(), Extra :: term()) -> {ok, NewState :: term()}. +-callback format_status(Opt, StatusData) -> Status when + Opt :: 'normal' | 'terminate', + StatusData :: [PDict | State], + PDict :: [{Key :: term(), Value :: term()}], + State :: term(), + Status :: term(). + +-optional_callbacks([format_status/2]). %%--------------------------------------------------------------------------- @@ -183,7 +191,11 @@ swap_sup_handler(M, {H1, A1}, {H2, A2}) -> which_handlers(M) -> rpc(M, which_handlers). -spec stop(emgr_ref()) -> 'ok'. -stop(M) -> rpc(M, stop). +stop(M) -> + gen:stop(M). + +stop(M, Reason, Timeout) -> + gen:stop(M, Reason, Timeout). rpc(M, Cmd) -> {ok, Reply} = gen:call(M, self(), Cmd, infinity), diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl index 5afe3e8b09..89825a6a57 100644 --- a/lib/stdlib/src/gen_fsm.erl +++ b/lib/stdlib/src/gen_fsm.erl @@ -106,6 +106,7 @@ -export([start/3, start/4, start_link/3, start_link/4, + stop/1, stop/3, send_event/2, sync_send_event/2, sync_send_event/3, send_all_state_event/2, sync_send_all_state_event/2, sync_send_all_state_event/3, @@ -160,6 +161,14 @@ -callback code_change(OldVsn :: term() | {down, term()}, StateName :: atom(), StateData :: term(), Extra :: term()) -> {ok, NextStateName :: atom(), NewStateData :: term()}. +-callback format_status(Opt, StatusData) -> Status when + Opt :: 'normal' | 'terminate', + StatusData :: [PDict | State], + PDict :: [{Key :: term(), Value :: term()}], + State :: term(), + Status :: term(). + +-optional_callbacks([format_status/2]). %%% --------------------------------------------------- %%% Starts a generic state machine. @@ -189,6 +198,11 @@ start_link(Mod, Args, Options) -> start_link(Name, Mod, Args, Options) -> gen:start(?MODULE, link, Name, Mod, Args, Options). +stop(Name) -> + gen:stop(Name). + +stop(Name, Reason, Timeout) -> + gen:stop(Name, Reason, Timeout). send_event({global, Name}, Event) -> catch global:send(Name, {'$gen_event', Event}), diff --git a/lib/stdlib/src/gen_server.erl b/lib/stdlib/src/gen_server.erl index 18ef4a2507..b29e40e5f7 100644 --- a/lib/stdlib/src/gen_server.erl +++ b/lib/stdlib/src/gen_server.erl @@ -88,6 +88,7 @@ %% API -export([start/3, start/4, start_link/3, start_link/4, + stop/1, stop/3, call/2, call/3, cast/2, reply/2, abcast/2, abcast/3, @@ -137,6 +138,15 @@ -callback code_change(OldVsn :: (term() | {down, term()}), State :: term(), Extra :: term()) -> {ok, NewState :: term()} | {error, Reason :: term()}. +-callback format_status(Opt, StatusData) -> Status when + Opt :: 'normal' | 'terminate', + StatusData :: [PDict | State], + PDict :: [{Key :: term(), Value :: term()}], + State :: term(), + Status :: term(). + +-optional_callbacks([format_status/2]). + %%% ----------------------------------------------------------------- %%% Starts a generic server. @@ -168,6 +178,17 @@ start_link(Name, Mod, Args, Options) -> %% ----------------------------------------------------------------- +%% Stop a generic server and wait for it to terminate. +%% If the server is located at another node, that node will +%% be monitored. +%% ----------------------------------------------------------------- +stop(Name) -> + gen:stop(Name). + +stop(Name, Reason, Timeout) -> + gen:stop(Name, Reason, Timeout). + +%% ----------------------------------------------------------------- %% Make a call to a generic server. %% If the server is located at another node, that node will %% be monitored. @@ -849,22 +870,10 @@ opt(_, []) -> debug_options(Name, Opts) -> case opt(debug, Opts) of - {ok, Options} -> dbg_options(Name, Options); - _ -> dbg_options(Name, []) + {ok, Options} -> dbg_opts(Name, Options); + _ -> [] end. -dbg_options(Name, []) -> - Opts = - case init:get_argument(generic_debug) of - error -> - []; - _ -> - [log, statistics] - end, - dbg_opts(Name, Opts); -dbg_options(Name, Opts) -> - dbg_opts(Name, Opts). - dbg_opts(Name, Opts) -> case catch sys:debug_options(Opts) of {'EXIT',_} -> diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl index adc9a0cf5f..e90cda0533 100644 --- a/lib/stdlib/src/io_lib.erl +++ b/lib/stdlib/src/io_lib.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -60,6 +60,7 @@ -module(io_lib). -export([fwrite/2,fread/2,fread/3,format/2]). +-export([scan_format/2,unscan_format/1,build_text/1]). -export([print/1,print/4,indentation/2]). -export([write/1,write/2,write/3,nl/0,format_prompt/1,format_prompt/2]). @@ -83,7 +84,7 @@ deep_unicode_char_list/1]). -export_type([chars/0, latin1_string/0, continuation/0, - fread_error/0, fread_item/0]). + fread_error/0, fread_item/0, format_spec/0]). %%---------------------------------------------------------------------- @@ -108,6 +109,18 @@ -type fread_item() :: string() | atom() | integer() | float(). +-type format_spec() :: + #{ + control_char => char(), + args => [any()], + width => 'none' | integer(), + adjust => 'left' | 'right', + precision => 'none' | integer(), + pad_char => char(), + encoding => 'unicode' | 'latin1', + strings => boolean() + }. + %%---------------------------------------------------------------------- %% Interface calls to sub-modules. @@ -156,6 +169,31 @@ format(Format, Args) -> Other end. +-spec scan_format(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | format_spec()]. + +scan_format(Format, Args) -> + try io_lib_format:scan(Format, Args) + catch + _:_ -> erlang:error(badarg, [Format, Args]) + end. + +-spec unscan_format(FormatList) -> {Format, Data} when + FormatList :: [char() | format_spec()], + Format :: io:format(), + Data :: [term()]. + +unscan_format(FormatList) -> + io_lib_format:unscan(FormatList). + +-spec build_text(FormatList) -> chars() when + FormatList :: [char() | format_spec()]. + +build_text(FormatList) -> + io_lib_format:build(FormatList). + -spec print(Term) -> chars() when Term :: term(). diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl index 89ae6fb187..015afb317a 100644 --- a/lib/stdlib/src/io_lib_format.erl +++ b/lib/stdlib/src/io_lib_format.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -20,10 +20,9 @@ %% Formatting functions of io library. --export([fwrite/2,fwrite_g/1,indentation/2]). +-export([fwrite/2,fwrite_g/1,indentation/2,scan/2,unscan/1,build/1]). -%% fwrite(Format, ArgList) -> string(). -%% Format the arguments in ArgList after string Format. Just generate +%% Format the arguments in Args after string Format. Just generate %% an error if there is an error in the arguments. %% %% To do the printing command correctly we need to calculate the @@ -37,15 +36,84 @@ %% and it also splits the handling of the control characters into two %% parts. -fwrite(Format, Args) when is_atom(Format) -> - fwrite(atom_to_list(Format), Args); -fwrite(Format, Args) when is_binary(Format) -> - fwrite(binary_to_list(Format), Args); +-spec fwrite(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | io_lib:format_spec()]. + fwrite(Format, Args) -> - Cs = collect(Format, Args), + build(scan(Format, Args)). + +%% Build the output text for a pre-parsed format list. + +-spec build(FormatList) -> io_lib:chars() when + FormatList :: [char() | io_lib:format_spec()]. + +build(Cs) -> Pc = pcount(Cs), build(Cs, Pc, 0). +%% Parse all control sequences in the format string. + +-spec scan(Format, Data) -> FormatList when + Format :: io:format(), + Data :: [term()], + FormatList :: [char() | io_lib:format_spec()]. + +scan(Format, Args) when is_atom(Format) -> + scan(atom_to_list(Format), Args); +scan(Format, Args) when is_binary(Format) -> + scan(binary_to_list(Format), Args); +scan(Format, Args) -> + collect(Format, Args). + +%% Revert a pre-parsed format list to a plain character list and a +%% list of arguments. + +-spec unscan(FormatList) -> {Format, Data} when + FormatList :: [char() | io_lib:format_spec()], + Format :: io:format(), + Data :: [term()]. + +unscan(Cs) -> + {print(Cs), args(Cs)}. + +args([#{args := As} | Cs]) -> + As ++ args(Cs); +args([_C | Cs]) -> + args(Cs); +args([]) -> + []. + +print([#{control_char := C, width := F, adjust := Ad, precision := P, + pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) -> + print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs); +print([C | Cs]) -> + [C | print(Cs)]; +print([]) -> + []. + +print(C, F, Ad, P, Pad, Encoding, Strings) -> + [$~] ++ print_field_width(F, Ad) ++ print_precision(P) ++ + print_pad_char(Pad) ++ print_encoding(Encoding) ++ + print_strings(Strings) ++ [C]. + +print_field_width(none, _Ad) -> ""; +print_field_width(F, left) -> integer_to_list(-F); +print_field_width(F, right) -> integer_to_list(F). + +print_precision(none) -> ""; +print_precision(P) -> [$. | integer_to_list(P)]. + +print_pad_char($\s) -> ""; % default, no need to make explicit +print_pad_char(Pad) -> [$., Pad]. + +print_encoding(unicode) -> "t"; +print_encoding(latin1) -> "". + +print_strings(false) -> "l"; +print_strings(true) -> "". + collect([$~|Fmt0], Args0) -> {C,Fmt1,Args1} = collect_cseq(Fmt0, Args0), [C|collect(Fmt1, Args1)]; @@ -60,7 +128,10 @@ collect_cseq(Fmt0, Args0) -> {Encoding,Fmt4,Args4} = encoding(Fmt3, Args3), {Strings,Fmt5,Args5} = strings(Fmt4, Args4), {C,As,Fmt6,Args6} = collect_cc(Fmt5, Args5), - {{C,As,F,Ad,P,Pad,Encoding,Strings},Fmt6,Args6}. + FormatSpec = #{control_char => C, args => As, width => F, adjust => Ad, + precision => P, pad_char => Pad, encoding => Encoding, + strings => Strings}, + {FormatSpec,Fmt6,Args6}. encoding([$t|Fmt],Args) -> true = hd(Fmt) =/= $l, @@ -136,17 +207,19 @@ collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}. pcount(Cs) -> pcount(Cs, 0). -pcount([{$p,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1); -pcount([{$P,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1); +pcount([#{control_char := $p}|Cs], Acc) -> pcount(Cs, Acc+1); +pcount([#{control_char := $P}|Cs], Acc) -> pcount(Cs, Acc+1); pcount([_|Cs], Acc) -> pcount(Cs, Acc); pcount([], Acc) -> Acc. -%% build([Control], Pc, Indentation) -> string(). +%% build([Control], Pc, Indentation) -> io_lib:chars(). %% Interpret the control structures. Count the number of print %% remaining and only calculate indentation when necessary. Must also %% be smart when calculating indentation for characters in format. -build([{C,As,F,Ad,P,Pad,Enc,Str}|Cs], Pc0, I) -> +build([#{control_char := C, args := As, width := F, adjust := Ad, + precision := P, pad_char := Pad, encoding := Enc, + strings := Str} | Cs], Pc0, I) -> S = control(C, As, F, Ad, P, Pad, Enc, Str, I), Pc1 = decr_pc(C, Pc0), if @@ -162,10 +235,14 @@ decr_pc($p, Pc) -> Pc - 1; decr_pc($P, Pc) -> Pc - 1; decr_pc(_, Pc) -> Pc. -%% indentation(String, Indentation) -> Indentation. + %% Calculate the indentation of the end of a string given its start %% indentation. We assume tabs at 8 cols. +-spec indentation(String, StartIndent) -> integer() when + String :: io_lib:chars(), + StartIndent :: integer(). + indentation([$\n|Cs], _I) -> indentation(Cs, 0); indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8); indentation([C|Cs], I) when is_integer(C) -> @@ -366,7 +443,6 @@ float_data([D|Cs], Ds) when D >= $0, D =< $9 -> float_data([_|Cs], Ds) -> float_data(Cs, Ds). -%% fwrite_g(Float) %% Writes the shortest, correctly rounded string that converts %% to Float when read back with list_to_float/1. %% @@ -374,6 +450,8 @@ float_data([_|Cs], Ds) -> %% in Proceedings of the SIGPLAN '96 Conference on Programming %% Language Design and Implementation. +-spec fwrite_g(float()) -> string(). + fwrite_g(0.0) -> "0.0"; fwrite_g(Float) when is_float(Float) -> @@ -642,7 +720,7 @@ prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase) term([Prefix|S], F, Adj, none, Pad) end. -%% char(Char, Field, Adjust, Precision, PadChar) -> string(). +%% char(Char, Field, Adjust, Precision, PadChar) -> chars(). char(C, none, _Adj, none, _Pad) -> [C]; char(C, F, _Adj, none, _Pad) -> chars(C, F); diff --git a/lib/stdlib/src/math.erl b/lib/stdlib/src/math.erl index 98a70b1644..43f736e54c 100644 --- a/lib/stdlib/src/math.erl +++ b/lib/stdlib/src/math.erl @@ -24,7 +24,7 @@ -export([sin/1, cos/1, tan/1, asin/1, acos/1, atan/1, atan2/2, sinh/1, cosh/1, tanh/1, asinh/1, acosh/1, atanh/1, exp/1, log/1, - log10/1, pow/2, sqrt/1, erf/1, erfc/1]). + log2/1, log10/1, pow/2, sqrt/1, erf/1, erfc/1]). -spec acos(X) -> float() when X :: number(). @@ -92,6 +92,11 @@ exp(_) -> log(_) -> erlang:nif_error(undef). +-spec log2(X) -> float() when + X :: number(). +log2(_) -> + erlang:nif_error(undef). + -spec log10(X) -> float() when X :: number(). log10(_) -> diff --git a/lib/stdlib/src/ms_transform.erl b/lib/stdlib/src/ms_transform.erl index 97564e2e44..7b6f4e5b50 100644 --- a/lib/stdlib/src/ms_transform.erl +++ b/lib/stdlib/src/ms_transform.erl @@ -1079,6 +1079,12 @@ normalise({cons,_,Head,Tail}) -> [normalise(Head)|normalise(Tail)]; normalise({tuple,_,Args}) -> list_to_tuple(normalise_list(Args)); +normalise({map,_,Pairs0}) -> + Pairs1 = lists:map(fun ({map_field_exact,_,K,V}) -> + {normalise(K),normalise(V)} + end, + Pairs0), + maps:from_list(Pairs1); %% Special case for unary +/-. normalise({op,_,'+',{char,_,I}}) -> I; normalise({op,_,'+',{integer,_,I}}) -> I; diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl index 6c25beabe9..4a338798d0 100644 --- a/lib/stdlib/src/otp_internal.erl +++ b/lib/stdlib/src/otp_internal.erl @@ -577,8 +577,19 @@ obsolete_1(asn1rt, utf8_binary_to_list, 1) -> {deprecated,{unicode,characters_to_list,1}}; obsolete_1(asn1rt, utf8_list_to_binary, 1) -> {deprecated,{unicode,characters_to_binary,1}}; -obsolete_1(pg, _, _) -> - {deprecated,"deprecated; will be removed in OTP 18"}; + +%% Added in OTP 18. +obsolete_1(core_lib, get_anno, 1) -> + {deprecated,{cerl,get_ann,1}}; +obsolete_1(core_lib, set_anno, 2) -> + {deprecated,{cerl,set_ann,2}}; +obsolete_1(core_lib, is_literal, 1) -> + {deprecated,{cerl,is_literal,1}}; +obsolete_1(core_lib, is_literal_list, 1) -> + {deprecated,"deprecated; use lists:all(fun cerl:is_literal/1, L)" + " instead"}; +obsolete_1(core_lib, literal_value, 1) -> + {deprecated,{core_lib,concrete,1}}; obsolete_1(_, _, _) -> no. diff --git a/lib/stdlib/src/pg.erl b/lib/stdlib/src/pg.erl deleted file mode 100644 index a41fd329c2..0000000000 --- a/lib/stdlib/src/pg.erl +++ /dev/null @@ -1,187 +0,0 @@ -%% -%% %CopyrightBegin% -%% -%% Copyright Ericsson AB 1996-2014. All Rights Reserved. -%% -%% The contents of this file are subject to the Erlang Public License, -%% Version 1.1, (the "License"); you may not use this file except in -%% compliance with the License. You should have received a copy of the -%% Erlang Public License along with this software. If not, it can be -%% retrieved online at http://www.erlang.org/. -%% -%% Software distributed under the License is distributed on an "AS IS" -%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See -%% the License for the specific language governing rights and limitations -%% under the License. -%% -%% %CopyrightEnd% -%% --module(pg). --deprecated(module). - -%% pg provides a process group facility. Messages -%% can be multicasted to all members in the group - --export([create/1, - create/2, - standby/2, - join/2, - send/2, - esend/2, - members/1, - name_to_pid/1, - master/1]). - - -%% Create a brand new empty process group with the master residing -%% at the local node - --spec create(PgName) -> 'ok' | {'error', Reason} when - PgName :: term(), - Reason :: 'already_created' | term(). - -create(PgName) -> - catch begin check(PgName), - Pid = spawn(pg,master,[PgName]), - global:register_name(PgName,Pid), - ok end. - -%% Create a brand new empty process group with the master -%% residing at Node - --spec create(PgName, Node) -> 'ok' | {'error', Reason} when - PgName :: term(), - Node :: node(), - Reason :: 'already_created' | term(). - -create(PgName, Node) -> - catch begin check(PgName), - Pid = spawn(Node,pg,master,[PgName]), - global:register_name(PgName,Pid), - ok end. - -%% Have a process on Node that will act as a standby for the process -%% group manager. So if the node where the manager runs fails, the -%% process group will continue to function. - --spec standby(term(), node()) -> 'ok'. - -standby(_PgName, _Node) -> - ok. - -%% Tell process group PgName that Pid is a new member of the group -%% synchronously return a list of all old members in the group - --spec join(PgName, Pid) -> Members when - PgName :: term(), - Pid :: pid(), - Members :: [pid()]. - -join(PgName, Pid) when is_atom(PgName) -> - global:send(PgName, {join,self(),Pid}), - receive - {_P,{members,Members}} -> - Members - end. - -%% Multi cast Mess to all members in the group - --spec send(PgName, Msg) -> 'ok' when - PgName :: term(), - Msg :: term(). - -send(PgName, Mess) when is_atom(PgName) -> - global:send(PgName, {send, self(), Mess}), - ok; -send(Pg, Mess) when is_pid(Pg) -> - Pg ! {send,self(),Mess}, - ok. - -%% multi cast a message to all members in the group but ourselves -%% If we are a member - --spec esend(PgName, Msg) -> 'ok' when - PgName :: term(), - Msg :: term(). - -esend(PgName, Mess) when is_atom(PgName) -> - global:send(PgName, {esend,self(),Mess}), - ok; -esend(Pg, Mess) when is_pid(Pg) -> - Pg ! {esend,self(),Mess}, - ok. - -%% Return the members of the group - --spec members(PgName) -> Members when - PgName :: term(), - Members :: [pid()]. - -members(PgName) when is_atom(PgName) -> - global:send(PgName, {self() ,members}), - receive - {_P,{members,Members}} -> - Members - end; -members(Pg) when is_pid(Pg) -> - Pg ! {self,members}, - receive - {_P,{members,Members}} -> - Members - end. - --spec name_to_pid(atom()) -> pid() | 'undefined'. - -name_to_pid(PgName) when is_atom(PgName) -> - global:whereis_name(PgName). - --spec master(term()) -> no_return(). - -master(PgName) -> - process_flag(trap_exit, true), - master_loop(PgName, []). - -master_loop(PgName,Members) -> - receive - {send,From,Message} -> - send_all(Members,{pg_message,From,PgName,Message}), - master_loop(PgName,Members); - {esend,From,Message} -> - send_all(lists:delete(From,Members), - {pg_message,From,PgName,Message}), - master_loop(PgName,Members); - {join,From,Pid} -> - link(Pid), - send_all(Members,{new_member,PgName,Pid}), - From ! {self(),{members,Members}}, - master_loop(PgName,[Pid|Members]); - {From,members} -> - From ! {self(),{members,Members}}, - master_loop(PgName,Members); - {'EXIT',From,_} -> - L = - case lists:member(From,Members) of - true -> - NewMembers = lists:delete(From,Members), - send_all(NewMembers, {crashed_member,PgName,From}), - NewMembers; - false -> - Members - end, - master_loop(PgName,L) - end. - -send_all([], _) -> ok; -send_all([P|Ps], M) -> - P ! M, - send_all(Ps, M). - -%% Check if the process group already exists - -check(PgName) -> - case global:whereis_name(PgName) of - Pid when is_pid(Pid) -> - throw({error,already_created}); - undefined -> - ok - end. diff --git a/lib/stdlib/src/proc_lib.erl b/lib/stdlib/src/proc_lib.erl index bf2a4e7ac5..8792ff44d3 100644 --- a/lib/stdlib/src/proc_lib.erl +++ b/lib/stdlib/src/proc_lib.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -30,7 +30,8 @@ hibernate/3, init_ack/1, init_ack/2, init_p/3,init_p/5,format/1,format/2,initial_call/1, - translate_initial_call/1]). + translate_initial_call/1, + stop/1, stop/3]). %% Internal exports. -export([wake_up/3]). @@ -748,3 +749,50 @@ format_tag(Tag, Data) -> modifier(latin1) -> ""; modifier(_) -> "t". + + +%%% ----------------------------------------------------------- +%%% Stop a process and wait for it to terminate +%%% ----------------------------------------------------------- +-spec stop(Process) -> 'ok' when + Process :: pid() | RegName | {RegName,node()}, + RegName :: atom(). +stop(Process) -> + stop(Process, normal, infinity). + +-spec stop(Process, Reason, Timeout) -> 'ok' when + Process :: pid() | RegName | {RegName,node()}, + RegName :: atom(), + Reason :: term(), + Timeout :: timeout(). +stop(Process, Reason, Timeout) -> + {Pid, Mref} = erlang:spawn_monitor(do_stop(Process, Reason)), + receive + {'DOWN', Mref, _, _, Reason} -> + ok; + {'DOWN', Mref, _, _, {noproc,{sys,terminate,_}}} -> + exit(noproc); + {'DOWN', Mref, _, _, CrashReason} -> + exit(CrashReason) + after Timeout -> + exit(Pid, kill), + receive + {'DOWN', Mref, _, _, _} -> + exit(timeout) + end + end. + +-spec do_stop(Process, Reason) -> Fun when + Process :: pid() | RegName | {RegName,node()}, + RegName :: atom(), + Reason :: term(), + Fun :: fun(() -> no_return()). +do_stop(Process, Reason) -> + fun() -> + Mref = erlang:monitor(process, Process), + ok = sys:terminate(Process, Reason, infinity), + receive + {'DOWN', Mref, _, _, ExitReason} -> + exit(ExitReason) + end + end. diff --git a/lib/stdlib/src/stdlib.app.src b/lib/stdlib/src/stdlib.app.src index aa9899da3b..f134c75869 100644 --- a/lib/stdlib/src/stdlib.app.src +++ b/lib/stdlib/src/stdlib.app.src @@ -77,7 +77,6 @@ orddict, ordsets, otp_internal, - pg, pool, proc_lib, proplists, diff --git a/lib/stdlib/src/stdlib.appup.src b/lib/stdlib/src/stdlib.appup.src index 7802ea884f..5900fd3ff3 100644 --- a/lib/stdlib/src/stdlib.appup.src +++ b/lib/stdlib/src/stdlib.appup.src @@ -18,10 +18,8 @@ {"%VSN%", %% Up from - max one major revision back [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 - {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.0 - {<<"1\\.19(\\.[0-9]+)*">>,[restart_new_emulator]}],%% R16 + {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], %% 17.0 %% Down to - max one major revision back [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3 - {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.0 - {<<"1\\.19(\\.[0-9]+)*">>,[restart_new_emulator]}] %% R16 + {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}] %% 17.0 }. diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl index f9b083a56d..f6903d1c3d 100644 --- a/lib/stdlib/src/string.erl +++ b/lib/stdlib/src/string.erl @@ -221,23 +221,47 @@ substr2([_|String], S) -> substr2(String, S-1). Tokens :: [Token :: nonempty_string()]. tokens(S, Seps) -> - tokens1(S, Seps, []). + case Seps of + [] -> + case S of + [] -> []; + [_|_] -> [S] + end; + [C] -> + tokens_single_1(reverse(S), C, []); + [_|_] -> + tokens_multiple_1(reverse(S), Seps, []) + end. -tokens1([C|S], Seps, Toks) -> +tokens_single_1([Sep|S], Sep, Toks) -> + tokens_single_1(S, Sep, Toks); +tokens_single_1([C|S], Sep, Toks) -> + tokens_single_2(S, Sep, Toks, [C]); +tokens_single_1([], _, Toks) -> + Toks. + +tokens_single_2([Sep|S], Sep, Toks, Tok) -> + tokens_single_1(S, Sep, [Tok|Toks]); +tokens_single_2([C|S], Sep, Toks, Tok) -> + tokens_single_2(S, Sep, Toks, [C|Tok]); +tokens_single_2([], _Sep, Toks, Tok) -> + [Tok|Toks]. + +tokens_multiple_1([C|S], Seps, Toks) -> case member(C, Seps) of - true -> tokens1(S, Seps, Toks); - false -> tokens2(S, Seps, Toks, [C]) + true -> tokens_multiple_1(S, Seps, Toks); + false -> tokens_multiple_2(S, Seps, Toks, [C]) end; -tokens1([], _Seps, Toks) -> - reverse(Toks). +tokens_multiple_1([], _Seps, Toks) -> + Toks. -tokens2([C|S], Seps, Toks, Cs) -> +tokens_multiple_2([C|S], Seps, Toks, Tok) -> case member(C, Seps) of - true -> tokens1(S, Seps, [reverse(Cs)|Toks]); - false -> tokens2(S, Seps, Toks, [C|Cs]) + true -> tokens_multiple_1(S, Seps, [Tok|Toks]); + false -> tokens_multiple_2(S, Seps, Toks, [C|Tok]) end; -tokens2([], _Seps, Toks, Cs) -> - reverse([reverse(Cs)|Toks]). +tokens_multiple_2([], _Seps, Toks, Tok) -> + [Tok|Toks]. -spec chars(Character, Number) -> String when Character :: char(), diff --git a/lib/stdlib/src/supervisor.erl b/lib/stdlib/src/supervisor.erl index ede2742875..658c00dc77 100644 --- a/lib/stdlib/src/supervisor.erl +++ b/lib/stdlib/src/supervisor.erl @@ -25,7 +25,7 @@ start_child/2, restart_child/2, delete_child/2, terminate_child/2, which_children/1, count_children/1, - check_childspecs/1]). + check_childspecs/1, get_childspec/2]). %% Internal exports -export([init/1, handle_call/3, handle_cast/2, handle_info/2, @@ -34,7 +34,7 @@ %%-------------------------------------------------------------------------- --export_type([child_spec/0, startchild_ret/0, strategy/0]). +-export_type([sup_flags/0, child_spec/0, startchild_ret/0, strategy/0]). %%-------------------------------------------------------------------------- @@ -53,7 +53,13 @@ | {'global', Name :: atom()} | {'via', Module :: module(), Name :: any()} | pid(). --type child_spec() :: {Id :: child_id(), +-type child_spec() :: #{id => child_id(), % mandatory + start => mfargs(), % mandatory + restart => restart(), % optional + shutdown => shutdown(), % optional + type => worker(), % optional + modules => modules()} % optional + | {Id :: child_id(), StartFunc :: mfargs(), Restart :: restart(), Shutdown :: shutdown(), @@ -63,6 +69,23 @@ -type strategy() :: 'one_for_all' | 'one_for_one' | 'rest_for_one' | 'simple_one_for_one'. +-type sup_flags() :: #{strategy => strategy(), % optional + intensity => non_neg_integer(), % optional + period => pos_integer()} % optional + | {RestartStrategy :: strategy(), + Intensity :: non_neg_integer(), + Period :: pos_integer()}. + +%%-------------------------------------------------------------------------- +%% Defaults +-define(default_flags, #{strategy => one_for_one, + intensity => 1, + period => 5}). +-define(default_child_spec, #{restart => permanent, + type => worker}). +%% Default 'shutdown' is 5000 for workers and infinity for supervisors. +%% Default 'modules' is [M], where M comes from the child's start {M,F,A}. + %%-------------------------------------------------------------------------- -record(child, {% pid is undefined when child is not running @@ -96,10 +119,7 @@ -define(is_simple(State), State#state.strategy =:= simple_one_for_one). -callback init(Args :: term()) -> - {ok, {{RestartStrategy :: strategy(), - MaxR :: non_neg_integer(), - MaxT :: non_neg_integer()}, - [ChildSpec :: child_spec()]}} + {ok, {SupFlags :: sup_flags(), [ChildSpec :: child_spec()]}} | ignore. -define(restarting(_Pid_), {restarting,_Pid_}). @@ -178,6 +198,14 @@ delete_child(Supervisor, Name) -> terminate_child(Supervisor, Name) -> call(Supervisor, {terminate_child, Name}). +-spec get_childspec(SupRef, Id) -> Result when + SupRef :: sup_ref(), + Id :: pid() | child_id(), + Result :: {'ok', child_spec()} | {'error', Error}, + Error :: 'not_found'. +get_childspec(Supervisor, Name) -> + call(Supervisor, {get_childspec, Name}). + -spec which_children(SupRef) -> [{Id,Child,Type,Modules}] when SupRef :: sup_ref(), Id :: child_id() | undefined, @@ -431,6 +459,14 @@ handle_call({delete_child, Name}, _From, State) -> {reply, {error, not_found}, State} end; +handle_call({get_childspec, Name}, _From, State) -> + case get_child(Name, State, ?is_simple(State)) of + {value, Child} -> + {reply, {ok, child_to_spec(Child)}, State}; + false -> + {reply, {error, not_found}, State} + end; + handle_call(which_children, _From, #state{children = [#child{restart_type = temporary, child_type = CT, modules = Mods}]} = @@ -610,13 +646,11 @@ terminate(_Reason, State) -> code_change(_, State, _) -> case (State#state.module):init(State#state.args) of {ok, {SupFlags, StartSpec}} -> - case catch check_flags(SupFlags) of - ok -> - {Strategy, MaxIntensity, Period} = SupFlags, - update_childspec(State#state{strategy = Strategy, - intensity = MaxIntensity, - period = Period}, - StartSpec); + case set_flags(SupFlags, State) of + {ok, State1} -> + update_childspec(State1, StartSpec); + {invalid_type, SupFlags} -> + {error, {bad_flags, SupFlags}}; % backwards compatibility Error -> {error, Error} end; @@ -626,14 +660,6 @@ code_change(_, State, _) -> Error end. -check_flags({Strategy, MaxIntensity, Period}) -> - validStrategy(Strategy), - validIntensity(MaxIntensity), - validPeriod(Period), - ok; -check_flags(What) -> - {bad_flags, What}. - update_childspec(State, StartSpec) when ?is_simple(State) -> case check_startspec(StartSpec) of {ok, [Child]} -> @@ -1188,25 +1214,36 @@ remove_child(Child, State) -> %% Returns: {ok, state()} | Error %%----------------------------------------------------------------- init_state(SupName, Type, Mod, Args) -> - case catch init_state1(SupName, Type, Mod, Args) of - {ok, State} -> - {ok, State}; - Error -> - Error + set_flags(Type, #state{name = supname(SupName,Mod), + module = Mod, + args = Args}). + +set_flags(Flags, State) -> + try check_flags(Flags) of + #{strategy := Strategy, intensity := MaxIntensity, period := Period} -> + {ok, State#state{strategy = Strategy, + intensity = MaxIntensity, + period = Period}} + catch + Thrown -> Thrown end. -init_state1(SupName, {Strategy, MaxIntensity, Period}, Mod, Args) -> +check_flags(SupFlags) when is_map(SupFlags) -> + do_check_flags(maps:merge(?default_flags,SupFlags)); +check_flags({Strategy, MaxIntensity, Period}) -> + check_flags(#{strategy => Strategy, + intensity => MaxIntensity, + period => Period}); +check_flags(What) -> + throw({invalid_type, What}). + +do_check_flags(#{strategy := Strategy, + intensity := MaxIntensity, + period := Period} = Flags) -> validStrategy(Strategy), validIntensity(MaxIntensity), validPeriod(Period), - {ok, #state{name = supname(SupName,Mod), - strategy = Strategy, - intensity = MaxIntensity, - period = Period, - module = Mod, - args = Args}}; -init_state1(_SupName, Type, _, _) -> - {invalid_type, Type}. + Flags. validStrategy(simple_one_for_one) -> true; validStrategy(one_for_one) -> true; @@ -1227,14 +1264,7 @@ supname(N, _) -> N. %%% ------------------------------------------------------ %%% Check that the children start specification is valid. -%%% Shall be a six (6) tuple -%%% {Name, Func, RestartType, Shutdown, ChildType, Modules} -%%% where Name is an atom -%%% Func is {Mod, Fun, Args} == {atom(), atom(), list()} -%%% RestartType is permanent | temporary | transient -%%% Shutdown = integer() > 0 | infinity | brutal_kill -%%% ChildType = supervisor | worker -%%% Modules = [atom()] | dynamic +%%% Input: [child_spec()] %%% Returns: {ok, [child_rec()]} | Error %%% ------------------------------------------------------ @@ -1244,6 +1274,9 @@ check_startspec([ChildSpec|T], Res) -> case check_childspec(ChildSpec) of {ok, Child} -> case lists:keymember(Child#child.name, #child.name, Res) of + %% The error message duplicate_child_name is kept for + %% backwards compatibility, although + %% duplicate_child_id would be more correct. true -> {duplicate_child_name, Child#child.name}; false -> check_startspec(T, [Child | Res]) end; @@ -1252,16 +1285,41 @@ check_startspec([ChildSpec|T], Res) -> check_startspec([], Res) -> {ok, lists:reverse(Res)}. +check_childspec(ChildSpec) when is_map(ChildSpec) -> + catch do_check_childspec(maps:merge(?default_child_spec,ChildSpec)); check_childspec({Name, Func, RestartType, Shutdown, ChildType, Mods}) -> - catch check_childspec(Name, Func, RestartType, Shutdown, ChildType, Mods); + check_childspec(#{id => Name, + start => Func, + restart => RestartType, + shutdown => Shutdown, + type => ChildType, + modules => Mods}); check_childspec(X) -> {invalid_child_spec, X}. -check_childspec(Name, Func, RestartType, Shutdown, ChildType, Mods) -> +do_check_childspec(#{restart := RestartType, + type := ChildType} = ChildSpec)-> + Name = case ChildSpec of + #{id := N} -> N; + _ -> throw(missing_id) + end, + Func = case ChildSpec of + #{start := F} -> F; + _ -> throw(missing_start) + end, validName(Name), validFunc(Func), validRestartType(RestartType), validChildType(ChildType), - validShutdown(Shutdown, ChildType), + Shutdown = case ChildSpec of + #{shutdown := S} -> S; + #{type := worker} -> 5000; + #{type := supervisor} -> infinity + end, + validShutdown(Shutdown), + Mods = case ChildSpec of + #{modules := Ms} -> Ms; + _ -> {M,_,_} = Func, [M] + end, validMods(Mods), {ok, #child{name = Name, mfargs = Func, restart_type = RestartType, shutdown = Shutdown, child_type = ChildType, modules = Mods}}. @@ -1282,11 +1340,11 @@ validRestartType(temporary) -> true; validRestartType(transient) -> true; validRestartType(RestartType) -> throw({invalid_restart_type, RestartType}). -validShutdown(Shutdown, _) +validShutdown(Shutdown) when is_integer(Shutdown), Shutdown > 0 -> true; -validShutdown(infinity, _) -> true; -validShutdown(brutal_kill, _) -> true; -validShutdown(Shutdown, _) -> throw({invalid_shutdown, Shutdown}). +validShutdown(infinity) -> true; +validShutdown(brutal_kill) -> true; +validShutdown(Shutdown) -> throw({invalid_shutdown, Shutdown}). validMods(dynamic) -> true; validMods(Mods) when is_list(Mods) -> @@ -1299,6 +1357,19 @@ validMods(Mods) when is_list(Mods) -> Mods); validMods(Mods) -> throw({invalid_modules, Mods}). +child_to_spec(#child{name = Name, + mfargs = Func, + restart_type = RestartType, + shutdown = Shutdown, + child_type = ChildType, + modules = Mods}) -> + #{id => Name, + start => Func, + restart => RestartType, + shutdown => Shutdown, + type => ChildType, + modules => Mods}. + %%% ------------------------------------------------------ %%% Add a new restart and calculate if the max restart %%% intensity has been reached (in that case the supervisor @@ -1367,14 +1438,14 @@ report_error(Error, Reason, Child, SupName) -> extract_child(Child) when is_list(Child#child.pid) -> [{nb_children, length(Child#child.pid)}, - {name, Child#child.name}, + {id, Child#child.name}, {mfargs, Child#child.mfargs}, {restart_type, Child#child.restart_type}, {shutdown, Child#child.shutdown}, {child_type, Child#child.child_type}]; extract_child(Child) -> [{pid, Child#child.pid}, - {name, Child#child.name}, + {id, Child#child.name}, {mfargs, Child#child.mfargs}, {restart_type, Child#child.restart_type}, {shutdown, Child#child.shutdown}, diff --git a/lib/stdlib/src/sys.erl b/lib/stdlib/src/sys.erl index d3ba09ce82..7e4bfa1fdd 100644 --- a/lib/stdlib/src/sys.erl +++ b/lib/stdlib/src/sys.erl @@ -24,6 +24,7 @@ get_state/1, get_state/2, replace_state/2, replace_state/3, change_code/4, change_code/5, + terminate/2, terminate/3, log/2, log/3, trace/2, trace/3, statistics/2, statistics/3, log_to_file/2, log_to_file/3, no_debug/1, no_debug/2, install/2, install/3, remove/2, remove/3]). @@ -163,6 +164,19 @@ change_code(Name, Mod, Vsn, Extra) -> change_code(Name, Mod, Vsn, Extra, Timeout) -> send_system_msg(Name, {change_code, Mod, Vsn, Extra}, Timeout). +-spec terminate(Name, Reason) -> 'ok' when + Name :: name(), + Reason :: term(). +terminate(Name, Reason) -> + send_system_msg(Name, {terminate, Reason}). + +-spec terminate(Name, Reason, Timeout) -> 'ok' when + Name :: name(), + Reason :: term(), + Timeout :: timeout(). +terminate(Name, Reason, Timeout) -> + send_system_msg(Name, {terminate, Reason}, Timeout). + %%----------------------------------------------------------------- %% Debug commands %%----------------------------------------------------------------- @@ -298,6 +312,8 @@ mfa(Name, {debug, {Func, Arg2}}) -> {sys, Func, [Name, Arg2]}; mfa(Name, {change_code, Mod, Vsn, Extra}) -> {sys, change_code, [Name, Mod, Vsn, Extra]}; +mfa(Name, {terminate, Reason}) -> + {sys, terminate, [Name, Reason]}; mfa(Name, Atom) -> {sys, Atom, [Name]}. @@ -313,7 +329,7 @@ mfa(Name, Req, Timeout) -> %% Returns: This function *never* returns! It calls the function %% Module:system_continue(Parent, NDebug, Misc) %% there the process continues the execution or -%% Module:system_terminate(Raeson, Parent, Debug, Misc) if +%% Module:system_terminate(Reason, Parent, Debug, Misc) if %% the process should terminate. %% The Module must export system_continue/3, system_terminate/4 %% and format_status/2 for status information. @@ -339,7 +355,10 @@ handle_system_msg(SysState, Msg, From, Parent, Mod, Debug, Misc, Hib) -> suspend_loop(suspended, Parent, Mod, NDebug, NMisc, Hib); {running, Reply, NDebug, NMisc} -> _ = gen:reply(From, Reply), - Mod:system_continue(Parent, NDebug, NMisc) + Mod:system_continue(Parent, NDebug, NMisc); + {{terminating, Reason}, Reply, NDebug, NMisc} -> + _ = gen:reply(From, Reply), + Mod:system_terminate(Reason, Parent, NDebug, NMisc) end. %%----------------------------------------------------------------- @@ -419,6 +438,8 @@ do_cmd(SysState, get_status, Parent, Mod, Debug, Misc) -> do_cmd(SysState, {debug, What}, _Parent, _Mod, Debug, Misc) -> {Res, NDebug} = debug_cmd(What, Debug), {SysState, Res, NDebug, Misc}; +do_cmd(_, {terminate, Reason}, _Parent, _Mod, Debug, Misc) -> + {{terminating, Reason}, ok, Debug, Misc}; do_cmd(suspended, {change_code, Module, Vsn, Extra}, _Parent, Mod, Debug, Misc) -> {Res, NMisc} = do_change_code(Mod, Module, Vsn, Extra, Misc), diff --git a/lib/stdlib/src/win32reg.erl b/lib/stdlib/src/win32reg.erl index 48a7e262be..38c41a5f6e 100644 --- a/lib/stdlib/src/win32reg.erl +++ b/lib/stdlib/src/win32reg.erl @@ -218,12 +218,7 @@ expand([C|Rest], [], Result) -> expand(Rest, [], [C|Result]); expand([$%|Rest], Env0, Result) -> Env = lists:reverse(Env0), - case os:getenv(Env) of - false -> - expand(Rest, [], Result); - Value -> - expand(Rest, [], lists:reverse(Value)++Result) - end; + expand(Rest, [], lists:reverse(os:getenv(Env, ""))++Result); expand([C|Rest], Env, Result) -> expand(Rest, [C|Env], Result); expand([], [], Result) -> diff --git a/lib/stdlib/src/zip.erl b/lib/stdlib/src/zip.erl index b768c6d0b9..44e75ff15b 100644 --- a/lib/stdlib/src/zip.erl +++ b/lib/stdlib/src/zip.erl @@ -214,7 +214,9 @@ -type zip_comment() :: #zip_comment{}. -type zip_file() :: #zip_file{}. --export_type([create_option/0, filename/0]). +-opaque handle() :: pid(). + +-export_type([create_option/0, filename/0, handle/0]). %% Open a zip archive with options %% @@ -500,7 +502,7 @@ do_list_dir(F, Options) -> -spec(t(Archive) -> ok when Archive :: file:name() | binary() | ZipHandle, - ZipHandle :: pid()). + ZipHandle :: handle()). t(F) when is_pid(F) -> zip_t(F); t(F) when is_record(F, openzip) -> openzip_t(F); @@ -524,7 +526,7 @@ do_t(F, RawPrint) -> -spec(tt(Archive) -> ok when Archive :: file:name() | binary() | ZipHandle, - ZipHandle :: pid()). + ZipHandle :: handle()). tt(F) when is_pid(F) -> zip_tt(F); tt(F) when is_record(F, openzip) -> openzip_tt(F); @@ -1114,15 +1116,19 @@ local_file_header_from_info_method_name(#file_info{mtime = MTime}, file_name_length = length(Name), extra_field_length = 0}. +server_init(Parent) -> + %% we want to know if our parent dies + process_flag(trap_exit, true), + server_loop(Parent, not_open). %% small, simple, stupid zip-archive server -server_loop(OpenZip) -> +server_loop(Parent, OpenZip) -> receive {From, {open, Archive, Options}} -> case openzip_open(Archive, Options) of {ok, NewOpenZip} -> From ! {self(), {ok, self()}}, - server_loop(NewOpenZip); + server_loop(Parent, NewOpenZip); Error -> From ! {self(), Error} end; @@ -1130,43 +1136,47 @@ server_loop(OpenZip) -> From ! {self(), openzip_close(OpenZip)}; {From, get} -> From ! {self(), openzip_get(OpenZip)}, - server_loop(OpenZip); + server_loop(Parent, OpenZip); {From, {get, FileName}} -> From ! {self(), openzip_get(FileName, OpenZip)}, - server_loop(OpenZip); + server_loop(Parent, OpenZip); {From, list_dir} -> From ! {self(), openzip_list_dir(OpenZip)}, - server_loop(OpenZip); + server_loop(Parent, OpenZip); {From, {list_dir, Opts}} -> From ! {self(), openzip_list_dir(OpenZip, Opts)}, - server_loop(OpenZip); + server_loop(Parent, OpenZip); {From, get_state} -> From ! {self(), OpenZip}, - server_loop(OpenZip); + server_loop(Parent, OpenZip); + {'EXIT', Parent, Reason} -> + openzip_close(OpenZip), + exit({parent_died, Reason}); _ -> {error, bad_msg} end. -spec(zip_open(Archive) -> {ok, ZipHandle} | {error, Reason} when Archive :: file:name() | binary(), - ZipHandle :: pid(), + ZipHandle :: handle(), Reason :: term()). zip_open(Archive) -> zip_open(Archive, []). -spec(zip_open(Archive, Options) -> {ok, ZipHandle} | {error, Reason} when Archive :: file:name() | binary(), - ZipHandle :: pid(), + ZipHandle :: handle(), Options :: [Option], Option :: cooked | memory | {cwd, CWD :: file:filename()}, Reason :: term()). zip_open(Archive, Options) -> - Pid = spawn(fun() -> server_loop(not_open) end), - request(self(), Pid, {open, Archive, Options}). + Self = self(), + Pid = spawn_link(fun() -> server_init(Self) end), + request(Self, Pid, {open, Archive, Options}). -spec(zip_get(ZipHandle) -> {ok, [Result]} | {error, Reason} when - ZipHandle :: pid(), + ZipHandle :: handle(), Result :: file:name() | {file:name(), binary()}, Reason :: term()). @@ -1174,14 +1184,14 @@ zip_get(Pid) when is_pid(Pid) -> request(self(), Pid, get). -spec(zip_close(ZipHandle) -> ok | {error, einval} when - ZipHandle :: pid()). + ZipHandle :: handle()). zip_close(Pid) when is_pid(Pid) -> request(self(), Pid, close). -spec(zip_get(FileName, ZipHandle) -> {ok, Result} | {error, Reason} when FileName :: file:name(), - ZipHandle :: pid(), + ZipHandle :: handle(), Result :: file:name() | {file:name(), binary()}, Reason :: term()). @@ -1190,7 +1200,7 @@ zip_get(FileName, Pid) when is_pid(Pid) -> -spec(zip_list_dir(ZipHandle) -> {ok, Result} | {error, Reason} when Result :: [zip_comment() | zip_file()], - ZipHandle :: pid(), + ZipHandle :: handle(), Reason :: term()). zip_list_dir(Pid) when is_pid(Pid) -> diff --git a/lib/stdlib/test/binary_module_SUITE.erl b/lib/stdlib/test/binary_module_SUITE.erl index 32cec0db6f..f828c70b63 100644 --- a/lib/stdlib/test/binary_module_SUITE.erl +++ b/lib/stdlib/test/binary_module_SUITE.erl @@ -506,12 +506,35 @@ do_interesting(Module) -> ?line [<<1,2,3>>,<<6>>] = Module:split(<<1,2,3,4,5,6,7,8>>, [<<4,5>>,<<7>>,<<8>>], [global,trim]), + ?line [<<1,2,3>>,<<6>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<4,5>>,<<7>>,<<8>>], + [global,trim_all]), ?line [<<1,2,3,4,5,6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, [<<4,5>>,<<7>>,<<8>>], [global,trim,{scope,{0,4}}]), ?line [<<1,2,3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, [<<4,5>>,<<7>>,<<8>>], [global,trim,{scope,{0,5}}]), + + ?line [<<>>,<<>>,<<3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<1>>,<<2>>,<<4,5>>], + [global,trim]), + ?line [<<3>>,<<6,7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<1>>,<<2>>,<<4,5>>], + [global,trim_all]), + + ?line [<<1,2,3>>,<<>>,<<7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<4,5>>,<<6>>], + [global,trim]), + ?line [<<1,2,3>>,<<7,8>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<4,5>>,<<6>>], + [global,trim_all]), + ?line [<<>>,<<>>,<<3>>,<<>>,<<6>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<1>>,<<2>>,<<4>>,<<5>>,<<7>>,<<8>>], + [global,trim]), + ?line [<<3>>,<<6>>] = Module:split(<<1,2,3,4,5,6,7,8>>, + [<<1>>,<<2>>,<<4>>,<<5>>,<<7>>,<<8>>], + [global,trim_all]), ?line badarg = ?MASK_ERROR( Module:replace(<<1,2,3,4,5,6,7,8>>, [<<4,5>>,<<7>>,<<8>>],<<99>>, @@ -1247,6 +1270,8 @@ do_random_split_comp(N,NeedleRange,HaystackRange) -> true = do_split_comp(Needle,Haystack,[]), true = do_split_comp(Needle,Haystack,[global]), true = do_split_comp(Needle,Haystack,[global,trim]), + true = do_split_comp(Needle,Haystack,[global,trim_all]), + true = do_split_comp(Needle,Haystack,[global,trim,trim_all]), do_random_split_comp(N-1,NeedleRange,HaystackRange). do_random_split_comp2(0,_,_) -> ok; @@ -1257,6 +1282,9 @@ do_random_split_comp2(N,NeedleRange,HaystackRange) -> _ <- lists:duplicate(NumNeedles,a)], true = do_split_comp(Needles,Haystack,[]), true = do_split_comp(Needles,Haystack,[global]), + true = do_split_comp(Needles,Haystack,[global,trim]), + true = do_split_comp(Needles,Haystack,[global,trim_all]), + true = do_split_comp(Needles,Haystack,[global,trim,trim_all]), do_random_split_comp2(N-1,NeedleRange,HaystackRange). do_split_comp(N,H,Opts) -> diff --git a/lib/stdlib/test/binref.erl b/lib/stdlib/test/binref.erl index 6d96736ef3..a52ea98e5a 100644 --- a/lib/stdlib/test/binref.erl +++ b/lib/stdlib/test/binref.erl @@ -155,7 +155,8 @@ split(Haystack,Needles0,Options) -> true -> exit(badtype) end, - {Part,Global,Trim} = get_opts_split(Options,{nomatch,false,false}), + {Part,Global,Trim,TrimAll} = + get_opts_split(Options,{nomatch,false,false,false}), {Start,End,NewStack} = case Part of nomatch -> @@ -180,20 +181,24 @@ split(Haystack,Needles0,Options) -> [X] end end, - do_split(Haystack,MList,0,Trim) + do_split(Haystack,MList,0,Trim,TrimAll) catch _:_ -> erlang:error(badarg) end. -do_split(H,[],N,true) when N >= byte_size(H) -> +do_split(H,[],N,true,_) when N >= byte_size(H) -> []; -do_split(H,[],N,_) -> +do_split(H,[],N,_,true) when N >= byte_size(H) -> + []; +do_split(H,[],N,_,_) -> [part(H,{N,byte_size(H)-N})]; -do_split(H,[{A,B}|T],N,Trim) -> +do_split(H,[{A,B}|T],N,Trim,TrimAll) -> case part(H,{N,A-N}) of + <<>> when TrimAll == true -> + do_split(H,T,A+B,Trim,TrimAll); <<>> -> - Rest = do_split(H,T,A+B,Trim), + Rest = do_split(H,T,A+B,Trim,TrimAll), case {Trim, Rest} of {true,[]} -> []; @@ -201,7 +206,7 @@ do_split(H,[{A,B}|T],N,Trim) -> [<<>> | Rest] end; Oth -> - [Oth | do_split(H,T,A+B,Trim)] + [Oth | do_split(H,T,A+B,Trim,TrimAll)] end. @@ -565,14 +570,16 @@ get_opts_match([{scope,{A,B}} | T],_Part) -> get_opts_match(_,_) -> throw(badopt). -get_opts_split([],{Part,Global,Trim}) -> - {Part,Global,Trim}; -get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim}) -> - get_opts_split(T,{{A,B},Global,Trim}); -get_opts_split([global | T],{Part,_Global,Trim}) -> - get_opts_split(T,{Part,true,Trim}); -get_opts_split([trim | T],{Part,Global,_Trim}) -> - get_opts_split(T,{Part,Global,true}); +get_opts_split([],{Part,Global,Trim,TrimAll}) -> + {Part,Global,Trim,TrimAll}; +get_opts_split([{scope,{A,B}} | T],{_Part,Global,Trim,TrimAll}) -> + get_opts_split(T,{{A,B},Global,Trim,TrimAll}); +get_opts_split([global | T],{Part,_Global,Trim,TrimAll}) -> + get_opts_split(T,{Part,true,Trim,TrimAll}); +get_opts_split([trim | T],{Part,Global,_Trim,TrimAll}) -> + get_opts_split(T,{Part,Global,true,TrimAll}); +get_opts_split([trim_all | T],{Part,Global,Trim,_TrimAll}) -> + get_opts_split(T,{Part,Global,Trim,true}); get_opts_split(_,_) -> throw(badopt). diff --git a/lib/stdlib/test/erl_eval_SUITE.erl b/lib/stdlib/test/erl_eval_SUITE.erl index b55324161b..3427f431c5 100644 --- a/lib/stdlib/test/erl_eval_SUITE.erl +++ b/lib/stdlib/test/erl_eval_SUITE.erl @@ -1458,6 +1458,30 @@ eep43(Config) when is_list(Config) -> "lists:map(fun (X) -> X#{price := 0} end, [#{hello => 0, price => nil}]).", [#{hello => 0, price => 0}]), + check(fun () -> + Map = #{ <<33:333>> => "wat" }, + #{ <<33:333>> := "wat" } = Map + end, + "begin " + " Map = #{ <<33:333>> => \"wat\" }, " + " #{ <<33:333>> := \"wat\" } = Map " + "end.", + #{ <<33:333>> => "wat" }), + check(fun () -> + K1 = 1, + K2 = <<42:301>>, + K3 = {3,K2}, + Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4}, + #{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map + end, + "begin " + " K1 = 1, " + " K2 = <<42:301>>, " + " K3 = {3,K2}, " + " Map = #{ K1 => 1, K2 => 2, K3 => 3, {2,2} => 4}, " + " #{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map " + "end.", + #{ 1 => 1, <<42:301>> => 2, {3,<<42:301>>} => 3, {2,2} => 4}), error_check("[camembert]#{}.", {badarg,[camembert]}), error_check("#{} = 1.", {badmatch,1}), ok. diff --git a/lib/stdlib/test/erl_internal_SUITE.erl b/lib/stdlib/test/erl_internal_SUITE.erl index b6b3c004ea..197a7a33eb 100644 --- a/lib/stdlib/test/erl_internal_SUITE.erl +++ b/lib/stdlib/test/erl_internal_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1999-2011. All Rights Reserved. +%% Copyright Ericsson AB 1999-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -51,7 +51,7 @@ end_per_group(_GroupName, Config) -> -define(default_timeout, ?t:minutes(2)). init_per_testcase(_Case, Config) -> - ?line Dog = test_server:timetrap(?default_timeout), + Dog = test_server:timetrap(?default_timeout), [{watchdog, Dog}|Config]. end_per_testcase(_Case, Config) -> @@ -63,27 +63,50 @@ behav(suite) -> []; behav(doc) -> ["Check that the behaviour callbacks are correctly defined"]; behav(_) -> - ?line check_behav_list([{start,2}, {stop,1}], - application:behaviour_info(callbacks)), - ?line check_behav_list([{init,1}, {handle_call,3}, {handle_cast,2}, - {handle_info,2}, {terminate,2}, {code_change,3}], - gen_server:behaviour_info(callbacks)), - ?line check_behav_list([{init,1}, {handle_event,3}, {handle_sync_event,4}, - {handle_info,3}, {terminate,3}, {code_change,4}], - gen_fsm:behaviour_info(callbacks)), - ?line check_behav_list([{init,1}, {handle_event,2}, {handle_call,2}, - {handle_info,2}, {terminate,2}, {code_change,3}], - gen_event:behaviour_info(callbacks)), - ?line check_behav_list( [{init,1}, {terminate,2}], - supervisor_bridge:behaviour_info(callbacks)), - ?line check_behav_list([{init,1}], - supervisor:behaviour_info(callbacks)), - ok. + Modules = [application, gen_server, gen_fsm, gen_event, + supervisor_bridge, supervisor], + lists:foreach(fun check_behav/1, Modules). + +check_behav(Module) -> + Callbacks = callbacks(Module), + Optional = optional_callbacks(Module), + check_behav_list(Callbacks, Module:behaviour_info(callbacks)), + check_behav_list(Optional, Module:behaviour_info(optional_callbacks)). check_behav_list([], []) -> ok; check_behav_list([L | L1], L2) -> - ?line true = lists:member(L, L2), - ?line L3 = lists:delete(L, L2), + true = lists:member(L, L2), + L3 = lists:delete(L, L2), check_behav_list(L1, L3). - +callbacks(application) -> + [{start,2}, {stop,1}]; +callbacks(gen_server) -> + [{init,1}, {handle_call,3}, {handle_cast,2}, + {handle_info,2}, {terminate,2}, {code_change,3}, + {format_status,2}]; +callbacks(gen_fsm) -> + [{init,1}, {handle_event,3}, {handle_sync_event,4}, + {handle_info,3}, {terminate,3}, {code_change,4}, + {format_status,2}]; +callbacks(gen_event) -> + [{init,1}, {handle_event,2}, {handle_call,2}, + {handle_info,2}, {terminate,2}, {code_change,3}, + {format_status,2}]; +callbacks(supervisor_bridge) -> + [{init,1}, {terminate,2}]; +callbacks(supervisor) -> + [{init,1}]. + +optional_callbacks(application) -> + []; +optional_callbacks(gen_server) -> + [{format_status,2}]; +optional_callbacks(gen_fsm) -> + [{format_status,2}]; +optional_callbacks(gen_event) -> + [{format_status,2}]; +optional_callbacks(supervisor_bridge) -> + []; +optional_callbacks(supervisor) -> + []. diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl index ea61b2082b..a7c3fd3c2e 100644 --- a/lib/stdlib/test/erl_lint_SUITE.erl +++ b/lib/stdlib/test/erl_lint_SUITE.erl @@ -42,6 +42,7 @@ unused_vars_warn_rec/1, unused_vars_warn_fun/1, unused_vars_OTP_4858/1, + unused_unsafe_vars_warn/1, export_vars_warn/1, shadow_vars/1, unused_import/1, @@ -55,7 +56,7 @@ otp_11772/1, otp_11771/1, otp_11872/1, export_all/1, bif_clash/1, - behaviour_basic/1, behaviour_multiple/1, + behaviour_basic/1, behaviour_multiple/1, otp_11861/1, otp_7550/1, otp_8051/1, format_warn/1, @@ -63,7 +64,7 @@ too_many_arguments/1, basic_errors/1,bin_syntax_errors/1, predef/1, - maps/1,maps_type/1 + maps/1,maps_type/1,otp_11851/1 ]). % Default timetrap timeout (set in init_per_testcase). @@ -89,16 +90,16 @@ all() -> otp_5362, otp_5371, otp_7227, otp_5494, otp_5644, otp_5878, otp_5917, otp_6585, otp_6885, otp_10436, otp_11254, otp_11772, otp_11771, otp_11872, export_all, - bif_clash, behaviour_basic, behaviour_multiple, + bif_clash, behaviour_basic, behaviour_multiple, otp_11861, otp_7550, otp_8051, format_warn, {group, on_load}, too_many_arguments, basic_errors, bin_syntax_errors, predef, - maps, maps_type]. + maps, maps_type, otp_11851]. groups() -> [{unused_vars_warn, [], [unused_vars_warn_basic, unused_vars_warn_lc, unused_vars_warn_rec, unused_vars_warn_fun, - unused_vars_OTP_4858]}, + unused_vars_OTP_4858, unused_unsafe_vars_warn]}, {on_load, [], [on_load_successful, on_load_failing]}]. init_per_suite(Config) -> @@ -730,6 +731,48 @@ unused_vars_OTP_4858(Config) when is_list(Config) -> ?line [] = run(Config, Ts), ok. +unused_unsafe_vars_warn(Config) when is_list(Config) -> + Ts = [{unused_unsafe1, + <<"t1() -> + UnusedVar1 = unused1, + try + UnusedVar2 = unused2 + catch + _:_ -> + ok + end, + ok. + ">>, + [warn_unused_vars], + {warnings,[{2,erl_lint,{unused_var,'UnusedVar1'}}, + {4,erl_lint,{unused_var,'UnusedVar2'}}]}}, + {unused_unsafe2, + <<"t2() -> + try + X = 1 + catch + _:_ -> ok + end. + ">>, + [warn_unused_vars], + {warnings,[{3,erl_lint,{unused_var,'X'}}]}}, + {unused_unsafe2, + <<"t3(X, Y) -> + X andalso Y. + ">>, + [warn_unused_vars], + []}, + {unused_unsafe4, + <<"t4() -> + _ = (catch X = X = 1), + _ = case ok of _ -> fun() -> ok end end, + fun (X) -> X end. + ">>, + [warn_unused_vars], + []}], + run(Config, Ts), + ok. + export_vars_warn(doc) -> "Warnings for exported variables"; export_vars_warn(suite) -> []; @@ -808,7 +851,19 @@ export_vars_warn(Config) when is_list(Config) -> [], {error,[{9,erl_lint,{unbound_var,'B'}}], [{9,erl_lint,{exported_var,'Y',{'receive',2}}}, - {10,erl_lint,{shadowed_var,'B',generate}}]}} + {10,erl_lint,{shadowed_var,'B',generate}}]}}, + + {exp4, + <<"t(X) -> + if true -> Z = X end, + case X of + 1 -> Z; + 2 -> X + end, + Z = X. + ">>, + [], + {warnings,[{7,erl_lint,{exported_var,'Z',{'if',2}}}]}} ], ?line [] = run(Config, Ts), ok. @@ -832,8 +887,15 @@ shadow_vars(Config) when is_list(Config) -> ">>, [nowarn_shadow_vars], {error,[{9,erl_lint,{unbound_var,'B'}}], - [{9,erl_lint,{exported_var,'Y',{'receive',2}}}]}}], - + [{9,erl_lint,{exported_var,'Y',{'receive',2}}}]}}, + {shadow2, + <<"t() -> + _ = (catch MS = MS = 1), % MS used unsafe + _ = case ok of _ -> fun() -> ok end end, + fun (MS) -> MS end. % MS not shadowed here + ">>, + [], + []}], ?line [] = run(Config, Ts), ok. @@ -958,6 +1020,45 @@ unsafe_vars(Config) when is_list(Config) -> [warn_unused_vars], {errors,[{3,erl_lint,{unsafe_var,'X',{'if',2}}}, {4,erl_lint,{unsafe_var,'X',{'if',2}}}], + []}}, + {unsafe8, + <<"t8(X) -> + case X of _ -> catch _Y = 1 end, + _Y." + >>, + [], + {errors,[{3,erl_lint,{unsafe_var,'_Y',{'catch',2}}}], + []}}, + {unsafe9, + <<"t9(X) -> + case X of + 1 -> + catch A = 1, % unsafe only here + B = 1, + C = 1, + D = 1; + 2 -> + A = 2, + % B not bound here + C = 2, + catch D = 2; % unsafe in two clauses + 3 -> + A = 3, + B = 3, + C = 3, + catch D = 3; % unsafe in two clauses + 4 -> + A = 4, + B = 4, + C = 4, + D = 4 + end, + {A,B,C,D}." + >>, + [], + {errors,[{24,erl_lint,{unsafe_var,'A',{'catch',4}}}, + {24,erl_lint,{unsafe_var,'B',{'case',2}}}, + {24,erl_lint,{unsafe_var,'D',{'case',2}}}], []}} ], ?line [] = run(Config, Ts), @@ -2648,8 +2749,9 @@ otp_11872(Config) when is_list(Config) -> t() -> 1. ">>, - {error,[{6,erl_lint,{undefined_type,{product,0}}}], - [{8,erl_lint,{new_var_arity_type,map}}]} = + {error,[{6,erl_lint,{undefined_type,{product,0}}}, + {8,erl_lint,{undefined_type,{dict,0}}}], + [{8,erl_lint,{new_builtin_type,{map,0}}}]} = run_test2(Config, Ts, []), ok. @@ -3080,6 +3182,193 @@ behaviour_multiple(Config) when is_list(Config) -> ?line [] = run(Config, Ts), ok. +otp_11861(doc) -> + "OTP-11861. behaviour_info() and -callback."; +otp_11861(suite) -> []; +otp_11861(Conf) when is_list(Conf) -> + CallbackFiles = [callback1, callback2, callback3, + bad_behaviour1, bad_behaviour2], + lists:foreach(fun(M) -> + F = filename:join(?datadir, M), + Opts = [{outdir,?privdir}, return], + {ok, M, []} = compile:file(F, Opts) + end, CallbackFiles), + CodePath = code:get_path(), + true = code:add_path(?privdir), + Ts = [{otp_11861_1, + <<" + -export([b1/1]). + -behaviour(callback1). + -behaviour(callback2). + + -spec b1(atom()) -> integer(). + b1(A) when is_atom(A)-> + 3. + ">>, + [], + %% b2/1 is optional in both modules + {warnings,[{4,erl_lint, + {conflicting_behaviours,{b1,1},callback2,3,callback1}}]}}, + {otp_11861_2, + <<" + -export([b2/1]). + -behaviour(callback1). + -behaviour(callback2). + + -spec b2(integer()) -> atom(). + b2(I) when is_integer(I)-> + a. + ">>, + [], + %% b2/1 is optional in callback2, but not in callback1 + {warnings,[{3,erl_lint,{undefined_behaviour_func,{b1,1},callback1}}, + {4,erl_lint, + {conflicting_behaviours,{b2,1},callback2,3,callback1}}]}}, + {otp_11861_3, + <<" + -callback b(_) -> atom(). + -optional_callbacks({b1,1}). % non-existing and ignored + ">>, + [], + []}, + {otp_11861_4, + <<" + -callback b(_) -> atom(). + -optional_callbacks([{b1,1}]). % non-existing + ">>, + [], + %% No behaviour-info(), but callback. + {errors,[{3,erl_lint,{undefined_callback,{lint_test,b1,1}}}],[]}}, + {otp_11861_5, + <<" + -optional_callbacks([{b1,1}]). % non-existing + ">>, + [], + %% No behaviour-info() and no callback: warning anyway + {errors,[{2,erl_lint,{undefined_callback,{lint_test,b1,1}}}],[]}}, + {otp_11861_6, + <<" + -optional_callbacks([b1/1]). % non-existing + behaviour_info(callbacks) -> [{b1,1}]. + ">>, + [], + %% behaviour-info() and no callback: warning anyway + {errors,[{2,erl_lint,{undefined_callback,{lint_test,b1,1}}}],[]}}, + {otp_11861_7, + <<" + -optional_callbacks([b1/1]). % non-existing + -callback b(_) -> atom(). + behaviour_info(callbacks) -> [{b1,1}]. + ">>, + [], + %% behaviour-info() callback: warning + {errors,[{2,erl_lint,{undefined_callback,{lint_test,b1,1}}}, + {3,erl_lint,{behaviour_info,{lint_test,b,1}}}], + []}}, + {otp_11861_8, + <<" + -callback b(_) -> atom(). + -optional_callbacks([b/1, {b, 1}]). + ">>, + [], + {errors,[{3,erl_lint,{redefine_optional_callback,{b,1}}}],[]}}, + {otp_11861_9, + <<" + -behaviour(gen_server). + -export([handle_call/3,handle_cast/2,handle_info/2, + code_change/3, init/1, terminate/2]). + handle_call(_, _, _) -> ok. + handle_cast(_, _) -> ok. + handle_info(_, _) -> ok. + code_change(_, _, _) -> ok. + init(_) -> ok. + terminate(_, _) -> ok. + ">>, + [], + []}, + {otp_11861_9, + <<" + -behaviour(gen_server). + -export([handle_call/3,handle_cast/2,handle_info/2, + code_change/3, init/1, terminate/2, format_status/2]). + handle_call(_, _, _) -> ok. + handle_cast(_, _) -> ok. + handle_info(_, _) -> ok. + code_change(_, _, _) -> ok. + init(_) -> ok. + terminate(_, _) -> ok. + format_status(_, _) -> ok. % optional callback + ">>, + [], + %% Nothing... + []}, + {otp_11861_10, + <<" + -optional_callbacks([{b1,1,bad}]). % badly formed and ignored + behaviour_info(callbacks) -> [{b1,1}]. + ">>, + [], + []}, + {otp_11861_11, + <<" + -behaviour(bad_behaviour1). + ">>, + [], + {warnings,[{2,erl_lint, + {ill_defined_behaviour_callbacks,bad_behaviour1}}]}}, + {otp_11861_12, + <<" + -behaviour(non_existing_behaviour). + ">>, + [], + {warnings,[{2,erl_lint, + {undefined_behaviour,non_existing_behaviour}}]}}, + {otp_11861_13, + <<" + -behaviour(bad_behaviour_none). + ">>, + [], + {warnings,[{2,erl_lint,{undefined_behaviour,bad_behaviour_none}}]}}, + {otp_11861_14, + <<" + -callback b(_) -> atom(). + ">>, + [], + []}, + {otp_11861_15, + <<" + -optional_callbacks([{b1,1,bad}]). % badly formed + -callback b(_) -> atom(). + ">>, + [], + []}, + {otp_11861_16, + <<" + -callback b(_) -> atom(). + -callback b(_) -> atom(). + ">>, + [], + {errors,[{3,erl_lint,{redefine_callback,{b,1}}}],[]}}, + {otp_11861_17, + <<" + -behaviour(bad_behaviour2). + ">>, + [], + {warnings,[{2,erl_lint,{undefined_behaviour_callbacks, + bad_behaviour2}}]}}, + {otp_11861_18, + <<" + -export([f1/1]). + -behaviour(callback3). + f1(_) -> ok. + ">>, + [], + []} + ], + ?line [] = run(Conf, Ts), + true = code:set_path(CodePath), + ok. + otp_7550(doc) -> "Test that the new utf8/utf16/utf32 types do not allow size or unit specifiers."; otp_7550(Config) when is_list(Config) -> @@ -3145,8 +3434,8 @@ format_warn(Config) when is_list(Config) -> ok. format_level(Level, Count, Config) -> - ?line W = get_compilation_warnings(Config, "format", - [{warn_format, Level}]), + ?line W = get_compilation_result(Config, "format", + [{warn_format, Level}]), %% Pick out the 'format' warnings. ?line FW = lists:filter(fun({_Line, erl_lint, {format_error, _}}) -> true; (_) -> false @@ -3330,42 +3619,22 @@ bin_syntax_errors(Config) -> ok. predef(doc) -> - "OTP-10342: Predefined types: array(), digraph(), and so on"; + "OTP-10342: No longer predefined types: array(), digraph(), and so on"; predef(suite) -> []; predef(Config) when is_list(Config) -> - W = get_compilation_warnings(Config, "predef", []), + W = get_compilation_result(Config, "predef", []), [] = W, - W2 = get_compilation_warnings(Config, "predef2", []), - Tag = deprecated_builtin_type, - [{7,erl_lint,{Tag,{array,0},{array,array,1},"OTP 18.0"}}, - {12,erl_lint,{Tag,{dict,0},{dict,dict,2},"OTP 18.0"}}, - {17,erl_lint,{Tag,{digraph,0},{digraph,graph},"OTP 18.0"}}, - {27,erl_lint,{Tag,{gb_set,0},{gb_sets,set,1},"OTP 18.0"}}, - {32,erl_lint,{Tag,{gb_tree,0},{gb_trees,tree,2},"OTP 18.0"}}, - {37,erl_lint,{Tag,{queue,0},{queue,queue,1},"OTP 18.0"}}, - {42,erl_lint,{Tag,{set,0},{sets,set,1},"OTP 18.0"}}, - {47,erl_lint,{Tag,{tid,0},{ets,tid},"OTP 18.0"}}] = W2, - Ts = [{otp_10342_1, - <<"-compile(nowarn_deprecated_type). - - -spec t(dict()) -> non_neg_integer(). - - t(D) -> - erlang:phash2(D, 3000). - ">>, - {[nowarn_unused_function]}, - []}, - {otp_10342_2, - <<"-spec t(dict()) -> non_neg_integer(). - - t(D) -> - erlang:phash2(D, 3000). - ">>, - {[nowarn_unused_function]}, - {warnings,[{1,erl_lint, - {deprecated_builtin_type,{dict,0},{dict,dict,2}, - "OTP 18.0"}}]}}], - [] = run(Config, Ts), + %% dict(), digraph() and so on were removed in Erlang/OTP 18.0. + E2 = get_compilation_result(Config, "predef2", []), + Tag = undefined_type, + {[{7,erl_lint,{Tag,{array,0}}}, + {12,erl_lint,{Tag,{dict,0}}}, + {17,erl_lint,{Tag,{digraph,0}}}, + {27,erl_lint,{Tag,{gb_set,0}}}, + {32,erl_lint,{Tag,{gb_tree,0}}}, + {37,erl_lint,{Tag,{queue,0}}}, + {42,erl_lint,{Tag,{set,0}}}, + {47,erl_lint,{Tag,{tid,0}}}],[]} = E2, ok. maps(Config) -> @@ -3398,7 +3667,8 @@ maps(Config) -> g := 1 + 1, h := _, i := (_X = _Y), - j := (x ! y) }) -> + j := (x ! y), + <<0:300>> := 33}) -> {A,F}. ">>, [], @@ -3411,9 +3681,10 @@ maps(Config) -> {errors,[{1,erl_lint,illegal_map_construction}, {1,erl_lint,{unbound_var,'X'}}], []}}, - {errors_in_map_keys, + {legal_map_construction, <<"t(V) -> #{ a => 1, #{a=>V} => 2, + #{{a,V}=>V} => 2, #{ \"hi\" => wazzup, hi => ho } => yep, [try a catch _:_ -> b end] => nope, ok => 1.0, @@ -3425,11 +3696,7 @@ maps(Config) -> }. ">>, [], - {errors,[{2,erl_lint,{illegal_map_key_variable,'V'}}, - {4,erl_lint,illegal_map_key}, - {6,erl_lint,illegal_map_key}, - {8,erl_lint,illegal_map_key}, - {10,erl_lint,illegal_map_key}],[]}}, + []}, {errors_in_map_keys_pattern, <<"t(#{ a := 2, #{} := A, @@ -3440,8 +3707,14 @@ maps(Config) -> A. ">>, [], - {errors,[{4,erl_lint,illegal_map_key}, - {6,erl_lint,{illegal_map_key_variable,'V'}}],[]}}], + {errors,[{4,erl_lint,illegal_map_construction}, + {6,erl_lint,illegal_map_key}],[]}}, + {unused_vars_with_empty_maps, + <<"t(Foo, Bar, Baz) -> {#{},#{}}.">>, + [warn_unused_variables], + {warnings,[{1,erl_lint,{unused_var,'Bar'}}, + {1,erl_lint,{unused_var,'Baz'}}, + {1,erl_lint,{unused_var,'Foo'}}]}}], [] = run(Config, Ts), ok. @@ -3470,7 +3743,94 @@ maps_type(Config) when is_list(Config) -> t(M) -> M. ">>, [], - {warnings,[{3,erl_lint,{new_var_arity_type,map}}]}}], + {warnings,[{3,erl_lint,{new_builtin_type,{map,0}}}]}}], + [] = run(Config, Ts), + ok. + +otp_11851(doc) -> + "OTP-11851: More atoms can be used as type names + bug fixes."; +otp_11851(Config) when is_list(Config) -> + Ts = [ + {otp_11851_1, + <<"-export([t/0]). + -type range(A, B) :: A | B. + + -type union(A) :: A. + + -type product() :: integer(). + + -type tuple(A) :: A. + + -type map(A) :: A. + + -type record() :: a | b. + + -type integer(A) :: A. + + -type atom(A) :: A. + + -type binary(A, B) :: A | B. + + -type 'fun'() :: integer(). + + -type 'fun'(X) :: X. + + -type 'fun'(X, Y) :: X | Y. + + -type all() :: range(atom(), integer()) | union(pid()) | product() + | tuple(reference()) | map(function()) | record() + | integer(atom()) | atom(integer()) + | binary(pid(), tuple()) | 'fun'(port()) + | 'fun'() | 'fun'(<<>>, 'none'). + + -spec t() -> all(). + + t() -> + a. + ">>, + [], + []}, + {otp_11851_2, + <<"-export([a/1, b/1, t/0]). + + -callback b(_) -> integer(). + + -callback ?MODULE:a(_) -> integer(). + + a(_) -> 3. + + b(_) -> a. + + t()-> a. + ">>, + [], + {errors,[{5,erl_lint,{bad_callback,{lint_test,a,1}}}],[]}}, + {otp_11851_3, + <<"-export([a/1]). + + -spec a(_A) -> boolean() when + _ :: atom(), + _A :: integer(). + + a(_) -> true. + ">>, + [], + {errors,[{4,erl_parse,"bad type variable"}],[]}}, + {otp_11851_4, + <<" + -spec a(_) -> ok. + -spec a(_) -> ok. + + -spec ?MODULE:a(_) -> ok. + -spec ?MODULE:a(_) -> ok. + ">>, + [], + {errors,[{3,erl_lint,{redefine_spec,{a,1}}}, + {5,erl_lint,{redefine_spec,{lint_test,a,1}}}, + {6,erl_lint,{redefine_spec,{lint_test,a,1}}}, + {6,erl_lint,{spec_fun_undefined,{a,1}}}], + []}} + ], [] = run(Config, Ts), ok. @@ -3487,9 +3847,9 @@ run(Config, Tests) -> end, lists:foldl(F, [], Tests). -%% Compiles a test file and returns the list of warnings. +%% Compiles a test file and returns the list of warnings/errors. -get_compilation_warnings(Conf, Filename, Warnings) -> +get_compilation_result(Conf, Filename, Warnings) -> ?line DataDir = ?datadir, ?line File = filename:join(DataDir, Filename), {ok,Bin} = file:read_file(File++".erl"), @@ -3498,6 +3858,7 @@ get_compilation_warnings(Conf, Filename, Warnings) -> Test = lists:nthtail(Start+Length, FileS), case run_test(Conf, Test, Warnings) of {warnings, Ws} -> Ws; + {errors,Es,Ws} -> {Es,Ws}; [] -> [] end. diff --git a/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour1.erl b/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour1.erl new file mode 100644 index 0000000000..230f4b4519 --- /dev/null +++ b/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour1.erl @@ -0,0 +1,6 @@ +-module(bad_behaviour1). + +-export([behaviour_info/1]). + +behaviour_info(callbacks) -> + [{a,1,bad}]. diff --git a/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour2.erl b/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour2.erl new file mode 100644 index 0000000000..bb755ce18b --- /dev/null +++ b/lib/stdlib/test/erl_lint_SUITE_data/bad_behaviour2.erl @@ -0,0 +1,6 @@ +-module(bad_behaviour2). + +-export([behaviour_info/1]). + +behaviour_info(callbacks) -> + undefined. diff --git a/lib/stdlib/test/erl_lint_SUITE_data/callback1.erl b/lib/stdlib/test/erl_lint_SUITE_data/callback1.erl new file mode 100644 index 0000000000..3cc5b51879 --- /dev/null +++ b/lib/stdlib/test/erl_lint_SUITE_data/callback1.erl @@ -0,0 +1,6 @@ +-module(callback1). + +-callback b1(I :: integer()) -> atom(). +-callback b2(A :: atom()) -> integer(). + +-optional_callbacks([{b2,1}]). diff --git a/lib/stdlib/test/erl_lint_SUITE_data/callback2.erl b/lib/stdlib/test/erl_lint_SUITE_data/callback2.erl new file mode 100644 index 0000000000..211cf9f115 --- /dev/null +++ b/lib/stdlib/test/erl_lint_SUITE_data/callback2.erl @@ -0,0 +1,6 @@ +-module(callback2). + +-callback b1(I :: integer()) -> atom(). +-callback b2(A :: atom()) -> integer(). + +-optional_callbacks([b1/1, b2/1]). diff --git a/lib/stdlib/test/erl_lint_SUITE_data/callback3.erl b/lib/stdlib/test/erl_lint_SUITE_data/callback3.erl new file mode 100644 index 0000000000..97b3ecb860 --- /dev/null +++ b/lib/stdlib/test/erl_lint_SUITE_data/callback3.erl @@ -0,0 +1,8 @@ +-module(callback3). + +-export([behaviour_info/1]). + +behaviour_info(callbacks) -> + [{f1, 1}]; +behaviour_info(_) -> + undefined. diff --git a/lib/stdlib/test/erl_lint_SUITE_data/predef.erl b/lib/stdlib/test/erl_lint_SUITE_data/predef.erl index ee9073aa67..3cb7bf40f1 100644 --- a/lib/stdlib/test/erl_lint_SUITE_data/predef.erl +++ b/lib/stdlib/test/erl_lint_SUITE_data/predef.erl @@ -5,8 +5,8 @@ -export_type([array/0, digraph/0, gb_set/0]). -%% Before Erlang/OTP 17.0 local re-definitions of pre-defined opaque -%% types were ignored but did not generate any warning. +%% Since Erlang/OTP 18.0 array() and so on are no longer pre-defined, +%% so there is nothing special about them at all. -opaque array() :: atom(). -opaque digraph() :: atom(). -opaque gb_set() :: atom(). diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl index 927fe0b595..f71446dd64 100644 --- a/lib/stdlib/test/erl_pp_SUITE.erl +++ b/lib/stdlib/test/erl_pp_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2006-2013. All Rights Reserved. +%% Copyright Ericsson AB 2006-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -42,7 +42,6 @@ -export([ func/1, call/1, recs/1, try_catch/1, if_then/1, receive_after/1, bits/1, head_tail/1, cond1/1, block/1, case1/1, ops/1, messages/1, - old_mnemosyne_syntax/1, import_export/1, misc_attrs/1, dialyzer_attrs/1, hook/1, neg_indent/1, @@ -50,7 +49,7 @@ otp_6321/1, otp_6911/1, otp_6914/1, otp_8150/1, otp_8238/1, otp_8473/1, otp_8522/1, otp_8567/1, otp_8664/1, otp_9147/1, - otp_10302/1, otp_10820/1, otp_11100/1]). + otp_10302/1, otp_10820/1, otp_11100/1, otp_11861/1]). %% Internal export. -export([ehook/6]). @@ -77,13 +76,13 @@ groups() -> [{expr, [], [func, call, recs, try_catch, if_then, receive_after, bits, head_tail, cond1, block, case1, ops, - messages, old_mnemosyne_syntax, maps_syntax + messages, maps_syntax ]}, {attributes, [], [misc_attrs, import_export, dialyzer_attrs]}, {tickets, [], [otp_6321, otp_6911, otp_6914, otp_8150, otp_8238, otp_8473, otp_8522, otp_8567, otp_8664, otp_9147, - otp_10302, otp_10820, otp_11100]}]. + otp_10302, otp_10820, otp_11100, otp_11861]}]. init_per_suite(Config) -> Config. @@ -561,27 +560,6 @@ messages(Config) when is_list(Config) -> ?line true = "\n" =:= lists:flatten(erl_pp:form({eof,0})), ok. -old_mnemosyne_syntax(Config) when is_list(Config) -> - %% Since we have kept the ':-' token, - %% better test that we can pretty print it. - R = {rule,12,sales,2, - [{clause,12, - [{var,12,'E'},{atom,12,employee}], - [], - [{generate,13, - {var,13,'E'}, - {call,13,{atom,13,table},[{atom,13,employee}]}}, - {match,14, - {record_field,14,{var,14,'E'},{atom,14,salary}}, - {atom,14,sales}}]}]}, - ?line "sales(E, employee) :-\n" - " E <- table(employee),\n" - " E.salary = sales.\n" = - lists:flatten(erl_pp:form(R)), - ok. - - - import_export(suite) -> []; import_export(Config) when is_list(Config) -> @@ -664,26 +642,6 @@ do_hook(HookFun) -> AFormChars = erl_pp:form(A, H), ?line true = AChars =:= lists:flatten(AFormChars), - R = {rule,0,sales,0, - [{clause,0,[{var,0,'E'},{atom,0,employee}],[], - [{generate,2,{var,2,'E'}, - {call,2,{atom,2,table},[{atom,2,employee}]}}, - {match,3, - {record_field,3,{var,3,'E'},{atom,3,salary}}, - {foo,Expr}}]}]}, - RChars = lists:flatten(erl_pp:rule(R, H)), - R2 = {rule,0,sales,0, - [{clause,0,[{var,0,'E'},{atom,0,employee}],[], - [{generate,2,{var,2,'E'}, - {call,2,{atom,2,table},[{atom,2,employee}]}}, - {match,3, - {record_field,3,{var,3,'E'},{atom,3,salary}}, - {call,0,{atom,0,foo},[Expr2]}}]}]}, - RChars2 = erl_pp:rule(R2), - ?line true = RChars =:= lists:flatten(RChars2), - ARChars = erl_pp:form(R, H), - ?line true = RChars =:= lists:flatten(ARChars), - ?line "INVALID-FORM:{foo,bar}:" = lists:flatten(erl_pp:expr({foo,bar})), %% A list (as before R6), not a list of lists. @@ -874,6 +832,7 @@ type_examples() -> {ex3,<<"-type paren() :: (ann2()). ">>}, {ex4,<<"-type t1() :: atom(). ">>}, {ex5,<<"-type t2() :: [t1()]. ">>}, + {ex56,<<"-type integer(A) :: A. ">>}, {ex6,<<"-type t3(Atom) :: integer(Atom). ">>}, {ex7,<<"-type '\\'t::4'() :: t3('\\'foobar'). ">>}, {ex8,<<"-type t5() :: {t1(), t3(foo)}. ">>}, @@ -1204,8 +1163,18 @@ otp_11100(Config) when is_list(Config) -> []}}), ok. +otp_11861(doc) -> + "OTP-11861. behaviour_info() and -callback."; +otp_11861(suite) -> []; +otp_11861(Config) when is_list(Config) -> + "-optional_callbacks([bar/0]).\n" = + pf({attribute,3,optional_callbacks,[{bar,0}]}), + "-optional_callbacks([{bar,1,bad}]).\n" = + pf({attribute,4,optional_callbacks,[{bar,1,bad}]}), + ok. + pf(Form) -> - lists:flatten(erl_pp:form(Form,none)). + lists:flatten(erl_pp:form(Form, none)). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl index 9be9f641c8..6ef947f0e3 100644 --- a/lib/stdlib/test/erl_scan_SUITE.erl +++ b/lib/stdlib/test/erl_scan_SUITE.erl @@ -226,7 +226,7 @@ atoms() -> punctuations() -> L = ["<<", "<-", "<=", "<", ">>", ">=", ">", "->", "--", "-", "++", "+", "=:=", "=/=", "=<", "=>", "==", "=", "/=", - "/", "||", "|", ":=", ":-", "::", ":"], + "/", "||", "|", ":=", "::", ":"], %% One token at a time: [begin W = list_to_atom(S), diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl index 8dc8b2c291..9f552b5a6b 100644 --- a/lib/stdlib/test/ets_SUITE.erl +++ b/lib/stdlib/test/ets_SUITE.erl @@ -47,6 +47,7 @@ -export([ordered/1, ordered_match/1, interface_equality/1, fixtable_next/1, fixtable_insert/1, rename/1, rename_unnamed/1, evil_rename/1, update_element/1, update_counter/1, evil_update_counter/1, partly_bound/1, match_heavy/1]). +-export([update_counter_with_default/1]). -export([member/1]). -export([memory/1]). -export([select_fail/1]). @@ -77,6 +78,7 @@ -export([otp_10182/1]). -export([ets_all/1]). -export([memory_check_summary/1]). +-export([take/1]). -export([init_per_testcase/2, end_per_testcase/2]). %% Convenience for manual testing @@ -98,7 +100,7 @@ misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1, heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1, do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2, - types_do/1, sleeper/0, memory_do/1, + types_do/1, sleeper/0, memory_do/1, update_counter_with_default_do/1, ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4 ]). @@ -135,7 +137,8 @@ all() -> {group, heavy}, ordered, ordered_match, interface_equality, fixtable_next, fixtable_insert, rename, rename_unnamed, evil_rename, update_element, - update_counter, evil_update_counter, partly_bound, + update_counter, evil_update_counter, + update_counter_with_default, partly_bound, match_heavy, {group, fold}, member, t_delete_object, t_init_table, t_whitebox, t_delete_all_objects, t_insert_list, t_test_ms, t_select_delete, t_ets_dets, @@ -153,6 +156,7 @@ all() -> otp_9932, otp_9423, ets_all, + take, memory_check_summary]. % MUST BE LAST @@ -1759,6 +1763,14 @@ update_counter_do(Opts) -> OrdSet = ets_new(ordered_set,[ordered_set | Opts]), update_counter_for(Set), update_counter_for(OrdSet), + ets:delete_all_objects(Set), + ets:delete_all_objects(OrdSet), + ets:safe_fixtable(Set, true), + ets:safe_fixtable(OrdSet, true), + update_counter_for(Set), + update_counter_for(OrdSet), + ets:safe_fixtable(Set, false), + ets:safe_fixtable(OrdSet, false), ets:delete(Set), ets:delete(OrdSet), update_counter_neg(Opts). @@ -1778,10 +1790,14 @@ update_counter_for(T) -> ?line {NewObj, Ret} = uc_mimic(Obj,Arg3), ArgHash = erlang:phash2({T,a,Arg3}), %%io:format("update_counter(~p, ~p, ~p) expecting ~p\n",[T,a,Arg3,Ret]), + [DefaultObj] = ets:lookup(T, a), ?line Ret = ets:update_counter(T,a,Arg3), + Ret = ets:update_counter(T, b, Arg3, DefaultObj), % Use other key ?line ArgHash = erlang:phash2({T,a,Arg3}), %%io:format("NewObj=~p~n ",[NewObj]), ?line [NewObj] = ets:lookup(T,a), + true = ets:lookup(T, b) =:= [setelement(1, NewObj, b)], + ets:delete(T, b), Myself(NewObj,Times-1,Arg3,Myself) end, @@ -2006,6 +2022,44 @@ evil_counter_1(Iter, T) -> ets:update_counter(T, dracula, 1), evil_counter_1(Iter-1, T). +update_counter_with_default(Config) when is_list(Config) -> + repeat_for_opts(update_counter_with_default_do). + +update_counter_with_default_do(Opts) -> + T1 = ets_new(a, [set | Opts]), + %% Insert default object. + 3 = ets:update_counter(T1, foo, 2, {beaufort,1}), + %% Increment. + 5 = ets:update_counter(T1, foo, 2, {cabecou,1}), + %% Increment with list. + [9] = ets:update_counter(T1, foo, [{2,4}], {camembert,1}), + %% Same with non-immediate key. + 3 = ets:update_counter(T1, {foo,bar}, 2, {{chaource,chevrotin},1}), + 5 = ets:update_counter(T1, {foo,bar}, 2, {{cantal,comté},1}), + [9] = ets:update_counter(T1, {foo,bar}, [{2,4}], {{emmental,de,savoie},1}), + %% Same with ordered set. + T2 = ets_new(b, [ordered_set | Opts]), + 3 = ets:update_counter(T2, foo, 2, {maroilles,1}), + 5 = ets:update_counter(T2, foo, 2, {mimolette,1}), + [9] = ets:update_counter(T2, foo, [{2,4}], {morbier,1}), + 3 = ets:update_counter(T2, {foo,bar}, 2, {{laguiole},1}), + 5 = ets:update_counter(T2, {foo,bar}, 2, {{saint,nectaire},1}), + [9] = ets:update_counter(T2, {foo,bar}, [{2,4}], {{rocamadour},1}), + %% Arithmetically-equal keys. + 3 = ets:update_counter(T2, 1.0, 2, {1,1}), + 5 = ets:update_counter(T2, 1, 2, {1,1}), + 7 = ets:update_counter(T2, 1, 2, {1.0,1}), + %% Same with reversed type difference. + 3 = ets:update_counter(T2, 2, 2, {2.0,1}), + 5 = ets:update_counter(T2, 2.0, 2, {2.0,1}), + 7 = ets:update_counter(T2, 2.0, 2, {2,1}), + %% bar is not an integer. + {'EXIT',{badarg,_}} = (catch ets:update_counter(T1, qux, 3, {saint,félicien})), + %% No third element in default value. + {'EXIT',{badarg,_}} = (catch ets:update_counter(T1, qux, [{3,1}], {roquefort,1})), + + ok. + fixtable_next(doc) -> ["Check that a first-next sequence always works on a fixed table"]; fixtable_next(suite) -> @@ -3777,6 +3831,7 @@ match_object_do(Opts) -> ?line ets:insert(Tab,{{one,5},5}), ?line ets:insert(Tab,{{two,4},4}), ?line ets:insert(Tab,{{two,5},6}), + ?line ets:insert(Tab, {#{camembert=>cabécou},7}), ?line case ets:match_object(Tab, {{one, '_'}, '$0'}) of [{{one,5},5},{{one,4},4}] -> ok; [{{one,4},4},{{one,5},5}] -> ok; @@ -3797,6 +3852,10 @@ match_object_do(Opts) -> [{{two,4},4},{{two,5},6}] -> ok; _ -> ?t:fail("ets:match_object() returned something funny.") end, + % Check that maps are inspected for variables. + [{#{camembert:=cabécou},7}] = + ets:match_object(Tab, {#{camembert=>'_'},7}), + {'EXIT',{badarg,_}} = (catch ets:match_object(Tab, {#{'$1'=>'_'},7})), % Check that unsucessful match returns an empty list. ?line [] = ets:match_object(Tab, {{three,'$0'}, '$92'}), % Check that '$0' equals '_'. @@ -5582,6 +5641,43 @@ ets_all_run() -> ets_all_run(). +take(Config) when is_list(Config) -> + %% Simple test for set tables. + T1 = ets_new(a, [set]), + [] = ets:take(T1, foo), + ets:insert(T1, {foo,bar}), + [] = ets:take(T1, bar), + [{foo,bar}] = ets:take(T1, foo), + [] = ets:tab2list(T1), + %% Non-immediate key. + ets:insert(T1, {{'not',<<"immediate">>},ok}), + [{{'not',<<"immediate">>},ok}] = ets:take(T1, {'not',<<"immediate">>}), + %% Same with ordered tables. + T2 = ets_new(b, [ordered_set]), + [] = ets:take(T2, foo), + ets:insert(T2, {foo,bar}), + [] = ets:take(T2, bar), + [{foo,bar}] = ets:take(T2, foo), + [] = ets:tab2list(T2), + ets:insert(T2, {{'not',<<"immediate">>},ok}), + [{{'not',<<"immediate">>},ok}] = ets:take(T2, {'not',<<"immediate">>}), + %% Arithmetically-equal keys. + ets:insert(T2, [{1.0,float},{2,integer}]), + [{1.0,float}] = ets:take(T2, 1), + [{2,integer}] = ets:take(T2, 2.0), + [] = ets:tab2list(T2), + %% Same with bag. + T3 = ets_new(c, [bag]), + ets:insert(T3, [{1,1},{1,2},{3,3}]), + [{1,1},{1,2}] = ets:take(T3, 1), + [{3,3}] = ets:take(T3, 3), + [] = ets:tab2list(T3), + ets:delete(T1), + ets:delete(T2), + ets:delete(T3), + ok. + + % % Utility functions: % diff --git a/lib/stdlib/test/filelib_SUITE.erl b/lib/stdlib/test/filelib_SUITE.erl index bd313390b3..146d810189 100644 --- a/lib/stdlib/test/filelib_SUITE.erl +++ b/lib/stdlib/test/filelib_SUITE.erl @@ -77,7 +77,8 @@ wildcard_one(Config) when is_list(Config) -> L = filelib:wildcard(Wc), L = filelib:wildcard(Wc, erl_prim_loader), L = filelib:wildcard(Wc, "."), - L = filelib:wildcard(Wc, Dir) + L = filelib:wildcard(Wc, Dir), + L = filelib:wildcard(Wc, Dir++"/.") end), ?line file:set_cwd(OldCwd), ?line ok = file:del_dir(Dir), diff --git a/lib/stdlib/test/filename_SUITE.erl b/lib/stdlib/test/filename_SUITE.erl index ecd9cff9f9..6f1d1a891d 100644 --- a/lib/stdlib/test/filename_SUITE.erl +++ b/lib/stdlib/test/filename_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2012. All Rights Reserved. +%% Copyright Ericsson AB 1997-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -287,38 +287,66 @@ extension(Config) when is_list(Config) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% join(Config) when is_list(Config) -> + %% Whenever joining two elements, test the equivalence between + %% join/1 and join/2 (OTP-12158) by using help function + %% filename_join/2. ?line "/" = filename:join(["/"]), ?line "/" = filename:join(["//"]), - ?line "usr/foo.erl" = filename:join("usr","foo.erl"), - ?line "/src/foo.erl" = filename:join(usr, "/src/foo.erl"), - ?line "/src/foo.erl" = filename:join(["/src/",'foo.erl']), - ?line "/src/foo.erl" = filename:join(usr, ["/sr", 'c/foo.erl']), - ?line "/src/foo.erl" = filename:join("usr", "/src/foo.erl"), + "usr/foo.erl" = filename_join("usr","foo.erl"), + "/src/foo.erl" = filename_join(usr, "/src/foo.erl"), + "/src/foo.erl" = filename_join("/src/",'foo.erl'), + "/src/foo.erl" = filename_join(usr, ["/sr", 'c/foo.erl']), + "/src/foo.erl" = filename_join("usr", "/src/foo.erl"), %% Make sure that redundant slashes work too. ?line "a/b/c/d/e/f/g" = filename:join(["a//b/c/////d//e/f/g"]), - ?line "a/b/c/d/e/f/g" = filename:join(["a//b/c/", "d//e/f/g"]), - ?line "a/b/c/d/e/f/g" = filename:join(["a//b/c", "d//e/f/g"]), - ?line "/d/e/f/g" = filename:join(["a//b/c", "/d//e/f/g"]), - ?line "/d/e/f/g" = filename:join(["a//b/c", "//d//e/f/g"]), - - ?line "foo/bar" = filename:join([$f,$o,$o,$/,[]], "bar"), + "a/b/c/d/e/f/g" = filename_join("a//b/c/", "d//e/f/g"), + "a/b/c/d/e/f/g" = filename_join("a//b/c", "d//e/f/g"), + "/d/e/f/g" = filename_join("a//b/c", "/d//e/f/g"), + "/d/e/f/g" = filename:join("a//b/c", "//d//e/f/g"), + + "foo/bar" = filename_join([$f,$o,$o,$/,[]], "bar"), + + %% Single dots - should be removed if in the middle of the path, + %% but not at the end of the path. + "/." = filename:join(["/."]), + "/" = filename:join(["/./"]), + "/." = filename:join(["/./."]), + "./." = filename:join(["./."]), + + "/a/b" = filename_join("/a/.","b"), + "/a/b/." = filename_join("/a/.","b/."), + "/a/." = filename_join("/a/.","."), + "/a/." = filename_join("/a","."), + "/a/." = filename_join("/a/.",""), + "./." = filename_join("./.","."), + "./." = filename_join("./","."), + "./." = filename_join("./.",""), + "." = filename_join(".",""), + "./." = filename_join(".","."), + + %% Trailing slash shall be removed - except the root + "/" = filename:join(["/"]), + "/" = filename:join(["/./"]), + "/a" = filename:join(["/a/"]), + "/b" = filename_join("/a/","/b/"), + "/a/b" = filename_join("/a/","b/"), ?line case os:type() of {win32, _} -> ?line "d:/" = filename:join(["D:/"]), ?line "d:/" = filename:join(["D:\\"]), - ?line "d:/abc" = filename:join(["D:/", "abc"]), - ?line "d:abc" = filename:join(["D:", "abc"]), + "d:/abc" = filename_join("D:/", "abc"), + "d:abc" = filename_join("D:", "abc"), ?line "a/b/c/d/e/f/g" = filename:join(["a//b\\c//\\/\\d/\\e/f\\g"]), ?line "a:usr/foo.erl" = filename:join(["A:","usr","foo.erl"]), ?line "/usr/foo.erl" = filename:join(["A:","/usr","foo.erl"]), - ?line "c:usr" = filename:join("A:","C:usr"), - ?line "a:usr" = filename:join("A:","usr"), - ?line "c:/usr" = filename:join("A:", "C:/usr"), + "c:usr" = filename_join("A:","C:usr"), + "a:usr" = filename_join("A:","usr"), + "c:/usr" = filename_join("A:", "C:/usr"), ?line "c:/usr/foo.erl" = filename:join(["A:","C:/usr","foo.erl"]), ?line "c:usr/foo.erl" = @@ -329,6 +357,11 @@ join(Config) when is_list(Config) -> ok end. +%% Make sure join([A,B]) is equivalent to join(A,B) (OTP-12158) +filename_join(A,B) -> + Res = filename:join(A,B), + Res = filename:join([A,B]). + pathtype(Config) when is_list(Config) -> ?line relative = filename:pathtype(".."), ?line relative = filename:pathtype("foo"), @@ -633,6 +666,53 @@ join_bin(Config) when is_list(Config) -> ?line <<"foo/bar">> = filename:join([$f,$o,$o,$/,[]], <<"bar">>), + %% Single dots - should be removed if in the middle of the path, + %% but not at the end of the path. + %% Also test equivalence between join/1 and join/2 (OTP-12158) + <<"/.">> = filename:join([<<"/.">>]), + <<"/">> = filename:join([<<"/./">>]), + <<"/.">> = filename:join([<<"/./.">>]), + <<"./.">> = filename:join([<<"./.">>]), + + <<"/a/b">> = filename:join([<<"/a/.">>,<<"b">>]), + <<"/a/b">> = filename:join(<<"/a/.">>,<<"b">>), + + <<"/a/b/.">> = filename:join([<<"/a/.">>,<<"b/.">>]), + <<"/a/b/.">> = filename:join(<<"/a/.">>,<<"b/.">>), + + <<"/a/.">> = filename:join([<<"/a/.">>,<<".">>]), + <<"/a/.">> = filename:join(<<"/a/.">>,<<".">>), + + <<"/a/.">> = filename:join([<<"/a">>,<<".">>]), + <<"/a/.">> = filename:join(<<"/a">>,<<".">>), + + <<"/a/.">> = filename:join([<<"/a/.">>,<<"">>]), + <<"/a/.">> = filename:join(<<"/a/.">>,<<"">>), + + <<"./.">> = filename:join([<<"./.">>,<<".">>]), + <<"./.">> = filename:join(<<"./.">>,<<".">>), + + <<"./.">> = filename:join([<<"./">>,<<".">>]), + <<"./.">> = filename:join(<<"./">>,<<".">>), + + <<"./.">> = filename:join([<<"./.">>,<<"">>]), + <<"./.">> = filename:join(<<"./.">>,<<"">>), + + <<".">> = filename:join([<<".">>,<<"">>]), + <<".">> = filename:join(<<".">>,<<"">>), + + <<"./.">> = filename:join([<<".">>,<<".">>]), + <<"./.">> = filename:join(<<".">>,<<".">>), + + %% Trailing slash shall be removed - except the root + <<"/">> = filename:join([<<"/">>]), + <<"/">> = filename:join([<<"/./">>]), + <<"/a">> = filename:join([<<"/a/">>]), + <<"/b">> = filename:join([<<"/a/">>,<<"/b/">>]), + <<"/b">> = filename:join(<<"/a/">>,<<"/b/">>), + <<"/a/b">> = filename:join([<<"/a/">>,<<"b/">>]), + <<"/a/b">> = filename:join(<<"/a/">>,<<"b/">>), + ?line case os:type() of {win32, _} -> ?line <<"d:/">> = filename:join([<<"D:/">>]), diff --git a/lib/stdlib/test/gen_event_SUITE.erl b/lib/stdlib/test/gen_event_SUITE.erl index 60a1ba8c60..576a5adfce 100644 --- a/lib/stdlib/test/gen_event_SUITE.erl +++ b/lib/stdlib/test/gen_event_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -106,7 +106,7 @@ start(Config) when is_list(Config) -> ?line {error, {already_started, _}} = gen_event:start({global, my_dummy_name}), - exit(Pid6, shutdown), + ok = gen_event:stop({global, my_dummy_name}, shutdown, 10000), receive {'EXIT', Pid6, shutdown} -> ok after 10000 -> diff --git a/lib/stdlib/test/gen_fsm_SUITE.erl b/lib/stdlib/test/gen_fsm_SUITE.erl index 39f0442824..dabc10aec4 100644 --- a/lib/stdlib/test/gen_fsm_SUITE.erl +++ b/lib/stdlib/test/gen_fsm_SUITE.erl @@ -27,6 +27,9 @@ -export([start1/1, start2/1, start3/1, start4/1, start5/1, start6/1, start7/1, start8/1, start9/1, start10/1, start11/1, start12/1]). +-export([stop1/1, stop2/1, stop3/1, stop4/1, stop5/1, stop6/1, stop7/1, + stop8/1, stop9/1, stop10/1]). + -export([ abnormal1/1, abnormal2/1]). -export([shutdown/1]). @@ -66,6 +69,8 @@ groups() -> [{start, [], [start1, start2, start3, start4, start5, start6, start7, start8, start9, start10, start11, start12]}, + {stop, [], + [stop1, stop2, stop3, stop4, stop5, stop6, stop7, stop8, stop9, stop10]}, {abnormal, [], [abnormal1, abnormal2]}, {sys, [], [sys1, call_format_status, error_format_status, terminate_crash_format, @@ -281,6 +286,105 @@ start12(Config) when is_list(Config) -> ok. +%% Anonymous, reason 'normal' +stop1(_Config) -> + {ok, Pid} = gen_fsm:start(?MODULE, [], []), + ok = gen_fsm:stop(Pid), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_fsm:stop(Pid)), + ok. + +%% Anonymous, other reason +stop2(_Config) -> + {ok,Pid} = gen_fsm:start(?MODULE, [], []), + ok = gen_fsm:stop(Pid, other_reason, infinity), + false = erlang:is_process_alive(Pid), + ok. + +%% Anonymous, invalid timeout +stop3(_Config) -> + {ok,Pid} = gen_fsm:start(?MODULE, [], []), + {'EXIT',_} = (catch gen_fsm:stop(Pid, other_reason, invalid_timeout)), + true = erlang:is_process_alive(Pid), + ok = gen_fsm:stop(Pid), + false = erlang:is_process_alive(Pid), + ok. + +%% Registered name +stop4(_Config) -> + {ok,Pid} = gen_fsm:start({local,to_stop},?MODULE, [], []), + ok = gen_fsm:stop(to_stop), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_fsm:stop(to_stop)), + ok. + +%% Registered name and local node +stop5(_Config) -> + {ok,Pid} = gen_fsm:start({local,to_stop},?MODULE, [], []), + ok = gen_fsm:stop({to_stop,node()}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_fsm:stop({to_stop,node()})), + ok. + +%% Globally registered name +stop6(_Config) -> + {ok, Pid} = gen_fsm:start({global, to_stop}, ?MODULE, [], []), + ok = gen_fsm:stop({global,to_stop}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_fsm:stop({global,to_stop})), + ok. + +%% 'via' registered name +stop7(_Config) -> + dummy_via:reset(), + {ok, Pid} = gen_fsm:start({via, dummy_via, to_stop}, + ?MODULE, [], []), + ok = gen_fsm:stop({via, dummy_via, to_stop}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_fsm:stop({via, dummy_via, to_stop})), + ok. + +%% Anonymous on remote node +stop8(_Config) -> + {ok,Node} = test_server:start_node(gen_fsm_SUITE_stop8,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_fsm,start,[?MODULE,[],[]]), + ok = gen_fsm:stop(Pid), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_fsm:stop(Pid)), + true = test_server:stop_node(Node), + {'EXIT',{{nodedown,Node},_}} = (catch gen_fsm:stop(Pid)), + ok. + +%% Registered name on remote node +stop9(_Config) -> + {ok,Node} = test_server:start_node(gen_fsm_SUITE_stop9,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_fsm,start,[{local,to_stop},?MODULE,[],[]]), + ok = gen_fsm:stop({to_stop,Node}), + undefined = rpc:call(Node,erlang,whereis,[to_stop]), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_fsm:stop({to_stop,Node})), + true = test_server:stop_node(Node), + {'EXIT',{{nodedown,Node},_}} = (catch gen_fsm:stop({to_stop,Node})), + ok. + +%% Globally registered name on remote node +stop10(_Config) -> + {ok,Node} = test_server:start_node(gen_fsm_SUITE_stop10,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_fsm,start,[{global,to_stop},?MODULE,[],[]]), + global:sync(), + ok = gen_fsm:stop({global,to_stop}), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_fsm:stop({global,to_stop})), + true = test_server:stop_node(Node), + {'EXIT',noproc} = (catch gen_fsm:stop({global,to_stop})), + ok. + %% Check that time outs in calls work abnormal1(suite) -> []; abnormal1(Config) when is_list(Config) -> diff --git a/lib/stdlib/test/gen_server_SUITE.erl b/lib/stdlib/test/gen_server_SUITE.erl index 0f03fda30a..30dabf63c5 100644 --- a/lib/stdlib/test/gen_server_SUITE.erl +++ b/lib/stdlib/test/gen_server_SUITE.erl @@ -36,6 +36,9 @@ get_state/1, replace_state/1, call_with_huge_message_queue/1 ]). +-export([stop1/1, stop2/1, stop3/1, stop4/1, stop5/1, stop6/1, stop7/1, + stop8/1, stop9/1, stop10/1]). + % spawn export -export([spec_init_local/2, spec_init_global/2, spec_init_via/2, spec_init_default_timeout/2, spec_init_global_default_timeout/2, @@ -51,7 +54,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [start, crash, call, cast, cast_fast, info, abcast, + [start, {group,stop}, crash, call, cast, cast_fast, info, abcast, multicall, multicall_down, call_remote1, call_remote2, call_remote3, call_remote_n1, call_remote_n2, call_remote_n3, spec_init, @@ -63,7 +66,8 @@ all() -> call_with_huge_message_queue]. groups() -> - []. + [{stop, [], + [stop1, stop2, stop3, stop4, stop5, stop6, stop7, stop8, stop9, stop10]}]. init_per_suite(Config) -> Config. @@ -237,6 +241,105 @@ start(Config) when is_list(Config) -> process_flag(trap_exit, OldFl), ok. +%% Anonymous, reason 'normal' +stop1(_Config) -> + {ok, Pid} = gen_server:start(?MODULE, [], []), + ok = gen_server:stop(Pid), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_server:stop(Pid)), + ok. + +%% Anonymous, other reason +stop2(_Config) -> + {ok,Pid} = gen_server:start(?MODULE, [], []), + ok = gen_server:stop(Pid, other_reason, infinity), + false = erlang:is_process_alive(Pid), + ok. + +%% Anonymous, invalid timeout +stop3(_Config) -> + {ok,Pid} = gen_server:start(?MODULE, [], []), + {'EXIT',_} = (catch gen_server:stop(Pid, other_reason, invalid_timeout)), + true = erlang:is_process_alive(Pid), + ok = gen_server:stop(Pid), + false = erlang:is_process_alive(Pid), + ok. + +%% Registered name +stop4(_Config) -> + {ok,Pid} = gen_server:start({local,to_stop},?MODULE, [], []), + ok = gen_server:stop(to_stop), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_server:stop(to_stop)), + ok. + +%% Registered name and local node +stop5(_Config) -> + {ok,Pid} = gen_server:start({local,to_stop},?MODULE, [], []), + ok = gen_server:stop({to_stop,node()}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_server:stop({to_stop,node()})), + ok. + +%% Globally registered name +stop6(_Config) -> + {ok, Pid} = gen_server:start({global, to_stop}, ?MODULE, [], []), + ok = gen_server:stop({global,to_stop}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_server:stop({global,to_stop})), + ok. + +%% 'via' registered name +stop7(_Config) -> + dummy_via:reset(), + {ok, Pid} = gen_server:start({via, dummy_via, to_stop}, + ?MODULE, [], []), + ok = gen_server:stop({via, dummy_via, to_stop}), + false = erlang:is_process_alive(Pid), + {'EXIT',noproc} = (catch gen_server:stop({via, dummy_via, to_stop})), + ok. + +%% Anonymous on remote node +stop8(_Config) -> + {ok,Node} = test_server:start_node(gen_server_SUITE_stop8,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_server,start,[?MODULE,[],[]]), + ok = gen_server:stop(Pid), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_server:stop(Pid)), + true = test_server:stop_node(Node), + {'EXIT',{{nodedown,Node},_}} = (catch gen_server:stop(Pid)), + ok. + +%% Registered name on remote node +stop9(_Config) -> + {ok,Node} = test_server:start_node(gen_server_SUITE_stop9,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_server,start,[{local,to_stop},?MODULE,[],[]]), + ok = gen_server:stop({to_stop,Node}), + undefined = rpc:call(Node,erlang,whereis,[to_stop]), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_server:stop({to_stop,Node})), + true = test_server:stop_node(Node), + {'EXIT',{{nodedown,Node},_}} = (catch gen_server:stop({to_stop,Node})), + ok. + +%% Globally registered name on remote node +stop10(_Config) -> + {ok,Node} = test_server:start_node(gen_server_SUITE_stop10,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + {ok, Pid} = rpc:call(Node,gen_server,start,[{global,to_stop},?MODULE,[],[]]), + global:sync(), + ok = gen_server:stop({global,to_stop}), + false = rpc:call(Node,erlang,is_process_alive,[Pid]), + {'EXIT',noproc} = (catch gen_server:stop({global,to_stop})), + true = test_server:stop_node(Node), + {'EXIT',noproc} = (catch gen_server:stop({global,to_stop})), + ok. + crash(Config) when is_list(Config) -> ?line error_logger_forwarder:register(), diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl index 2203dd8f51..8d53949c40 100644 --- a/lib/stdlib/test/io_SUITE.erl +++ b/lib/stdlib/test/io_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1999-2013. All Rights Reserved. +%% Copyright Ericsson AB 1999-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -31,7 +31,7 @@ printable_range/1, io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1, otp_10836/1, io_lib_width_too_small/1, - io_with_huge_message_queue/1]). + io_with_huge_message_queue/1, format_string/1]). -export([pretty/2]). @@ -71,7 +71,8 @@ all() -> io_fread_newlines, otp_8989, io_lib_fread_literal, printable_range, io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836, - io_lib_width_too_small, io_with_huge_message_queue]. + io_lib_width_too_small, io_with_huge_message_queue, + format_string]. groups() -> []. @@ -1035,7 +1036,14 @@ rp(Term, Col, Ll, D, M, RF) -> lists:flatten(io_lib:format("~s", [R])). fmt(Fmt, Args) -> - lists:flatten(io_lib:format(Fmt, Args)). + FormatList = io_lib:scan_format(Fmt, Args), + {Fmt2, Args2} = io_lib:unscan_format(FormatList), + Chars1 = lists:flatten(io_lib:build_text(FormatList)), + Chars2 = lists:flatten(io_lib:format(Fmt2, Args2)), + Chars3 = lists:flatten(io_lib:format(Fmt, Args)), + Chars1 = Chars2, + Chars2 = Chars3, + Chars3. rfd(a, 0) -> []; @@ -2261,3 +2269,9 @@ writes(0, _) -> ok; writes(N, F1) -> file:write(F1, "hello\n"), writes(N - 1, F1). + +format_string(Config) -> + %% All but padding is tested by fmt/2. + "xxxxxxsssx" = fmt("~10.4.xs", ["sss"]), + "xxxxxxsssx" = fmt("~10.4.*s", [$x, "sss"]), + ok. diff --git a/lib/stdlib/test/io_proto_SUITE.erl b/lib/stdlib/test/io_proto_SUITE.erl index 76a8109a8d..c55836ff87 100644 --- a/lib/stdlib/test/io_proto_SUITE.erl +++ b/lib/stdlib/test/io_proto_SUITE.erl @@ -69,12 +69,7 @@ init_per_testcase(_Case, Config) -> ?line Dog = ?t:timetrap(?default_timeout), - Term = case os:getenv("TERM") of - List when is_list(List) -> - List; - _ -> - "dumb" - end, + Term = os:getenv("TERM", "dumb"), os:putenv("TERM","vt100"), [{watchdog, Dog}, {term, Term} | Config]. end_per_testcase(_Case, Config) -> diff --git a/lib/stdlib/test/proc_lib_SUITE.erl b/lib/stdlib/test/proc_lib_SUITE.erl index 8dca69bac4..b6f1973a05 100644 --- a/lib/stdlib/test/proc_lib_SUITE.erl +++ b/lib/stdlib/test/proc_lib_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2012. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -27,7 +27,7 @@ init_per_group/2,end_per_group/2, crash/1, sync_start_nolink/1, sync_start_link/1, spawn_opt/1, sp1/0, sp2/0, sp3/1, sp4/2, sp5/1, - hibernate/1]). + hibernate/1, stop/1]). -export([ otp_6345/1, init_dont_hang/1]). -export([hib_loop/1, awaken/1]). @@ -38,6 +38,7 @@ -export([otp_6345_init/1, init_dont_hang_init/1]). +-export([system_terminate/4]). -ifdef(STANDALONE). -define(line, noop, ). @@ -49,7 +50,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> [crash, {group, sync_start}, spawn_opt, hibernate, - {group, tickets}]. + {group, tickets}, stop]. groups() -> [{tickets, [], [otp_6345, init_dont_hang]}, @@ -361,10 +362,94 @@ init_dont_hang(Config) when is_list(Config) -> exit(Error) end. -init_dont_hang_init(Parent) -> +init_dont_hang_init(_Parent) -> 1 = 2. +%% Test proc_lib:stop/1,3 +stop(_Config) -> + Parent = self(), + SysMsgProc = + fun() -> + receive + {system,From,Request} -> + sys:handle_system_msg(Request,From,Parent,?MODULE,[],[]) + end + end, + + %% Normal case: + %% Process handles system message and terminated with given reason + Pid1 = proc_lib:spawn(SysMsgProc), + ok = proc_lib:stop(Pid1), + false = erlang:is_process_alive(Pid1), + + %% Process does not exit + {'EXIT',noproc} = (catch proc_lib:stop(Pid1)), + + %% Badly handled system message + DieProc = + fun() -> + receive + {system,_From,_Request} -> + exit(die) + end + end, + Pid2 = proc_lib:spawn(DieProc), + {'EXIT',{die,_}} = (catch proc_lib:stop(Pid2)), + + %% Hanging process => timeout + HangProc = + fun() -> + receive + {system,_From,_Request} -> + timer:sleep(5000) + end + end, + Pid3 = proc_lib:spawn(HangProc), + {'EXIT',timeout} = (catch proc_lib:stop(Pid3,normal,1000)), + + %% Success case with other reason than 'normal' + Pid4 = proc_lib:spawn(SysMsgProc), + ok = proc_lib:stop(Pid4,other_reason,infinity), + false = erlang:is_process_alive(Pid4), + + %% System message is handled, but process dies with other reason + %% than the given (in system_terminate/4 below) + Pid5 = proc_lib:spawn(SysMsgProc), + {'EXIT',{badmatch,2}} = (catch proc_lib:stop(Pid5,crash,infinity)), + false = erlang:is_process_alive(Pid5), + + %% Local registered name + Pid6 = proc_lib:spawn(SysMsgProc), + register(to_stop,Pid6), + ok = proc_lib:stop(to_stop), + undefined = whereis(to_stop), + false = erlang:is_process_alive(Pid6), + + %% Remote registered name + {ok,Node} = test_server:start_node(proc_lib_SUITE_stop,slave,[]), + Dir = filename:dirname(code:which(?MODULE)), + rpc:call(Node,code,add_path,[Dir]), + Pid7 = spawn(Node,SysMsgProc), + true = rpc:call(Node,erlang,register,[to_stop,Pid7]), + Pid7 = rpc:call(Node,erlang,whereis,[to_stop]), + ok = proc_lib:stop({to_stop,Node}), + undefined = rpc:call(Node,erlang,whereis,[to_stop]), + false = rpc:call(Node,erlang,is_process_alive,[Pid7]), + + %% Local and remote registered name, but non-existing + {'EXIT',noproc} = (catch proc_lib:stop(to_stop)), + {'EXIT',noproc} = (catch proc_lib:stop({to_stop,Node})), + + true = test_server:stop_node(Node), + + %% Remote registered name, but non-existing node + {'EXIT',{{nodedown,Node},_}} = (catch proc_lib:stop({to_stop,Node})), + ok. +system_terminate(crash,_Parent,_Deb,_State) -> + 1 = 2; +system_terminate(Reason,_Parent,_Deb,_State) -> + exit(Reason). %%----------------------------------------------------------------- %% The error_logger handler used. diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl index 37fbb5267b..4173a40d14 100644 --- a/lib/stdlib/test/qlc_SUITE.erl +++ b/lib/stdlib/test/qlc_SUITE.erl @@ -7891,7 +7891,7 @@ run_test(Config, Extra, {cres, Body, Opts, ExpectedCompileReturn}) -> {module, _} = code:load_abs(AbsFile, Mod), Ms0 = erlang:process_info(self(),messages), - Before = {get(), pps(), ets:all(), Ms0}, + Before = {{get(), ets:all(), Ms0}, pps()}, %% Prepare the check that the qlc module does not call qlc_pt. _ = [unload_pt() || {file, Name} <- [code:is_loaded(qlc_pt)], @@ -7921,12 +7921,29 @@ run_test(Config, Extra, {cres, Body, Opts, ExpectedCompileReturn}) -> run_test(Config, Extra, Body) -> run_test(Config, Extra, {cres,Body,[]}). -wait_for_expected(R, Before, SourceFile, Wait) -> +wait_for_expected(R, {Strict0,PPS0}=Before, SourceFile, Wait) -> Ms = erlang:process_info(self(),messages), - After = {get(), pps(), ets:all(), Ms}, + After = {_,PPS1} = {{get(), ets:all(), Ms}, pps()}, case {R, After} of {ok, Before} -> ok; + {ok, {Strict0,_}} -> + {Ports0,Procs0} = PPS0, + {Ports1,Procs1} = PPS1, + case {Ports1 -- Ports0, Procs1 -- Procs0} of + {[], []} -> ok; + _ when Wait -> + timer:sleep(1000), + wait_for_expected(R, Before, SourceFile, false); + {PortsDiff,ProcsDiff} -> + io:format("failure, got ~p~n, expected ~p\n", + [PPS1, PPS0]), + show("Old port", Ports0 -- Ports1), + show("New port", PortsDiff), + show("Old proc", Procs0 -- Procs1), + show("New proc", ProcsDiff), + fail(SourceFile) + end; _ when Wait -> timer:sleep(1000), wait_for_expected(R, Before, SourceFile, false); @@ -7993,7 +8010,7 @@ compile_file(Config, Test0, Opts0) -> case compile:file(File, Opts) of {ok, _M, Ws} -> warnings(File, Ws); {error, [{File,Es}], []} -> {errors, Es, []}; - {error, [{File,Es}], [{File,Ws}]} -> {error, Es, Ws} + {error, [{File,Es}], [{File,Ws}]} -> {errors, Es, Ws} end. comp_compare(T, T) -> @@ -8058,6 +8075,17 @@ filename(Name, Config) when is_atom(Name) -> filename(Name, Config) -> filename:join(?privdir, Name). +show(_S, []) -> + ok; +show(S, [{Pid, Name, InitCall}|Pids]) when is_pid(Pid) -> + io:format("~s: ~w (~w), ~w: ~p~n", + [S, Pid, proc_reg_name(Name), InitCall, + erlang:process_info(Pid)]), + show(S, Pids); +show(S, [{Port, _}|Ports]) when is_port(Port)-> + io:format("~s: ~w: ~p~n", [S, Port, erlang:port_info(Port)]), + show(S, Ports). + pps() -> {port_list(), process_list()}. @@ -8070,6 +8098,9 @@ process_list() -> safe_second_element(process_info(P, initial_call))} || P <- processes(), is_process_alive(P)]. +proc_reg_name({registered_name, Name}) -> Name; +proc_reg_name([]) -> no_reg_name. + safe_second_element({_,Info}) -> Info; safe_second_element(Other) -> Other. diff --git a/lib/stdlib/test/stdlib_SUITE.erl b/lib/stdlib/test/stdlib_SUITE.erl index 3d09bd27ff..206eb4fd74 100644 --- a/lib/stdlib/test/stdlib_SUITE.erl +++ b/lib/stdlib/test/stdlib_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1997-2014. All Rights Reserved. +%% Copyright Ericsson AB 1997-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -22,14 +22,7 @@ -module(stdlib_SUITE). -include_lib("test_server/include/test_server.hrl"). - -% Test server specific exports --export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1, - init_per_group/2,end_per_group/2]). --export([init_per_testcase/2, end_per_testcase/2]). - -% Test cases must be exported. --export([app_test/1, appup_test/1]). +-compile(export_all). %% %% all/1 @@ -37,10 +30,10 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [app_test, appup_test]. + [app_test, appup_test, {group,upgrade}]. groups() -> - []. + [{upgrade,[minor_upgrade,major_upgrade]}]. init_per_suite(Config) -> Config. @@ -48,9 +41,13 @@ init_per_suite(Config) -> end_per_suite(_Config) -> ok. +init_per_group(upgrade, Config) -> + ct_release_test:init(Config); init_per_group(_GroupName, Config) -> Config. +end_per_group(upgrade, Config) -> + ct_release_test:cleanup(Config); end_per_group(_GroupName, Config) -> Config. @@ -165,3 +162,26 @@ check_appup([Vsn|Vsns],Instrs,Expected) -> end; check_appup([],_,_) -> ok. + + +minor_upgrade(Config) -> + ct_release_test:upgrade(stdlib,minor,{?MODULE,[]},Config). + +major_upgrade(Config) -> + ct_release_test:upgrade(stdlib,major,{?MODULE,[]},Config). + +%% Version numbers are checked by ct_release_test, so there is nothing +%% more to check here... +upgrade_init(CtData,State) -> + {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,stdlib), + case ct_release_test:get_appup(CtData,stdlib) of + {ok,{FromVsn,ToVsn,[restart_new_emulator],[restart_new_emulator]}} -> + io:format("Upgrade/downgrade ~p <--> ~p",[FromVsn,ToVsn]); + {error,{vsn_not_found,_}} when FromVsn==ToVsn -> + io:format("No upgrade test for stdlib, same version") + end, + State. +upgrade_upgraded(_CtData,State) -> + State. +upgrade_downgraded(_CtData,State) -> + State. diff --git a/lib/stdlib/test/string_SUITE.erl b/lib/stdlib/test/string_SUITE.erl index fccd1bef95..a55c710d50 100644 --- a/lib/stdlib/test/string_SUITE.erl +++ b/lib/stdlib/test/string_SUITE.erl @@ -217,21 +217,39 @@ substr(Config) when is_list(Config) -> ?line {'EXIT',_} = (catch string:substr("1234", "1")), ok. -tokens(suite) -> - []; -tokens(doc) -> - []; tokens(Config) when is_list(Config) -> - ?line [] = string:tokens("",""), - ?line [] = string:tokens("abc","abc"), - ?line ["abc"] = string:tokens("abc", ""), - ?line ["1","2 34","4","5"] = string:tokens("1,2 34,4;5", ";,"), - %% invalid arg type - ?line {'EXIT',_} = (catch string:tokens('x,y', ",")), + [] = string:tokens("",""), + [] = string:tokens("abc","abc"), + ["abc"] = string:tokens("abc", ""), + ["1","2 34","45","5","6","7"] = do_tokens("1,2 34,45;5,;6;,7", ";,"), + %% invalid arg type - ?line {'EXIT',_} = (catch string:tokens("x,y", ',')), + {'EXIT',_} = (catch string:tokens('x,y', ",")), + {'EXIT',_} = (catch string:tokens("x,y", ',')), ok. +do_tokens(S0, Sep0) -> + [H|T] = Sep0, + S = [replace_sep(C, T, H) || C <- S0], + Sep = [H], + io:format("~p ~p\n", [S0,Sep0]), + io:format("~p ~p\n", [S,Sep]), + + Res = string:tokens(S0, Sep0), + Res = string:tokens(Sep0++S0, Sep0), + Res = string:tokens(S0++Sep0, Sep0), + + Res = string:tokens(S, Sep), + Res = string:tokens(Sep++S, Sep), + Res = string:tokens(S++Sep, Sep), + + Res. + +replace_sep(C, Seps, New) -> + case lists:member(C, Seps) of + true -> New; + false -> C + end. chars(suite) -> []; diff --git a/lib/stdlib/test/supervisor_SUITE.erl b/lib/stdlib/test/supervisor_SUITE.erl index 836ea7c030..c98654aef7 100644 --- a/lib/stdlib/test/supervisor_SUITE.erl +++ b/lib/stdlib/test/supervisor_SUITE.erl @@ -37,9 +37,11 @@ sup_start_ignore_child/1, sup_start_ignore_temporary_child/1, sup_start_ignore_temporary_child_start_child/1, sup_start_ignore_temporary_child_start_child_simple/1, - sup_start_error_return/1, sup_start_fail/1, sup_stop_infinity/1, - sup_stop_timeout/1, sup_stop_brutal_kill/1, child_adm/1, - child_adm_simple/1, child_specs/1, extra_return/1]). + sup_start_error_return/1, sup_start_fail/1, + sup_start_map/1, sup_start_map_faulty_specs/1, + sup_stop_infinity/1, sup_stop_timeout/1, sup_stop_brutal_kill/1, + child_adm/1, child_adm_simple/1, child_specs/1, extra_return/1, + sup_flags/1]). %% Tests concept permanent, transient and temporary -export([ permanent_normal/1, transient_normal/1, @@ -65,7 +67,8 @@ do_not_save_child_specs_for_temporary_children/1, simple_one_for_one_scale_many_temporary_children/1, simple_global_supervisor/1, hanging_restart_loop/1, - hanging_restart_loop_simple/1]). + hanging_restart_loop_simple/1, code_change/1, code_change_map/1, + code_change_simple/1, code_change_simple_map/1]). %%------------------------------------------------------------------------- @@ -73,8 +76,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}]. all() -> - [{group, sup_start}, {group, sup_stop}, child_adm, - child_adm_simple, extra_return, child_specs, + [{group, sup_start}, {group, sup_start_map}, {group, sup_stop}, child_adm, + child_adm_simple, extra_return, child_specs, sup_flags, {group, restart_one_for_one}, {group, restart_one_for_all}, {group, restart_simple_one_for_one}, @@ -85,7 +88,8 @@ all() -> count_children, do_not_save_start_parameters_for_temporary_children, do_not_save_child_specs_for_temporary_children, simple_one_for_one_scale_many_temporary_children, temporary_bystander, - simple_global_supervisor, hanging_restart_loop, hanging_restart_loop_simple]. + simple_global_supervisor, hanging_restart_loop, hanging_restart_loop_simple, + code_change, code_change_map, code_change_simple, code_change_simple_map]. groups() -> [{sup_start, [], @@ -94,6 +98,8 @@ groups() -> sup_start_ignore_temporary_child_start_child, sup_start_ignore_temporary_child_start_child_simple, sup_start_error_return, sup_start_fail]}, + {sup_start_map, [], + [sup_start_map, sup_start_map_faulty_specs]}, {sup_stop, [], [sup_stop_infinity, sup_stop_timeout, sup_stop_brutal_kill]}, @@ -256,6 +262,60 @@ sup_start_fail(Config) when is_list(Config) -> check_exit_reason(Term). %%------------------------------------------------------------------------- +%% Tests that the supervisor process starts correctly with map +%% startspec, and that the full childspec can be read. +sup_start_map(Config) when is_list(Config) -> + process_flag(trap_exit, true), + Child1 = #{id=>child1, start=>{supervisor_1, start_child, []}}, + Child2 = #{id=>child2, + start=>{supervisor_1, start_child, []}, + shutdown=>brutal_kill}, + Child3 = #{id=>child3, + start=>{supervisor_1, start_child, []}, + type=>supervisor}, + {ok, Pid} = start_link({ok, {#{}, [Child1,Child2,Child3]}}), + + %% Check default values + {ok,#{id:=child1, + start:={supervisor_1,start_child,[]}, + restart:=permanent, + shutdown:=5000, + type:=worker, + modules:=[supervisor_1]}} = supervisor:get_childspec(Pid, child1), + {ok,#{id:=child2, + start:={supervisor_1,start_child,[]}, + restart:=permanent, + shutdown:=brutal_kill, + type:=worker, + modules:=[supervisor_1]}} = supervisor:get_childspec(Pid, child2), + {ok,#{id:=child3, + start:={supervisor_1,start_child,[]}, + restart:=permanent, + shutdown:=infinity, + type:=supervisor, + modules:=[supervisor_1]}} = supervisor:get_childspec(Pid, child3), + {error,not_found} = supervisor:get_childspec(Pid, child4), + terminate(Pid, shutdown). + +%%------------------------------------------------------------------------- +%% Tests that the supervisor produces good error messages when start- +%% and child specs are faulty. +sup_start_map_faulty_specs(Config) when is_list(Config) -> + process_flag(trap_exit, true), + Child1 = #{start=>{supervisor_1, start_child, []}}, + Child2 = #{id=>child2}, + Child3 = #{id=>child3, + start=>{supervisor_1, start_child, []}, + silly_flag=>true}, + Child4 = child4, + {error,{start_spec,missing_id}} = start_link({ok, {#{}, [Child1]}}), + {error,{start_spec,missing_start}} = start_link({ok, {#{}, [Child2]}}), + {ok,Pid} = start_link({ok, {#{}, [Child3]}}), + terminate(Pid,shutdown), + {error,{start_spec,{invalid_child_spec,child4}}} = + start_link({ok, {#{}, [Child4]}}). + +%%------------------------------------------------------------------------- %% See sup_stop/1 when Shutdown = infinity, this walue is allowed for %% children of type supervisor _AND_ worker. sup_stop_infinity(Config) when is_list(Config) -> @@ -479,7 +539,7 @@ child_adm_simple(Config) when is_list(Config) -> %% Tests child specs, invalid formats should be rejected. child_specs(Config) when is_list(Config) -> process_flag(trap_exit, true), - {ok, _Pid} = start_link({ok, {{one_for_one, 2, 3600}, []}}), + {ok, Pid} = start_link({ok, {{one_for_one, 2, 3600}, []}}), {error, _} = supervisor:start_child(sup_test, hej), %% Bad child specs @@ -509,6 +569,7 @@ child_specs(Config) when is_list(Config) -> {error, {invalid_modules,dy}} = supervisor:start_child(sup_test, B5), + {error, {badarg, _}} = supervisor:check_childspecs(B1), % should be list {error, {invalid_mfa,mfa}} = supervisor:check_childspecs([B1]), {error, {invalid_restart_type,prmanent}} = supervisor:check_childspecs([B2]), @@ -524,6 +585,54 @@ child_specs(Config) when is_list(Config) -> ok = supervisor:check_childspecs([C3]), ok = supervisor:check_childspecs([C4]), ok = supervisor:check_childspecs([C5]), + + {error,{duplicate_child_name,child}} = supervisor:check_childspecs([C1,C2]), + + terminate(Pid, shutdown), + + %% Faulty child specs in supervisor start + {error, {start_spec, {invalid_mfa, mfa}}} = + start_link({ok, {{one_for_one, 2, 3600}, [B1]}}), + {error, {start_spec, {invalid_restart_type, prmanent}}} = + start_link({ok, {{simple_one_for_one, 2, 3600}, [B2]}}), + + %% simple_one_for_one needs exactly one child + {error,{bad_start_spec,[]}} = + start_link({ok, {{simple_one_for_one, 2, 3600}, []}}), + {error,{bad_start_spec,[C1,C2]}} = + start_link({ok, {{simple_one_for_one, 2, 3600}, [C1,C2]}}), + + ok. + +%%------------------------------------------------------------------------- +%% Test error handling of supervisor flags +sup_flags(_Config) -> + process_flag(trap_exit,true), + {error,{supervisor_data,{invalid_strategy,_}}} = + start_link({ok, {{none_for_one, 2, 3600}, []}}), + {error,{supervisor_data,{invalid_strategy,_}}} = + start_link({ok, {#{strategy=>none_for_one}, []}}), + {error,{supervisor_data,{invalid_intensity,_}}} = + start_link({ok, {{one_for_one, infinity, 3600}, []}}), + {error,{supervisor_data,{invalid_intensity,_}}} = + start_link({ok, {#{intensity=>infinity}, []}}), + {error,{supervisor_data,{invalid_period,_}}} = + start_link({ok, {{one_for_one, 2, 0}, []}}), + {error,{supervisor_data,{invalid_period,_}}} = + start_link({ok, {#{period=>0}, []}}), + {error,{supervisor_data,{invalid_period,_}}} = + start_link({ok, {{one_for_one, 2, infinity}, []}}), + {error,{supervisor_data,{invalid_period,_}}} = + start_link({ok, {#{period=>infinity}, []}}), + + %% SupFlags other than a map or a 3-tuple + {error,{supervisor_data,{invalid_type,_}}} = + start_link({ok, {{one_for_one, 2}, []}}), + + %% Unexpected flags are ignored + {ok,Pid} = start_link({ok,{#{silly_flag=>true},[]}}), + terminate(Pid,shutdown), + ok. %%------------------------------------------------------------------------- @@ -1647,6 +1756,186 @@ hanging_restart_loop_simple(Config) when is_list(Config) -> ok. %%------------------------------------------------------------------------- +%% Test the code_change function +code_change(_Config) -> + process_flag(trap_exit, true), + + SupFlags = {one_for_one, 0, 1}, + {ok, Pid} = start_link({ok, {SupFlags, []}}), + [] = supervisor:which_children(Pid), + + %% Change supervisor flags + S1 = sys:get_state(Pid), + ok = fake_upgrade(Pid,{ok, {{one_for_one, 1, 3}, []}}), + S2 = sys:get_state(Pid), + true = (S1 /= S2), + + %% Faulty childspec + FaultyChild = {child1, permanent, brutal_kill, worker, []}, % missing start + {error,{error,{invalid_child_spec,FaultyChild}}} = + fake_upgrade(Pid,{ok,{SupFlags,[FaultyChild]}}), + + %% Add child1 and child2 + Child1 = {child1, {supervisor_1, start_child, []}, + permanent, 2000, worker, []}, + Child2 = {child2, {supervisor_1, start_child, []}, + permanent, brutal_kill, worker, []}, + ok = fake_upgrade(Pid,{ok,{SupFlags,[Child1,Child2]}}), + %% Children are not automatically started + {ok,_} = supervisor:restart_child(Pid,child1), + {ok,_} = supervisor:restart_child(Pid,child2), + [{child2,_,_,_},{child1,_,_,_}] = supervisor:which_children(Pid), + + %% Change child1, remove child2 and add child3 + Child11 = {child1, {supervisor_1, start_child, []}, + permanent, 1000, worker, []}, + Child3 = {child3, {supervisor_1, start_child, []}, + permanent, brutal_kill, worker, []}, + ok = fake_upgrade(Pid,{ok, {SupFlags, [Child11,Child3]}}), + %% Children are not deleted on upgrade, so it is ok that child2 is + %% still here + [{child2,_,_,_},{child3,_,_,_},{child1,_,_,_}] = + supervisor:which_children(Pid), + + %% Ignore during upgrade + ok = fake_upgrade(Pid,ignore), + + %% Error during upgrade + {error, faulty_return} = fake_upgrade(Pid,faulty_return), + + %% Faulty flags + {error,{error, {invalid_intensity,faulty_intensity}}} = + fake_upgrade(Pid,{ok, {{one_for_one,faulty_intensity,1}, []}}), + {error,{error,{bad_flags, faulty_flags}}} = + fake_upgrade(Pid,{ok, {faulty_flags, []}}), + + terminate(Pid,shutdown). + +code_change_map(_Config) -> + process_flag(trap_exit, true), + + {ok, Pid} = start_link({ok, {#{}, []}}), + [] = supervisor:which_children(Pid), + + %% Change supervisor flags + S1 = sys:get_state(Pid), + ok = fake_upgrade(Pid,{ok, {#{intensity=>1, period=>3}, []}}), + S2 = sys:get_state(Pid), + true = (S1 /= S2), + + %% Faulty childspec + FaultyChild = #{id=>faulty_child}, + {error,{error,missing_start}} = + fake_upgrade(Pid,{ok,{#{},[FaultyChild]}}), + + %% Add child1 and child2 + Child1 = #{id=>child1, + start=>{supervisor_1, start_child, []}, + shutdown=>2000}, + Child2 = #{id=>child2, + start=>{supervisor_1, start_child, []}}, + ok = fake_upgrade(Pid,{ok,{#{},[Child1,Child2]}}), + %% Children are not automatically started + {ok,_} = supervisor:restart_child(Pid,child1), + {ok,_} = supervisor:restart_child(Pid,child2), + [{child2,_,_,_},{child1,_,_,_}] = supervisor:which_children(Pid), + {ok,#{shutdown:=2000}} = supervisor:get_childspec(Pid,child1), + + %% Change child1, remove child2 and add child3 + Child11 = #{id=>child1, + start=>{supervisor_1, start_child, []}, + shutdown=>1000}, + Child3 = #{id=>child3, + start=>{supervisor_1, start_child, []}}, + ok = fake_upgrade(Pid,{ok, {#{}, [Child11,Child3]}}), + %% Children are not deleted on upgrade, so it is ok that child2 is + %% still here + [{child2,_,_,_},{child3,_,_,_},{child1,_,_,_}] = + supervisor:which_children(Pid), + {ok,#{shutdown:=1000}} = supervisor:get_childspec(Pid,child1), + + %% Ignore during upgrade + ok = fake_upgrade(Pid,ignore), + + %% Error during upgrade + {error, faulty_return} = fake_upgrade(Pid,faulty_return), + + %% Faulty flags + {error,{error, {invalid_intensity,faulty_intensity}}} = + fake_upgrade(Pid,{ok, {#{intensity=>faulty_intensity}, []}}), + + terminate(Pid,shutdown). + +code_change_simple(_Config) -> + process_flag(trap_exit, true), + + SimpleChild1 = {child1,{supervisor_1, start_child, []}, permanent, + brutal_kill, worker, []}, + SimpleFlags = {simple_one_for_one, 0, 1}, + {ok, SimplePid} = start_link({ok, {SimpleFlags,[SimpleChild1]}}), + %% Change childspec + SimpleChild11 = {child1,{supervisor_1, start_child, []}, permanent, + 1000, worker, []}, + ok = fake_upgrade(SimplePid,{ok,{SimpleFlags,[SimpleChild11]}}), + + %% Attempt to add child + SimpleChild2 = {child2,{supervisor_1, start_child, []}, permanent, + brutal_kill, worker, []}, + + {error, {error, {ok,[_,_]}}} = + fake_upgrade(SimplePid,{ok,{SimpleFlags,[SimpleChild1,SimpleChild2]}}), + + %% Attempt to remove child + {error, {error, {ok,[]}}} = fake_upgrade(SimplePid,{ok,{SimpleFlags,[]}}), + + terminate(SimplePid,shutdown), + ok. + +code_change_simple_map(_Config) -> + process_flag(trap_exit, true), + + SimpleChild1 = #{id=>child1, + start=>{supervisor_1, start_child, []}}, + SimpleFlags = #{strategy=>simple_one_for_one}, + {ok, SimplePid} = start_link({ok, {SimpleFlags,[SimpleChild1]}}), + %% Change childspec + SimpleChild11 = #{id=>child1, + start=>{supervisor_1, start_child, []}, + shutdown=>1000}, + ok = fake_upgrade(SimplePid,{ok,{SimpleFlags,[SimpleChild11]}}), + + %% Attempt to add child + SimpleChild2 = #{id=>child2, + start=>{supervisor_1, start_child, []}}, + {error, {error, {ok, [_,_]}}} = + fake_upgrade(SimplePid,{ok,{SimpleFlags,[SimpleChild1,SimpleChild2]}}), + + %% Attempt to remove child + {error, {error, {ok, []}}} = + fake_upgrade(SimplePid,{ok,{SimpleFlags,[]}}), + + terminate(SimplePid,shutdown), + ok. + +fake_upgrade(Pid,NewInitReturn) -> + ok = sys:suspend(Pid), + + %% Update state to fake code change + %% The #state record in supervisor.erl holds the arguments given + %% to the callback init function. By replacing these arguments the + %% init function will return something new and by that fake a code + %% change (see init function above in this module). + Fun = fun(State) -> + Size = size(State), % 'args' is the last field in #state. + setelement(Size,State,NewInitReturn) + end, + sys:replace_state(Pid,Fun), + + R = sys:change_code(Pid,gen_server,dummy_vsn,[]), + ok = sys:resume(Pid), + R. + +%%------------------------------------------------------------------------- terminate(Pid, Reason) when Reason =/= supervisor -> terminate(dummy, Pid, dummy, Reason). diff --git a/lib/stdlib/test/sys_SUITE.erl b/lib/stdlib/test/sys_SUITE.erl index f38bc87ae5..047ee9f1fa 100644 --- a/lib/stdlib/test/sys_SUITE.erl +++ b/lib/stdlib/test/sys_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -202,14 +202,7 @@ spec_proc(Mod) -> {Mod,system_get_state},{throw,fail}},_}} -> ok end, - Mod:stop(), - WaitForUnregister = fun W() -> - case whereis(Mod) of - undefined -> ok; - _ -> timer:sleep(10), W() - end - end, - WaitForUnregister(), + ok = sys:terminate(Mod, normal), {ok,_} = Mod:start_link(4), ok = case catch sys:replace_state(Mod, fun(_) -> {} end) of {} -> @@ -218,8 +211,7 @@ spec_proc(Mod) -> {Mod,system_replace_state},{throw,fail}},_}} -> ok end, - Mod:stop(), - WaitForUnregister(), + ok = sys:terminate(Mod, normal), {ok,_} = Mod:start_link(4), StateFun = fun(_) -> error(fail) end, ok = case catch sys:replace_state(Mod, StateFun) of @@ -231,7 +223,7 @@ spec_proc(Mod) -> {'EXIT',{{callback_failed,StateFun,{error,fail}},_}} -> ok end, - Mod:stop(). + ok = sys:terminate(Mod, normal). %%%%%%%%%%%%%%%%%%%% %% Dummy server diff --git a/lib/stdlib/test/sys_sp1.erl b/lib/stdlib/test/sys_sp1.erl index e84ffcfa12..0fb288991f 100644 --- a/lib/stdlib/test/sys_sp1.erl +++ b/lib/stdlib/test/sys_sp1.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -17,7 +17,7 @@ %% %CopyrightEnd% %% -module(sys_sp1). --export([start_link/1, stop/0]). +-export([start_link/1]). -export([alloc/0, free/1]). -export([init/1]). -export([system_continue/3, system_terminate/4, @@ -31,10 +31,6 @@ start_link(NumCh) -> proc_lib:start_link(?MODULE, init, [[self(),NumCh]]). -stop() -> - ?MODULE ! stop, - ok. - alloc() -> ?MODULE ! {self(), alloc}, receive @@ -70,11 +66,7 @@ loop(Chs, Parent, Deb) -> loop(Chs2, Parent, Deb2); {system, From, Request} -> sys:handle_system_msg(Request, From, Parent, - ?MODULE, Deb, Chs); - stop -> - sys:handle_debug(Deb, fun write_debug/3, - ?MODULE, {in, stop}), - ok + ?MODULE, Deb, Chs) end. system_continue(Parent, Deb, Chs) -> diff --git a/lib/stdlib/test/sys_sp2.erl b/lib/stdlib/test/sys_sp2.erl index 56a5e4d071..a0847b5838 100644 --- a/lib/stdlib/test/sys_sp2.erl +++ b/lib/stdlib/test/sys_sp2.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2013. All Rights Reserved. +%% Copyright Ericsson AB 1996-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -17,7 +17,7 @@ %% %CopyrightEnd% %% -module(sys_sp2). --export([start_link/1, stop/0]). +-export([start_link/1]). -export([alloc/0, free/1]). -export([init/1]). -export([system_continue/3, system_terminate/4, @@ -30,10 +30,6 @@ start_link(NumCh) -> proc_lib:start_link(?MODULE, init, [[self(),NumCh]]). -stop() -> - ?MODULE ! stop, - ok. - alloc() -> ?MODULE ! {self(), alloc}, receive @@ -45,11 +41,6 @@ free(Ch) -> ?MODULE ! {free, Ch}, ok. -%% can't use 2-tuple for state here as we do in sys_sp1, since the 2-tuple -%% is not compatible with the backward compatibility handling for -%% sys:get_state in sys.erl --record(state, {alloc,free}). - init([Parent,NumCh]) -> register(?MODULE, self()), Chs = channels(NumCh), @@ -74,11 +65,7 @@ loop(Chs, Parent, Deb) -> loop(Chs2, Parent, Deb2); {system, From, Request} -> sys:handle_system_msg(Request, From, Parent, - ?MODULE, Deb, Chs); - stop -> - sys:handle_debug(Deb, fun write_debug/3, - ?MODULE, {in, stop}), - ok + ?MODULE, Deb, Chs) end. system_continue(Parent, Deb, Chs) -> @@ -91,17 +78,17 @@ write_debug(Dev, Event, Name) -> io:format(Dev, "~p event = ~p~n", [Name, Event]). channels(NumCh) -> - #state{alloc=[], free=lists:seq(1,NumCh)}. + {_Allocated=[], _Free=lists:seq(1,NumCh)}. -alloc(#state{free=[]}=Channels) -> - {{error, "no channels available"}, Channels}; -alloc(#state{alloc=Allocated, free=[H|T]}) -> - {H, #state{alloc=[H|Allocated], free=T}}. +alloc({_, []}) -> + {error, "no channels available"}; +alloc({Allocated, [H|T]}) -> + {H, {[H|Allocated], T}}. -free(Ch, #state{alloc=Alloc, free=Free}=Channels) -> +free(Ch, {Alloc, Free}=Channels) -> case lists:member(Ch, Alloc) of true -> - #state{alloc=lists:delete(Ch, Alloc), free=[Ch|Free]}; + {lists:delete(Ch, Alloc), [Ch|Free]}; false -> Channels end. diff --git a/lib/stdlib/test/zip_SUITE.erl b/lib/stdlib/test/zip_SUITE.erl index a57641ef62..d168a9d9bc 100644 --- a/lib/stdlib/test/zip_SUITE.erl +++ b/lib/stdlib/test/zip_SUITE.erl @@ -23,7 +23,7 @@ bad_zip/1, unzip_from_binary/1, unzip_to_binary/1, zip_to_binary/1, unzip_options/1, zip_options/1, list_dir_options/1, aliases/1, - openzip_api/1, zip_api/1, unzip_jar/1, + openzip_api/1, zip_api/1, open_leak/1, unzip_jar/1, compress_control/1, foldl/1]). @@ -38,7 +38,7 @@ all() -> [borderline, atomic, bad_zip, unzip_from_binary, unzip_to_binary, zip_to_binary, unzip_options, zip_options, list_dir_options, aliases, openzip_api, - zip_api, unzip_jar, compress_control, foldl]. + zip_api, open_leak, unzip_jar, compress_control, foldl]. groups() -> []. @@ -318,8 +318,46 @@ zip_api(Config) when is_list(Config) -> %% Clean up. delete_files([Names]), + ok. + +open_leak(doc) -> + ["Test that zip doesn't leak processes and ports where the " + "controlling process dies without closing an zip opened with " + "zip:zip_open/1."]; +open_leak(suite) -> []; +open_leak(Config) when is_list(Config) -> + %% Create a zip archive + Zip = "zip.zip", + {ok, Zip} = zip:zip(Zip, [], []), + + %% Open archive in a another process that dies immediately. + ZipSrv = spawn_zip(Zip, [memory]), + + %% Expect the ZipSrv process to die soon after. + true = spawned_zip_dead(ZipSrv), + + %% Clean up. + delete_files([Zip]), + ok. +spawn_zip(Zip, Options) -> + Self = self(), + spawn(fun() -> Self ! zip:zip_open(Zip, Options) end), + receive + {ok, ZipSrv} -> + ZipSrv + end. + +spawned_zip_dead(ZipSrv) -> + Ref = monitor(process, ZipSrv), + receive + {'DOWN', Ref, _, ZipSrv, _} -> + true + after 1000 -> + false + end. + unzip_options(doc) -> ["Test options for unzip, only cwd and file_list currently"]; unzip_options(suite) -> diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl index 877675772f..81272e62de 100644 --- a/lib/syntax_tools/src/erl_prettypr.erl +++ b/lib/syntax_tools/src/erl_prettypr.erl @@ -50,8 +50,7 @@ | fun((erl_syntax:syntaxTree(), _, _) -> prettypr:document()). -type clause_t() :: 'case_expr' | 'cond_expr' | 'fun_expr' | 'if_expr' | 'receive_expr' | 'try_expr' - | {'function', prettypr:document()} - | {'rule', prettypr:document()}. + | {'function', prettypr:document()}. -record(ctxt, {prec = 0 :: integer(), sub_indent = 2 :: non_neg_integer(), @@ -587,8 +586,6 @@ lay_2(Node, Ctxt) -> make_case_clause(D1, D2, D3, Ctxt); try_expr -> make_case_clause(D1, D2, D3, Ctxt); - {rule, N} -> - make_rule_clause(N, D1, D2, D3, Ctxt); undefined -> %% If a clause is formatted out of context, we %% use a "fun-expression" clause style. @@ -851,14 +848,10 @@ lay_2(Node, Ctxt) -> floating(text(".")), lay(erl_syntax:record_access_field(Node), set_prec(Ctxt, PrecR))), - D3 = case erl_syntax:record_access_type(Node) of - none -> - D2; - T -> - beside(beside(floating(text("#")), - lay(T, reset_prec(Ctxt))), - D2) - end, + T = erl_syntax:record_access_type(Node), + D3 = beside(beside(floating(text("#")), + lay(T, reset_prec(Ctxt))), + D2), maybe_parentheses(beside(D1, D3), Prec, Ctxt); record_expr -> @@ -926,15 +919,6 @@ lay_2(Node, Ctxt) -> D2 = lay(erl_syntax:map_field_exact_value(Node), Ctxt1), par([D1, floating(text(":=")), D2], Ctxt1#ctxt.break_indent); - rule -> - %% Comments on the name will be repeated; cf. - %% `function'. - Ctxt1 = reset_prec(Ctxt), - D1 = lay(erl_syntax:rule_name(Node), Ctxt1), - D2 = lay_clauses(erl_syntax:rule_clauses(Node), - {rule, D1}, Ctxt1), - beside(D2, floating(text("."))); - size_qualifier -> Ctxt1 = set_prec(Ctxt, max_prec()), D1 = lay(erl_syntax:size_qualifier_body(Node), Ctxt1), @@ -1073,10 +1057,6 @@ make_fun_clause_head(N, P, Ctxt) -> beside(N, D) end. -make_rule_clause(N, P, G, B, Ctxt) -> - D = make_fun_clause_head(N, P, Ctxt), - append_rule_body(B, append_guard(G, D, Ctxt), Ctxt). - make_case_clause(P, G, B, Ctxt) -> append_clause_body(B, append_guard(G, P, Ctxt), Ctxt). @@ -1092,9 +1072,6 @@ make_if_clause(_P, G, B, Ctxt) -> append_clause_body(B, D, Ctxt) -> append_clause_body(B, D, floating(text(" ->")), Ctxt). -append_rule_body(B, D, Ctxt) -> - append_clause_body(B, D, floating(text(" :-")), Ctxt). - append_clause_body(B, D, S, Ctxt) -> sep([beside(D, S), nest(Ctxt#ctxt.break_indent, B)]). diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl index 40372a2106..3f2a3e05dd 100644 --- a/lib/syntax_tools/src/erl_syntax.erl +++ b/lib/syntax_tools/src/erl_syntax.erl @@ -254,7 +254,6 @@ receive_expr_action/1, receive_expr_clauses/1, receive_expr_timeout/1, - record_access/2, record_access/3, record_access_argument/1, record_access_field/1, @@ -271,10 +270,6 @@ record_index_expr/2, record_index_expr_field/1, record_index_expr_type/1, - rule/2, - rule_arity/1, - rule_clauses/1, - rule_name/1, size_qualifier/2, size_qualifier_argument/1, size_qualifier_body/1, @@ -472,19 +467,16 @@ %% <td>record_field</td> %% </tr><tr> %% <td>record_index_expr</td> -%% <td>rule</td> %% <td>size_qualifier</td> %% <td>string</td> -%% </tr><tr> %% <td>text</td> +%% </tr><tr> %% <td>try_expr</td> %% <td>tuple</td> %% <td>underscore</td> -%% </tr><tr> %% <td>variable</td> +%% </tr><tr> %% <td>warning_marker</td> -%% <td></td> -%% <td></td> %% </tr> %% </table></center> %% @@ -540,7 +532,6 @@ %% @see record_expr/2 %% @see record_field/2 %% @see record_index_expr/2 -%% @see rule/2 %% @see size_qualifier/2 %% @see string/1 %% @see text/1 @@ -607,10 +598,8 @@ type(Node) -> {record, _, _, _, _} -> record_expr; {record, _, _, _} -> record_expr; {record_field, _, _, _, _} -> record_access; - {record_field, _, _, _} -> record_access; {record_index, _, _, _} -> record_index_expr; {remote, _, _, _} -> module_qualifier; - {rule, _, _, _, _} -> rule; {'try', _, _, _, _, _} -> try_expr; {tuple, _, _} -> tuple; _ -> @@ -693,10 +682,9 @@ is_leaf(Node) -> %% <td>`comment'</td> %% <td>`error_marker'</td> %% <td>`eof_marker'</td> -%% <td>`form_list'</td> %% </tr><tr> +%% <td>`form_list'</td> %% <td>`function'</td> -%% <td>`rule'</td> %% <td>`warning_marker'</td> %% <td>`text'</td> %% </tr> @@ -709,7 +697,6 @@ is_leaf(Node) -> %% @see error_marker/1 %% @see form_list/1 %% @see function/2 -%% @see rule/2 %% @see warning_marker/1 -spec is_form(syntaxTree()) -> boolean(). @@ -722,7 +709,6 @@ is_form(Node) -> eof_marker -> true; error_marker -> true; form_list -> true; - rule -> true; warning_marker -> true; text -> true; _ -> false @@ -3323,6 +3309,11 @@ attribute_arguments(Node) -> [set_pos( list(unfold_function_names(Data, Pos)), Pos)]; + optional_callbacks -> + D = try list(unfold_function_names(Data, Pos)) + catch _:_ -> abstract(Data) + end, + [set_pos(D, Pos)]; import -> {Module, Imports} = Data, [set_pos(atom(Module), Pos), @@ -3475,7 +3466,6 @@ module_qualifier_body(Node) -> %% @see function_clauses/1 %% @see function_arity/1 %% @see is_form/1 -%% @see rule/2 %% Don't use the name 'function' for this record, to avoid confusion with %% the tuples on the form {function,Name,Arity} used by erl_parse. @@ -4305,49 +4295,32 @@ record_index_expr_field(Node) -> %% ===================================================================== -%% @equiv record_access(Argument, none, Field) - --spec record_access(syntaxTree(), syntaxTree()) -> syntaxTree(). - -record_access(Argument, Field) -> - record_access(Argument, none, Field). - - -%% ===================================================================== -%% @doc Creates an abstract record field access expression. If -%% `Type' is not `none', the result represents -%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>". -%% -%% If `Type' is `none', the result represents -%% "<code><em>Argument</em>.<em>Field</em></code>". This is a special -%% form only allowed within Mnemosyne queries. +%% @doc Creates an abstract record field access expression. The result +%% represents "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>". %% -%% @see record_access/2 %% @see record_access_argument/1 %% @see record_access_type/1 %% @see record_access_field/1 %% @see record_expr/3 -record(record_access, {argument :: syntaxTree(), - type :: 'none' | syntaxTree(), + type :: syntaxTree(), field :: syntaxTree()}). %% type(Node) = record_access %% data(Node) = #record_access{argument :: Argument, type :: Type, %% field :: Field} %% -%% Argument = Field = syntaxTree() -%% Type = none | syntaxTree() +%% Argument = Type = Field = syntaxTree() %% %% `erl_parse' representation: %% %% {record_field, Pos, Argument, Type, Field} -%% {record_field, Pos, Argument, Field} %% %% Argument = Field = erl_parse() %% Type = atom() --spec record_access(syntaxTree(), 'none' | syntaxTree(), syntaxTree()) -> +-spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) -> syntaxTree(). record_access(Argument, Type, Field) -> @@ -4360,16 +4333,11 @@ revert_record_access(Node) -> Argument = record_access_argument(Node), Type = record_access_type(Node), Field = record_access_field(Node), - if Type =:= none -> - {record_field, Pos, Argument, Field}; - true -> - case type(Type) of - atom -> - {record_field, Pos, - Argument, concrete(Type), Field}; - _ -> - Node - end + case type(Type) of + atom -> + {record_field, Pos, Argument, concrete(Type), Field}; + _ -> + Node end. @@ -4382,8 +4350,6 @@ revert_record_access(Node) -> record_access_argument(Node) -> case unwrap(Node) of - {record_field, _, Argument, _} -> - Argument; {record_field, _, Argument, _, _} -> Argument; Node1 -> @@ -4392,21 +4358,14 @@ record_access_argument(Node) -> %% ===================================================================== -%% @doc Returns the type subtree of a `record_access' node, -%% if any. If `Node' represents -%% "<code><em>Argument</em>.<em>Field</em></code>", `none' -%% is returned, otherwise if `Node' represents -%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>", -%% `Type' is returned. +%% @doc Returns the type subtree of a `record_access' node. %% %% @see record_access/3 --spec record_access_type(syntaxTree()) -> 'none' | syntaxTree(). +-spec record_access_type(syntaxTree()) -> syntaxTree(). record_access_type(Node) -> case unwrap(Node) of - {record_field, _, _, _} -> - none; {record_field, Pos, _, Type, _} -> set_pos(atom(Type), Pos); Node1 -> @@ -4423,8 +4382,6 @@ record_access_type(Node) -> record_access_field(Node) -> case unwrap(Node) of - {record_field, _, _, Field} -> - Field; {record_field, _, _, _, Field} -> Field; Node1 -> @@ -4803,117 +4760,6 @@ binary_comp_body(Node) -> %% ===================================================================== -%% @doc Creates an abstract Mnemosyne rule. If `Clauses' is -%% `[C1, ..., Cn]', the results represents -%% "<code><em>Name</em> <em>C1</em>; ...; <em>Name</em> -%% <em>Cn</em>.</code>". More exactly, if each `Ci' -%% represents "<code>(<em>Pi1</em>, ..., <em>Pim</em>) <em>Gi</em> -> -%% <em>Bi</em></code>", then the result represents -%% "<code><em>Name</em>(<em>P11</em>, ..., <em>P1m</em>) <em>G1</em> :- -%% <em>B1</em>; ...; <em>Name</em>(<em>Pn1</em>, ..., <em>Pnm</em>) -%% <em>Gn</em> :- <em>Bn</em>.</code>". Rules are source code forms. -%% -%% @see rule_name/1 -%% @see rule_clauses/1 -%% @see rule_arity/1 -%% @see is_form/1 -%% @see function/2 - --record(rule, {name :: syntaxTree(), clauses :: [syntaxTree()]}). - -%% type(Node) = rule -%% data(Node) = #rule{name :: Name, clauses :: Clauses} -%% -%% Name = syntaxTree() -%% Clauses = [syntaxTree()] -%% -%% (See `function' for notes on why the arity is not stored.) -%% -%% `erl_parse' representation: -%% -%% {rule, Pos, Name, Arity, Clauses} -%% -%% Name = atom() -%% Arity = integer() -%% Clauses = [Clause] \ [] -%% Clause = {clause, ...} -%% -%% where the number of patterns in each clause should be equal to -%% the integer `Arity'; see `clause' for documentation on -%% `erl_parse' clauses. - --spec rule(syntaxTree(), [syntaxTree()]) -> syntaxTree(). - -rule(Name, Clauses) -> - tree(rule, #rule{name = Name, clauses = Clauses}). - -revert_rule(Node) -> - Name = rule_name(Node), - Clauses = [revert_clause(C) || C <- rule_clauses(Node)], - Pos = get_pos(Node), - case type(Name) of - atom -> - A = rule_arity(Node), - {rule, Pos, concrete(Name), A, Clauses}; - _ -> - Node - end. - - -%% ===================================================================== -%% @doc Returns the name subtree of a `rule' node. -%% -%% @see rule/2 - --spec rule_name(syntaxTree()) -> syntaxTree(). - -rule_name(Node) -> - case unwrap(Node) of - {rule, Pos, Name, _, _} -> - set_pos(atom(Name), Pos); - Node1 -> - (data(Node1))#rule.name - end. - -%% ===================================================================== -%% @doc Returns the list of clause subtrees of a `rule' node. -%% -%% @see rule/2 - --spec rule_clauses(syntaxTree()) -> [syntaxTree()]. - -rule_clauses(Node) -> - case unwrap(Node) of - {rule, _, _, _, Clauses} -> - Clauses; - Node1 -> - (data(Node1))#rule.clauses - end. - -%% ===================================================================== -%% @doc Returns the arity of a `rule' node. The result is the -%% number of parameter patterns in the first clause of the rule; -%% subsequent clauses are ignored. -%% -%% An exception is thrown if `rule_clauses(Node)' returns -%% an empty list, or if the first element of that list is not a syntax -%% tree `C' of type `clause' such that -%% `clause_patterns(C)' is a nonempty list. -%% -%% @see rule/2 -%% @see rule_clauses/1 -%% @see clause/3 -%% @see clause_patterns/1 - --spec rule_arity(syntaxTree()) -> arity(). - -rule_arity(Node) -> - %% Note that this never accesses the arity field of - %% `erl_parse' rule nodes. - length(clause_patterns(hd(rule_clauses(Node)))). - - -%% ===================================================================== %% @doc Creates an abstract generator. The result represents %% "<code><em>Pattern</em> <- <em>Body</em></code>". %% @@ -6135,6 +5981,13 @@ abstract_tail(H, T) -> %% {@link char/1} function to explicitly create an abstract %% character.) %% +%% Note: `arity_qualifier' nodes are recognized. This is to follow The +%% Erlang Parser when it comes to wild attributes: both {F, A} and F/A +%% are recognized, which makes it possible to turn wild attributes +%% into recognized attributes without at the same time making it +%% impossible to compile files using the new syntax with the old +%% version of the Erlang Compiler. +%% %% @see abstract/1 %% @see is_literal/1 %% @see char/1 @@ -6184,6 +6037,20 @@ concrete(Node) -> {value, concrete(F), []} end, [], true), B; + arity_qualifier -> + A = erl_syntax:arity_qualifier_argument(Node), + case erl_syntax:type(A) of + integer -> + F = erl_syntax:arity_qualifier_body(Node), + case erl_syntax:type(F) of + atom -> + {F, A}; + _ -> + erlang:error({badarg, Node}) + end; + _ -> + erlang:error({badarg, Node}) + end; _ -> erlang:error({badarg, Node}) end. @@ -6377,8 +6244,6 @@ revert_root(Node) -> revert_record_expr(Node); record_index_expr -> revert_record_index_expr(Node); - rule -> - revert_rule(Node); string -> revert_string(Node); try_expr -> @@ -6635,15 +6500,9 @@ subtrees(T) -> receive_expr_action(T)] end; record_access -> - case record_access_type(T) of - none -> - [[record_access_argument(T)], - [record_access_field(T)]]; - R -> - [[record_access_argument(T)], - [R], - [record_access_field(T)]] - end; + [[record_access_argument(T)], + [record_access_type(T)], + [record_access_field(T)]]; record_expr -> case record_expr_argument(T) of none -> @@ -6664,8 +6523,6 @@ subtrees(T) -> record_index_expr -> [[record_index_expr_type(T)], [record_index_expr_field(T)]]; - rule -> - [[rule_name(T)], rule_clauses(T)]; size_qualifier -> [[size_qualifier_body(T)], [size_qualifier_argument(T)]]; @@ -6760,8 +6617,6 @@ make_tree(parentheses, [[E]]) -> parentheses(E); make_tree(prefix_expr, [[F], [A]]) -> prefix_expr(F, A); make_tree(receive_expr, [C]) -> receive_expr(C); make_tree(receive_expr, [C, [E], A]) -> receive_expr(C, E, A); -make_tree(record_access, [[E], [F]]) -> - record_access(E, F); make_tree(record_access, [[E], [T], [F]]) -> record_access(E, T, F); make_tree(record_expr, [[T], F]) -> record_expr(T, F); @@ -6770,7 +6625,6 @@ make_tree(record_field, [[N]]) -> record_field(N); make_tree(record_field, [[N], [E]]) -> record_field(N, E); make_tree(record_index_expr, [[T], [F]]) -> record_index_expr(T, F); -make_tree(rule, [[N], C]) -> rule(N, C); make_tree(size_qualifier, [[N], [A]]) -> size_qualifier(N, A); make_tree(try_expr, [B, C, H, A]) -> try_expr(B, C, H, A); make_tree(tuple, [E]) -> tuple(E). diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl index 2f0488abec..5b5b18d15b 100644 --- a/lib/syntax_tools/src/erl_syntax_lib.erl +++ b/lib/syntax_tools/src/erl_syntax_lib.erl @@ -35,8 +35,7 @@ analyze_function_name/1, analyze_implicit_fun/1, analyze_import_attribute/1, analyze_module_attribute/1, analyze_record_attribute/1, analyze_record_expr/1, - analyze_record_field/1, analyze_rule/1, - analyze_wild_attribute/1, annotate_bindings/1, + analyze_record_field/1, analyze_wild_attribute/1, annotate_bindings/1, annotate_bindings/2, fold/3, fold_subtrees/3, foldl_listlist/3, function_name_expansions/1, is_fail_expr/1, limit/2, limit/3, map/2, map_subtrees/2, mapfold/3, mapfold_subtrees/3, @@ -527,8 +526,6 @@ vann(Tree, Env) -> vann_try_expr(Tree, Env); function -> vann_function(Tree, Env); - rule -> - vann_rule(Tree, Env); fun_expr -> vann_fun_expr(Tree, Env); list_comp -> @@ -569,15 +566,6 @@ vann_function(Tree, Env) -> Bound = [], {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}. -vann_rule(Tree, Env) -> - Cs = erl_syntax:rule_clauses(Tree), - {Cs1, {_, Free}} = vann_clauses(Cs, Env), - N = erl_syntax:rule_name(Tree), - {N1, _, _} = vann(N, Env), - Tree1 = rewrite(Tree, erl_syntax:rule(N1, Cs1)), - Bound = [], - {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}. - vann_fun_expr(Tree, Env) -> Cs = erl_syntax:fun_expr_clauses(Tree), {Cs1, {_, Free}} = vann_clauses(Cs, Env), @@ -946,7 +934,7 @@ is_fail_expr(E) -> %% %% Forms = syntaxTree() | [syntaxTree()] %% Key = attributes | errors | exports | functions | imports -%% | module | records | rules | warnings +%% | module | records | warnings %% %% @doc Analyzes a sequence of "program forms". The given %% `Forms' may be a single syntax tree of type @@ -1047,16 +1035,6 @@ is_fail_expr(E) -> %% that each record name occurs at most once in the list. The %% order of listing is not defined.</dd> %% -%% <dt>`{rules, Rules}'</dt> -%% <dd><ul> -%% <li>`Rules = [{atom(), integer()}]'</li> -%% </ul> -%% `Rules' is a list of the names of the rules that are -%% defined in `Forms' (cf. -%% `analyze_rule/1'). We do not guarantee that each -%% name occurs at most once in the list. The order of listing is -%% not defined.</dd> -%% %% <dt>`{warnings, Warnings}'</dt> %% <dd><ul> %% <li>`Warnings = [term()]'</li> @@ -1074,12 +1052,11 @@ is_fail_expr(E) -> %% @see analyze_import_attribute/1 %% @see analyze_record_attribute/1 %% @see analyze_function/1 -%% @see analyze_rule/1 %% @see erl_syntax:error_marker_info/1 %% @see erl_syntax:warning_marker_info/1 -type key() :: 'attributes' | 'errors' | 'exports' | 'functions' | 'imports' - | 'module' | 'records' | 'rules' | 'warnings'. + | 'module' | 'records' | 'warnings'. -type info_pair() :: {key(), term()}. -spec analyze_forms(erl_syntax:forms()) -> [info_pair()]. @@ -1099,8 +1076,6 @@ collect_form(Node, Info) -> Info; {function, Name} -> finfo_add_function(Name, Info); - {rule, Name} -> - finfo_add_rule(Name, Info); {error_marker, Data} -> finfo_add_error(Data, Info); {warning_marker, Data} -> @@ -1136,8 +1111,7 @@ collect_attribute(_, {N, V}, Info) -> records = [] :: [{atom(), [{atom(), field_default()}]}], errors = [] :: [term()], warnings = [] :: [term()], - functions = [] :: [{atom(), arity()}], - rules = [] :: [{atom(), arity()}]}). + functions = [] :: [{atom(), arity()}]}). -type field_default() :: 'none' | erl_syntax:syntaxTree(). @@ -1183,9 +1157,6 @@ finfo_add_warning(R, Info) -> finfo_add_function(F, Info) -> Info#forms{functions = [F | Info#forms.functions]}. -finfo_add_rule(F, Info) -> - Info#forms{rules = [F | Info#forms.rules]}. - finfo_to_list(Info) -> [{Key, Value} || {Key, {value, Value}} <- @@ -1197,8 +1168,7 @@ finfo_to_list(Info) -> {records, list_value(Info#forms.records)}, {errors, list_value(Info#forms.errors)}, {warnings, list_value(Info#forms.warnings)}, - {functions, list_value(Info#forms.functions)}, - {rules, list_value(Info#forms.rules)} + {functions, list_value(Info#forms.functions)} ]]. list_value([]) -> @@ -1229,10 +1199,6 @@ list_value(List) -> %% %% <dd>where `Info = analyze_function(Node)'.</dd> %% -%% <dt>`{rule, Info}'</dt> -%% -%% <dd>where `Info = analyze_rule(Node)'.</dd> -%% %% <dt>`{warning_marker, Info}'</dt> %% %% <dd>where `Info = @@ -1245,7 +1211,6 @@ list_value(List) -> %% %% @see analyze_attribute/1 %% @see analyze_function/1 -%% @see analyze_rule/1 %% @see erl_syntax:is_form/1 %% @see erl_syntax:error_marker_info/1 %% @see erl_syntax:warning_marker_info/1 @@ -1258,8 +1223,6 @@ analyze_form(Node) -> {attribute, analyze_attribute(Node)}; function -> {function, analyze_function(Node)}; - rule -> - {rule, analyze_rule(Node)}; error_marker -> {error_marker, erl_syntax:error_marker_info(Node)}; warning_marker -> @@ -1669,7 +1632,7 @@ analyze_record_attribute_tuple(Node) -> %% <dt>`record_expr':</dt> %% <dd>`{atom(), [{atom(), Value}]}'</dd> %% <dt>`record_access':</dt> -%% <dd>`{atom(), atom()} | atom()'</dd> +%% <dd>`{atom(), atom()}'</dd> %% <dt>`record_index_expr':</dt> %% <dd>`{atom(), atom()}'</dd> %% </dl> @@ -1679,9 +1642,7 @@ analyze_record_attribute_tuple(Node) -> %% listed in the order they appear. (See %% `analyze_record_field/1' for details on the field %% descriptors). For a `record_access' node, -%% `Info' represents the record name and the field name (or -%% if the record name is not included, only the field name; this is -%% allowed only in Mnemosyne-query syntax). For a +%% `Info' represents the record name and the field name. For a %% `record_index_expr' node, `Info' represents the %% record name and the name field name. %% @@ -1713,18 +1674,14 @@ analyze_record_expr(Node) -> F = erl_syntax:record_access_field(Node), case erl_syntax:type(F) of atom -> - case erl_syntax:record_access_type(Node) of - none -> - {record_access, erl_syntax:atom_value(F)}; - A -> - case erl_syntax:type(A) of - atom -> - {record_access, - {erl_syntax:atom_value(A), - erl_syntax:atom_value(F)}}; - _ -> - throw(syntax_error) - end + A = erl_syntax:record_access_type(Node), + case erl_syntax:type(A) of + atom -> + {record_access, + {erl_syntax:atom_value(A), + erl_syntax:atom_value(F)}}; + _ -> + throw(syntax_error) end; _ -> throw(syntax_error) @@ -1835,8 +1792,6 @@ analyze_file_attribute(Node) -> %% The evaluation throws `syntax_error' if %% `Node' does not represent a well-formed function %% definition. -%% -%% @see analyze_rule/1 -spec analyze_function(erl_syntax:syntaxTree()) -> {atom(), arity()}. @@ -1857,37 +1812,6 @@ analyze_function(Node) -> %% ===================================================================== -%% @spec analyze_rule(Node::syntaxTree()) -> {atom(), integer()} -%% -%% @doc Returns the name and arity of a Mnemosyne rule. The result is a -%% pair `{Name, A}' if `Node' represents a rule -%% "`Name(<em>P_1</em>, ..., <em>P_A</em>) :- ...'". -%% -%% The evaluation throws `syntax_error' if -%% `Node' does not represent a well-formed Mnemosyne -%% rule. -%% -%% @see analyze_function/1 - --spec analyze_rule(erl_syntax:syntaxTree()) -> {atom(), arity()}. - -analyze_rule(Node) -> - case erl_syntax:type(Node) of - rule -> - N = erl_syntax:rule_name(Node), - case erl_syntax:type(N) of - atom -> - {erl_syntax:atom_value(N), - erl_syntax:rule_arity(Node)}; - _ -> - throw(syntax_error) - end; - _ -> - throw(syntax_error) - end. - - -%% ===================================================================== %% @spec analyze_implicit_fun(Node::syntaxTree()) -> FunctionName %% %% FunctionName = atom() | {atom(), integer()} diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl index 38e0c2099b..db7f0939a3 100644 --- a/lib/syntax_tools/src/erl_tidy.erl +++ b/lib/syntax_tools/src/erl_tidy.erl @@ -792,16 +792,11 @@ keep_form(Form, Used, Opts) -> N = erl_syntax_lib:analyze_function(Form), case sets:is_element(N, Used) of false -> - report_removed_def("function", N, Form, Opts), - false; - true -> - true - end; - rule -> - N = erl_syntax_lib:analyze_rule(Form), - case sets:is_element(N, Used) of - false -> - report_removed_def("rule", N, Form, Opts), + {F, A} = N, + File = proplists:get_value(file, Opts, ""), + report({File, erl_syntax:get_pos(Form), + "removing unused function `~w/~w'."}, + [F, A], Opts), false; true -> true @@ -823,12 +818,6 @@ keep_form(Form, Used, Opts) -> true end. -report_removed_def(Type, {N, A}, Form, Opts) -> - File = proplists:get_value(file, Opts, ""), - report({File, erl_syntax:get_pos(Form), - "removing unused ~s `~w/~w'."}, - [Type, N, A], Opts). - collect_functions(Forms) -> lists:foldl( fun (F, {Names, Defs}) -> @@ -837,10 +826,6 @@ collect_functions(Forms) -> N = erl_syntax_lib:analyze_function(F), {sets:add_element(N, Names), dict:store(N, {F, []}, Defs)}; - rule -> - N = erl_syntax_lib:analyze_rule(F), - {sets:add_element(N, Names), - dict:store(N, {F, []}, Defs)}; _ -> {Names, Defs} end @@ -855,11 +840,6 @@ update_forms([F | Fs], Defs, Imports, Opts) -> {F1, Fs1} = dict:fetch(N, Defs), [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, Opts); - rule -> - N = erl_syntax_lib:analyze_rule(F), - {F1, Fs1} = dict:fetch(N, Defs), - [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports, - Opts); attribute -> [update_attribute(F, Imports, Opts) | update_forms(Fs, Defs, Imports, Opts)]; diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl index 0420508f2a..eac5af5540 100644 --- a/lib/syntax_tools/src/igor.erl +++ b/lib/syntax_tools/src/igor.erl @@ -1713,8 +1713,6 @@ transform(Tree, Env, St) -> transform_function(Tree, Env, St); implicit_fun -> transform_implicit_fun(Tree, Env, St); - rule -> - transform_rule(Tree, Env, St); record_expr -> transform_record(Tree, Env, St); record_index_expr -> @@ -1778,27 +1776,6 @@ renaming_note(Name) -> rename_atom(Node, Atom) -> rewrite(Node, erl_syntax:atom(Atom)). -%% Renaming Mnemosyne rules (just like function definitions) - -transform_rule(T, Env, St) -> - {T1, St1} = default_transform(T, Env, St), - F = erl_syntax_lib:analyze_rule(T1), - {V, Text} = case (Env#code.map)(F) of - F -> - %% Not renamed - {none, []}; - {Atom, _Arity} -> - %% Renamed - Cs = erl_syntax:rule_clauses(T1), - N = rename_atom( - erl_syntax:rule_name(T1), - Atom), - T2 = rewrite(T1, - erl_syntax:rule(N, Cs)), - {{value, T2}, renaming_note(Atom)} - end, - {maybe_modified(V, T1, 2, Text, Env), St1}. - %% Renaming "implicit fun" expressions (done quietly). transform_implicit_fun(T, Env, St) -> diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl index 1011791899..c881108c37 100644 --- a/lib/test_server/src/test_server.erl +++ b/lib/test_server/src/test_server.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 1996-2014. All Rights Reserved. +%% Copyright Ericsson AB 1996-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -178,68 +178,35 @@ module_names(Beams) -> do_cover_compile(Modules) -> cover:start(), - pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)), + Sticky = prepare_cover_compile(Modules,[]), + R = cover:compile_beam(Modules), + [warn_compile(Error) || Error <- R,element(1,Error)=/=ok], + [code:stick_mod(M) || M <- Sticky], ok. -do_cover_compile1(M) -> +warn_compile({error,{Reason,Module}}) -> + io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n", + [Module,{error,Reason}]). + +%% Make sure all modules are loaded and unstick if sticky +prepare_cover_compile([M|Ms],Sticky) -> case {code:is_sticky(M),code:is_loaded(M)} of {true,_} -> code:unstick_mod(M), - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end, - code:stick_mod(M); + prepare_cover_compile(Ms,[M|Sticky]); {false,false} -> case code:load_file(M) of {module,_} -> - do_cover_compile1(M); + prepare_cover_compile([M|Ms],Sticky); Error -> - io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]) + io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]), + prepare_cover_compile(Ms,Sticky) end; {false,_} -> - case cover:compile_beam(M) of - {ok,_} -> - ok; - Error -> - io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n", - [M,Error]) - end - end. - -pmap1(Fun,List) -> - NTot = length(List), - NProcs = erlang:system_info(schedulers) * 2, - NPerProc = (NTot div NProcs) + 1, - - {[],Pids} = - lists:foldr( - fun(_,{L,Ps}) -> - {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L); - true -> {L,[]} % last chunk - end, - {P,_Ref} = - spawn_monitor(fun() -> - exit(lists:map(Fun,L1)) - end), - {L2,[P|Ps]} - end, - {List,[]}, - lists:seq(1,NProcs)), - collect(Pids,[]). - -collect([],Acc) -> - lists:append(Acc); -collect([Pid|Pids],Acc) -> - receive - {'DOWN', _Ref, process, Pid, Result} -> - %% collect(lists:delete(Pid,Pids),[Result|Acc]) - collect(Pids,[Result|Acc]) - end. - + prepare_cover_compile(Ms,Sticky) + end; +prepare_cover_compile([],Sticky) -> + Sticky. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) -> @@ -269,45 +236,40 @@ collect([Pid|Pids],Acc) -> %% after the test is completed. cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> io:fwrite(user, "Cover analysing... ", []), - DetailsFun = + {ATFOk,ATFFail} = case Analyse of details -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(M) -> - OutFile = filename:join(Dir, - atom_to_list(M) ++ - ".COVER.html"), - case cover:analyse_to_file(M,OutFile,[html]) of - {ok,_} -> - {file,OutFile}; - Error -> - Error - end - end; + {result,Ok1,Fail1} = + cover:analyse_to_file(Modules,[{outdir,Dir},html]), + {lists:map(fun(OutFile) -> + M = list_to_atom( + filename:basename( + filename:rootname(OutFile, + ".COVER.html") + ) + ), + {M,{file,OutFile}} + end, Ok1), + lists:map(fun({Reason,M}) -> + {M,{error,Reason}} + end, Fail1)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end; overview -> case cover:export(filename:join(Dir,"all.coverdata")) of ok -> - fun(_) -> undefined end; + {[],lists:map(fun(M) -> {M,undefined} end, Modules)}; Error -> - fun(_) -> Error end + {[],lists:map(fun(M) -> {M,Error} end, Modules)} end end, - R = pmap2( - fun(M) -> - case cover:analyse(M,module) of - {ok,{M,{Cov,NotCov}}} -> - {M,{Cov,NotCov,DetailsFun(M)}}; - Err -> - io:fwrite(user, - "\nWARNING: Analysis failed for ~w. Reason: ~p\n", - [M,Err]), - {M,Err} - end - end, Modules), + {result,AOk,AFail} = cover:analyse(Modules,module), + R0 = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++ + [{M,{error,Reason}} || {Reason,M} <- AFail], + R = lists:sort(R0), io:fwrite(user, "done\n\n", []), case Stop of @@ -320,19 +282,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) -> end, R. -pmap2(Fun,List) -> - Collector = self(), - Pids = lists:map(fun(E) -> - spawn(fun() -> - Collector ! {res,self(),Fun(E)} - end) - end, List), - lists:map(fun(Pid) -> - receive - {res,Pid,Res} -> - Res - end - end, Pids). +merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) -> + case lists:keytake(M,1,ATF) of + {value,{_,R},ATF1} -> + merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]); + false -> + merge_analysis_results(T,ATF,Acc) + end; +merge_analysis_results([],_,Acc) -> + Acc. do_cover_for_node(Node,CoverFunc) -> do_cover_for_node(Node,CoverFunc,true). diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml index 07ffa65e3d..914baa7977 100644 --- a/lib/tools/doc/src/cover.xml +++ b/lib/tools/doc/src/cover.xml @@ -5,7 +5,7 @@ <header> <copyright> <year>2001</year> - <year>2013</year> + <year>2015</year> <holder>Ericsson AB, All Rights Reserved</holder> </copyright> <legalnotice> @@ -138,17 +138,18 @@ </desc> </func> <func> - <name>compile(ModFile) -> Result</name> - <name>compile(ModFile, Options) -> Result</name> - <name>compile_module(ModFile) -> Result</name> - <name>compile_module(ModFile, Options) -> Result</name> - <fsummary>Compile a module for Cover analysis.</fsummary> + <name>compile(ModFiles) -> Result | [Result]</name> + <name>compile(ModFiles, Options) -> Result | [Result]</name> + <name>compile_module(ModFiles) -> Result | [Result]</name> + <name>compile_module(ModFiles, Options) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis.</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | File</v> <v> Module = atom()</v> <v> File = string()</v> <v>Options = [Option]</v> - <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v> + <v> Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v> <d>See <c>compile:file/2.</c></d> <v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v> </type> @@ -165,6 +166,9 @@ returns <c>{ok,Module}</c>. Otherwise the function returns <c>{error,File}</c>. Errors and warnings are printed as they occur.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> <p>Note that the internal database is (re-)initiated during the compilation, meaning any previously collected coverage data for the module will be lost.</p> @@ -194,9 +198,10 @@ </desc> </func> <func> - <name>compile_beam(ModFile) -> Result</name> - <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary> + <name>compile_beam(ModFiles) -> Result | [Result]</name> + <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary> <type> + <v>ModFiles = ModFile | [ModFile]</v> <v>ModFile = Module | BeamFile</v> <v> Module = atom()</v> <v> BeamFile = string()</v> @@ -229,6 +234,9 @@ returned.</p> <p><c>{error,BeamFile}</c> is returned if the compiled code can not be loaded on the node.</p> + <p>If a list of <c>ModFiles</c> is given as input, a list + of <c>Result</c> will be returned. The order of the returned + list is undefined.</p> </desc> </func> <func> @@ -251,16 +259,21 @@ </desc> </func> <func> - <name>analyse(Module) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name> - <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name> - <fsummary>Analyse a Cover compiled module.</fsummary> + <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Analyse one or more Cover compiled modules.</fsummary> <type> - <v>Module = atom()</v> + <v>Modules = Module | [Module]</v> + <v>Module = atom() </v> <v>Analysis = coverage | calls</v> <v>Level = line | clause | function | module</v> - <v>Answer = {Module,Value} | [{Item,Value}]</v> + <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v> <v> Item = Line | Clause | Function</v> <v> Line = {M,N}</v> <v> Clause = {M,F,A,C}</v> @@ -269,49 +282,67 @@ <v> N = A = C = integer()</v> <v> Value = {Cov,NotCov} | Calls</v> <v> Cov = NotCov = Calls = integer()</v> - <v>Error = {not_cover_compiled,Module} | not_main_node</v> + <v> Error = {not_cover_compiled,Module}</v> + <v>Ok = [{Module,Value}] | [{Item,Value}]</v> + <v>Fail = [Error]</v> </type> <desc> - <p>Performs analysis of a Cover compiled module <c>Module</c>, as + <p>Performs analysis of one or more Cover compiled modules, as specified by <c>Analysis</c> and <c>Level</c> (see above), by examining the contents of the internal database.</p> <p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c> defaults to <c>function</c>.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>OneResult</c>, else the return will be + <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a given module is not Cover compiled, this is indicated + by the error reason <c>{not_cover_compiled,Module}</c>.</p> </desc> </func> <func> - <name>analyse_to_file(Module) -> </name> - <name>analyse_to_file(Module,Options) -> </name> - <name>analyse_to_file(Module, OutFile) -> </name> - <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name> - <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary> + <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name> + <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name> + <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary> <type> + <v>Modules = Module | [Module]</v> <v>Module = atom()</v> - <v>OutFile = string()</v> + <v>OutFile = OutDir = string()</v> <v>Options = [Option]</v> - <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v> + <v>Answer = {ok,OutFile} | {error,Error}</v> + <v>Ok = [OutFile]</v> + <v>Fail = [Error]</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v> <v> File = string()</v> <v> Reason = term()</v> </type> <desc> - <p>Makes a copy <c>OutFile</c> of the source file for a module - <c>Module</c>, where it for each executable line is specified + <p>Makes copies of the source file for the given modules, + where it for each executable line is specified how many times it has been executed.</p> <p>The output file <c>OutFile</c> defaults to <c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the option <c>html</c> was used.</p> - <p>If <c>Module</c> is not Cover compiled, the function returns - <c>{error,{not_cover_compiled,Module}}</c>.</p> + <p>If <c>Modules</c> is an atom (one module), the return will + be <c>Answer</c>, else the return will be a + list, <c>{result,Ok,Fail}</c>.</p> + <p>If <c>Modules</c> is not given, all modules that have data + in the cover data table, are analysed. Note that this + includes both cover compiled modules and imported + modules.</p> + <p>If a module is not Cover compiled, this is indicated by the + error reason <c>{not_cover_compiled,Module}</c>.</p> <p>If the source file and/or the output file cannot be opened using <c>file:open/2</c>, the function returns <c>{error,{file,File,Reason}}</c> where <c>File</c> is the file name and <c>Reason</c> is the error reason.</p> - <p>If the module was cover compiled from the <c>.beam</c> + <p>If a module was cover compiled from the <c>.beam</c> file, i.e. using <c>compile_beam/1</c> or <c>compile_beam_directory/0,1</c>, it is assumed that the source code can be found in the same directory as the @@ -322,10 +353,8 @@ joining <c>../src</c> and the tail of the compiled path below a trailing <c>src</c> component, then the compiled path itself. - If no source code is found, - <c>{error,no_source_code_found}</c> is returned.</p> - <p>HINT: It is possible to issue multiple analyse_to_file commands at - the same time. </p> + If no source code is found, this is indicated by the error reason + <c>{no_source_code_found,Module}</c>.</p> </desc> </func> <func> @@ -339,7 +368,7 @@ <v>OutFile = string()</v> <v>Options = [Option]</v> <v>Option = html</v> - <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v> + <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v> <v> File = string()</v> <v> Reason = term()</v> </type> diff --git a/lib/tools/emacs/erlang-skels.el b/lib/tools/emacs/erlang-skels.el index 78929ac510..8d2c02e455 100644 --- a/lib/tools/emacs/erlang-skels.el +++ b/lib/tools/emacs/erlang-skels.el @@ -1,7 +1,7 @@ ;; ;; %CopyrightBegin% ;; -;; Copyright Ericsson AB 2010. All Rights Reserved. +;; Copyright Ericsson AB 2010-2014. All Rights Reserved. ;; ;; The contents of this file are subject to the Erlang Public License, ;; Version 1.1, (the "License"); you may not use this file except in @@ -352,26 +352,25 @@ Please see the function `tempo-define-template'.") "%% @doc" n "%% Whenever a supervisor is started using supervisor:start_link/[2,3]," n "%% this function is called by the new process to find out about" n - "%% restart strategy, maximum restart frequency and child" n + "%% restart strategy, maximum restart intensity, and child" n "%% specifications." n "%%" n "%% @spec init(Args) -> {ok, {SupFlags, [ChildSpec]}} |" n "%% ignore |" n "%% {error, Reason}" n (erlang-skel-separator-end 2) - "init([]) ->" n> - "RestartStrategy = one_for_one," n> - "MaxRestarts = 1000," n> - "MaxSecondsBetweenRestarts = 3600," n - "" n> - "SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts}," n + "init([]) ->" n "" n> - "Restart = permanent," n> - "Shutdown = 2000," n> - "Type = worker," n + "SupFlags = #{strategy => one_for_one," n> + "intensity => 1," n> + "period => 5}," n "" n> - "AChild = {'AName', {'AModule', start_link, []}," n> - "Restart, Shutdown, Type, ['AModule']}," n + "AChild = #{id => 'AName'," n> + "start => {'AModule', start_link, []}," n> + "restart => permanent," n> + "shutdown => 5000," n> + "type => worker," n> + "modules => ['AModule']}," n "" n> "{ok, {SupFlags, [AChild]}}." n n @@ -379,7 +378,7 @@ Please see the function `tempo-define-template'.") "%%% Internal functions" n (erlang-skel-double-separator-end 3) ) - "*The template of an supervisor behaviour. + "*The template of a supervisor behaviour. Please see the function `tempo-define-template'.") (defvar erlang-skel-supervisor-bridge @@ -449,7 +448,7 @@ Please see the function `tempo-define-template'.") "%%% Internal functions" n (erlang-skel-double-separator-end 3) ) - "*The template of an supervisor_bridge behaviour. + "*The template of a supervisor_bridge behaviour. Please see the function `tempo-define-template'.") (defvar erlang-skel-generic-server diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl index 31754015f7..6c32c47069 100644 --- a/lib/tools/src/cover.erl +++ b/lib/tools/src/cover.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -77,8 +77,11 @@ compile/1, compile/2, compile_module/1, compile_module/2, compile_directory/0, compile_directory/1, compile_directory/2, compile_beam/1, compile_beam_directory/0, compile_beam_directory/1, - analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3, + analyse/0, analyse/1, analyse/2, analyse/3, + analyze/0, analyze/1, analyze/2, analyze/3, + analyse_to_file/0, analyse_to_file/1, analyse_to_file/2, analyse_to_file/3, + analyze_to_file/0, analyze_to_file/1, analyze_to_file/2, analyze_to_file/3, async_analyse_to_file/1,async_analyse_to_file/2, async_analyse_to_file/3, async_analyze_to_file/1, @@ -109,6 +112,7 @@ line = '_' % integer() }). -define(BUMP_REC_NAME,bump). +-define(CHUNK_SIZE, 20000). -record(vars, {module, % atom() Module name @@ -181,10 +185,11 @@ start(Node) when is_atom(Node) -> start(Nodes) -> call({start_nodes,remove_myself(Nodes,[])}). -%% compile(ModFile) -> -%% compile(ModFile, Options) -> -%% compile_module(ModFile) -> Result -%% compile_module(ModFile, Options) -> Result +%% compile(ModFiles) -> +%% compile(ModFiles, Options) -> +%% compile_module(ModFiles) -> Result +%% compile_module(ModFiles, Options) -> Result +%% ModFiles = ModFile | [ModFile] %% ModFile = Module | File %% Module = atom() %% File = string() @@ -198,18 +203,27 @@ compile(ModFile, Options) -> compile_module(ModFile) when is_atom(ModFile); is_list(ModFile) -> compile_module(ModFile, []). -compile_module(Module, Options) when is_atom(Module), is_list(Options) -> - compile_module(atom_to_list(Module), Options); -compile_module(File, Options) when is_list(File), is_list(Options) -> - WithExt = case filename:extension(File) of - ".erl" -> - File; - _ -> - File++".erl" - end, - AbsFile = filename:absname(WithExt), - [R] = compile_modules([AbsFile], Options), - R. +compile_module(ModFile, Options) when is_atom(ModFile); + is_list(ModFile), is_integer(hd(ModFile)) -> + [R] = compile_module([ModFile], Options), + R; +compile_module(ModFiles, Options) when is_list(Options) -> + AbsFiles = + [begin + File = + case ModFile of + _ when is_atom(ModFile) -> atom_to_list(ModFile); + _ when is_list(ModFile) -> ModFile + end, + WithExt = case filename:extension(File) of + ".erl" -> + File; + _ -> + File++".erl" + end, + filename:absname(WithExt) + end || ModFile <- ModFiles], + compile_modules(AbsFiles, Options). %% compile_directory() -> %% compile_directory(Dir) -> @@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) -> compile_modules(Files,Options) -> Options2 = filter_options(Options), - compile_modules(Files,Options2,[]). + %% compile_modules(Files,Options2,[]). + call({compile, Files, Options2}). -compile_modules([File|Files], Options, Result) -> - R = call({compile, File, Options}), - compile_modules(Files,Options,[R|Result]); -compile_modules([],_Opts,Result) -> - lists:reverse(Result). +%% compile_modules([File|Files], Options, Result) -> +%% R = call({compile, File, Options}), +%% compile_modules(Files,Options,[R|Result]); +%% compile_modules([],_Opts,Result) -> +%% lists:reverse(Result). filter_options(Options) -> lists:filter(fun(Option) -> @@ -264,30 +279,17 @@ filter_options(Options) -> %% ModFile - see compile/1 %% Result - see compile/1 %% Reason = non_existing | already_cover_compiled -compile_beam(Module) when is_atom(Module) -> - case code:which(Module) of - non_existing -> +compile_beam(ModFile0) when is_atom(ModFile0); + is_list(ModFile0), is_integer(hd(ModFile0)) -> + case compile_beams([ModFile0]) of + [{error,{non_existing,_}}] -> + %% Backwards compatibility {error,non_existing}; - ?TAG -> - compile_beam(Module,?TAG); - File -> - compile_beam(Module,File) + [Result] -> + Result end; -compile_beam(File) when is_list(File) -> - {WithExt,WithoutExt} - = case filename:rootname(File,".beam") of - File -> - {File++".beam",File}; - Rootname -> - {File,Rootname} - end, - AbsFile = filename:absname(WithExt), - Module = list_to_atom(filename:basename(WithoutExt)), - compile_beam(Module,AbsFile). - -compile_beam(Module,File) -> - call({compile_beam,Module,File}). - +compile_beam(ModFiles) when is_list(ModFiles) -> + compile_beams(ModFiles). %% compile_beam_directory(Dir) -> [Result] | {error,Reason} @@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) -> Error end. -compile_beams(Files) -> - compile_beams(Files,[]). -compile_beams([File|Files],Result) -> - R = compile_beam(File), - compile_beams(Files,[R|Result]); -compile_beams([],Result) -> - lists:reverse(Result). +compile_beams(ModFiles0) -> + ModFiles = get_mods_and_beams(ModFiles0,[]), + call({compile_beams,ModFiles}). - -%% analyse(Module) -> -%% analyse(Module, Analysis) -> -%% analyse(Module, Level) -> -%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error} +get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) -> + case code:which(Module) of + non_existing -> + get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]); + File -> + get_mods_and_beams([{Module,File}|ModFiles],Acc) + end; +get_mods_and_beams([File|ModFiles],Acc) when is_list(File) -> + {WithExt,WithoutExt} + = case filename:rootname(File,".beam") of + File -> + {File++".beam",File}; + Rootname -> + {File,Rootname} + end, + AbsFile = filename:absname(WithExt), + Module = list_to_atom(filename:basename(WithoutExt)), + get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc); +get_mods_and_beams([{Module,File}|ModFiles],Acc) -> + %% Check for duplicates + case lists:keyfind(Module,2,Acc) of + {ok,Module,File} -> + %% Duplicate, but same file so ignore + get_mods_and_beams(ModFiles,Acc); + {ok,Module,_OtherFile} -> + %% Duplicate and differnet file - error + get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]); + _ -> + get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc]) + end; +get_mods_and_beams([],Acc) -> + lists:reverse(Acc). + + +%% analyse(Modules) -> +%% analyse(Analysis) -> +%% analyse(Level) -> +%% analyse(Modules, Analysis) -> +%% analyse(Modules, Level) -> +%% analyse(Analysis, Level) +%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error} +%% Modules = Module | [Module] %% Module = atom() %% Analysis = coverage | calls %% Level = line | clause | function | module @@ -337,48 +372,74 @@ compile_beams([],Result) -> %% N = A = C = integer() %% Value = {Cov,NotCov} | Calls %% Cov = NotCov = Calls = integer() -%% Error = {not_cover_compiled,Module} +%% Error = {not_cover_compiled,Module} | not_main_node +-define(is_analysis(__A__), + (__A__=:=coverage orelse __A__=:=calls)). +-define(is_level(__L__), + (__L__=:=line orelse __L__=:=clause orelse + __L__=:=function orelse __L__=:=module)). +analyse() -> + analyse('_'). + +analyse(Analysis) when ?is_analysis(Analysis) -> + analyse('_', Analysis); +analyse(Level) when ?is_level(Level) -> + analyse('_', Level); analyse(Module) -> analyse(Module, coverage). -analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls -> + +analyse(Analysis, Level) when ?is_analysis(Analysis) andalso + ?is_level(Level) -> + analyse('_', Analysis, Level); +analyse(Module, Analysis) when ?is_analysis(Analysis) -> analyse(Module, Analysis, function); -analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function; - Level=:=module -> +analyse(Module, Level) when ?is_level(Level) -> analyse(Module, coverage, Level). -analyse(Module, Analysis, Level) when is_atom(Module), - Analysis=:=coverage; Analysis=:=calls, - Level=:=line; Level=:=clause; - Level=:=function; Level=:=module -> + +analyse(Module, Analysis, Level) when ?is_analysis(Analysis), + ?is_level(Level) -> call({{analyse, Analysis, Level}, Module}). +analyze() -> analyse( ). analyze(Module) -> analyse(Module). analyze(Module, Analysis) -> analyse(Module, Analysis). analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level). -%% analyse_to_file(Module) -> -%% analyse_to_file(Module, Options) -> -%% analyse_to_file(Module, OutFile) -> -%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error} +%% analyse_to_file() -> +%% analyse_to_file(Modules) -> +%% analyse_to_file(Modules, Options) -> +%% Modules = Module | [Module] %% Module = atom() %% OutFile = string() %% Options = [Option] -%% Option = html +%% Option = html | {outfile,filename()} | {outdir,dirname()} %% Error = {not_cover_compiled,Module} | no_source_code_found | %% {file,File,Reason} %% File = string() %% Reason = term() -analyse_to_file(Module) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []). -analyse_to_file(Module, []) when is_atom(Module) -> - analyse_to_file(Module, outfilename(Module,[]), []); -analyse_to_file(Module, Options) when is_atom(Module), - is_list(Options), is_atom(hd(Options)) -> - analyse_to_file(Module, outfilename(Module,Options), Options); -analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) -> - analyse_to_file(Module, OutFile, []). -analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) -> - call({{analyse_to_file, OutFile, Options}, Module}). - +%% +%% Kept for backwards compatibility: +%% analyse_to_file(Modules, OutFile) -> +%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error} +analyse_to_file() -> + analyse_to_file('_'). +analyse_to_file(Arg) -> + case is_options(Arg) of + true -> + analyse_to_file('_',Arg); + false -> + analyse_to_file(Arg,[]) + end. +analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) -> + %% Kept for backwards compatibility + analyse_to_file(Module, [{outfile,OutFile}]); +analyse_to_file(Module, Options) when is_list(Options) -> + call({{analyse_to_file, Options}, Module}). +analyse_to_file(Module, OutFile, Options) when is_list(OutFile) -> + %% Kept for backwards compatibility + analyse_to_file(Module,[{outfile,OutFile}|Options]). + +analyze_to_file() -> analyse_to_file(). analyze_to_file(Module) -> analyse_to_file(Module). analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut). analyze_to_file(Module, OutFile, Options) -> @@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) -> async_analyse_to_file(Module, OutFile, Options) -> do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]). +is_options([html]) -> + true; % this is not 100% safe - could be a module named html... +is_options([html|Opts]) -> + is_options(Opts); +is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir -> + true; +is_options(_) -> + false. + do_spawn(M,F,A) -> spawn_link(fun() -> case apply(M,F,A) of @@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) -> async_analyze_to_file(Module, OutFile, Options) -> async_analyse_to_file(Module, OutFile, Options). -outfilename(Module,Opts) -> - case lists:member(html,Opts) of - true -> - atom_to_list(Module)++".COVER.html"; - false -> - atom_to_list(Module)++".COVER.out" - end. +outfilename(undefined, Module, HTML) -> + outfilename(Module, HTML); +outfilename(OutDir, Module, HTML) -> + filename:join(OutDir, outfilename(Module, HTML)). + +outfilename(Module, true) -> + atom_to_list(Module)++".COVER.html"; +outfilename(Module, false) -> + atom_to_list(Module)++".COVER.out". + %% export(File) %% export(File,Module) -> ok | {error,Reason} @@ -559,7 +632,7 @@ init_main(Starter) -> ,{write_concurrency, true} ]), ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]), - ets:new(?BINARY_TABLE, [set, named_table]), + ets:new(?BINARY_TABLE, [set, public, named_table]), ets:new(?COLLECTION_TABLE, [set, public, named_table]), ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]), net_kernel:monitor_nodes(true), @@ -573,55 +646,19 @@ main_process_loop(State) -> reply(From, {ok,StartedNodes}), main_process_loop(State1); - {From, {compile, File, Options}} -> - case do_compile(File, Options) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes,[{Module,File}]), - reply(From, {ok, Module}), - Compiled = add_compiled(Module, File, - State#main_state.compiled), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - error -> - reply(From, {error, File}), - main_process_loop(State) - end; + {From, {compile, Files, Options}} -> + {R,S} = do_compile(Files, Options, State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); - {From, {compile_beam, Module, BeamFile0}} -> - Compiled0 = State#main_state.compiled, - case get_beam_file(Module,BeamFile0,Compiled0) of - {ok,BeamFile} -> - UserOptions = get_compile_options(Module,BeamFile), - {Reply,Compiled} = - case do_compile_beam(Module,BeamFile,UserOptions) of - {ok, Module} -> - remote_load_compiled(State#main_state.nodes, - [{Module,BeamFile}]), - C = add_compiled(Module,BeamFile,Compiled0), - {{ok,Module},C}; - error -> - {{error, BeamFile}, Compiled0}; - {error,Reason} -> % no abstract code - {{error, {Reason, BeamFile}}, Compiled0} - end, - reply(From,Reply), - Imported = remove_imported(Module,State#main_state.imported), - NewState = State#main_state{compiled = Compiled, - imported = Imported}, - %% This module (cover) could have been reloaded. Make - %% sure we run the new code. - ?MODULE:main_process_loop(NewState); - {error,no_beam} -> - %% The module has first been compiled from .erl, and now - %% someone tries to compile it from .beam - reply(From, - {error,{already_cover_compiled,no_beam_found,Module}}), - main_process_loop(State) - end; + {From, {compile_beams, ModsAndFiles}} -> + {R,S} = do_compile_beams(ModsAndFiles,State), + reply(From,R), + %% This module (cover) could have been reloaded. Make + %% sure we run the new code. + ?MODULE:main_process_loop(S); {From, {export,OutFile,Module}} -> spawn(fun() -> @@ -706,6 +743,16 @@ main_process_loop(State) -> unregister(?SERVER), reply(From, ok); + {From, {{analyse, Analysis, Level}, '_'}} -> + R = analyse_all(Analysis, Level, State), + reply(From, R), + main_process_loop(State); + + {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) -> + R = analyse_list(Modules, Analysis, Level, State), + reply(From, R), + main_process_loop(State); + {From, {{analyse, Analysis, Level}, Module}} -> S = try Loaded = is_loaded(Module, State), @@ -722,15 +769,23 @@ main_process_loop(State) -> end, main_process_loop(S); - {From, {{analyse_to_file, OutFile, Opts},Module}} -> + {From, {{analyse_to_file, Opts},'_'}} -> + R = analyse_all_to_file(Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) -> + R = analyse_list_to_file(Modules, Opts, State), + reply(From,R), + main_process_loop(State); + + {From, {{analyse_to_file, Opts},Module}} -> S = try Loaded = is_loaded(Module, State), spawn(fun() -> - ?SPAWN_DBG(analyse_to_file, - {Module,OutFile, Opts}), + ?SPAWN_DBG(analyse_to_file,{Module,Opts}), do_parallel_analysis_to_file( - Module, OutFile, Opts, - Loaded, From, State) + Module, Opts, Loaded, From, State) end), State catch throw:Reason -> @@ -848,11 +903,15 @@ remote_process_loop(State) -> {remote,collect,Module,CollectorPid} -> self() ! {remote,collect,Module,CollectorPid, ?SERVER}; - {remote,collect,Module,CollectorPid,From} -> + {remote,collect,Modules0,CollectorPid,From} -> + Modules = case Modules0 of + '_' -> [M || {M,_} <- State#remote_state.compiled]; + _ -> Modules0 + end, spawn(fun() -> ?SPAWN_DBG(remote_collect, - {Module, CollectorPid, From}), - do_collect(Module, CollectorPid, From) + {Modules, CollectorPid, From}), + do_collect(Modules, CollectorPid, From) end), remote_process_loop(State); @@ -893,39 +952,51 @@ remote_process_loop(State) -> end. -do_collect(Module, CollectorPid, From) -> - AllMods = - case Module of - '_' -> ets:tab2list(?COVER_CLAUSE_TABLE); - _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module) - end, - - %% Sending clause by clause in order to avoid large lists +do_collect(Modules, CollectorPid, From) -> pmap( - fun({_Mod,Clauses}) -> - lists:map(fun(Clause) -> - send_collected_data(Clause, CollectorPid) - end,Clauses) - end,AllMods), + fun(Module) -> + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + send_chunks(Match, CollectorPid, []) + end,Modules), CollectorPid ! done, remote_reply(From, ok). -send_collected_data({M,F,A,C,_L}, CollectorPid) -> - Pattern = - {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), - %% Reset - lists:foreach(fun({Bump,_N}) -> - ets:insert(?COVER_TABLE, {Bump,0}) - end, - Bumps), - CollectorPid ! {chunk,Bumps}. +send_chunks('$end_of_table', _CollectorPid, Mons) -> + get_downs(Mons); +send_chunks({Chunk,Continuation}, CollectorPid, Mons) -> + Mon = spawn_monitor( + fun() -> + lists:foreach(fun({Bump,_N}) -> + ets:insert(?COVER_TABLE, {Bump,0}) + end, + Chunk) end), + send_chunk(CollectorPid,Chunk), + send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]). + +send_chunk(CollectorPid,Chunk) -> + CollectorPid ! {chunk,Chunk,self()}, + receive continue -> ok end. + +get_downs([]) -> + ok; +get_downs(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, _Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + get_downs(lists:delete({Pid,Ref},Mons)); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs(Mons) + end + end. -reload_originals([{Module,_File}|Compiled]) -> - do_reload_original(Module), - reload_originals(Compiled); -reload_originals([]) -> - ok. +reload_originals(Compiled) -> + Modules = [M || {M,_} <- Compiled], + pmap(fun do_reload_original/1, Modules). do_reload_original(Module) -> case code:which(Module) of @@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) -> ok; remote_load_compiled(Nodes, Compiled, Acc, ModNum) when Compiled == []; ModNum == ?MAX_MODS -> + RemoteLoadData = get_downs_r(Acc), lists:foreach( fun(Node) -> - remote_call(Node,{remote,load_compiled,Acc}) + remote_call(Node,{remote,load_compiled,RemoteLoadData}) end, Nodes), remote_load_compiled(Nodes, Compiled, [], 0); remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) -> remote_load_compiled( - Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1). + Nodes, Rest, + [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc], + ModNum + 1). + +spawn_job_r(Fun) -> + spawn_monitor(fun() -> exit(Fun()) end). + +get_downs_r([]) -> + []; +get_downs_r(Mons) -> + receive + {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} -> + [R|get_downs_r(lists:delete({Pid,Ref},Mons))]; + {'DOWN', Ref, _Type, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + get_downs_r(Mons) + end + end. + %% Read all data needed for loading a cover compiled module on a remote node %% Binary is the beam code for the module and InitialTable is the initial @@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) -> Nodes). %% Collect data from remote nodes - used for analyse or stop(Node) -remote_collect(Module,Nodes,Stop) -> +remote_collect(Modules,Nodes,Stop) -> pmap(fun(Node) -> ?SPAWN_DBG(remote_collect, - {Module, Nodes, Stop}), - do_collection(Node, Module, Stop) + {Modules, Nodes, Stop}), + do_collection(Node, Modules, Stop) end, Nodes). @@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) -> collector_proc() -> ?SPAWN_DBG(collector_proc, []), receive - {chunk,Chunk} -> + {chunk,Chunk,From} -> insert_in_collection_table(Chunk), + From ! continue, collector_proc(); done -> ok @@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) -> add_compiled(Module, File, []) -> [{Module,File}]. +are_loaded([Module|Modules], State, Loaded, Imported, Error) -> + try is_loaded(Module,State) of + {loaded,File} -> + are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error); + {imported,File,_} -> + are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error) + catch throw:_ -> + are_loaded(Modules, State, Loaded, Imported, + [{not_cover_compiled,Module}|Error]) + end; +are_loaded([], _State, Loaded, Imported, Error) -> + {Loaded, Imported, Error}. + is_loaded(Module, State) -> case get_file(Module, State#main_state.compiled) of {ok, File} -> @@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) -> %%%--Compilation--------------------------------------------------------- -%% do_compile(File, Options) -> {ok,Module} | {error,Error} -do_compile(File, UserOptions) -> +do_compile_beams(ModsAndFiles, State) -> + Result0 = pmap(fun({ok,Module,File}) -> + do_compile_beam(Module,File,State); + (Error) -> + Error + end, + ModsAndFiles), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile_beam(Module,BeamFile0,State) -> + case get_beam_file(Module,BeamFile0,State#main_state.compiled) of + {ok,BeamFile} -> + UserOptions = get_compile_options(Module,BeamFile), + case do_compile_beam1(Module,BeamFile,UserOptions) of + {ok, Module} -> + {ok,Module,BeamFile}; + error -> + {error, BeamFile}; + {error,Reason} -> % no abstract code + {error, {Reason, BeamFile}} + end; + {error,no_beam} -> + %% The module has first been compiled from .erl, and now + %% someone tries to compile it from .beam + {error,{already_cover_compiled,no_beam_found,Module}} + end. + +fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) -> + Compiled = add_compiled(Module,BeamFile,State#main_state.compiled), + Imported = remove_imported(Module,State#main_state.imported), + NewState = State#main_state{compiled=Compiled,imported=Imported}, + fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]); +fix_state_and_result([Error|Rest],State,Acc) -> + fix_state_and_result(Rest,State,[Error|Acc]); +fix_state_and_result([],State,Acc) -> + {lists:reverse(Acc),State}. + + +do_compile(Files, Options, State) -> + Result0 = pmap(fun(File) -> + do_compile(File, Options) + end, + Files), + Compiled = [{M,F} || {ok,M,F} <- Result0], + remote_load_compiled(State#main_state.nodes,Compiled), + fix_state_and_result(Result0,State,[]). + +do_compile(File, Options) -> + case do_compile1(File, Options) of + {ok, Module} -> + {ok,Module,File}; + error -> + {error,File} + end. + +%% do_compile1(File, Options) -> {ok,Module} | error +do_compile1(File, UserOptions) -> Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions, case compile:file(File, Options) of {ok, Module, Binary} -> - do_compile_beam(Module,Binary,UserOptions); + do_compile_beam1(Module,Binary,UserOptions); error -> error end. %% Beam is a binary or a .beam file name -do_compile_beam(Module,Beam,UserOptions) -> +do_compile_beam1(Module,Beam,UserOptions) -> %% Clear database do_clear(Module), @@ -1915,10 +2082,21 @@ common_elems(L1, L2) -> collect(Nodes) -> %% local node AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE), - pmap(fun move_modules/1,AllClauses), - + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end), + + %% remote nodes + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). + +%% Collect data for a list of modules +collect(Modules,Nodes) -> + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules], + Clauses = ets:select(?COVER_CLAUSE_TABLE,MS), + Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end), + %% remote nodes - remote_collect('_',Nodes,false). + Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end), + get_downs([Mon1,Mon2]). %% Collect data for one module collect(Module,Clauses,Nodes) -> @@ -1926,25 +2104,26 @@ collect(Module,Clauses,Nodes) -> move_modules({Module,Clauses}), %% remote nodes - remote_collect(Module,Nodes,false). + remote_collect([Module],Nodes,false). %% When analysing, the data from the local ?COVER_TABLE is moved to the %% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE move_modules({Module,Clauses}) -> ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}), - move_clauses(Clauses). + Pattern = {#bump{module=Module, _='_'}, '_'}, + MatchSpec = [{Pattern,[],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_move_module(Match). -move_clauses([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE,Pattern), +do_move_module({Bumps,Continuation}) -> lists:foreach(fun({Key,Val}) -> ets:insert(?COVER_TABLE, {Key,0}), insert_in_collection_table(Key,Val) end, Bumps), - move_clauses(Clauses); -move_clauses([]) -> + do_move_module(ets:select(Continuation)); +do_move_module('$end_of_table') -> ok. %% Given a .beam file, find the .erl file. Look first in same directory as @@ -2002,6 +2181,26 @@ splice(BeamDir, SrcFile) -> revsplit(Path) -> lists:reverse(filename:split(Path)). +analyse_list(Modules, Analysis, Level, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + Loaded = [M || {M,_} <- LoadedMF], + Imported = [M || {M,_} <- ImportedMF], + collect(Loaded, State#main_state.nodes), + MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported], + AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), Error}. + +analyse_all(Analysis, Level, State) -> + collect(State#main_state.nodes), + AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE), + Fun = fun({Module,Clauses}) -> + do_analyse(Module, Analysis, Level, Clauses) + end, + {result, lists:flatten(pmap(Fun, AllClauses)), []}. + do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> analyse_info(Module,State#main_state.imported), C = case Loaded of @@ -2016,7 +2215,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) -> Clauses end, R = do_analyse(Module, Analysis, Level, C), - reply(From, R). + reply(From, {ok,R}). %% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error} %% Clauses = [{Module,Function,Arity,Clause,Lines}] @@ -2035,37 +2234,44 @@ do_analyse(Module, Analysis, line, _Clauses) -> {{Module,L}, N} end end, - Answer = lists:keysort(1, lists:map(Fun, Bumps)), - {ok, Answer}; -do_analyse(_Module, Analysis, clause, Clauses) -> - Fun = case Analysis of - coverage -> - fun({M,F,A,C,Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},0}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - NotCov = length(Bumps), - {{M,F,A,C}, {Ls-NotCov, NotCov}} - end; - calls -> - fun({M,F,A,C,_Ls}) -> - Pattern = {#bump{module=M,function=F,arity=A, - clause=C},'_'}, - Bumps = ets:match_object(?COLLECTION_TABLE, Pattern), - {_Bump, Calls} = hd(lists:keysort(1, Bumps)), - {{M,F,A,C}, Calls} - end - end, - Answer = lists:map(Fun, Clauses), - {ok, Answer}; + lists:keysort(1, lists:map(Fun, Bumps)); +do_analyse(Module, Analysis, clause, _Clauses) -> + Pattern = {#bump{module=Module},'_'}, + Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)), + analyse_clause(Analysis,Bumps); do_analyse(Module, Analysis, function, Clauses) -> - {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses), - Result = merge_clauses(ClauseResult, merge_fun(Analysis)), - {ok, Result}; + ClauseResult = do_analyse(Module, Analysis, clause, Clauses), + merge_clauses(ClauseResult, merge_fun(Analysis)); do_analyse(Module, Analysis, module, Clauses) -> - {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses), + FunctionResult = do_analyse(Module, Analysis, function, Clauses), Result = merge_functions(FunctionResult, merge_fun(Analysis)), - {ok, {Module,Result}}. + {Module,Result}. + +analyse_clause(_,[]) -> + []; +analyse_clause(coverage, + [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) -> + analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]); +analyse_clause(calls,Bumps) -> + analyse_clause_calls(Bumps,{x,x,x,x},[]). + +analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps], + {M,F,A,C}=Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc); +analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps, + Clause,Ls,NotCov,Acc) -> + analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]); +analyse_clause_cov([],Clause,Ls,NotCov,Acc) -> + lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]). + +analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps], + {M,F,A,C}=Clause,Acc) -> + analyse_clause_calls(Bumps,Clause,Acc); +analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps], + _Clause,Acc) -> + analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]); +analyse_clause_calls([],_Clause,Acc) -> + lists:reverse(Acc). merge_fun(coverage) -> fun({Cov1,NotCov1}, {Cov2,NotCov2}) -> @@ -2094,7 +2300,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) -> merge_functions([], _MFun, Result) -> Result. -do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> +analyse_list_to_file(Modules, Opts, State) -> + {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []), + collect([M || {M,_} <- LoadedMF], State#main_state.nodes), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]), + {result,Ok,Error ++ Error1}. + +analyse_all_to_file(Opts, State) -> + collect(State#main_state.nodes), + AllModules = get_all_modules(State), + OutDir = proplists:get_value(outdir,Opts), + HTML = lists:member(html,Opts), + Fun = fun({Module,File}) -> + OutFile = outfilename(OutDir,Module,HTML), + do_analyse_to_file(Module,File,OutFile,HTML,State) + end, + {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]), + {result,Ok,Error}. + +get_all_modules(State) -> + get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]). +get_all_modules([{Module,File}|Rest],Acc) -> + get_all_modules(Rest,[{Module,File}|Acc]); +get_all_modules([{Module,File,_}|Rest],Acc) -> + case lists:keymember(Module,1,Acc) of + true -> get_all_modules(Rest,Acc); + false -> get_all_modules(Rest,[{Module,File}|Acc]) + end; +get_all_modules([],Acc) -> + Acc. + +split_ok_error([{ok,R}|Result],Ok,Error) -> + split_ok_error(Result,[R|Ok],Error); +split_ok_error([{error,R}|Result],Ok,Error) -> + split_ok_error(Result,Ok,[R|Error]); +split_ok_error([],Ok,Error) -> + {Ok,Error}. + +do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) -> File = case Loaded of {loaded, File0} -> [{Module,Clauses}] = @@ -2105,24 +2354,32 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) -> {imported, File0, _} -> File0 end, + HTML = lists:member(html,Opts), + OutFile = + case proplists:get_value(outfile,Opts) of + undefined -> + outfilename(proplists:get_value(outdir,Opts),Module,HTML); + F -> + F + end, + reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)). + +do_analyse_to_file(Module,File,OutFile,HTML,State) -> case find_source(Module, File) of {beam,_BeamFile} -> - reply(From, {error,no_source_code_found}); + {error,{no_source_code_found,Module}}; ErlFile -> analyse_info(Module,State#main_state.imported), - HTML = lists:member(html,Opts), - R = do_analyse_to_file(Module,OutFile, - ErlFile,HTML), - reply(From, R) + do_analyse_to_file1(Module,OutFile,ErlFile,HTML) end. -%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} +%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error} %% Module = atom() %% OutFile = ErlFile = string() -do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> - case file:open(ErlFile, [read]) of +do_analyse_to_file1(Module, OutFile, ErlFile, HTML) -> + case file:open(ErlFile, [read,raw,read_ahead]) of {ok, InFd} -> - case file:open(OutFile, [write]) of + case file:open(OutFile, [write,raw,delayed_write]) of {ok, OutFd} -> if HTML -> Encoding = encoding(ErlFile), @@ -2160,9 +2417,14 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> "**************************************" "\n\n"]), - print_lines(Module, InFd, OutFd, 1, HTML), + Pattern = {#bump{module=Module,line='$1',_='_'},'$2'}, + MS = [{Pattern,[],[{{'$1','$2'}}]}], + CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)), + print_lines(Module, CovLines, InFd, OutFd, 1, HTML), - if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]); + if + HTML -> + file:write(OutFd, "</pre>\n</body>\n</html>\n"); true -> ok end, @@ -2179,21 +2441,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) -> {error, {file, ErlFile, Reason}} end. -print_lines(Module, InFd, OutFd, L, HTML) -> - case io:get_line(InFd, '') of + +print_lines(Module, CovLines, InFd, OutFd, L, HTML) -> + case file:read_line(InFd) of eof -> ignore; - "%"++_=Line -> %Comment line - not executed. - io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]), - print_lines(Module, InFd, OutFd, L+1, HTML); - RawLine -> + {ok,"%"++_=Line} -> %Comment line - not executed. + file:write(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]), + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML); + {ok,RawLine} -> Line = escape_lt_and_gt(RawLine,HTML), - Pattern = {#bump{module=Module,line=L},'$1'}, - case ets:match(?COLLECTION_TABLE, Pattern) of - [] -> - io:put_chars(OutFd, [tab(),Line]); - Ns -> - N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), + case CovLines of + [{L,N}|CovLines1] -> + %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns), if N=:=0, HTML=:=true -> LineNoNL = Line -- "\n", @@ -2201,19 +2461,22 @@ print_lines(Module, InFd, OutFd, L, HTML) -> %%Str = string:right("0", 6, 32), RedLine = ["<font color=red>",Str,fill1(), LineNoNL,"</font>\n"], - io:put_chars(OutFd, RedLine); + file:write(OutFd, RedLine); N<1000000 -> Str = string:right(integer_to_list(N), 6, 32), - io:put_chars(OutFd, [Str,fill1(),Line]); + file:write(OutFd, [Str,fill1(),Line]); N<10000000 -> Str = integer_to_list(N), - io:put_chars(OutFd, [Str,fill2(),Line]); + file:write(OutFd, [Str,fill2(),Line]); true -> Str = integer_to_list(N), - io:put_chars(OutFd, [Str,fill3(),Line]) - end - end, - print_lines(Module, InFd, OutFd, L+1, HTML) + file:write(OutFd, [Str,fill3(),Line]) + end, + print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML); + _ -> + file:write(OutFd, [tab(),Line]), + print_lines(Module, CovLines, InFd, OutFd, L+1, HTML) + end end. tab() -> " | ". @@ -2223,7 +2486,7 @@ fill3() -> "| ". %%%--Export-------------------------------------------------------------- do_export(Module, OutFile, From, State) -> - case file:open(OutFile,[write,binary,raw]) of + case file:open(OutFile,[write,binary,raw,delayed_write]) of {ok,Fd} -> Reply = case Module of @@ -2362,21 +2625,21 @@ do_reset_collection_table(Module) -> ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}). %% do_reset(Module) -> ok -%% The reset is done on a per-clause basis to avoid building +%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building %% long lists in the case of very large modules do_reset(Module) -> - [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module), - do_reset2(Clauses). + Pattern = {#bump{module=Module, _='_'}, '$1'}, + MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}], + Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE), + do_reset2(Match). -do_reset2([{M,F,A,C,_L}|Clauses]) -> - Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'}, - Bumps = ets:match_object(?COVER_TABLE, Pattern), +do_reset2({Bumps,Continuation}) -> lists:foreach(fun({Bump,_N}) -> ets:insert(?COVER_TABLE, {Bump,0}) end, Bumps), - do_reset2(Clauses); -do_reset2([]) -> + do_reset2(ets:select(Continuation)); +do_reset2('$end_of_table') -> ok. do_clear(Module) -> @@ -2419,31 +2682,43 @@ escape_lt_and_gt1([],Acc) -> escape_lt_and_gt1([H|T],Acc) -> escape_lt_and_gt1(T,[H|Acc]). -pmap(Fun, List) -> - pmap(Fun, List, 20). -pmap(Fun, List, Limit) -> - pmap(Fun, List, [], Limit, 0, []). -pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit -> - Collector = self(), - Pid = spawn_link(fun() -> - ?SPAWN_DBG(pmap,E), - Collector ! {res,self(),Fun(E)} - end), - erlang:monitor(process, Pid), - pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc); -pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) -> - receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc); - {res, Pid, Res} -> - pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc]) - end; -pmap(_Fun, [], [], _Limit, 0, Acc) -> - lists:reverse(Acc); -pmap(Fun, [], [], Limit, Cnt, Acc) -> +%%%--Internal functions for parallelization------------------------------ +pmap(Fun,List) -> + NTot = length(List), + NProcs = erlang:system_info(schedulers) * 2, + NPerProc = (NTot div NProcs) + 1, + Mons = pmap_spawn(Fun,NPerProc,List,[]), + pmap_collect(Mons,[]). + +pmap_spawn(_,_,[],Mons) -> + Mons; +pmap_spawn(Fun,NPerProc,List,Mons) -> + {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List); + true -> {List,[]} % last chunk + end, + Mon = + spawn_monitor( + fun() -> + exit({pmap_done,lists:map(Fun,L1)}) + end), + pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]). + +pmap_collect([],Acc) -> + lists:append(Acc); +pmap_collect(Mons,Acc) -> receive - {'DOWN', _Ref, process, X, _} when is_pid(X) -> - pmap(Fun, [], [], Limit, Cnt - 1, Acc) + {'DOWN', Ref, process, Pid, {pmap_done,Result}} -> + pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]); + {'DOWN', Ref, process, Pid, Reason} = Down -> + case lists:member({Pid,Ref},Mons) of + true -> + %% Something went really wrong - don't hang! + exit(Reason); + false -> + %% This should be handled somewhere else + self() ! Down, + pmap_collect(Mons,Acc) + end end. %%%----------------------------------------------------------------- diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl index 80807b1d38..368fa6c3d1 100644 --- a/lib/tools/test/cover_SUITE.erl +++ b/lib/tools/test/cover_SUITE.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2001-2013. All Rights Reserved. +%% Copyright Ericsson AB 2001-2015. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -33,6 +33,8 @@ -export([do_coverage/1]). +-export([distribution_performance/1]). + -include_lib("test_server/include/test_server.hrl"). %%---------------------------------------------------------------------- @@ -170,10 +172,15 @@ compile(Config) when is_list(Config) -> ?line {ok, CWD} = file:get_cwd(), ?line Result2 = cover:compile_directory(CWD), ?line SortedResult = lists:sort(Result2), - ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, + ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult, ?line [{ok,e}] = cover:compile_directory("d1"), ?line {error,enoent} = cover:compile_directory("d2"), + [] = cover:compile([]), + Result21 = cover:compile([a,b,"cc.erl",d,"f"]), + SortedResult21 = lists:sort(Result21), + [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21, + ?line {ok,a} = cover:compile(a), ?line {ok,b} = compile:file(b), ?line code:purge(b), @@ -213,8 +220,14 @@ compile(Config) when is_list(Config) -> ?line {error,non_existing} = cover:compile_beam(z), ?line [{ok,y}] = cover:compile_beam_directory("d"), ?line Result3 = lists:sort(cover:compile_beam_directory()), - ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, + ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3, ?line {error,enoent} = cover:compile_beam_directory("d2"), + + [] = cover:compile_beam([]), + Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]), + SortedResult31 = lists:sort(Result31), + [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31, + ?line decompile([v,w,y]), ?line Files = lsfiles(), ?line remove(files(Files, ".beam")). @@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) -> ?line done = a:start(5), - ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module), - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function), - ?line {ok, [{{a,start,1,1},{6,0}}, - {{a,stop,1,1},{0,1}}, - {{a,pong,1,1},{1,0}}, + {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module), + {ok, [{{a,exit_kalle,0},{1,0}}, + {{a,loop,3},{5,1}}, + {{a,pong,1},{1,0}}, + {{a,start,1},{6,0}}, + {{a,stop,1},{0,1}}, + {{a,trycatch,1},{4,0}}]=ACovFunc} = + cover:analyse(a, coverage, function), + {ok, [{{a,exit_kalle,0,1},{1,0}}, {{a,loop,3,1},{3,1}}, {{a,loop,3,2},{2,0}}, - {{a,trycatch,1,1},{4,0}}, - {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause), + {{a,pong,1,1},{1,0}}, + {{a,start,1,1},{6,0}}, + {{a,stop,1,1},{0,1}}, + {{a,trycatch,1,1},{4,0}}]=ACovClause} = + cover:analyse(a, coverage, clause), ?line {ok, [{{a,9},{1,0}}, {{a,10},{1,0}}, {{a,11},{1,0}}, @@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) -> {{a,47},{1,0}}, {{a,49},{1,0}}, {{a,51},{1,0}}, - {{a,55},{1,0}}]} = cover:analyse(a, coverage, line), - - ?line {ok, {a,15}} = cover:analyse(a, calls, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function), - ?line {ok, [{{a,start,1,1},1}, - {{a,stop,1,1},0}, - {{a,pong,1,1},5}, - {{a,loop,3,1},5}, - {{a,loop,3,2},1}, - {{a,trycatch,1,1},2}, - {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause), + {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line), + + {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module), + {ok, [{{a,exit_kalle,0},1}, + {{a,loop,3},6}, + {{a,pong,1},5}, + {{a,start,1},1}, + {{a,stop,1},0}, + {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function), + {ok, [{{a,exit_kalle,0,1},1}, + {{a,loop,3,1},5}, + {{a,loop,3,2},1}, + {{a,pong,1,1},5}, + {{a,start,1,1},1}, + {{a,stop,1,1},0}, + {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause), ?line {ok, [{{a,9},1}, {{a,10},1}, {{a,11},1}, @@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) -> {{a,47},1}, {{a,49},1}, {{a,51},2}, - {{a,55},1}]} = cover:analyse(a, calls, line), - - ?line {ok, [{{a,start,1},{6,0}}, - {{a,stop,1},{0,1}}, - {{a,pong,1},{1,0}}, - {{a,loop,3},{5,1}}, - {{a,trycatch,1},{4,0}}, - {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a), - ?line {ok, {a,{17,2}}} = cover:analyse(a, module), - ?line {ok, [{{a,start,1},1}, - {{a,stop,1},0}, - {{a,pong,1},5}, - {{a,loop,3},6}, - {{a,trycatch,1},2}, - {{a,exit_kalle,0},1}]} = cover:analyse(a, calls), + {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line), + + {ok,ACovFunc} = cover:analyse(a), + {ok,ACovMod} = cover:analyse(a, module), + {ok,ACallsFunc} = cover:analyse(a, calls), ?line {ok, "a.COVER.out"} = cover:analyse_to_file(a), ?line {ok, "e.COVER.out"} = cover:analyse_to_file(e), ?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]), ?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]), + %% Analyse all modules + Modules = cover:modules(), + N = length(Modules), + + {result,CovFunc,[]} = cover:analyse(), % default = coverage, function + ACovFunc = [A || {{a,_,_},_}=A<-CovFunc], + + {result,CovMod,[]} = cover:analyse(coverage,module), + ACovMod = lists:keyfind(a,1,CovMod), + + {result,CovClause,[]} = cover:analyse(coverage,clause), + ACovClause = [A || {{a,_,_,_},_}=A<-CovClause], + + {result,CovLine,[]} = cover:analyse(coverage,line), + ACovLine = [A || {{a,_},_}=A<-CovLine], + + {result,CallsFunc,[]} = cover:analyse(calls,function), + ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc], + + {result,CallsMod,[]} = cover:analyse(calls,module), + ACallsMod = lists:keyfind(a,1,CallsMod), + + {result,CallsClause,[]} = cover:analyse(calls,clause), + ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause], + + {result,CallsLine,[]} = cover:analyse(calls,line), + ACallsLine = [A || {{a,_},_}=A<-CallsLine], + + {result,AllToFile,[]} = cover:analyse_to_file(), + N = length(AllToFile), + true = lists:member("a.COVER.out",AllToFile), + {result,AllToFileHtml,[]} = cover:analyse_to_file([html]), + N = length(AllToFileHtml), + true = lists:member("a.COVER.html",AllToFileHtml), + + %% Analyse list of modules + %% Listing all modules so we can compare result with above result + %% from analysing all. + + {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function + true = lists:sort(CovFunc) == lists:sort(CovFunc1), + + {result,CovMod1,[]} = cover:analyse(Modules,coverage,module), + true = lists:sort(CovMod) == lists:sort(CovMod1), + + {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause), + true = lists:sort(CovClause) == lists:sort(CovClause1), + + {result,CovLine1,[]} = cover:analyse(Modules,coverage,line), + true = lists:sort(CovLine) == lists:sort(CovLine1), + + {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function), + true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1), + + {result,CallsMod1,[]} = cover:analyse(Modules,calls,module), + true = lists:sort(CallsMod) == lists:sort(CallsMod1), + + {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause), + true = lists:sort(CallsClause) == lists:sort(CallsClause1), + + {result,CallsLine1,[]} = cover:analyse(Modules,calls,line), + true = lists:sort(CallsLine) == lists:sort(CallsLine1), + + {result,AllToFile1,[]} = cover:analyse_to_file(Modules), + true = lists:sort(AllToFile) == lists:sort(AllToFile1), + {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]), + true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1), + %% analyse_to_file of file which is compiled from beam ?line {ok,f} = compile:file(f,[debug_info]), ?line code:purge(f), @@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) -> {module,z} = code:load_file(z), {ok,z} = cover:compile_beam(z), ok = file:delete("z.erl"), - {error,no_source_code_found} = cover:analyse_to_file(z), + {error,{no_source_code_found,z}} = cover:analyse_to_file(z), + {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]), code:purge(z), code:delete(z), ?line {error,{not_cover_compiled,b}} = cover:analyse(b), ?line {error,{not_cover_compiled,g}} = cover:analyse(g), + {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]), ?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b), - ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g). + {error,{not_cover_compiled,g}} = cover:analyse_to_file(g), + {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]). misc(suite) -> []; misc(Config) when is_list(Config) -> @@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) -> ?t:stop_node(N1), ok. +distribution_performance(Config) -> + PrivDir = ?config(priv_dir,Config), + Dir = filename:join(PrivDir,"distribution_performance"), + AllFiles = filename:join(Dir,"*"), + ok = filelib:ensure_dir(AllFiles), + code:add_patha(Dir), + M = 9, % Generate M modules + F = 210, % with F functions + C = 10, % and each function of C clauses + Mods = generate_modules(M,F,C,Dir), + +% ?t:break(""), + + NodeName = cover_SUITE_distribution_performance, + {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]), + %% CFun = fun() -> + %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods] + %% end, + CFun = fun() -> cover:compile_beam(Mods) end, + {CT,CA} = timer:tc(CFun), +% erlang:display(CA), + erlang:display({compile,CT}), + + {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end), + erlang:display({start_node,SNT}), + + [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods], + +% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end, +% Fun = fun() -> analyse_all(Mods,calls,function) end, +% Fun = fun() -> cover:analyse('_',calls,function) end, + Fun = fun() -> cover:analyse(Mods,calls,function) end, + +% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end, +% Fun = fun() -> analyse_all_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end, +% Fun = fun() -> cover:analyse_to_file([html]) end, + +% Fun = fun() -> cover:reset() end, + + {AT,A} = timer:tc(Fun), + erlang:display({analyse,AT}), +% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])), + + %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]), + %% fprof:profile(), + %% fprof:analyse(dest,[]), + + {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end), + erlang:display({stop_node,SNT2}), + + code:del_path(Dir), + Files = filelib:wildcard(AllFiles), + [ok = file:delete(File) || File <- Files], + ok = file:del_dir(Dir), + ok. + +%% Run analysis in parallel +analyse_all(Mods,Analysis,Level) -> + Pids = [begin + Pid = spawn(fun() -> + {ok,A} = cover:analyse(Mod,Analysis,Level), + exit(A) + end), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +analyse_all_to_file(Mods,Opts) -> + Pids = [begin + Pid = cover:async_analyse_to_file(Mod,Opts), + erlang:monitor(process,Pid), + Pid + end || Mod <- Mods], + get_downs(Pids,[]). + +get_downs([],Acc) -> + Acc; +get_downs(Pids,Acc) -> + receive + {'DOWN', _Ref, _Type, Pid, A} -> + get_downs(lists:delete(Pid,Pids),[A|Acc]) + end. + +generate_modules(0,_,_,_) -> + []; +generate_modules(M,F,C,Dir) -> + ModStr = "m" ++ integer_to_list(M), + Mod = list_to_atom(ModStr), + Src = ["-module(",ModStr,").\n" + "-compile(export_all).\n" | + generate_functions(F,C)], + Erl = filename:join(Dir,ModStr++".erl"), + ok = file:write_file(Erl,Src), + {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]), + [Mod | generate_modules(M-1,F,C,Dir)]. + +generate_functions(0,_) -> + []; +generate_functions(F,C) -> + Func = "f" ++ integer_to_list(F), + [generate_clauses(C,Func) | generate_functions(F-1,C)]. + +generate_clauses(0,_) -> + []; +generate_clauses(C,Func) -> + CStr = integer_to_list(C), + Sep = if C==1 -> "."; true -> ";" end, + [Func,"(",CStr,") -> ",CStr,Sep,"\n" | + generate_clauses(C-1,Func)]. + + export_import(suite) -> []; export_import(Config) when is_list(Config) -> ?line DataDir = ?config(data_dir, Config), @@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) -> Dog = ?t:timetrap(?t:seconds(10)), - ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]), + {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]), ?line {ok,[N1]} = cover:start(N1), ?line {error,not_main_node} = rpc:call(N1,cover,modules,[]), ?line cover:stop(), + ?t:stop_node(N1), ?t:timetrap_cancel(Dog), ok. @@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) -> ok. + otp_8270(doc) -> ["OTP-8270. Bug."]; otp_8270(suite) -> []; @@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) -> ?line {ok,N3} = ?t:start_node(cover_n3,slave,As), timer:sleep(500), - cover:start(nodes()), + {ok,[_,_,_]} = cover:start(nodes()), Test = << "-module(m).\n" @@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) -> ?line {N2,true} = {N2,is_list(N2_info)}, ?line {N3,true} = {N3,is_list(N3_info)}, + exit(Pid1,kill), ?line ?t:stop_node(N1), ?line ?t:stop_node(N2), ?line ?t:stop_node(N3), @@ -1572,7 +1764,9 @@ is_unloaded(What) -> end. check_f_calls(F1,F2) -> - {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function). + {ok,A} = cover:analyse(f,calls,function), + {_,F1} = lists:keyfind({f,f1,0},1,A), + {_,F2} = lists:keyfind({f,f2,0},1,A). cover_which_nodes(Expected) -> case cover:which_nodes() of diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl index 572bf24ca4..cbad05081e 100644 --- a/lib/typer/src/typer.erl +++ b/lib/typer/src/typer.erl @@ -405,7 +405,7 @@ get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, Records) -> case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of error -> {{F, A}, {Range, Arg}}; - {ok, {_FileLine, Contract}} -> + {ok, {_FileLine, Contract, _Xtra}} -> Sig = erl_types:t_fun(Arg, Range), case dialyzer_contracts:check_contract(Contract, Sig) of ok -> {{F, A}, {contract, Contract}}; diff --git a/lib/wx/src/wx_object.erl b/lib/wx/src/wx_object.erl index 80f8937656..2c016e7951 100644 --- a/lib/wx/src/wx_object.erl +++ b/lib/wx/src/wx_object.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2008-2011. All Rights Reserved. +%% Copyright Ericsson AB 2008-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -102,6 +102,7 @@ %% API -export([start/3, start/4, start_link/3, start_link/4, + stop/1, stop/3, call/2, call/3, cast/2, reply/2, @@ -215,6 +216,42 @@ gen_response({ok, Pid}) -> gen_response(Reply) -> Reply. +%% @spec (Ref::wxObject()|atom()|pid()) -> ok +%% @doc Stops a generic wx_object server with reason 'normal'. +%% Invokes terminate(Reason,State) in the server. The call waits until +%% the process is terminated. If the process does not exist, an +%% exception is raised. +stop(Ref = #wx_ref{state=Pid}) when is_pid(Pid) -> + try + gen:stop(Pid) + catch _:ExitReason -> + erlang:error({ExitReason, {?MODULE, stop, [Ref]}}) + end; +stop(Name) when is_atom(Name) orelse is_pid(Name) -> + try + gen:stop(Name) + catch _:ExitReason -> + erlang:error({ExitReason, {?MODULE, stop, [Name]}}) + end. + +%% @spec (Ref::wxObject()|atom()|pid(), Reason::term(), Timeout::timeout()) -> ok +%% @doc Stops a generic wx_object server with the given Reason. +%% Invokes terminate(Reason,State) in the server. The call waits until +%% the process is terminated. If the call times out, or if the process +%% does not exist, an exception is raised. +stop(Ref = #wx_ref{state=Pid}, Reason, Timeout) when is_pid(Pid) -> + try + gen:stop(Pid, Reason, Timeout) + catch _:ExitReason -> + erlang:error({ExitReason, {?MODULE, stop, [Ref, Reason, Timeout]}}) + end; +stop(Name, Reason, Timeout) when is_atom(Name) orelse is_pid(Name) -> + try + gen:stop(Name, Reason, Timeout) + catch _:ExitReason -> + erlang:error({ExitReason, {?MODULE, stop, [Name, Reason, Timeout]}}) + end. + %% @spec (Ref::wxObject()|atom()|pid(), Request::term()) -> term() %% @doc Make a call to a wx_object server. %% The call waits until it gets a result. @@ -563,22 +600,10 @@ opt(_, []) -> %% @hidden debug_options(Name, Opts) -> case opt(debug, Opts) of - {ok, Options} -> dbg_options(Name, Options); - _ -> dbg_options(Name, []) + {ok, Options} -> dbg_opts(Name, Options); + _ -> [] end. %% @hidden -dbg_options(Name, []) -> - Opts = - case init:get_argument(generic_debug) of - error -> - []; - _ -> - [log, statistics] - end, - dbg_opts(Name, Opts); -dbg_options(Name, Opts) -> - dbg_opts(Name, Opts). -%% @hidden dbg_opts(Name, Opts) -> case catch sys:debug_options(Opts) of {'EXIT',_} -> diff --git a/lib/wx/test/wx_event_SUITE.erl b/lib/wx/test/wx_event_SUITE.erl index f9f8788d8f..3252547c9b 100644 --- a/lib/wx/test/wx_event_SUITE.erl +++ b/lib/wx/test/wx_event_SUITE.erl @@ -544,13 +544,14 @@ handler_clean(_Config) -> ?mt(wxFrame, Frame1), wxWindow:show(Frame1), ?m([_|_], lists:sort(wx_test_lib:flush())), - ?m({stop,_}, wx_obj_test:stop(Frame1, fun(_) -> normal end)), + ?m(ok, wx_obj_test:stop(Frame1)), ?m([{terminate,normal}], lists:sort(wx_test_lib:flush())), - Frame2 = wx_obj_test:start([{init, Init}]), + Terminate = fun({Frame,_}) -> wxWindow:destroy(Frame) end, + Frame2 = wx_obj_test:start([{init, Init}, {terminate, Terminate}]), wxWindow:show(Frame2), ?m([_|_], lists:sort(wx_test_lib:flush())), - ?m({stop,_}, wx_obj_test:stop(Frame2, fun(_) -> wxWindow:destroy(Frame2), normal end)), + ?m(ok, wx_obj_test:stop(Frame2)), ?m([{terminate,normal}], lists:sort(wx_test_lib:flush())), timer:sleep(104), ?m({[],[],[]}, white_box_check_event_handlers()), diff --git a/lib/wx/test/wx_obj_test.erl b/lib/wx/test/wx_obj_test.erl index f47f2fbc46..6c648c65f8 100644 --- a/lib/wx/test/wx_obj_test.erl +++ b/lib/wx/test/wx_obj_test.erl @@ -1,7 +1,7 @@ %% %% %CopyrightBegin% %% -%% Copyright Ericsson AB 2011-2013. All Rights Reserved. +%% Copyright Ericsson AB 2011-2014. All Rights Reserved. %% %% The contents of this file are subject to the Erlang Public License, %% Version 1.1, (the "License"); you may not use this file except in @@ -18,7 +18,7 @@ -module(wx_obj_test). -include_lib("wx/include/wx.hrl"). --export([start/1, stop/2]). +-export([start/1, stop/1]). %% wx_object callbacks -export([init/1, handle_info/2, terminate/2, code_change/3, handle_call/3, @@ -29,8 +29,8 @@ start(Opts) -> wx_object:start_link(?MODULE, [{parent, self()}| Opts], []). -stop(Object, Fun) -> - wx_object:call(Object, {stop, Fun}). +stop(Object) -> + wx_object:stop(Object). init(Opts) -> Parent = proplists:get_value(parent, Opts), @@ -61,8 +61,6 @@ handle_event(Event, State = #state{parent=Parent}) -> handle_call(What, From, State = #state{user_state=US}) when is_function(What) -> Result = What(US), {reply, {call, Result, From}, State}; -handle_call({stop, Fun}, From, State = #state{user_state=US}) -> - {stop, Fun(US), {stop, From}, State}; handle_call(What, From, State) -> {reply, {call, What, From}, State}. @@ -79,7 +77,13 @@ handle_info(What, State = #state{parent=Pid}) -> Pid ! {info, What}, {noreply, State}. -terminate(What, #state{parent=Pid}) -> +terminate(What, #state{parent=Pid, opts=Opts, user_state=US}) -> + case proplists:get_value(terminate, Opts) of + undefined -> + ok; + Terminate -> + Terminate(US) + end, Pid ! {terminate, What}, ok. |