diff options
Diffstat (limited to 'lib/dialyzer')
56 files changed, 545 insertions, 394 deletions
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES index 2457faa07a..299cc8642f 100644 --- a/lib/dialyzer/RELEASE_NOTES +++ b/lib/dialyzer/RELEASE_NOTES @@ -181,7 +181,7 @@ Version 1.8.0 (in Erlang/OTP R12B-2) - Dialyzer has a new warning option -Wunmatched_returns which warns for function calls that ignore the return value. This catches many common programming errors (e.g. calling file:close/1 - and not checking for the absense of errors), interface discrepancies + and not checking for the absence of errors), interface discrepancies (e.g. a function returning multiple values when in reality the function is void and only called for its side-effects), calling the wrong function (e.g. io_lib:format/1 instead of io:format/1), and even possible diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl index ae1e4d8c38..aeeb895a0c 100644 --- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl +++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl @@ -114,7 +114,6 @@ loop(#server_state{parent = Parent} = State, %% The Analysis %%-------------------------------------------------------------------- -%% Calls to erlang:garbage_collect() help to reduce the heap size. analysis_start(Parent, Analysis, LegalWarnings) -> CServer = dialyzer_codeserver:new(), Plt = Analysis#analysis.plt, @@ -136,11 +135,9 @@ analysis_start(Parent, Analysis, LegalWarnings) -> %% Remote type postprocessing NewCServer = try - NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0), + TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt), NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0), - OldRecords = dialyzer_plt:get_types(Plt), OldExpTypes0 = dialyzer_plt:get_exported_types(Plt), - MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords), RemMods = [case Analysis#analysis.start_from of byte_code -> list_to_atom(filename:basename(F, ".beam")); @@ -148,25 +145,20 @@ analysis_start(Parent, Analysis, LegalWarnings) -> end || F <- Files], OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0), MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1), - TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0), TmpCServer2 = dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1), - erlang:garbage_collect(), + erlang:garbage_collect(), % reduce heap size ?timing(State#analysis_state.timing_server, "remote", contracts_and_records(TmpCServer2)) catch throw:{error, _ErrorMsg} = Error -> exit(Error) end, - NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)), - ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer), - NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes), - State0 = State#analysis_state{plt = NewPlt1}, - dump_callgraph(Callgraph, State0, Analysis), + dump_callgraph(Callgraph, State, Analysis), %% Remove all old versions of the files being analyzed AllNodes = dialyzer_callgraph:all_nodes(Callgraph), - Plt1_a = dialyzer_plt:delete_list(NewPlt1, AllNodes), + Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes), Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer), - State1 = State0#analysis_state{codeserver = NewCServer, plt = Plt1}, + State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1}, Exports = dialyzer_codeserver:get_exports(NewCServer), NonExports = sets:subtract(sets:from_list(AllNodes), Exports), NonExportsList = sets:to_list(NonExports), @@ -176,14 +168,17 @@ analysis_start(Parent, Analysis, LegalWarnings) -> false -> Callgraph end, State2 = analyze_callgraph(NewCallgraph, State1), - #analysis_state{plt = MiniPlt2, doc_plt = DocPlt} = State2, + #analysis_state{plt = MiniPlt2, + doc_plt = DocPlt, + codeserver = Codeserver0} = State2, + {Codeserver, MiniPlt3} = move_data(Codeserver0, MiniPlt2), dialyzer_callgraph:dispose_race_server(NewCallgraph), rcv_and_send_ext_types(Parent), %% Since the PLT is never used, a dummy is sent: DummyPlt = dialyzer_plt:new(), - send_codeserver_plt(Parent, CServer, DummyPlt), - MiniPlt3 = dialyzer_plt:delete_list(MiniPlt2, NonExportsList), - send_analysis_done(Parent, MiniPlt3, DocPlt). + send_codeserver_plt(Parent, Codeserver, DummyPlt), + MiniPlt4 = dialyzer_plt:delete_list(MiniPlt3, NonExportsList), + send_analysis_done(Parent, MiniPlt4, DocPlt). contracts_and_records(CodeServer) -> Fun = contrs_and_recs(CodeServer), @@ -200,15 +195,20 @@ contracts_and_records(CodeServer) -> contrs_and_recs(TmpCServer2) -> fun() -> Parent = receive {Pid, go} -> Pid end, - {TmpCServer3, RecordDict} = - dialyzer_utils:process_record_remote_types(TmpCServer2), + TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2), TmpServer4 = - dialyzer_contracts:process_contract_remote_types(TmpCServer3, - RecordDict), + dialyzer_contracts:process_contract_remote_types(TmpCServer3), dialyzer_codeserver:give_away(TmpServer4, Parent), exit(TmpServer4) end. +move_data(CServer, MiniPlt) -> + {CServer1, Records} = dialyzer_codeserver:extract_records(CServer), + MiniPlt1 = dialyzer_plt:insert_types(MiniPlt, Records), + {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1), + NewMiniPlt = dialyzer_plt:insert_exported_types(MiniPlt1, ExpTypes), + {NewCServer, NewMiniPlt}. + analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver, doc_plt = DocPlt, plt = Plt, @@ -603,6 +603,7 @@ send_ext_types(Parent, ExtTypes) -> ok. send_codeserver_plt(Parent, CServer, Plt) -> + ok = dialyzer_codeserver:give_away(CServer, Parent), Parent ! {self(), cserver, CServer, Plt}, ok. diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl index 68f3d7a240..6387f3d1e4 100644 --- a/lib/dialyzer/src/dialyzer_callgraph.erl +++ b/lib/dialyzer/src/dialyzer_callgraph.erl @@ -40,7 +40,7 @@ module_postorder_from_funs/2, new/0, get_depends_on/2, - get_required_by/2, + %% get_required_by/2, in_neighbours/2, renew_race_info/4, renew_race_code/2, @@ -250,12 +250,12 @@ get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) -> get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) -> digraph:out_neighbours(DG, SCC). --spec get_required_by(scc() | module(), callgraph()) -> [scc()]. +%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()]. -get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) -> - lookup_scc(SCC, In, Maps); -get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) -> - digraph:in_neighbours(DG, SCC). +%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) -> +%% lookup_scc(SCC, In, Maps); +%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) -> +%% digraph:in_neighbours(DG, SCC). lookup_scc(SCC, Table, Maps) -> case ets_lookup_dict({'scc', SCC}, Maps) of @@ -285,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) -> Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]), MDG = digraph:new([acyclic]), digraph_confirm_vertices(sets:to_list(Nodes), MDG), - Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end, + Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end, lists:foreach(Foreach, sets:to_list(Edges)), - {digraph_utils:topsort(MDG), {'d', MDG}}. + %% The out-neighbors of a vertex are the vertices called directly. + %% The used vertices are to occur *before* the calling vertex: + {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}. edge_fold({{M1,_,_},{M2,_,_}}, Set) -> case M1 =/= M2 of @@ -305,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) -> Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]), MDG = digraph:new(), digraph_confirm_vertices(sets:to_list(Nodes), MDG), - Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end, + Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end, lists:foreach(Foreach, sets:to_list(Edges)), Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))} || N <- sets:to_list(Nodes)], @@ -363,7 +365,7 @@ ets_lookup_set(Key, Table) -> %% The core tree must be labeled as by cerl_trees:label/1 (or /2). %% The set of labels in the tree must be disjoint from the set of -%% labels already occuring in the callgraph. +%% labels already occurring in the callgraph. -spec scan_core_tree(cerl:c_module(), callgraph()) -> {[mfa_or_funlbl()], [callgraph_edge()]}. @@ -552,9 +554,21 @@ digraph_add_edge(From, To, DG) -> false -> digraph:add_vertex(DG, To); {To, _} -> ok end, - digraph:add_edge(DG, {From, To}, From, To, []), + check_add_edge(DG, {From, To}, From, To, []), ok. +check_add_edge(G, V1, V2) -> + case digraph:add_edge(G, V1, V2) of + {error, Error} -> exit({add_edge, V1, V2, Error}); + _Edge -> ok + end. + +check_add_edge(G, E, V1, V2, L) -> + case digraph:add_edge(G, E, V1, V2, L) of + {error, Error} -> exit({add_edge, E, V1, V2, L, Error}); + _Edge -> ok + end. + digraph_confirm_vertices([MFA|Left], DG) -> digraph:add_vertex(DG, MFA, confirmed), digraph_confirm_vertices(Left, DG); @@ -762,28 +776,53 @@ to_ps(#callgraph{} = CG, File, Args) -> ok. condensation(G) -> - SCCs = digraph_utils:strong_components(G), - %% Assign unique numbers to SCCs: - Ints = lists:seq(1, length(SCCs)), - IntToSCC = lists:zip(Ints, SCCs), - IntScc = sofs:relation(IntToSCC, [{int, scc}]), - %% Subsitute strong components for vertices in edges using the - %% unique numbers: - C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]), - I2V = sofs:relative_product(IntScc, C2V), % [{v, int}] - Es = sofs:relation(digraph:edges(G), [{v, v}]), - R1 = sofs:relative_product(I2V, Es), - R2 = sofs:relative_product(I2V, sofs:converse(R1)), - %% Create in- and out-neighbours: - In = sofs:relation_to_family(sofs:strict_relation(R2)), - R3 = sofs:converse(R2), - Out = sofs:relation_to_family(sofs:strict_relation(R3)), - [OutETS, InETS, MapsETS] = - [ets:new(Name,[{read_concurrency, true}]) || - Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]], - ets:insert(OutETS, sofs:to_external(Out)), - ets:insert(InETS, sofs:to_external(In)), - %% Create mappings from SCCs to unique integers, and the inverse: - ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)), - ets:insert(MapsETS, IntToSCC), - {{'e', OutETS, InETS, MapsETS}, SCCs}. + erlang:garbage_collect(), % reduce heap size + {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())), + receive {'DOWN', Ref, process, Pid, Result} -> + {SCCInts, OutETS, InETS, MapsETS} = Result, + NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts], + {{'e', OutETS, InETS, MapsETS}, NewSCCs} + end. + +-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()). + +do_condensation(G, Parent) -> + fun() -> + [OutETS, InETS, MapsETS] = + [ets:new(Name,[{read_concurrency, true}]) || + Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]], + SCCs = digraph_utils:strong_components(G), + %% Assign unique numbers to SCCs: + Ints = lists:seq(1, length(SCCs)), + IntToSCC = lists:zip(Ints, SCCs), + IntScc = sofs:relation(IntToSCC, [{int, scc}]), + %% Create mapping from unique integers to SCCs: + ets:insert(MapsETS, IntToSCC), + %% Subsitute strong components for vertices in edges using the + %% unique numbers: + C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]), + I2V = sofs:relative_product(IntScc, C2V), % [{v, int}] + Es = sofs:relation(digraph:edges(G), [{v, v}]), + R1 = sofs:relative_product(I2V, Es), + R2 = sofs:relative_product(I2V, sofs:converse(R1)), + R2Strict = sofs:strict_relation(R2), + %% Create out-neighbours: + Out = sofs:relation_to_family(sofs:converse(R2Strict)), + ets:insert(OutETS, sofs:to_external(Out)), + %% Sort the SCCs topologically: + DG = sofs:family_to_digraph(Out), + lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints), + SCCInts0 = digraph_utils:topsort(DG), + digraph:delete(DG), + %% The out-neighbors of a vertex are the vertices called directly. + %% The used vertices are to occur *before* the calling vertex: + SCCInts = lists:reverse(SCCInts0), + %% Create in-neighbours: + In = sofs:relation_to_family(R2Strict), + ets:insert(InETS, sofs:to_external(In)), + %% Create mapping from SCCs to unique integers: + ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)), + lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any) + end, [OutETS, InETS, MapsETS]), + exit({SCCInts, OutETS, InETS, MapsETS}) + end. diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl index 158ee761af..8500c59ebe 100644 --- a/lib/dialyzer/src/dialyzer_cl.erl +++ b/lib/dialyzer/src/dialyzer_cl.erl @@ -30,6 +30,8 @@ -record(cl_state, {backend_pid :: pid() | 'undefined', + code_server = none :: 'none' + | dialyzer_codeserver:codeserver(), erlang_mode = false :: boolean(), external_calls = [] :: [mfa()], external_types = [] :: [mfa()], @@ -630,6 +632,9 @@ cl_loop(State, LogCache) -> {BackendPid, warnings, Warnings} -> NewState = store_warnings(State, Warnings), cl_loop(NewState, LogCache); + {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored + NewState = State#cl_state{code_server = CodeServer}, + cl_loop(NewState, LogCache); {BackendPid, done, NewMiniPlt, _NewDocPlt} -> return_value(State, NewMiniPlt); {BackendPid, ext_calls, ExtCalls} -> @@ -647,7 +652,6 @@ cl_loop(State, LogCache) -> cl_error(State, Msg); _Other -> %% io:format("Received ~p\n", [_Other]), - %% Note: {BackendPid, cserver, CodeServer, Plt} is ignored. cl_loop(State, LogCache) end. @@ -688,18 +692,34 @@ cl_error(State, Msg) -> maybe_close_output_file(State), throw({dialyzer_error, lists:flatten(Msg)}). -return_value(State = #cl_state{erlang_mode = ErlangMode, +return_value(State = #cl_state{code_server = CodeServer, + erlang_mode = ErlangMode, mod_deps = ModDeps, output_plt = OutputPlt, plt_info = PltInfo, stored_warnings = StoredWarnings}, MiniPlt) -> + %% Just for now: + case CodeServer =:= none of + true -> + ok; + false -> + dialyzer_codeserver:delete(CodeServer) + end, case OutputPlt =:= none of true -> dialyzer_plt:delete(MiniPlt); false -> - Plt = dialyzer_plt:restore_full_plt(MiniPlt), - dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo) + Fun = to_file_fun(OutputPlt, MiniPlt, ModDeps, PltInfo), + {Pid, Ref} = erlang:spawn_monitor(Fun), + dialyzer_plt:give_away(MiniPlt, Pid), + Pid ! go, + receive {'DOWN', Ref, process, Pid, Result} -> + case Result of + ok -> ok; + Thrown -> throw(Thrown) + end + end end, UnknownWarnings = unknown_warnings(State), RetValue = @@ -720,6 +740,16 @@ return_value(State = #cl_state{erlang_mode = ErlangMode, {RetValue, set_warning_id(AllWarnings)} end. +-spec to_file_fun(_, _, _, _) -> fun(() -> no_return()). + +to_file_fun(Filename, MiniPlt, ModDeps, PltInfo) -> + fun() -> + receive go -> ok end, + Plt = dialyzer_plt:restore_full_plt(MiniPlt), + dialyzer_plt:to_file(Filename, Plt, ModDeps, PltInfo), + exit(ok) + end. + unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) -> Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of true -> diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl index f53c713bfe..a1a7370eff 100644 --- a/lib/dialyzer/src/dialyzer_codeserver.erl +++ b/lib/dialyzer/src/dialyzer_codeserver.erl @@ -26,18 +26,21 @@ give_away/2, finalize_contracts/1, finalize_exported_types/2, - finalize_records/2, + finalize_records/1, get_contracts/1, get_callbacks/1, get_exported_types/1, + extract_exported_types/1, get_exports/1, - get_records/1, + get_records_table/1, + extract_records/1, get_next_core_label/1, get_temp_contracts/2, - contracts_modules/1, + all_temp_modules/1, store_contracts/4, get_temp_exported_types/1, - get_temp_records/1, + get_temp_records_table/1, + lookup_temp_mod_records/2, insert/3, insert_exports/2, insert_temp_exported_types/2, @@ -52,7 +55,6 @@ lookup_meta_info/2, new/0, set_next_core_label/2, - set_temp_records/2, store_temp_records/3, translate_fake_file/3]). @@ -67,10 +69,8 @@ -type set_ets() :: ets:tid(). -type types() :: erl_types:type_table(). --type mod_records() :: erl_types:mod_records(). -type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}. --type mod_contracts() :: dict:dict(module(), contracts()). %% A property-list of data compiled from -compile and -dialyzer attributes. -type meta_info() :: [{{'nowarn_function' | dial_warn_tag()}, @@ -80,8 +80,8 @@ -record(codeserver, {next_core_label = 0 :: label(), code :: dict_ets(), - exported_types :: set_ets(), % set(mfa()) - records :: map_ets(), + exported_types :: 'clean' | set_ets(), % set(mfa()) + records :: 'clean' | map_ets(), contracts :: map_ets(), callbacks :: map_ets(), fun_meta_info :: dict_ets(), % {mfa(), meta_info()} @@ -107,9 +107,6 @@ ets_map_store(Key, Element, Table) -> true = ets:insert(Table, {Key, Element}), Table. -ets_dict_store_dict(Dict, Table) -> - true = ets:insert(Table, dict:to_list(Dict)). - ets_dict_to_dict(Table) -> Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end, ets:foldl(Fold, dict:new(), Table). @@ -164,11 +161,8 @@ new() -> -spec delete(codeserver()) -> 'ok'. -delete(#codeserver{code = Code, exported_types = ExportedTypes, - records = Records, contracts = Contracts, - callbacks = Callbacks}) -> - lists:foreach(fun ets:delete/1, - [Code, ExportedTypes, Records, Contracts, Callbacks]). +delete(CServer) -> + lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)). -spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver(). @@ -222,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) -> get_exported_types(#codeserver{exported_types = ExpTypes}) -> ets_set_to_set(ExpTypes). +-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}. + +extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) -> + {CS#codeserver{exported_types = 'clean'}, ExpTypes}. + -spec get_exports(codeserver()) -> sets:set(mfa()). get_exports(#codeserver{exports = Exports}) -> @@ -269,10 +268,15 @@ lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) -> {ok, Map} -> Map end. --spec get_records(codeserver()) -> mod_records(). +-spec get_records_table(codeserver()) -> map_ets(). + +get_records_table(#codeserver{records = RecDict}) -> + RecDict. -get_records(#codeserver{records = RecDict}) -> - ets_dict_to_dict(RecDict). +-spec extract_records(codeserver()) -> {codeserver(), map_ets()}. + +extract_records(#codeserver{records = RecDict} = CS) -> + {CS#codeserver{records = clean}, RecDict}. -spec store_temp_records(module(), types(), codeserver()) -> codeserver(). @@ -283,26 +287,26 @@ store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS) false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)} end. --spec get_temp_records(codeserver()) -> mod_records(). +-spec get_temp_records_table(codeserver()) -> map_ets(). -get_temp_records(#codeserver{temp_records = TempRecDict}) -> - ets_dict_to_dict(TempRecDict). +get_temp_records_table(#codeserver{temp_records = TempRecDict}) -> + TempRecDict. --spec set_temp_records(mod_records(), codeserver()) -> codeserver(). +-spec lookup_temp_mod_records(module(), codeserver()) -> types(). -set_temp_records(Dict, CS) -> - true = ets:delete(CS#codeserver.temp_records), - TempRecords = ets:new(dialyzer_codeserver_temp_records,[]), - true = ets_dict_store_dict(Dict, TempRecords), - CS#codeserver{temp_records = TempRecords}. +lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) -> + case ets_dict_find(Mod, TempRecDict) of + error -> maps:new(); + {ok, Map} -> Map + end. --spec finalize_records(mod_records(), codeserver()) -> codeserver(). +-spec finalize_records(codeserver()) -> codeserver(). -finalize_records(Dict, #codeserver{temp_records = TmpRecords, - records = Records} = CS) -> - true = ets:delete(TmpRecords), - true = ets_dict_store_dict(Dict, Records), - CS#codeserver{temp_records = clean}. +finalize_records(#codeserver{temp_records = TmpRecords, + records = Records} = CS) -> + true = ets:delete(Records), + ets:rename(TmpRecords, dialyzer_codeserver_records), + CS#codeserver{temp_records = clean, records = TmpRecords}. -spec lookup_mod_contracts(atom(), codeserver()) -> contracts(). @@ -331,10 +335,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) -> {ok, PropList} -> PropList end. --spec get_contracts(codeserver()) -> mod_contracts(). +-spec get_contracts(codeserver()) -> + dict:dict(mfa(), dialyzer_contracts:file_contract()). get_contracts(#codeserver{contracts = ContDict}) -> - ets_dict_to_dict(ContDict). + dict:filter(fun({_M, _F, _A}, _) -> true; + (_, _) -> false + end, ets_dict_to_dict(ContDict)). -spec get_callbacks(codeserver()) -> list(). @@ -348,12 +355,14 @@ store_temp_contracts(Mod, SpecMap, CallbackMap, #codeserver{temp_contracts = Cn, temp_callbacks = Cb} = CS) when is_atom(Mod) -> + %% Make sure Mod is stored even if there are not callbacks or + %% contracts. CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)}, CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}. --spec contracts_modules(codeserver()) -> [module()]. +-spec all_temp_modules(codeserver()) -> [module()]. -contracts_modules(#codeserver{temp_contracts = TempContTable}) -> +all_temp_modules(#codeserver{temp_contracts = TempContTable}) -> ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]). -spec store_contracts(module(), contracts(), contracts(), codeserver()) -> @@ -380,17 +389,25 @@ get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict, -spec give_away(codeserver(), pid()) -> 'ok'. -give_away(#codeserver{temp_records = TempRecords, - temp_contracts = TempContracts, - temp_callbacks = TempCallbacks, - records = Records, - contracts = Contracts, - callbacks = Callbacks}, Pid) -> - _ = [true = ets:give_away(Table, Pid, any) || - Table <- [TempRecords, TempContracts, TempCallbacks, - Records, Contracts, Callbacks], - Table =/= clean], - ok. +give_away(CServer, Pid) -> + lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any) + end, tables(CServer)). + +tables(#codeserver{code = Code, + fun_meta_info = FunMetaInfo, + exports = Exports, + temp_exported_types = TempExpTypes, + temp_records = TempRecords, + temp_contracts = TempContracts, + temp_callbacks = TempCallbacks, + exported_types = ExportedTypes, + records = Records, + contracts = Contracts, + callbacks = Callbacks}) -> + [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes, + TempRecords, TempContracts, TempCallbacks, + ExportedTypes, Records, Contracts, Callbacks], + Table =/= clean]. -spec finalize_contracts(codeserver()) -> codeserver(). diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl index 2078e58ce8..5f24b5a668 100644 --- a/lib/dialyzer/src/dialyzer_contracts.erl +++ b/lib/dialyzer/src/dialyzer_contracts.erl @@ -24,7 +24,7 @@ get_contract_return/2, %% get_contract_signature/1, is_overloaded/1, - process_contract_remote_types/2, + process_contract_remote_types/1, store_tmp_contract/5]). -export_type([file_contract/0, plt_contracts/0]). @@ -139,18 +139,18 @@ sequence([], _Delimiter) -> ""; sequence([H], _Delimiter) -> H; sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter). --spec process_contract_remote_types(dialyzer_codeserver:codeserver(), - erl_types:mod_records()) -> +-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver(). -process_contract_remote_types(CodeServer, RecordDict) -> - Mods = dialyzer_codeserver:contracts_modules(CodeServer), +process_contract_remote_types(CodeServer) -> + Mods = dialyzer_codeserver:all_temp_modules(CodeServer), + RecordTable = dialyzer_codeserver:get_records_table(CodeServer), ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer), ContractFun = fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) -> #tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract, {NewCs, C2} = lists:mapfoldl(fun(CFun, C1) -> - CFun(ExpTypes, RecordDict, C1) + CFun(ExpTypes, RecordTable, C1) end, C0, CFuns), Args = general_domain(NewCs), Contract = #contract{contracts = NewCs, args = Args, forms = Forms}, @@ -177,7 +177,7 @@ process_contract_remote_types(CodeServer, RecordDict) -> -type fun_types() :: dict:dict(label(), erl_types:type_table()). --spec check_contracts([{mfa(), file_contract()}], +-spec check_contracts(orddict:orddict(mfa(), file_contract()), dialyzer_callgraph:callgraph(), fun_types(), opaques_fun()) -> plt_contracts(). @@ -206,7 +206,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) -> error -> NewContracts end end, - dict:fold(FoldFun, [], FunTypes). + orddict:from_list(dict:fold(FoldFun, [], FunTypes)). %% Checks all components of a contract -spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}. @@ -451,10 +451,10 @@ contract_from_form(Forms, MFA, RecDict, FileLine) -> contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = - fun(ExpTypes, AllRecords, Cache) -> + fun(ExpTypes, RecordTable, Cache) -> {NewType, NewCache} = try - from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) + from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) catch throw:{error, Msg} -> {File, Line} = FileLine, @@ -472,12 +472,12 @@ contract_from_form([{type, _L1, bounded_fun, [{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left], MFA, RecDict, FileLine, TypeAcc, FormAcc) -> TypeFun = - fun(ExpTypes, AllRecords, Cache) -> + fun(ExpTypes, RecordTable, Cache) -> {Constr1, VarTable, Cache1} = - process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords, + process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable, Cache), {NewType, NewCache} = - from_form_with_check(Form, ExpTypes, MFA, AllRecords, + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache1), NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType), {{NewTypeNoVars, Constr1}, NewCache} @@ -488,28 +488,28 @@ contract_from_form([{type, _L1, bounded_fun, contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) -> {lists:reverse(TypeAcc), lists:reverse(FormAcc)}. -process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> +process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> {Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes, - AllRecords, Cache), + RecordTable, Cache), Init = remove_cycles(Init0), - constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache). + constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache). -initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> - initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, +initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> + initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache, []). -initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords, +initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable, Cache, Acc) -> {Acc, Cache}; -initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords, +initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable, Cache, Acc) -> case Constr of {type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} -> VarTable = erl_types:var_table__new(), {T1, NewCache} = - final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache), + final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache), Entry = {T1, Type2}, - initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords, + initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable, NewCache, [Entry|Acc]); {type, _, constraint, [{atom,_,Name}, List]} -> N = length(List), @@ -517,18 +517,18 @@ initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords, io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])}) end. -constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) -> +constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) -> VarTable = erl_types:var_table__new(), {VarTab, NewCache} = - constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTable, Cache), constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, NewCache). + RecordTable, NewCache). constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, Cache) -> + RecordTable, Cache) -> {NewVarTab, NewCache} = - constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, OldVarTab, Cache), case NewVarTab of OldVarTab -> @@ -540,38 +540,38 @@ constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes, {FinalConstrs, NewVarTab, NewCache}; _Other -> constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes, - AllRecords, NewCache) + RecordTable, NewCache) end. -final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) -> - from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache). +final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) -> + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache). -from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) -> +from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) -> VarTable = erl_types:var_table__new(), - from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache). + from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache). -from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) -> +from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) -> Site = {spec, MFA}, - C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords, + C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable, VarTable, Cache), - erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1). + erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1). -constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords, +constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache) -> {Subtypes, NewCache} = - constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache, []), {insert_constraints(Subtypes), NewCache}. -constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords, +constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable, _VarTab, Cache, Acc) -> {Acc, Cache}; -constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords, +constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable, VarTab, Cache, Acc) -> {T2, NewCache} = - final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache), + final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache), NewAcc = [{subtype, T1, T2}|Acc], - constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords, + constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable, VarTab, NewCache, NewAcc). %% Replaces variables with '_' when necessary to break up cycles among diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl index 99f95a4dca..7c1bc1de5a 100644 --- a/lib/dialyzer/src/dialyzer_coordinator.erl +++ b/lib/dialyzer/src/dialyzer_coordinator.erl @@ -76,6 +76,8 @@ active = 0 :: integer(), result :: result(), next_label = 0 :: integer(), + jobs :: [job()], + job_fun :: fun(), init_data :: init_data(), regulator :: regulator(), scc_to_pid :: scc_to_pid() @@ -108,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) -> false -> unused end, Coordinator = {Collector, Regulator, SCCtoPID}, - Fold = - fun(Job, Count) -> - Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator), - case TypesigOrDataflow of - true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok; - false -> ok - end, - Count + 1 + JobFun = + fun(Job) -> + Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator), + case TypesigOrDataflow of + true -> true = ets:insert(SCCtoPID, {Job, Pid}); + false -> true + end end, - JobCount = lists:foldl(Fold, 0, Jobs), + JobCount = length(Jobs), + NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()), + {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs), + lists:foreach(JobFun, InitJobs), Unit = case Mode of 'typesig' -> "SCCs"; @@ -129,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) -> 'compile' -> dialyzer_analysis_callgraph:compile_init_result(); _ -> [] end, - #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0, - init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}. + #state{mode = Mode, active = JobCount, result = InitResult, + next_label = 0, job_fun = JobFun, jobs = RestJobs, + init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}. collect_result(#state{mode = Mode, active = Active, result = Result, next_label = NextLabel, init_data = InitData, + jobs = JobsLeft, job_fun = JobFun, regulator = Regulator, scc_to_pid = SCCtoPID} = State) -> receive {next_label_request, Estimation, Pid} -> @@ -141,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result, collect_result(State#state{next_label = NextLabel + Estimation}); {done, Job, Data} -> NewResult = update_result(Mode, InitData, Job, Data, Result), + TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'), case Active of 1 -> kill_regulator(Regulator), case Mode of 'compile' -> {NewResult, NextLabel}; - X when X =:= 'typesig'; X =:= 'dataflow' -> + _ when TypesigOrDataflow -> ets:delete(SCCtoPID), NewResult; 'warnings' -> NewResult end; N -> - collect_result(State#state{result = NewResult, active = N - 1}) + case TypesigOrDataflow of + true -> true = ets:delete(SCCtoPID, Job); + false -> true + end, + NewJobsLeft = + case JobsLeft of + [] -> []; + [NewJob|JobsLeft1] -> + JobFun(NewJob), + JobsLeft1 + end, + NewState = State#state{result = NewResult, + jobs = NewJobsLeft, + active = N - 1}, + collect_result(NewState) end end. @@ -170,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) -> end. -spec sccs_to_pids([scc() | module()], coordinator()) -> - {[dialyzer_worker:worker()], [scc() | module()]}. + [dialyzer_worker:worker()]. sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) -> Fold = - fun(SCC, {Pids, Unknown}) -> - try ets:lookup_element(SCCtoPID, SCC, 2) of - Result -> {[Result|Pids], Unknown} - catch - _:_ -> {Pids, [SCC|Unknown]} - end + fun(SCC, Pids) -> + %% The SCCs that SCC depends on have always been started. + try ets:lookup_element(SCCtoPID, SCC, 2) of + Pid when is_pid(Pid) -> + [Pid|Pids] + catch + _:_ -> Pids + end end, - lists:foldl(Fold, {[], []}, SCCs). + lists:foldl(Fold, [], SCCs). -spec job_done(job(), job_result(), coordinator()) -> ok. diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl index f706ebfb02..dc2238e63a 100644 --- a/lib/dialyzer/src/dialyzer_dataflow.erl +++ b/lib/dialyzer/src/dialyzer_dataflow.erl @@ -1363,7 +1363,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) -> {{Tag, PatTypes}, false}; false -> %% Try to find out if this is a default clause in a list - %% comprehension and supress this. A real Hack(tm) + %% comprehension and suppress this. A real Hack(tm) Force0 = case is_compiler_generated(cerl:get_ann(C)) of true -> diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl index 37c22fef48..eb63e9e695 100644 --- a/lib/dialyzer/src/dialyzer_plt.erl +++ b/lib/dialyzer/src/dialyzer_plt.erl @@ -31,9 +31,8 @@ included_files/1, from_file/1, get_default_plt/0, - get_types/1, + get_module_types/2, get_exported_types/1, - %% insert/3, insert_list/2, insert_contract_list/2, insert_callbacks/2, @@ -143,6 +142,10 @@ delete_list(#plt{info = Info, types = Types, -spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt(). +insert_contract_list(#plt{contracts = Contracts} = PLT, List) -> + NewContracts = dict:merge(fun(_MFA, _Old, New) -> New end, + Contracts, dict:from_list(List)), + PLT#plt{contracts = NewContracts}; insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) -> true = ets:insert(Contracts, List), PLT. @@ -184,20 +187,23 @@ lookup(Plt, Label) when is_integer(Label) -> lookup_1(#mini_plt{info = Info}, MFAorLabel) -> ets_table_lookup(Info, MFAorLabel). --spec insert_types(plt(), erl_types:mod_records()) -> plt(). +-spec insert_types(plt(), ets:tid()) -> plt(). -insert_types(PLT, Rec) -> - PLT#plt{types = Rec}. +insert_types(MiniPLT, Records) -> + ets:rename(Records, plt_types), + MiniPLT#mini_plt{types = Records}. --spec insert_exported_types(plt(), sets:set()) -> plt(). +-spec insert_exported_types(plt(), ets:tid()) -> plt(). -insert_exported_types(PLT, Set) -> - PLT#plt{exported_types = Set}. +insert_exported_types(MiniPLT, ExpTypes) -> + ets:rename(ExpTypes, plt_exported_types), + MiniPLT#mini_plt{exported_types = ExpTypes}. --spec get_types(plt()) -> erl_types:mod_records(). +-spec get_module_types(plt(), atom()) -> + 'none' | {'value', erl_types:type_table()}. -get_types(#plt{types = Types}) -> - Types. +get_module_types(#plt{types = Types}, M) when is_atom(M) -> + table_lookup(Types, M). -spec get_exported_types(plt()) -> sets:set(). @@ -520,10 +526,12 @@ get_mini_plt(#plt{info = Info, contracts = Contracts, callbacks = Callbacks, exported_types = ExpTypes}) -> - [ETSInfo, ETSTypes, ETSContracts, ETSCallbacks, ETSExpTypes] = + [ETSInfo, ETSContracts] = [ets:new(Name, [public]) || - Name <- [plt_info, plt_types, plt_contracts, plt_callbacks, - plt_exported_types]], + Name <- [plt_info, plt_contracts]], + [ETSTypes, ETSCallbacks, ETSExpTypes] = + [ets:new(Name, [compressed, public]) || + Name <- [plt_types, plt_callbacks, plt_exported_types]], CallbackList = dict:to_list(Callbacks), CallbacksByModule = [{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} || diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl index 3c90f46e95..be685baf22 100644 --- a/lib/dialyzer/src/dialyzer_succ_typings.erl +++ b/lib/dialyzer/src/dialyzer_succ_typings.erl @@ -29,7 +29,7 @@ -export([ find_succ_types_for_scc/2, refine_one_module/2, - find_required_by/2, + %% find_required_by/2, find_depends_on/2, collect_warnings/2, lookup_names/2 @@ -236,10 +236,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver, find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> dialyzer_callgraph:get_depends_on(SCC, Callgraph). --spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()]. +%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()]. -find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> - dialyzer_callgraph:get_required_by(SCC, Callgraph). +%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) -> +%% dialyzer_callgraph:get_required_by(SCC, Callgraph). -spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()]. diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl index b33484bda4..c4f8adf7ee 100644 --- a/lib/dialyzer/src/dialyzer_typesig.erl +++ b/lib/dialyzer/src/dialyzer_typesig.erl @@ -81,7 +81,7 @@ -record(constraint_list, {type :: 'conj' | 'disj', list :: [constr()], deps :: deps(), - masks = maps:new() :: #{dep() => mask()}, + masks :: #{dep() => mask()} | 'undefined', id :: {'list', dep()} | 'undefined'}). -type constraint_list() :: #constraint_list{}. @@ -181,7 +181,6 @@ analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) -> M <- lists:usort([M || {M, _, _} <- SCC])], State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1), State3 = state__finalize(State2), - erlang:garbage_collect(), Funs = state__scc(State3), pp_constrs_scc(Funs, State3), constraints_to_dot_scc(Funs, State3), @@ -202,7 +201,8 @@ traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) -> {M, Rec} = lists:keyfind(M, 1, ModRecs), TmpState1 = state__set_rec_dict(AccState, Rec), DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)), - {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1), + TmpState2 = state__new_constraint_context(TmpState1), + {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2), traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState); traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) -> AccState. @@ -2080,6 +2080,8 @@ v2_solve_disjunct(Disj, Map, V2State0) -> var_occurs_everywhere(V, Masks, NotFailed) -> ordsets:is_subset(NotFailed, get_mask(V, Masks)). +-dialyzer({no_improper_lists, [v2_solve_disj/10, v2_solve_conj/12]}). + v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval, Failed0) -> Id = C#constraint_list.id, @@ -2098,6 +2100,12 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval, end; v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) -> {ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed}; +v2_solve_disj(every_i, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) -> + NewIs = case Cs of + [] -> []; + _ -> [I|every_i] + end, + v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed); v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) -> Uneval = [{I,C#constraint_list.id} || not is_failed_list(C, V2State)] ++ Uneval0, @@ -2169,7 +2177,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0, M = lists:keydelete(I, 1, vars_per_child(U, Masks)), {V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1), {NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0), - Is1 = lists:umerge(Is, F), + Is1 = umerge_mask(Is, F), NewFs = [NewF|NewFs0], v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2, [U|UL], NewFs, VarsUp, LastMap, LastFlags) @@ -2191,6 +2199,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State, [], [], [U|VarsUp], Map, NewFlags) end; +v2_solve_conj(every_i, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, + LastMap, LastFlags) -> + NewIs = case Cs of + [] -> []; + _ -> [I|every_i] + end, + v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, + LastMap, LastFlags); v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, LastMap, LastFlags) -> v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp, @@ -2207,7 +2223,12 @@ report_detected_loop(_) -> add_mask_to_flags(Flags, [Im|M], I, L) when I > Im -> add_mask_to_flags(Flags, M, I, [Im|L]); add_mask_to_flags(Flags, [_|M], _I, L) -> - {lists:umerge(M, Flags), lists:reverse(L)}. + {umerge_mask(Flags, M), lists:reverse(L)}. + +umerge_mask(every_i, _F) -> + every_i; +umerge_mask(Is, F) -> + lists:umerge(Is, F). get_mask(V, Masks) -> case maps:find(V, Masks) of @@ -2221,7 +2242,7 @@ get_flags(#v2_state{constr_data = ConData}=V2State0, C) -> error -> ?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]), V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)}, - {V2State, lists:seq(1, length(Cs))}; + {V2State, every_i}; {ok, failed} -> {V2State0, failed_list}; {ok, {Part,U}} when U =/= [] -> @@ -2901,8 +2922,9 @@ state__get_rec_var(Fun, #state{fun_map = Map}) -> maps:find(Fun, Map). state__finalize(State) -> - State1 = enumerate_constraints(State), - order_fun_constraints(State1). + State1 = state__new_constraint_context(State), + State2 = enumerate_constraints(State1), + order_fun_constraints(State2). %% ============================================================================ %% @@ -2982,7 +3004,7 @@ find_constraint_deps([Type|Tail], Acc) -> NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc], find_constraint_deps(Tail, NewAcc); find_constraint_deps([], Acc) -> - lists:flatten(Acc). + lists:append(Acc). mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs -> #constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps}; @@ -3090,8 +3112,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) -> List1 = [C || C <- List, is_simple_constraint(C)], %% Just an assert. [] = [C || #constraint{} = C <- List1], - Expanded = lists:flatten([expand_to_conjunctions(C) - || #constraint_list{} = C <- List]), + Expanded = lists:append([expand_to_conjunctions(C) + || #constraint_list{} = C <- List]), ReturnList = Expanded ++ List1, if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj); true -> ReturnList @@ -3116,8 +3138,10 @@ calculate_deps(List) -> calculate_deps([H|Tail], Acc) -> Deps = get_deps(H), calculate_deps(Tail, [Deps|Acc]); +calculate_deps([], []) -> []; +calculate_deps([], [L]) -> L; calculate_deps([], Acc) -> - ordsets:from_list(lists:flatten(Acc)). + lists:umerge(Acc). mk_conj_constraint_list(List) -> mk_constraint_list(conj, List). @@ -3185,7 +3209,8 @@ order_fun_constraints(State) -> order_fun_constraints([#constraint_ref{id = Id}|Tail], State) -> Cs = state__get_cs(Id, State), - {[NewCs], State1} = order_fun_constraints([Cs], [], [], State), + {[Cs1], State1} = order_fun_constraints([Cs], [], [], State), + NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps}, NewState = state__store_constrs(Id, NewCs, State1), order_fun_constraints(Tail, NewState); order_fun_constraints([], State) -> @@ -3193,23 +3218,31 @@ order_fun_constraints([], State) -> order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) -> order_fun_constraints(Tail, [C|Funs], Acc, State); -order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail], +order_fun_constraints([#constraint_list{list = List, + type = Type, + masks = OldMasks} = C|Tail], Funs, Acc, State) -> - {NewList, NewState} = - case Type of - conj -> order_fun_constraints(List, [], [], State); - disj -> - FoldFun = fun(X, AccState) -> - {[NewX], NewAccState} = - order_fun_constraints([X], [], [], AccState), - {NewX, NewAccState} - end, - lists:mapfoldl(FoldFun, State, List) - end, - C1 = update_constraint_list(C, NewList), - Masks = calculate_masks(NewList, 1, []), - NewAcc = [update_masks(C1, Masks)|Acc], - order_fun_constraints(Tail, Funs, NewAcc, NewState); + case OldMasks of + undefined -> + {NewList, NewState} = + case Type of + conj -> order_fun_constraints(List, [], [], State); + disj -> + FoldFun = fun(X, AccState) -> + {[NewX], NewAccState} = + order_fun_constraints([X], [], [], AccState), + {NewX, NewAccState} + end, + lists:mapfoldl(FoldFun, State, List) + end, + NewList2 = reset_deps(NewList, State), + C1 = update_constraint_list(C, NewList2), + Masks = calculate_masks(NewList, 1, []), + NewAcc = [update_masks(C1, Masks)|Acc], + order_fun_constraints(Tail, Funs, NewAcc, NewState); + M when is_map(M) -> + order_fun_constraints(Tail, Funs, [C|Acc], State) + end; order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) -> order_fun_constraints(Tail, Funs, [C|Acc], State); order_fun_constraints([], Funs, Acc, State) -> @@ -3219,6 +3252,18 @@ order_fun_constraints([], Funs, Acc, State) -> update_masks(C, Masks) -> C#constraint_list{masks = Masks}. +reset_deps(ConstrList, #state{solvers = Solvers}) -> + case lists:member(v1, Solvers) of + true -> + ConstrList; + false -> + [reset_deps(Constr) || Constr <- ConstrList] + end. + +reset_deps(#constraint{}=C) -> C#constraint{deps = []}; +reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []}; +reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}. + calculate_masks([C|Cs], I, L0) -> calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0); calculate_masks([], _I, L) -> diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl index 432d27571b..9eaf95c1a2 100644 --- a/lib/dialyzer/src/dialyzer_utils.erl +++ b/lib/dialyzer/src/dialyzer_utils.erl @@ -37,9 +37,9 @@ get_fun_meta_info/3, is_suppressed_fun/2, is_suppressed_tag/3, - merge_records/2, pp_hook/0, process_record_remote_types/1, + merge_types/2, sets_filter/2, src_compiler_opts/0, refold_pattern/1, @@ -188,7 +188,6 @@ get_core_from_abstract_code(AbstrCode, Opts) -> %% ============================================================================ -type type_table() :: erl_types:type_table(). --type mod_records() :: dict:dict(module(), type_table()). -spec get_record_and_type_info(abstract_code()) -> {'ok', type_table()} | {'error', string()}. @@ -289,18 +288,18 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) -> get_record_fields([], _RecDict, Acc) -> lists:reverse(Acc). --spec process_record_remote_types(codeserver()) -> - {codeserver(), mod_records()}. +-spec process_record_remote_types(codeserver()) -> codeserver(). %% The field types are cached. Used during analysis when handling records. process_record_remote_types(CServer) -> - TempRecords = dialyzer_codeserver:get_temp_records(CServer), ExpTypes = dialyzer_codeserver:get_exported_types(CServer), - TempRecords1 = process_opaque_types0(TempRecords, ExpTypes), - %% A cache (not the field type cache) is used for speeding things up a bit. + Mods = dialyzer_codeserver:all_temp_modules(CServer), + process_opaque_types0(Mods, CServer, ExpTypes), VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), ModuleFun = - fun({Module, Record}) -> + fun(Module) -> + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), RecordFun = fun({Key, Value}, C2) -> case Key of @@ -313,7 +312,7 @@ process_record_remote_types(CServer) -> {FieldT, C6} = erl_types:t_from_form (Field, ExpTypes, Site, - TempRecords1, VarTable, + RecordTable, VarTable, C5), {{FieldName, Field, FieldT}, C6} end, C4, Fields), @@ -328,30 +327,29 @@ process_record_remote_types(CServer) -> end, Cache = erl_types:cache__new(), {RecordList, _NewCache} = - lists:mapfoldl(RecordFun, Cache, maps:to_list(Record)), - {Module, maps:from_list(RecordList)} + lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)), + dialyzer_codeserver:store_temp_records(Module, + maps:from_list(RecordList), + CServer) end, - NewRecordsList = lists:map(ModuleFun, dict:to_list(TempRecords1)), - NewRecords = dict:from_list(NewRecordsList), - check_record_fields(NewRecords, ExpTypes), - {dialyzer_codeserver:finalize_records(NewRecords, CServer), NewRecords}. + lists:foreach(ModuleFun, Mods), + check_record_fields(Mods, CServer, ExpTypes), + dialyzer_codeserver:finalize_records(CServer). %% erl_types:t_from_form() substitutes the declaration of opaque types %% for the expanded type in some cases. To make sure the initial type, %% any(), is not used, the expansion is done twice. %% XXX: Recursive opaque types are not handled well. -process_opaque_types0(TempRecords0, TempExpTypes) -> - Cache = erl_types:cache__new(), - {TempRecords1, Cache1} = - process_opaque_types(TempRecords0, TempExpTypes, Cache), - {TempRecords, _NewCache} = - process_opaque_types(TempRecords1, TempExpTypes, Cache1), - TempRecords. - -process_opaque_types(TempRecords, TempExpTypes, Cache) -> +process_opaque_types0(AllModules, CServer, TempExpTypes) -> + process_opaque_types(AllModules, CServer, TempExpTypes), + process_opaque_types(AllModules, CServer, TempExpTypes). + +process_opaque_types(AllModules, CServer, TempExpTypes) -> VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), ModuleFun = - fun({Module, Record}, C0) -> + fun(Module) -> + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), RecordFun = fun({Key, Value}, C2) -> case Key of @@ -360,32 +358,32 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) -> Site = {type, {Module, Name, NArgs}}, {Type, C3} = erl_types:t_from_form(Form, TempExpTypes, Site, - TempRecords, VarTable, C2), + RecordTable, VarTable, C2), {{Key, {F, Type}}, C3}; _Other -> {{Key, Value}, C2} end end, - {RecordList, C1} = - lists:mapfoldl(RecordFun, C0, maps:to_list(Record)), - {{Module, maps:from_list(RecordList)}, C1} - %% dict:map(RecordFun, Record) + C0 = erl_types:cache__new(), + {RecordList, _NewCache} = + lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)), + dialyzer_codeserver:store_temp_records(Module, + maps:from_list(RecordList), + CServer) end, - {TempRecordList, NewCache} = - lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)), - {dict:from_list(TempRecordList), NewCache}. - %% dict:map(ModuleFun, TempRecords). + lists:foreach(ModuleFun, AllModules). -check_record_fields(Records, TempExpTypes) -> - Cache = erl_types:cache__new(), +check_record_fields(AllModules, CServer, TempExpTypes) -> VarTable = erl_types:var_table__new(), + RecordTable = dialyzer_codeserver:get_temp_records_table(CServer), CheckFun = - fun({Module, Element}, C0) -> + fun(Module) -> CheckForm = fun(Form, Site, C1) -> erl_types:t_check_record_fields(Form, TempExpTypes, - Site, Records, + Site, RecordTable, VarTable, C1) end, - ElemFun = + RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer), + RecordFun = fun({Key, Value}, C2) -> case Key of {record, Name} -> @@ -406,10 +404,10 @@ check_record_fields(Records, TempExpTypes) -> msg_with_position(Fun, FileLine) end end, - lists:foldl(ElemFun, C0, maps:to_list(Element)) + C0 = erl_types:cache__new(), + _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap)) end, - _NewCache = lists:foldl(CheckFun, Cache, dict:to_list(Records)), - ok. + lists:foreach(CheckFun, AllModules). msg_with_position(Fun, FileLine) -> try Fun() @@ -421,10 +419,37 @@ msg_with_position(Fun, FileLine) -> throw({error, NewMsg}) end. --spec merge_records(mod_records(), mod_records()) -> mod_records(). +-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver(). -merge_records(NewRecords, OldRecords) -> - dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords). +merge_types(CServer, Plt) -> + AllNewModules = dialyzer_codeserver:all_temp_modules(CServer), + AllNewModulesSet = sets:from_list(AllNewModules), + AllOldModulesSet = dialyzer_plt:all_modules(Plt), + AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet), + ModuleFun = + fun(Module) -> + KeepOldFun = + fun() -> + case dialyzer_plt:get_module_types(Plt, Module) of + none -> no; + {value, OldRecords} -> + case sets:is_element(Module, AllNewModulesSet) of + true -> no; + false -> {yes, OldRecords} + end + end + end, + Records = + case KeepOldFun() of + no -> + dialyzer_codeserver:lookup_temp_mod_records(Module, CServer); + {yes, OldRecords} -> + OldRecords + end, + dialyzer_codeserver:store_temp_records(Module, Records, CServer) + end, + lists:foreach(ModuleFun, sets:to_list(AllModulesSet)), + CServer. %% ============================================================================ %% diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl index 418c9798b3..af0f2e9e08 100644 --- a/lib/dialyzer/src/dialyzer_worker.erl +++ b/lib/dialyzer/src/dialyzer_worker.erl @@ -56,10 +56,14 @@ launch(Mode, Job, InitData, Coordinator) -> %%-------------------------------------------------------------------- -init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when +init(#state{job = SCC, mode = Mode, init_data = InitData, + coordinator = Coordinator} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData), - ?debug("Deps ~p: ~p\n",[SCC, DependsOn]), + DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData), + ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]), + Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator), + ?debug("~w: PidsDeps ~p\n", [self(), Pids]), + DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids], loop(updating, State#state{depends_on = DependsOn}); init(#state{mode = Mode} = State) when Mode =:= 'compile'; Mode =:= 'warnings' -> @@ -67,7 +71,7 @@ init(#state{mode = Mode} = State) when loop(updating, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - ?debug("Update: ~p\n",[State#state.job]), + ?debug("~w: Update: ~p\n", [self(), State#state.job]), NextStatus = case waits_more_success_typings(State) of true -> waiting; @@ -76,11 +80,11 @@ loop(updating, #state{mode = Mode} = State) when loop(NextStatus, State); loop(waiting, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> - ?debug("Wait: ~p\n",[State#state.job]), + ?debug("~w: Wait: ~p\n", [self(), State#state.job]), NewState = wait_for_success_typings(State), loop(updating, NewState); loop(running, #state{mode = 'compile'} = State) -> - dialyzer_coordinator:request_activation(State#state.coordinator), + request_activation(State), ?debug("Compile: ~s\n",[State#state.job]), Result = case start_compilation(State) of @@ -92,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) -> end, report_to_coordinator(Result, State); loop(running, #state{mode = 'warnings'} = State) -> - dialyzer_coordinator:request_activation(State#state.coordinator), + request_activation(State), ?debug("Warning: ~s\n",[State#state.job]), Result = collect_warnings(State), report_to_coordinator(Result, State); loop(running, #state{mode = Mode} = State) when Mode =:= 'typesig'; Mode =:= 'dataflow' -> request_activation(State), - ?debug("Run: ~p\n",[State#state.job]), + ?debug("~w: Run: ~p\n", [self(), State#state.job]), NotFixpoint = do_work(State), - ok = broadcast_done(State), report_to_coordinator(NotFixpoint, State). waits_more_success_typings(#state{depends_on = Depends}) -> Depends =/= []. -broadcast_done(#state{job = SCC, init_data = InitData, - coordinator = Coordinator}) -> - RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData), - {Callers, Unknown} = - dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator), - send_done(Callers, SCC), - continue_broadcast_done(Unknown, SCC, Coordinator). - -send_done(Callers, SCC) -> - ?debug("Sending ~p: ~p\n",[SCC, Callers]), - SendSTFun = fun(PID) -> PID ! {done, SCC} end, - lists:foreach(SendSTFun, Callers). - -continue_broadcast_done([], _SCC, _Coordinator) -> ok; -continue_broadcast_done(Rest, SCC, Coordinator) -> - %% This time limit should be greater than the time required - %% by the coordinator to spawn all processes. - timer:sleep(500), - {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator), - send_done(Callers, SCC), - continue_broadcast_done(Unknown, SCC, Coordinator). - wait_for_success_typings(#state{depends_on = DependsOn} = State) -> receive - {done, SCC} -> - ?debug("GOT ~p: ~p\n",[State#state.job, SCC]), - State#state{depends_on = DependsOn -- [SCC]} + {'DOWN', Ref, process, Pid, _Info} -> + ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]), + State#state{depends_on = DependsOn -- [{Pid, Ref}]} after 5000 -> - ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]), + ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]), State end. @@ -150,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) -> end. report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) -> - ?debug("Done: ~p\n",[Job]), + ?debug("~w: Done: ~p\n",[self(), Job]), dialyzer_coordinator:job_done(Job, Result, Coordinator). start_compilation(#state{job = Job, init_data = InitData}) -> diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options index cb6a88786e..365b4798c5 100644 --- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options @@ -1,2 +1,2 @@ {dialyzer_options, []}. -{time_limit, 2}. +{time_limit, 5}. diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options index 50991c9bc5..02425c33f2 100644 --- a/lib/dialyzer/test/map_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options @@ -1 +1,2 @@ {dialyzer_options, []}. +{time_limit, 30}. diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_galore b/lib/dialyzer/test/map_SUITE_data/results/map_galore index 6ea88f01f8..c34ba5cf30 100644 --- a/lib/dialyzer/test/map_SUITE_data/results/map_galore +++ b/lib/dialyzer/test/map_SUITE_data/results/map_galore @@ -20,9 +20,9 @@ map_galore.erl:186: The pattern #{'x':=2} can never match the type #{'x':=3} map_galore.erl:187: The pattern #{'x':=3} can never match the type {'a','b','c'} map_galore.erl:188: The pattern #{'x':=3} can never match the type #{'y':=3} map_galore.erl:189: The pattern #{'x':=3} can never match the type #{'x':=[101 | 104 | 114 | 116,...]} -map_galore.erl:2304: Cons will produce an improper list since its 2nd argument is {'b','a'} -map_galore.erl:2304: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) -map_galore.erl:2305: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) -map_galore.erl:2306: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) +map_galore.erl:2280: Cons will produce an improper list since its 2nd argument is {'b','a'} +map_galore.erl:2280: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) +map_galore.erl:2281: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) +map_galore.erl:2282: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}]) map_galore.erl:997: A key of type 'nonexisting' cannot exist in a map of type #{} map_galore.erl:998: A key of type 'nonexisting' cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c'} diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl index 2611241379..99eb73a5f6 100644 --- a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl +++ b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl @@ -2070,11 +2070,8 @@ t_bif_map_values(Config) when is_list(Config) -> ok. t_erlang_hash(Config) when is_list(Config) -> - ok = t_bif_erlang_phash2(), ok = t_bif_erlang_phash(), - ok = t_bif_erlang_hash(), - ok. t_bif_erlang_phash2() -> @@ -2117,27 +2114,6 @@ t_bif_erlang_phash() -> 2620391445 = erlang:phash(M2,Sz), % 3590546636 ok. -t_bif_erlang_hash() -> - Sz = 1 bsl 27 - 1, - 39684169 = erlang:hash(#{},Sz), % 5158 - 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838 - 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225 - 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654 - 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236 - - 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720 - 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720 - - M0 = #{ a => 1, "key" => <<"value">> }, - M1 = maps:remove("key",M0), - M2 = M1#{ "key" => <<"value">> }, - - 70254632 = erlang:hash(M0,Sz), % 38260486 - 59623150 = erlang:hash(M1,Sz), % 126426236 - 70254632 = erlang:hash(M2,Sz), % 38260486 - ok. - - t_map_encode_decode(Config) when is_list(Config) -> <<131,116,0,0,0,0>> = erlang:term_to_binary(#{}), Pairs = [ diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options index 06ed52043a..cb301ff6a1 100644 --- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options +++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options @@ -1,2 +1,2 @@ {dialyzer_options, [{warnings, [no_unused, no_return]}]}. -{time_limit, 20}. +{time_limit, 40}. diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl index 6a5b593db0..53b08cc5c9 100644 --- a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl +++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl @@ -1340,7 +1340,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) -> {{Tag, PatTypes}, false}; false -> %% Try to find out if this is a default clause in a list - %% comprehension and supress this. A real Hack(tm) + %% comprehension and suppress this. A real Hack(tm) Force0 = case is_compiler_generated(cerl:get_ann(C)) of true -> diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl index 0108f91b7f..cf2cbe8e2b 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl @@ -565,7 +565,7 @@ resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) -> [OldIndex] = resolve_args(Args), {value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} = lists:keysearch(OldIndex, 1, Lambdas), - [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy. + [{_,{M,_,_}}|_] = Lbls, % Slightly kludgy. {make_fun2,{M,F,A},OldIndex,OldUniq,NumFree}; resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) -> resolve_inst(Instr, Imports, Str, Lbls). diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl index 95d2076ccf..8fca202b8c 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl @@ -951,7 +951,7 @@ i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) -> %% Finally, we create new letrec-bindings for any and all %% residualised definitions. All referenced functions should have - %% been visited; the call to `visit' below is expected to retreive a + %% been visited; the call to `visit' below is expected to retrieve a %% cached expression. Rs1 = keep_referenced(Rs, S4), {Es1, S5} = mapfoldl(fun (R, S) -> @@ -997,7 +997,7 @@ i_apply(E, Ctxt, Ren, Env, S) -> %% location could be recycled after the flag has been tested, but %% there is no real advantage to that, because in practice, only %% 4-5% of all created store locations will ever be reused, while - %% there will be a noticable overhead for managing the free list.) + %% there will be a noticeable overhead for managing the free list.) case st__get_app_inlined(L, S3) of true -> %% The application was inlined, so we have the final @@ -2007,7 +2007,7 @@ residualize_operand(Opnd, E, S) -> case st__get_opnd_effect(Opnd#opnd.loc, S) of true -> %% The operand has not been visited, so we do that now, but - %% in `effect' context. (Waddell's algoritm does some stuff + %% in `effect' context. (Waddell's algorithm does some stuff %% here to account specially for the operand size, which %% appears unnecessary.) {E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren, diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl index 01c2512397..76ae871aee 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl @@ -469,7 +469,7 @@ get(Key, Env) -> -define(MINIMUM_RANGE, 1000). -define(START_RANGE_FACTOR, 50). -define(MAX_RETRIES, 2). % retries before enlarging range --define(ENLARGE_FACTOR, 10). % range enlargment factor +-define(ENLARGE_FACTOR, 10). % range enlargement factor -ifdef(DEBUG). %% If you want to use these process dictionary counters, make sure to diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl index 49a95a95e5..69139cd568 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl @@ -316,7 +316,7 @@ record_test_in_guard(Line, Term, Name, Vs, St) -> %% code bloat.) %% (4) Xref may be run on the abstract code, so the name in the %% abstract code must be erlang:is_record/3. - %% (5) To achive both (3) and (4) at the same time, set the name + %% (5) To achieve both (3) and (4) at the same time, set the name %% here to erlang:is_record/3, but mark it as compiler-generated. %% The v3_core pass will change the name to erlang:internal_is_record/3. Fs = record_fields(Name, St), diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl index 33a322b466..acb49b5faf 100644 --- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl +++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl @@ -1667,7 +1667,7 @@ bs_function({function,Name,Arity,CLabel,Asm0}=Func) -> %%% %%% Pass 1: Found out which bs_restore's that are needed. For now we assume -%%% that a bs_restore is needed unless it is directly preceeded by a bs_save. +%%% that a bs_restore is needed unless it is directly preceded by a bs_save. %%% bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) -> diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl index 460d4e2240..fbfa979e1b 100644 --- a/lib/dialyzer/test/plt_SUITE.erl +++ b/lib/dialyzer/test/plt_SUITE.erl @@ -26,6 +26,8 @@ build_plt(Config) -> end. beam_tests(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "beam_tests.plt"), Prog = <<" -module(no_auto_import). @@ -42,10 +44,12 @@ beam_tests(Config) when is_list(Config) -> ">>, Opts = [no_auto_import], {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts), - [] = run_dialyzer(plt_build, [BeamFile], []), + [] = run_dialyzer(plt_build, [BeamFile], [{output_plt, Plt}]), ok. run_plt_check(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "run_plt_check.plt"), Mod1 = <<" -module(run_plt_check1). ">>, @@ -56,7 +60,7 @@ run_plt_check(Config) when is_list(Config) -> {ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []), {ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []), - [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []), + [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], [{output_plt, Plt}]), Mod2B = <<" -module(run_plt_check2). @@ -70,11 +74,13 @@ run_plt_check(Config) when is_list(Config) -> % callgraph warning as run_plt_check2:call/1 makes a call to unexported % function run_plt_check1:call/1. - [_] = run_dialyzer(plt_check, [], []), + [_] = run_dialyzer(plt_check, [], [{init_plt, Plt}]), ok. run_succ_typings(Config) when is_list(Config) -> + PrivDir = ?config(priv_dir, Config), + Plt = filename:join(PrivDir, "run_succ_typings.plt"), Mod1A = <<" -module(run_succ_typings1). @@ -84,7 +90,7 @@ run_succ_typings(Config) when is_list(Config) -> ">>, {ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []), - [] = run_dialyzer(plt_build, [BeamFile1], []), + [] = run_dialyzer(plt_build, [BeamFile1], [{output_plt, Plt}]), Mod1B = <<" -module(run_succ_typings1). @@ -107,9 +113,11 @@ run_succ_typings(Config) when is_list(Config) -> {ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []), % contract types warning as run_succ_typings2:call/0 makes a call to % run_succ_typings1:call/0, which returns a (not b) in the PLT. - [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]), + [_] = run_dialyzer(succ_typings, [BeamFile2], + [{check_plt, false}, {init_plt, Plt}]), % warning not returned as run_succ_typings1 is updated in the PLT. - [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]), + [] = run_dialyzer(succ_typings, [BeamFile2], + [{check_plt, true}, {init_plt, Plt}]), ok. @@ -252,12 +260,9 @@ remove_plt(Config) -> ok. bad_dialyzer_attr(Config) -> - PrivDir = ?config(priv_dir, Config), - Prog1 = <<"-module(dial). -dialyzer({no_return, [undef/0]}).">>, {ok, Beam1} = compile(Config, Prog1, dial, []), - Plt = filename:join(PrivDir, "bad_attr.plt"), {dialyzer_error, "Analysis failed with error:\n" "Could not scan the following file(s):\n" diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl index ed38b2f915..3829479a94 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl @@ -520,7 +520,7 @@ save_automatic_tagged_types([_M|Ms]) -> %% remove_in_set_imports/3 : %% input: list with tuples of each module's imports and module name %% respectively. -%% output: one list with same format but each occured import from a +%% output: one list with same format but each occurred import from a %% module in the input set (IMNameL) is removed. remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) -> NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]), @@ -1628,7 +1628,7 @@ tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) -> tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag). %% reads the content from the configuration file and returns the -%% selected part choosen by InfoType. Assumes that the config file +%% selected part chosen by InfoType. Assumes that the config file %% content is an Erlang term. read_config_file(ModuleName,InfoType) when atom(InfoType) -> CfgList = read_config_file(ModuleName), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl index c26b8f851b..a4f39bde74 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl @@ -4028,7 +4028,7 @@ check_sequence(S,Type,Comps) -> {CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps), % io:format("CRelInf: ~p~n",[CRelInf]), % io:format("NewComps2: ~p~n",[NewComps2]), - %% CompListWithTblInf has got a lot unecessary info about + %% CompListWithTblInf has got a lot unnecessary info about %% the involved class removed, as the class of the object %% set. CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2), @@ -4686,7 +4686,7 @@ any_component_relation(_,[],_,_,Acc) -> %% evaluate_atpath/4 finds out whether the at notation refers to the %% search level. The list of referenced names in the AtNot list shall %% begin with a name that exists on the level it refers to. If the -%% found AtPath is refering to the same sub-branch as the simple table +%% found AtPath is referring to the same sub-branch as the simple table %% has, then there shall not be any leading attribute info on this %% level. evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) -> @@ -4857,7 +4857,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) -> case Cons of [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] -> %% This AtList must have an "outermost" at sign to be - %% relevent here. + %% relevant here. [{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2] = AtList, %% #'ObjectClassFieldType'{class=ClassDef} = Def, diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl index 392896932a..0b5ea85681 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl @@ -1259,7 +1259,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) -> end, case DecObjInf of {Cname,ObjSet} -> % this must be the component were an object is - %% choosen from the object set according to the table + %% chosen from the object set according to the table %% constraint. {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}], PostpDec}; diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl index 9725da4d11..fb9ffb13db 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl @@ -1096,7 +1096,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) -> end, case DecObjInf of {Cname,ObjSet} -> % this must be the component were an object is - %% choosen from the object set according to the table + %% chosen from the object set according to the table %% constraint. {[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}], PostpDec}; diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl index 5f8c7a0de8..32676b3448 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl @@ -2721,7 +2721,7 @@ prioritize_error(ErrList) -> end, NewErrList), case SplitErrs of - {[],UndefPosErrs} -> % if no error with Positon exists + {[],UndefPosErrs} -> % if no error with Position exists lists:last(UndefPosErrs); {IntPosErrs,_} -> IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl index 5854f8edbd..8f4d189b5a 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl @@ -1036,7 +1036,7 @@ decode_real2(Buffer0, Form, Len, RemBytes1) -> %% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl index 0457425445..6e12d36579 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl @@ -1034,7 +1034,7 @@ decode_real_notag(_Buffer, _Form) -> %% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl index b163aa24ac..97c92a2dd1 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl @@ -823,7 +823,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl index 15986cc217..aa2cf5ba88 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl @@ -1000,7 +1000,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl index 43d9bef54e..24f7949c21 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl @@ -1059,7 +1059,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% bitstring NamedBitList %% Val can be of: -%% - [identifiers] where only named identifers are set to one, +%% - [identifiers] where only named identifiers are set to one, %% the Constraint must then have some information of the %% bitlength. %% - [list of ones and zeroes] all bits diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl index 4f0ca99cce..8be5b0cd6e 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl @@ -108,7 +108,7 @@ user(Pid, User, Pass) -> gen_server:call(Pid, {user, User, Pass}, infinity). %% user(Pid, User, Pass,Acc) -%% Purpose: Login whith a supplied account name +%% Purpose: Login with a supplied account name %% Args: Pid = pid(), User = Pass = Acc = string() %% Returns: ok | {error, euser} | {error, econn} | {error, eacct} user(Pid, User, Pass,Acc) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl index cf05431f5a..039960dac7 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl @@ -24,7 +24,7 @@ %%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!) %%% - HTTP/1.1 Specification Errata found at %%% http://world.std.com/~lawrence/http_errata.html -%%% Additionaly follows the following recommendations: +%%% Additionally follows the following recommendations: %%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!) %%% - draft-nottingham-hdrreg-http-00.txt (not yet!) %%% diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl index ebefcd7ad7..28ea42c685 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl @@ -697,7 +697,7 @@ lookup(Key,Val) -> %%% This code is for parsing trailer headers in chunked messages. %%% Will be deprecated whenever I have found an alternative working solution! %%% Note: -%%% - The header names are returned slighly different from what the what +%%% - The header names are returned slightly different from what the what %%% inet_drv returns read_headers_old(Scheme,Socket,Timeout) -> read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl index 45beaa84f7..d2653184aa 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl @@ -95,7 +95,7 @@ abort_session(Addr,Sid,Msg) -> next_request(Addr,Sid) -> gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity). -%%% Session handler has succeded to set up a new session, now register +%%% Session handler has succeed to set up a new session, now register %%% the socket register_socket(Addr,Sid,Socket) -> gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl index 85e06f43b6..3058ac3556 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl @@ -224,7 +224,7 @@ is_blocked(ServerRef) -> %% -%% Module API. Theese functions are intended for use from modules only. +%% Module API. These functions are intended for use from modules only. %% config_lookup(Port, Query) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl index d7a698d65a..07f951d057 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl @@ -109,7 +109,7 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)-> %%If it is version prio to 1.1 kill the conneciton [$H, $T, $T, $P, $\/, $1, $.,N] -> case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of - %%if the connection isnt ordered to go down let it live + %%if the connection isn't ordered to go down let it live %%The keep-alive value is the older http/1.1 might be older %%Clients that use it. "keep-alive" when N >= 49 -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl index 6b872d7c95..73edcf6b92 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl @@ -60,7 +60,7 @@ % request_line, % string() Request Line headers, % #req_headers{} Parsed request headers entity_body= <<>>, % binary() Body of request - connection, % boolean() true if persistant connection + connection, % boolean() true if persistent connection status_code, % int() Status code logging % int() 0=No logging % 1=Only mod_log present diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl index e42494ff76..847d6e97c1 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl @@ -53,7 +53,7 @@ store_directory_data(Directory, DirData) -> %% API %% -%% Compability API +%% Compatibility API store_user(UserName, Password, Port, Dir, AccessPassword) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl index 1203aeaa4c..a48f73274b 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl @@ -440,7 +440,7 @@ try_new_erl_scheme_method(Info,Env,Input,Mod,Func)-> %%---------------------------------------------------------------------- -%%The function recieves the data from the process that generates the page +%%The function receives the data from the process that generates the page %%and send the data to the client through the mod_cgi:send function %%---------------------------------------------------------------------- diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl index f600c65e92..d95c745b07 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl @@ -272,10 +272,10 @@ controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)-> end. -%---------------------------------------------------------------------% -%The Denycontrol isn't neccessary to preform since the allow control % -%override the deny control % -%---------------------------------------------------------------------% +%--------------------------------------------------------------------% +%The Denycontrol isn't necessary to preform since the allow control % +%override the deny control % +%--------------------------------------------------------------------% controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)-> case AllowedNetworks of [{allow,all}]-> @@ -657,7 +657,7 @@ getData2(HtAccessFileNames,SplittedPath,Info)-> %---------------------------------------------------------------------- %HtAccessFilenames is a list the names the accesssfiles can have -%Path is the shortest match agains all alias and documentroot +%Path is the shortest match against all alias and documentroot %rest of splitted path is a list of the parts of the path %Info is the mod recod from the server %---------------------------------------------------------------------- diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl index 4e6030d5e2..f2c45c4a3f 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl @@ -80,7 +80,7 @@ send_range_response(Path,Info,Ranges,FileInfo,LastModified)-> send_range_response(Path,Info,Start,Stop,FileInfo,LastModified) end. %%More than one range specified -%%Send a multipart reponse to the user +%%Send a multipart response to the user % %%An example of an multipart range response diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl index 76168f3890..a997db6880 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl @@ -48,8 +48,8 @@ do(Info) -> %%---------------------------------------------------------------------- -%%Control that the request header did not contians any limitations -%%wheather a response shall be createed or not +%%Control that the request header did not contains any limitations +%%whether a response shall be created or not %%---------------------------------------------------------------------- do_responsecontrol(Info) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl index 19b571ac47..cc72a9b6fe 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl @@ -431,7 +431,7 @@ wrap_trans(State, Fun, Args, Retries, Mod, Kind) -> %% read lock is only set on the first node %% Nodes may either be a list of nodes or one node as an atom %% Mnesia on all Nodes must be connected to each other, but -%% it is not neccessary that they are up and running. +%% it is not necessary that they are up and running. lock(LockItem, LockKind) -> case get(mnesia_activity_state) of diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl index fdbf3e4481..a85a08e4f8 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl @@ -775,7 +775,7 @@ restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) -> restore_tables([], _Header, _Schema, State) -> State. -%% Creates all neccessary dat files and inserts +%% Creates all necessary dat files and inserts %% the table definitions in the schema table %% %% Returns a list of local_tab tuples for all local tables diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl index 2b5c77b3ba..0403c7e978 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl @@ -332,7 +332,7 @@ really_retain(Name, Tab) -> %% %% {min, MinTabs} %% Minimize redundancy and only keep checkpoint info together with -%% one replica, preferrably at the local node. If any node involved +%% one replica, preferably at the local node. If any node involved %% the checkpoint goes down, the checkpoint is deactivated. %% %% {max, MaxTabs} @@ -345,7 +345,7 @@ really_retain(Name, Tab) -> %% {ram_overrides_dump, Tabs} %% Only applicable for ram_copies. Bool controls which versions of %% the records that should be included in the checkpoint state. -%% true means that the latest comitted records in ram (i.e. the +%% true means that the latest committed records in ram (i.e. the %% records that the application accesses) should be included %% in the checkpoint. false means that the records dumped to %% dat-files (the records that will be loaded at startup) should diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl index 70fee1741e..07667d73f5 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl @@ -61,7 +61,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies -> Repair = mnesia_monitor:get_env(auto_repair), Args = [{keypos, 2}, public, named_table, Type], case Reason of - {dumper, _} -> %% Resources allready allocated + {dumper, _} -> %% Resources already allocated ignore; _ -> mnesia_monitor:mktab(Tab, Args), @@ -82,7 +82,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies -> do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies -> Args = [{keypos, 2}, public, named_table, Type], case Reason of - {dumper, _} -> %% Resources allready allocated + {dumper, _} -> %% Resources already allocated ignore; _ -> mnesia_monitor:mktab(Tab, Args), diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl index 701aa8f598..accb631f2a 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl @@ -170,14 +170,14 @@ loop(State) -> end; %% If test_set_sticky fails, we send this to all nodes - %% after aquiring a real write lock on Oid + %% after acquiring a real write lock on Oid {stick, {Tab, _}, N} -> ?ets_insert(mnesia_sticky_locks, {Tab, N}), loop(State); %% The caller which sends this message, must have first - %% aquired a write lock on the entire table + %% acquired a write lock on the entire table {unstick, Tab} -> ?ets_delete(mnesia_sticky_locks, Tab), loop(State); @@ -738,11 +738,11 @@ dirty_sticky_lock(Tab, Key, Nodes, Lock) -> sticky_wlock_table(Tid, Store, Tab) -> sticky_lock(Tid, Store, {Tab, ?ALL}, write). -%% aquire a wlock on Oid +%% acquire a wlock on Oid %% We store a {Tabname, write, Tid} in all locktables %% on all nodes containing a copy of Tabname %% We also store an item {{locks, Tab, Key}, write} in the -%% local store when we have aquired the lock. +%% local store when we have acquired the lock. %% wlock(Tid, Store, Oid) -> {Tab, Key} = Oid, diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl index d1ff09ce29..7fd5f70e23 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl @@ -144,7 +144,7 @@ check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) -> end, [node(Mon) | check_protocol(Tail, Protocols)]; false -> - unlink(Mon), % Get rid of unneccessary link + unlink(Mon), % Get rid of unnecessary link check_protocol(Tail, Protocols) end; check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) -> diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl index ec07e1c1ab..fbd1356a7f 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl @@ -1265,7 +1265,7 @@ make_change_table_copy_type(Tab, Node, ToS) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% change index functions .... -%% Pos is allready added by 1 in both of these functions +%% Pos is already added by 1 in both of these functions add_table_index(Tab, Pos) -> schema_transaction(fun() -> do_add_table_index(Tab, Pos) end). diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl index 3e08354b5a..09e310530d 100644 --- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl +++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl @@ -1615,7 +1615,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) -> do_abort(Tid, Bin) when binary(Bin) -> %% Possible optimization: - %% If we want we could pass arround a flag + %% If we want we could pass around a flag %% that tells us whether the binary contains %% schema ops or not. Only if the binary %% contains schema ops there are meningful diff --git a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl index d608275efe..88ac486044 100644 --- a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl +++ b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl @@ -2,7 +2,7 @@ %%% File : tuple1.erl %%% Author : Tobias Lindahl <[email protected]> %%% Description : Exposed two bugs in the analysis; -%%% one supressed warning and one crash. +%%% one suppressed warning and one crash. %%% %%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]> %%%------------------------------------------------------------------- |