aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer
diff options
context:
space:
mode:
Diffstat (limited to 'lib/dialyzer')
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl4
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl1
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl4
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl20
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl66
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl10
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl86
-rw-r--r--lib/dialyzer/src/typer.erl4
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args2
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl10
-rw-r--r--lib/dialyzer/test/dialyzer_common.erl10
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl10
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_validator.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps_sum2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/stacktrace5
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/stacktrace.erl73
17 files changed, 153 insertions, 158 deletions
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 0617be6435..1e06d6e974 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -672,7 +672,7 @@ failed_anal_msg(Reason, LogCache) ->
%%
format_log_cache(LogCache) ->
Str = lists:append(lists:reverse(LogCache)),
- string:join(string:tokens(Str, "\n"), "\n ").
+ lists:join("\n ", string:lexemes(Str, "\n")).
-spec store_warnings(#cl_state{}, [raw_warning()]) -> #cl_state{}.
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index 80c10183cf..f21eaed087 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -41,8 +41,8 @@ start() ->
Ret
catch
throw:{dialyzer_cl_parse_error, Msg} -> {error, Msg};
- _:R ->
- Msg = io_lib:format("~tp\n~tp\n", [R, erlang:get_stacktrace()]),
+ _:R:S ->
+ Msg = io_lib:format("~tp\n~tp\n", [R, S]),
{error, lists:flatten(Msg)}
end.
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index ea3523a965..c5f93a3392 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -299,6 +299,7 @@ traverse(Tree, Map, State) ->
match_fail -> t_none();
raise -> t_none();
bs_init_writable -> t_from_term(<<>>);
+ build_stacktrace -> erl_bif_types:type(erlang, build_stacktrace, 0);
Other -> erlang:error({'Unsupported primop', Other})
end,
{State, Map, Type};
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index b4b1872c12..b8414b7d8b 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -475,7 +475,7 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
gui_loop(State);
{BackendPid, ext_types, ExtTypes} ->
Map = fun({M,F,A}) -> io_lib:format("~tp:~tp/~p",[M,F,A]) end,
- ExtTypeString = string:join(lists:map(Map, ExtTypes), "\n"),
+ ExtTypeString = lists:join("\n", lists:map(Map, ExtTypes)),
Msg = io_lib:format("The following remote types are being used "
"but information about them is not available.\n"
"The analysis might get more precise by including "
@@ -638,7 +638,7 @@ output_sms(#gui_state{frame = Frame}, Title, Message, Type) ->
free_editor(#gui_state{gui = Wx, frame = Frame}, Title, Contents0) ->
Contents = lists:flatten(Contents0),
- Tokens = string:tokens(Contents, "\n"),
+ Tokens = string:lexemes(Contents, "\n"),
NofLines = length(Tokens),
LongestLine = lists:max([length(X) || X <- Tokens]),
Height0 = NofLines * 25 + 80,
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 95c8b5ebce..2af4534396 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -531,17 +531,19 @@ compute_md5_from_files(Files) ->
lists:keysort(1, [{F, compute_md5_from_file(F)} || F <- Files]).
compute_md5_from_file(File) ->
- case filelib:is_regular(File) of
- false ->
+ case beam_lib:all_chunks(File) of
+ {ok, _, Chunks} ->
+ %% We cannot use beam_lib:md5 because it does not consider
+ %% the debug_info chunk, where typespecs are likely stored.
+ %% So we consider almost all chunks except the useless ones.
+ Filtered = [[ID, Chunk] || {ID, Chunk} <- Chunks, ID =/= "CInf", ID =/= "Docs"],
+ erlang:md5(lists:sort(Filtered));
+ {error, beam_lib, {file_error, _, enoent}} ->
Msg = io_lib:format("Not a regular file: ~ts\n", [File]),
throw({dialyzer_error, Msg});
- true ->
- case dialyzer_utils:get_core_from_beam(File) of
- {error, Error} ->
- throw({dialyzer_error, Error});
- {ok, Core} ->
- erlang:md5(term_to_binary(Core))
- end
+ {error, beam_lib, _} ->
+ Msg = io_lib:format("Could not compute MD5 for .beam: ~ts\n", [File]),
+ throw({dialyzer_error, Msg})
end.
init_diff_list(RemoveFiles, AddFiles) ->
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 7fe64c3e11..7602faa21d 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -1270,8 +1270,8 @@ filter_named_tables(NamesList) ->
[] -> [];
[Head|Tail] ->
NewHead =
- case string:rstr(Head, "()") of
- 0 -> [Head];
+ case string:find(Head, "()", trailing) of
+ nomatch -> [Head];
_Other -> []
end,
NewHead ++ filter_named_tables(Tail)
@@ -1558,8 +1558,8 @@ any_args(StrList) ->
case StrList of
[] -> false;
[Head|Tail] ->
- case string:rstr(Head, "()") of
- 0 -> any_args(Tail);
+ case string:find(Head, "()", trailing) of
+ nomatch -> any_args(Tail);
_Other -> true
end
end.
@@ -1765,10 +1765,8 @@ ets_list_args(MaybeList) ->
end.
ets_list_argtypes(ListStr) ->
- ListStr1 = string:strip(ListStr, left, $[),
- ListStr2 = string:strip(ListStr1, right, $]),
- ListStr3 = string:strip(ListStr2, right, $.),
- string:strip(ListStr3, right, $,).
+ ListStr1 = string:trim(ListStr, leading, "$["),
+ string:trim(ListStr1, trailing, "$]$.$,").
ets_tuple_args(MaybeTuple) ->
case is_tuple(MaybeTuple) of
@@ -1810,7 +1808,7 @@ ets_tuple_argtypes2_helper(TupleStr, ElemStr, NestingLevel) ->
{[H|ElemStr], NestingLevel, false}
end,
case Return of
- true -> string:tokens(NewElemStr, " |");
+ true -> string:lexemes(NewElemStr, " |");
false ->
ets_tuple_argtypes2_helper(T, NewElemStr, NewNestingLevel)
end
@@ -1889,44 +1887,44 @@ format_args_2(StrArgList, Call) ->
case Call of
whereis ->
lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |"));
+ string:lexemes(lists:nth(2, StrArgList), " |"));
register ->
lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |"));
+ string:lexemes(lists:nth(2, StrArgList), " |"));
unregister ->
lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |"));
+ string:lexemes(lists:nth(2, StrArgList), " |"));
ets_new ->
StrArgList1 = lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |")),
+ string:lexemes(lists:nth(2, StrArgList), " |")),
lists_key_replace(4, StrArgList1,
- string:tokens(ets_list_argtypes(lists:nth(4, StrArgList1)), " |"));
+ string:lexemes(ets_list_argtypes(lists:nth(4, StrArgList1)), " |"));
ets_lookup ->
StrArgList1 = lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |")),
+ string:lexemes(lists:nth(2, StrArgList), " |")),
lists_key_replace(4, StrArgList1,
- string:tokens(lists:nth(4, StrArgList1), " |"));
+ string:lexemes(lists:nth(4, StrArgList1), " |"));
ets_insert ->
StrArgList1 = lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |")),
+ string:lexemes(lists:nth(2, StrArgList), " |")),
lists_key_replace(4, StrArgList1,
ets_tuple_argtypes2(
ets_tuple_argtypes1(lists:nth(4, StrArgList1), [], [], 0),
[]));
mnesia_dirty_read1 ->
lists_key_replace(2, StrArgList,
- [mnesia_tuple_argtypes(T) || T <- string:tokens(
+ [mnesia_tuple_argtypes(T) || T <- string:lexemes(
lists:nth(2, StrArgList), " |")]);
mnesia_dirty_read2 ->
lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |"));
+ string:lexemes(lists:nth(2, StrArgList), " |"));
mnesia_dirty_write1 ->
lists_key_replace(2, StrArgList,
- [mnesia_record_tab(R) || R <- string:tokens(
+ [mnesia_record_tab(R) || R <- string:lexemes(
lists:nth(2, StrArgList), " |")]);
mnesia_dirty_write2 ->
lists_key_replace(2, StrArgList,
- string:tokens(lists:nth(2, StrArgList), " |"));
+ string:lexemes(lists:nth(2, StrArgList), " |"));
function_call -> StrArgList
end.
@@ -1943,18 +1941,16 @@ format_type(Type, State) ->
erl_types:t_to_string(Type, R).
mnesia_record_tab(RecordStr) ->
- case string:str(RecordStr, "#") =:= 1 of
- true ->
- "'" ++
- string:sub_string(RecordStr, 2, string:str(RecordStr, "{") - 1) ++
- "'";
- false -> RecordStr
+ case erl_scan:string(RecordStr) of
+ {ok, [{'#', _}, {atom, _, Name}|_], _} ->
+ io_lib:write_string(atom_to_list(Name), $');
+ _ -> RecordStr
end.
mnesia_tuple_argtypes(TupleStr) ->
- TupleStr1 = string:strip(TupleStr, left, ${),
- [TupleStr2|_T] = string:tokens(TupleStr1, " ,"),
- lists:flatten(string:tokens(TupleStr2, " |")).
+ TupleStr1 = string:trim(TupleStr, leading, "${"),
+ [TupleStr2|_T] = string:lexemes(TupleStr1, " ,"),
+ lists:flatten(string:lexemes(TupleStr2, " |")).
-spec race_var_map(var_to_map1(), var_to_map2(), dict:dict(), op()) ->
dict:dict().
@@ -2237,7 +2233,7 @@ var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
case lists_key_member_lists(Vars, FunVarArgs) of
0 -> [Vars, WVA2, WVA3, WVA4];
N when is_integer(N) ->
- NewWVA2 = string:tokens(lists:nth(N + 1, FunVarArgs), " |"),
+ NewWVA2 = string:lexemes(lists:nth(N + 1, FunVarArgs), " |"),
[Vars, NewWVA2, WVA3, WVA4]
end;
?WARN_WHEREIS_UNREGISTER ->
@@ -2246,7 +2242,7 @@ var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
case lists_key_member_lists(Vars, FunVarArgs) of
0 -> [Vars, WVA2];
N when is_integer(N) ->
- NewWVA2 = string:tokens(lists:nth(N + 1, FunVarArgs), " |"),
+ NewWVA2 = string:lexemes(lists:nth(N + 1, FunVarArgs), " |"),
[Vars, NewWVA2]
end;
?WARN_ETS_LOOKUP_INSERT ->
@@ -2256,7 +2252,7 @@ var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
case lists_key_member_lists(Vars1, FunVarArgs) of
0 -> [Vars1, WVA2];
N1 when is_integer(N1) ->
- NewWVA2 = string:tokens(lists:nth(N1 + 1, FunVarArgs), " |"),
+ NewWVA2 = string:lexemes(lists:nth(N1 + 1, FunVarArgs), " |"),
[Vars1, NewWVA2]
end,
Vars2 =
@@ -2286,10 +2282,10 @@ var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
NewWVA2 =
case Arity of
1 ->
- [mnesia_record_tab(R) || R <- string:tokens(
+ [mnesia_record_tab(R) || R <- string:lexemes(
lists:nth(2, FunVarArgs), " |")];
2 ->
- string:tokens(lists:nth(N + 1, FunVarArgs), " |")
+ string:lexemes(lists:nth(N + 1, FunVarArgs), " |")
end,
[Vars, NewWVA2|T]
end
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index d03326ec97..dede475f98 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -418,6 +418,11 @@ traverse(Tree, DefinedVars, State) ->
match_fail -> throw(error);
raise -> throw(error);
bs_init_writable -> {State, t_from_term(<<>>)};
+ build_stacktrace ->
+ V = mk_var(Tree),
+ Type = erl_bif_types:type(erlang, build_stacktrace, 0),
+ State1 = state__store_conj(V, sub, Type, State),
+ {State1, V};
Other -> erlang:error({'Unsupported primop', Other})
end;
'receive' ->
@@ -1895,9 +1900,8 @@ solver(Solver, SolveFun) ->
?debug("Solver ~w returned unexpected result:\n ~P\n",
[Solver, _R, 60]),
throw(error)
- catch E:R ->
- io:format("Solver ~w failed: ~w:~p\n ~tp\n",
- [Solver, E, R, erlang:get_stacktrace()]),
+ catch E:R:S ->
+ io:format("Solver ~w failed: ~w:~p\n ~tp\n", [Solver, E, R, S]),
throw(error)
end.
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 9b8fbc67eb..abd89034f3 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -120,92 +120,10 @@ get_core_from_beam(File, Opts) ->
{error, " Could not get Core Erlang code for: " ++ File ++ "\n"}
end;
_ ->
- deprecated_get_core_from_beam(File, Opts)
+ {error, " Could not get Core Erlang code for: " ++ File ++ "\n" ++
+ " Recompile with +debug_info or analyze starting from source code"}
end.
-deprecated_get_core_from_beam(File, Opts) ->
- case get_abstract_code_from_beam(File) of
- error ->
- {error, " Could not get abstract code for: " ++ File ++ "\n" ++
- " Recompile with +debug_info or analyze starting from source code"};
- {ok, AbstrCode} ->
- case get_compile_options_from_beam(File) of
- error ->
- {error, " Could not get compile options for: " ++ File ++ "\n" ++
- " Recompile or analyze starting from source code"};
- {ok, CompOpts} ->
- case get_core_from_abstract_code(AbstrCode, Opts ++ CompOpts) of
- error ->
- {error, " Could not get core Erlang code for: " ++ File};
- {ok, _} = Core ->
- Core
- end
- end
- end.
-
-get_abstract_code_from_beam(File) ->
- case beam_lib:chunks(File, [abstract_code]) of
- {ok, {_, List}} ->
- case lists:keyfind(abstract_code, 1, List) of
- {abstract_code, {raw_abstract_v1, Abstr}} -> {ok, Abstr};
- _ -> error
- end;
- _ ->
- %% No or unsuitable abstract code.
- error
- end.
-
-get_compile_options_from_beam(File) ->
- case beam_lib:chunks(File, [compile_info]) of
- {ok, {_, List}} ->
- case lists:keyfind(compile_info, 1, List) of
- {compile_info, CompInfo} -> compile_info_to_options(CompInfo);
- _ -> error
- end;
- _ ->
- %% No or unsuitable compile info.
- error
- end.
-
-compile_info_to_options(CompInfo) ->
- case lists:keyfind(options, 1, CompInfo) of
- {options, CompOpts} -> {ok, CompOpts};
- _ -> error
- end.
-
-get_core_from_abstract_code(AbstrCode, Opts) ->
- %% We do not want the parse_transforms around since we already
- %% performed them. In some cases we end up in trouble when
- %% performing them again.
- AbstrCode1 = cleanup_parse_transforms(AbstrCode),
- %% Remove parse_transforms (and other options) from compile options.
- Opts2 = cleanup_compile_options(Opts),
- try compile:noenv_forms(AbstrCode1, Opts2 ++ src_compiler_opts()) of
- {ok, _, Core} -> {ok, Core};
- _What -> error
- catch
- error:_ -> error
- end.
-
-cleanup_parse_transforms([{attribute, _, compile, {parse_transform, _}}|Left]) ->
- cleanup_parse_transforms(Left);
-cleanup_parse_transforms([Other|Left]) ->
- [Other|cleanup_parse_transforms(Left)];
-cleanup_parse_transforms([]) ->
- [].
-
-cleanup_compile_options(Opts) ->
- lists:filter(fun keep_compile_option/1, Opts).
-
-%% Using abstract, not asm or core.
-keep_compile_option(from_asm) -> false;
-keep_compile_option(from_core) -> false;
-%% The parse transform will already have been applied, may cause
-%% problems if it is re-applied.
-keep_compile_option({parse_transform, _}) -> false;
-keep_compile_option(warnings_as_errors) -> false;
-keep_compile_option(_) -> true.
-
%% ============================================================================
%%
%% Typed Records
diff --git a/lib/dialyzer/src/typer.erl b/lib/dialyzer/src/typer.erl
index 16b9c8a94a..9d3d9ce438 100644
--- a/lib/dialyzer/src/typer.erl
+++ b/lib/dialyzer/src/typer.erl
@@ -164,9 +164,9 @@ get_type_info(#analysis{callgraph = CallGraph,
CodeServer),
Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
catch
- error:What ->
+ error:What:Stacktrace ->
fatal_error(io_lib:format("Analysis failed with message: ~tp",
- [{What, erlang:get_stacktrace()}]));
+ [{What, Stacktrace}]));
throw:{dialyzer_succ_typing_error, Msg} ->
fatal_error(io_lib:format("Analysis failed with message: ~ts", [Msg]))
end.
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
index 1eb8cd455b..1be0ce0d8c 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
@@ -1,5 +1,5 @@
gen_server_incorrect_args.erl:3: Undefined callback function handle_cast/2 (behaviour gen_server)
gen_server_incorrect_args.erl:3: Undefined callback function init/1 (behaviour gen_server)
-gen_server_incorrect_args.erl:7: The inferred return type of handle_call/3 ({'no'} | {'ok'}) has nothing in common with {'noreply',_} | {'noreply',_,'hibernate' | 'infinity' | non_neg_integer()} | {'reply',_,_} | {'stop',_,_} | {'reply',_,_,'hibernate' | 'infinity' | non_neg_integer()} | {'stop',_,_,_}, which is the expected return type for the callback of the gen_server behaviour
+gen_server_incorrect_args.erl:7: The inferred return type of handle_call/3 ({'no'} | {'ok'}) has nothing in common with {'noreply',_} | {'noreply',_,'hibernate' | 'infinity' | non_neg_integer() | {'continue',_}} | {'reply',_,_} | {'stop',_,_} | {'reply',_,_,'hibernate' | 'infinity' | non_neg_integer() | {'continue',_}} | {'stop',_,_,_}, which is the expected return type for the callback of the gen_server behaviour
gen_server_incorrect_args.erl:7: The inferred type for the 2nd argument of handle_call/3 ('boo' | 'foo') is not a supertype of {pid(),_}, which is expected type for this argument in the callback of the gen_server behaviour
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl
index b16075763f..12f6532c0c 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl
@@ -539,7 +539,7 @@ apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiv
try apply(Mod, Fun, Args) of
X -> {ok, X}
catch
- X:Y -> {X, Y}
+ X:Y:S -> {{X, Y}, S}
end,
case Result of
{ok, Z} ->
@@ -551,15 +551,15 @@ apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiv
false ->
false
end;
- Exception when is_function(FalsePositiveMFAs) ->
+ {Exception, S2} when is_function(FalsePositiveMFAs) ->
case FalsePositiveMFAs(MFA, Args, Exception) of
true ->
true;
false ->
- error(Exception, erlang:get_stacktrace())
+ error(Exception, S2)
end;
- Exception ->
- error(Exception, erlang:get_stacktrace())
+ {Exception, S3} ->
+ error(Exception, S3)
end
end).
diff --git a/lib/dialyzer/test/dialyzer_common.erl b/lib/dialyzer/test/dialyzer_common.erl
index 48083a2731..1a8403f486 100644
--- a/lib/dialyzer/test/dialyzer_common.erl
+++ b/lib/dialyzer/test/dialyzer_common.erl
@@ -221,13 +221,9 @@ get_suites(Dir) ->
end.
suffix(String, Suffix) ->
- case string:rstr(String, Suffix) of
- 0 -> no;
- Index ->
- case string:substr(String, Index) =:= Suffix of
- true -> {yes, string:sub_string(String,1,Index-1)};
- false -> no
- end
+ case string:split(String, Suffix, trailing) of
+ [Prefix,[]] -> {yes, Prefix};
+ _ -> no
end.
-spec create_suite(string()) -> 'ok'.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl
index 1677b4efb8..529f9fba72 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl
@@ -533,7 +533,7 @@ apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiv
try apply(Mod,Fun,Args) of
X -> {ok, X}
catch
- X:Y -> {X, Y}
+ X:Y:S -> {{X, Y}, S}
end,
case Result of
{ok, Z} ->
@@ -545,15 +545,15 @@ apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiv
false ->
false
end;
- Exception when is_function(FalsePositiveMFAs) ->
+ {Exception, S2} when is_function(FalsePositiveMFAs) ->
case FalsePositiveMFAs(MFA, Args, Exception) of
true ->
true;
false ->
- error(Exception, erlang:get_stacktrace())
+ error(Exception, S2)
end;
- Exception ->
- error(Exception, erlang:get_stacktrace())
+ {Exception, S3} ->
+ error(Exception, S3)
end
end).
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_validator.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_validator.erl
index 8fe43163f6..ea92613781 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_validator.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_validator.erl
@@ -174,7 +174,7 @@ validate_error(Error, Name, Ar) ->
-endif.
validate_error_1(Error, Name, Ar) ->
{{'_',Name,Ar},
- {internal_error,'_',{Error,erlang:get_stacktrace()}}}.
+ {internal_error,'_',{Error,[]}}}.
-record(st, %Emulation state
{x=init_regs(0, term), %x register info.
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps_sum b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
index bd192bdb93..b29ac77d88 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/maps_sum
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
@@ -1,4 +1,4 @@
-maps_sum.erl:15: Invalid type specification for function maps_sum:wrong1/1. The success typing is (map()) -> any()
+maps_sum.erl:15: Invalid type specification for function maps_sum:wrong1/1. The success typing is (maps:iterator() | map()) -> any()
maps_sum.erl:26: Function wrong2/1 has no local return
maps_sum.erl:27: The call lists:foldl(fun((_,_,_) -> any()),0,Data::any()) will never return since it differs in the 1st argument from the success typing arguments: (fun((_,_) -> any()),any(),[any()])
diff --git a/lib/dialyzer/test/small_SUITE_data/results/stacktrace b/lib/dialyzer/test/small_SUITE_data/results/stacktrace
new file mode 100644
index 0000000000..fd60881953
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/stacktrace
@@ -0,0 +1,5 @@
+
+stacktrace.erl:11: The pattern {'a', 'b'} can never match the type [{atom(),atom(),[any()] | byte(),[{'file',string()} | {'line',pos_integer()}]}]
+stacktrace.erl:19: The pattern ['a', 'b'] can never match the type [{atom(),atom(),[any()] | byte(),[{'file',string()} | {'line',pos_integer()}]}]
+stacktrace.erl:44: The pattern {'a', 'b'} can never match the type [{atom(),atom(),[any()] | byte(),[{'file',string()} | {'line',pos_integer()}]}]
+stacktrace.erl:53: The pattern ['a', 'b'] can never match the type [{atom(),atom(),[any()] | byte(),[{'file',string()} | {'line',pos_integer()}]}]
diff --git a/lib/dialyzer/test/small_SUITE_data/src/stacktrace.erl b/lib/dialyzer/test/small_SUITE_data/src/stacktrace.erl
new file mode 100644
index 0000000000..de79e710e9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/stacktrace.erl
@@ -0,0 +1,73 @@
+-module(stacktrace).
+
+%% Check the stacktrace variable introduced in Erlang/OTP 21.0
+
+-export([t1/0, t2/0, t3/0, t4/0, s1/0, s2/0, s3/0, s4/0]).
+
+t1() ->
+ try foo:bar()
+ catch
+ E:P:S ->
+ {a,b} = S, % can never match
+ {E, P}
+ end.
+
+t2() ->
+ try foo:bar()
+ catch
+ E:P:S ->
+ [a,b] = S, % can never match
+ {E, P}
+ end.
+
+t3() ->
+ try foo:bar()
+ catch
+ E:P:S ->
+ [{m,f,[],[]}] = S,
+ {E, P}
+ end.
+
+t4() ->
+ try foo:bar()
+ catch
+ E:P:S ->
+ [{m,f,1,[{file,"tjo"},{line,95}]}] = S,
+ {E, P}
+ end.
+
+s1() ->
+ try foo:bar()
+ catch
+ E:P ->
+ S = erlang:get_stacktrace(),
+ {a,b} = S, % can never match
+ {E, P}
+ end.
+
+s2() ->
+ try foo:bar()
+ catch
+ E:P ->
+ S = erlang:get_stacktrace(),
+ [a,b] = S, % can never match
+ {E, P}
+ end.
+
+s3() ->
+ try foo:bar()
+ catch
+ E:P ->
+ S = erlang:get_stacktrace(),
+ [{m,f,[],[]}] = S,
+ {E, P}
+ end.
+
+s4() ->
+ try foo:bar()
+ catch
+ E:P ->
+ S = erlang:get_stacktrace(),
+ [{m,f,1,[{file,"tjo"},{line,95}]}] = S,
+ {E, P}
+ end.