aboutsummaryrefslogtreecommitdiffstats
path: root/lib/observer
diff options
context:
space:
mode:
authorSiri Hansen <[email protected]>2017-10-27 10:41:37 +0200
committerSiri Hansen <[email protected]>2017-11-08 10:31:38 +0100
commit4bce8d8f37f66022017061a42673a9a239158888 (patch)
treea3f7f67a540d769944cf2d0b44b00ddc25c31143 /lib/observer
parent99205ab9fb89c08ce71deb02c2fd32376b15a24c (diff)
downloadotp-4bce8d8f37f66022017061a42673a9a239158888.tar.gz
otp-4bce8d8f37f66022017061a42673a9a239158888.tar.bz2
otp-4bce8d8f37f66022017061a42673a9a239158888.zip
[cdv] Fix test of size limit reached on crash dump
This test failed every now and then since the selected max size could be greater than the actual size of the dump - so the dump was not truncated. This is now corrected.
Diffstat (limited to 'lib/observer')
-rw-r--r--lib/observer/test/crashdump_viewer_SUITE.erl30
1 files changed, 23 insertions, 7 deletions
diff --git a/lib/observer/test/crashdump_viewer_SUITE.erl b/lib/observer/test/crashdump_viewer_SUITE.erl
index 02adfb6508..29b9e406ae 100644
--- a/lib/observer/test/crashdump_viewer_SUITE.erl
+++ b/lib/observer/test/crashdump_viewer_SUITE.erl
@@ -649,13 +649,7 @@ do_create_dumps(DataDir,Rel) ->
current ->
CD3 = dump_with_args(DataDir,Rel,"instr","+Mim true"),
CD4 = dump_with_strange_module_name(DataDir,Rel,"strangemodname"),
- Tmp = dump_with_args(DataDir,Rel,"trunc_bytes",""),
- {ok,#file_info{size=Max}} = file:read_file_info(Tmp),
- ok = file:delete(Tmp),
- Bytes = max(15,rand:uniform(Max)),
- CD5 = dump_with_args(DataDir,Rel,"trunc_bytes",
- "-env ERL_CRASH_DUMP_BYTES " ++
- integer_to_list(Bytes)),
+ CD5 = dump_with_size_limit_reached(DataDir,Rel,"trunc_bytes"),
CD6 = dump_with_unicode_atoms(DataDir,Rel,"unicode"),
CD7 = dump_with_maps(DataDir,Rel,"maps"),
TruncatedDumps = truncate_dump(CD1),
@@ -768,6 +762,28 @@ dump_with_strange_module_name(DataDir,Rel,DumpName) ->
?t:stop_node(n1),
CD.
+dump_with_size_limit_reached(DataDir,Rel,DumpName) ->
+ Tmp = dump_with_args(DataDir,Rel,DumpName,""),
+ {ok,#file_info{size=Max}} = file:read_file_info(Tmp),
+ ok = file:delete(Tmp),
+ dump_with_size_limit_reached(DataDir,Rel,DumpName,Max).
+
+dump_with_size_limit_reached(DataDir,Rel,DumpName,Max) ->
+ Bytes = max(15,rand:uniform(Max)),
+ CD = dump_with_args(DataDir,Rel,DumpName,
+ "-env ERL_CRASH_DUMP_BYTES " ++
+ integer_to_list(Bytes)),
+ {ok,#file_info{size=Size}} = file:read_file_info(CD),
+ if Size < Bytes ->
+ %% This means that the dump was actually smaller than the
+ %% randomly selected truncation size, so we'll just do it
+ %% again with a smaller numer
+ ok = file:delete(CD),
+ dump_with_size_limit_reached(DataDir,Rel,DumpName,Size-3);
+ true ->
+ CD
+ end.
+
dump_with_unicode_atoms(DataDir,Rel,DumpName) ->
Opt = rel_opt(Rel),
Pz = "-pz \"" ++ filename:dirname(code:which(?MODULE)) ++ "\"",