aboutsummaryrefslogtreecommitdiffstats
path: root/lib/dialyzer
diff options
context:
space:
mode:
Diffstat (limited to 'lib/dialyzer')
-rw-r--r--lib/dialyzer/Makefile2
-rw-r--r--lib/dialyzer/README2
-rw-r--r--lib/dialyzer/RELEASE_NOTES4
-rw-r--r--lib/dialyzer/doc/about.txt2
-rw-r--r--lib/dialyzer/doc/manual.txt4
-rw-r--r--lib/dialyzer/doc/src/Makefile2
-rw-r--r--lib/dialyzer/doc/src/book.xml4
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml827
-rw-r--r--lib/dialyzer/doc/src/dialyzer_chapter.xml305
-rw-r--r--lib/dialyzer/doc/src/notes.xml432
-rw-r--r--lib/dialyzer/doc/src/part.xml5
-rw-r--r--lib/dialyzer/doc/src/part_notes.xml2
-rw-r--r--lib/dialyzer/doc/src/ref_man.xml6
-rw-r--r--lib/dialyzer/doc/src/typer.xml157
-rw-r--r--lib/dialyzer/info2
-rw-r--r--lib/dialyzer/src/Makefile10
-rw-r--r--lib/dialyzer/src/dialyzer.app.src14
-rw-r--r--lib/dialyzer/src/dialyzer.appup.src2
-rw-r--r--lib/dialyzer/src/dialyzer.erl175
-rw-r--r--lib/dialyzer/src/dialyzer.hrl18
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl346
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl20
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl318
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl137
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl14
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl308
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl469
-rw-r--r--lib/dialyzer/src/dialyzer_coordinator.erl108
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl616
-rw-r--r--lib/dialyzer/src/dialyzer_dep.erl69
-rw-r--r--lib/dialyzer/src/dialyzer_explanation.erl10
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl37
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.hrl12
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl59
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl437
-rw-r--r--lib/dialyzer/src/dialyzer_race_data_server.erl127
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl45
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl196
-rw-r--r--lib/dialyzer/src/dialyzer_timing.erl7
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl927
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl722
-rw-r--r--lib/dialyzer/src/dialyzer_worker.erl109
-rw-r--r--lib/dialyzer/src/typer.erl1106
-rw-r--r--lib/dialyzer/test/Makefile4
-rw-r--r--lib/dialyzer/test/abstract_SUITE.erl107
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/callbacks_and_specs6
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/gen_event_incorrect_return2
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args9
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_missing_callbacks3
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour12
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour_old4
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return2
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/results/vars_in_beh_spec8
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/compile_flags.hrl2
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_common.hrl55
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_gen.erl611
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_internal.hrl98
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_types.erl1353
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl2411
-rw-r--r--lib/dialyzer/test/dialyzer_SUITE.erl4
-rw-r--r--lib/dialyzer/test/dialyzer_common.erl11
-rw-r--r--lib/dialyzer/test/map_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/bad_argument5
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/contract7
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/contract_violation3
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/exact3
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/guard_update5
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/initial_dataflow4
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/is_map_guard5
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_galore28
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_in_guard4
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_in_guard213
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_size13
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/maps_merge11
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/opaque_key15
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/order17
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/subtract_value_flip0
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/typeflow4
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/typeflow213
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/typesig5
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/bad_argument.erl19
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/bug.erl63
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/contract.erl14
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/contract_violation.erl29
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/exact.erl23
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/guard_update.erl18
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/initial_dataflow.erl11
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/is_map_guard.erl17
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/a.erl9
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/b.erl3
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_galore.erl2800
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_in_guard.erl35
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_in_guard2.erl27
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_size.erl36
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/maps_merge.erl29
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/opaque_bif.erl13
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl69
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_use.erl97
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/order.erl56
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/subtract_value_flip.erl9
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/typeflow.erl25
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/typeflow2.erl88
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/typesig.erl9
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/array4
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/crash10
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/dict18
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/ets6
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/my_queue2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/opaque2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/para10
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/queue6
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/rec4
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/simple52
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/timer4
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/union8
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/weird6
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/wings8
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/big_external_type.erl526
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/big_local_type.erl523
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_ig_moves.erl75
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_vectors.erl129
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_digraph.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_erl_scan.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue.erl17
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue_params.erl15
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/para/para3.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_common.hrl55
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_gen.erl624
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_internal.hrl92
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_types.erl1349
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl2402
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/cerl.erl4595
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/core_parse.hrl115
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer.hrl173
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl3795
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_races.erl2487
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/erl_types.erl5735
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/is_rec.erl6
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl8
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl26
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/timer/timer_use.erl4
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl18
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl14
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl19
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/wings/wings_util.erl4
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/results/compiler2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl6
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/results/unknown_type2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/src/unknown_type.erl7
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl10
-rw-r--r--lib/dialyzer/test/plt_SUITE.erl237
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/results/mnesia5
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl6
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl10
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl8
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/abs9
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/app_call2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/bif13
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/chars4
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/comparisons228
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes36
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes22
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/fun_arity37
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/guards2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/higher_order_discrepancy5
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/literals15
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps14
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps_difftype2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps_sum2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/non_existing1
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/pretty_bitstring2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/record_creation_diffs2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/record_pat2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/undefined2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/abs.erl71
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/anno.erl18
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/bif1/bif1.erl16
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/bif1/bif1_adt.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/chars.erl32
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/comparisons.erl56
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/fun_arity.erl127
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/higher_order_discrepancy.erl5
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/keydel.erl29
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/literals.erl6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/loopy.erl17
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps1.erl14
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_redef.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ms.erl8
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/trec.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/tuple1.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/undefined.erl29
-rw-r--r--lib/dialyzer/test/typer_SUITE.erl158
-rw-r--r--lib/dialyzer/test/unmatched_returns_SUITE_data/src/send.erl11
-rw-r--r--lib/dialyzer/test/user_SUITE_data/results/wpc_hlines3
-rw-r--r--lib/dialyzer/test/user_SUITE_data/results/wsp_pdu2
-rw-r--r--lib/dialyzer/test/user_SUITE_data/src/wpc_hlines.erl22
-rw-r--r--lib/dialyzer/vsn.mk2
238 files changed, 37802 insertions, 3085 deletions
diff --git a/lib/dialyzer/Makefile b/lib/dialyzer/Makefile
index 0e59376240..e4f681dcd9 100644
--- a/lib/dialyzer/Makefile
+++ b/lib/dialyzer/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2006-2009. All Rights Reserved.
+# Copyright Ericsson AB 2006-2016. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/README b/lib/dialyzer/README
index 82d0f5ec48..951a92469b 100644
--- a/lib/dialyzer/README
+++ b/lib/dialyzer/README
@@ -6,7 +6,7 @@
## Copyright: Held by the authors; all rights reserved (2004 - 2010).
##----------------------------------------------------------------------------
-The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
+The DIALYZER, a DIscrepancy AnaLYZer for ERlang programs.
-----------------------------------------------
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES
index 4e311bb543..299cc8642f 100644
--- a/lib/dialyzer/RELEASE_NOTES
+++ b/lib/dialyzer/RELEASE_NOTES
@@ -135,7 +135,7 @@ Version 1.9.1 (in Erlang/OTP R13B)
Version 1.9.0 (in Erlang/OTP R13A)
----------------------------------
- The analysis accepts opaque type declarations and detects violations of
- opaqueness of terms of such types. Starting with R13, many Erlang/OTP
+ opacity of terms of such types. Starting with R13, many Erlang/OTP
standard libraries (array, dict, digraph, ets, gb_sets, gb_trees, queue,
and sets) contain opaque type declarations of their main data types.
Dialyzer will spit out warnings in code that explicitly depends on the
@@ -181,7 +181,7 @@ Version 1.8.0 (in Erlang/OTP R12B-2)
- Dialyzer has a new warning option -Wunmatched_returns which warns for
function calls that ignore the return value.
This catches many common programming errors (e.g. calling file:close/1
- and not checking for the absense of errors), interface discrepancies
+ and not checking for the absence of errors), interface discrepancies
(e.g. a function returning multiple values when in reality the function
is void and only called for its side-effects), calling the wrong function
(e.g. io_lib:format/1 instead of io:format/1), and even possible
diff --git a/lib/dialyzer/doc/about.txt b/lib/dialyzer/doc/about.txt
index 7d9d819731..1318d9a65e 100644
--- a/lib/dialyzer/doc/about.txt
+++ b/lib/dialyzer/doc/about.txt
@@ -1,5 +1,5 @@
This is DIALYZER version 2.1.0
- DIALYZER is a DIscrepany AnaLYZer for ERlang programs.
+ DIALYZER is a DIscrepancy AnaLYZer for ERlang programs.
Copyright (C) Tobias Lindahl <[email protected]>
Kostis Sagonas <[email protected]>
diff --git a/lib/dialyzer/doc/manual.txt b/lib/dialyzer/doc/manual.txt
index 29c9518d84..a571cd2e2b 100644
--- a/lib/dialyzer/doc/manual.txt
+++ b/lib/dialyzer/doc/manual.txt
@@ -4,7 +4,7 @@
## Kostis Sagonas <[email protected]>
##----------------------------------------------------------------------------
-The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
+The DIALYZER, a DIscrepancy AnaLYZer for ERlang programs.
-----------------------------------------------
@@ -255,7 +255,7 @@ Warning options:
-Wno_match
Suppress warnings for patterns that are unused or cannot match.
-Wno_opaque
- Suppress warnings for violations of opaqueness of data types.
+ Suppress warnings for violations of opacity of data types.
-Wunmatched_returns ***
Include warnings for function calls which ignore a structured return
value or do not match against one of many possible return value(s).
diff --git a/lib/dialyzer/doc/src/Makefile b/lib/dialyzer/doc/src/Makefile
index 77d0a6fc68..8fe6cd30eb 100644
--- a/lib/dialyzer/doc/src/Makefile
+++ b/lib/dialyzer/doc/src/Makefile
@@ -34,7 +34,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
# Target Specs
# ----------------------------------------------------
XML_APPLICATION_FILES = ref_man.xml
-XML_REF3_FILES = dialyzer.xml
+XML_REF3_FILES = dialyzer.xml typer.xml
XML_PART_FILES = part.xml part_notes.xml
XML_CHAPTER_FILES = dialyzer_chapter.xml notes.xml
diff --git a/lib/dialyzer/doc/src/book.xml b/lib/dialyzer/doc/src/book.xml
index 4cc2a9db7c..46df8b81b8 100644
--- a/lib/dialyzer/doc/src/book.xml
+++ b/lib/dialyzer/doc/src/book.xml
@@ -4,7 +4,7 @@
<book xmlns:xi="http://www.w3.org/2001/XInclude">
<header titlestyle="normal">
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2016</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -25,7 +25,7 @@
<title>Dialyzer</title>
<prepared></prepared>
<docno></docno>
- <date></date>
+ <date>2016-09-19</date>
<rev></rev>
<file>book.xml</file>
</header>
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index 3fd34241b2..e34ffd6def 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2006</year><year>2015</year>
+ <year>2006</year><year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -25,341 +25,472 @@
<title>dialyzer</title>
<prepared></prepared>
<docno></docno>
- <date></date>
+ <date>2016-09-20</date>
<rev></rev>
+ <file>dialyzer.xml</file>
</header>
<module>dialyzer</module>
- <modulesummary>The Dialyzer, a DIscrepancy AnalYZer for ERlang programs</modulesummary>
+ <modulesummary>Dialyzer, a DIscrepancy AnaLYZer for ERlang programs.
+ </modulesummary>
<description>
- <p>The Dialyzer is a static analysis tool that identifies software
- discrepancies such as definite type errors, code which has become
- dead or unreachable due to some programming error, unnecessary
- tests, etc. in single Erlang modules or entire (sets of)
- applications. Dialyzer starts its analysis from either
- debug-compiled BEAM bytecode or from Erlang source code. The file
- and line number of a discrepancy is reported along with an
- indication of what the discrepancy is about. Dialyzer bases its
- analysis on the concept of success typings which allows for sound
- warnings (no false positives).</p>
- <p>Read more about Dialyzer and about how to use it from the GUI
- in <seealso marker="dialyzer_chapter">Dialyzer User's
- Guide</seealso>.</p>
+ <p>Dialyzer is a static analysis tool that identifies software
+ discrepancies, such as definite type errors, code that has become dead
+ or unreachable because of programming error, and unnecessary tests,
+ in single Erlang modules or entire (sets of) applications.</p>
+
+ <p>Dialyzer starts its analysis from either
+ debug-compiled BEAM bytecode or from Erlang source code. The file
+ and line number of a discrepancy is reported along with an
+ indication of what the discrepancy is about. Dialyzer bases its
+ analysis on the concept of success typings, which allows for sound
+ warnings (no false positives).</p>
</description>
<section>
- <title>Using the Dialyzer from the command line</title>
- <p>Dialyzer also has a command line version for automated use. Below is a
- brief description of the list of its options. The same information can
- be obtained by writing</p>
- <code type="none">
- dialyzer --help</code>
- <p>in a shell. Please refer to the GUI description for more details on
- the operation of Dialyzer.</p>
- <p>The exit status of the command line version is:</p>
+ <marker id="command_line"></marker>
+ <title>Using Dialyzer from the Command Line</title>
+ <p>Dialyzer has a command-line version for automated use. This
+ section provides a brief description of the options. The same information
+ can be obtained by writing the following in a shell:</p>
+
<code type="none">
- 0 - No problems were encountered during the analysis and no
- warnings were emitted.
- 1 - Problems were encountered during the analysis.
- 2 - No problems were encountered, but warnings were emitted.</code>
- <p>Usage:</p>
+dialyzer --help</code>
+
+ <p>For more details about the operation of Dialyzer, see section
+ <seealso marker="dialyzer_chapter#dialyzer_gui">
+ Using Dialyzer from the GUI</seealso> in the User's Guide.</p>
+
+ <p><em>Exit status of the command-line version:</em></p>
+
+ <taglist>
+ <tag><c>0</c></tag>
+ <item>
+ <p>No problems were found during the analysis and no warnings were
+ emitted.</p>
+ </item>
+ <tag><c>1</c></tag>
+ <item>
+ <p>Problems were found during the analysis.</p>
+ </item>
+ <tag><c>2</c></tag>
+ <item>
+ <p>No problems were found during the analysis, but warnings were
+ emitted.</p>
+ </item>
+ </taglist>
+
+ <p><em>Usage:</em></p>
+
<code type="none">
- dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
- [-pa dir]* [--plt plt] [--plts plt*] [-Ddefine]*
- [-I include_dir]* [--output_plt file] [-Wwarn]* [--raw]
- [--src] [--gui] [files_or_dirs] [-r dirs]
- [--apps applications] [-o outfile]
- [--build_plt] [--add_to_plt] [--remove_from_plt]
- [--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
- [--dump_callgraph file] [--no_native] [--fullpath]
- [--statistics] [--no_native_cache]</code>
- <p>Options:</p>
+dialyzer [--add_to_plt] [--apps applications] [--build_plt]
+ [--check_plt] [-Ddefine]* [-Dname] [--dump_callgraph file]
+ [files_or_dirs] [--fullpath] [--get_warnings] [--gui] [--help]
+ [-I include_dir]* [--no_check_plt] [--no_native]
+ [--no_native_cache] [-o outfile] [--output_plt file] [-pa dir]*
+ [--plt plt] [--plt_info] [--plts plt*] [--quiet] [-r dirs]
+ [--raw] [--remove_from_plt] [--shell] [--src] [--statistics]
+ [--verbose] [--version] [-Wwarn]*</code>
+
+ <note>
+ <p>* denotes that multiple occurrences of the option are possible.</p>
+ </note>
+
+ <p><em>Options:</em></p>
+
<taglist>
- <tag><c><![CDATA[files_or_dirs]]></c> (for backwards compatibility also
- as: <c><![CDATA[-c files_or_dirs]]></c>)</tag>
- <item>Use Dialyzer from the command line to detect defects in the
- specified files or directories containing <c><![CDATA[.erl]]></c> or
- <c><![CDATA[.beam]]></c> files, depending on the type of the
- analysis.</item>
- <tag><c><![CDATA[-r dirs]]></c></tag>
- <item>Same as the previous but the specified directories are searched
- recursively for subdirectories containing <c><![CDATA[.erl]]></c> or
- <c><![CDATA[.beam]]></c> files in them, depending on the type of
- analysis.</item>
- <tag><c><![CDATA[--apps applications]]></c></tag>
- <item>Option typically used when building or modifying a plt as in:
+ <tag><c>--add_to_plt</c></tag>
+ <item>
+ <p>The PLT is extended to also include the files specified with
+ <c>-c</c> and <c>-r</c>. Use
+ <c>--plt</c> to specify which PLT to start from,
+ and <c>--output_plt</c> to specify where to put the PLT.
+ Notice that the analysis possibly can include files from the PLT if
+ they depend on the new files. This option only works for BEAM
+ files.</p>
+ </item>
+ <tag><c>--apps applications</c></tag>
+ <item>
+ <p>This option is typically used when building or modifying a PLT as
+ in:</p>
<code type="none">
- dialyzer --build_plt --apps erts kernel stdlib mnesia ...</code>
- to conveniently refer to library applications corresponding to the
- Erlang/OTP installation. However, the option is general and can also
- be used during analysis in order to refer to Erlang/OTP applications.
- In addition, file or directory names can also be included, as in:
+dialyzer --build_plt --apps erts kernel stdlib mnesia ...</code>
+ <p>to refer conveniently to library applications corresponding to the
+ Erlang/OTP installation. However, this option is general and can also
+ be used during analysis to refer to Erlang/OTP applications.
+ File or directory names can also be included, as in:</p>
<code type="none">
- dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam</code></item>
- <tag><c><![CDATA[-o outfile]]></c> (or
- <c><![CDATA[--output outfile]]></c>)</tag>
- <item>When using Dialyzer from the command line, send the analysis
- results to the specified outfile rather than to stdout.</item>
- <tag><c><![CDATA[--raw]]></c></tag>
- <item>When using Dialyzer from the command line, output the raw analysis
- results (Erlang terms) instead of the formatted result. The raw format
- is easier to post-process (for instance, to filter warnings or to
- output HTML pages).</item>
- <tag><c><![CDATA[--src]]></c></tag>
- <item>Override the default, which is to analyze BEAM files, and
- analyze starting from Erlang source code instead.</item>
- <tag><c><![CDATA[-Dname]]></c> (or <c><![CDATA[-Dname=value]]></c>)</tag>
- <item>When analyzing from source, pass the define to Dialyzer. (**)</item>
- <tag><c><![CDATA[-I include_dir]]></c></tag>
- <item>When analyzing from source, pass the <c><![CDATA[include_dir]]></c>
- to Dialyzer. (**)</item>
- <tag><c><![CDATA[-pa dir]]></c></tag>
- <item>Include <c><![CDATA[dir]]></c> in the path for Erlang (useful when
- analyzing files that have <c><![CDATA['-include_lib()']]></c>
- directives).</item>
- <tag><c><![CDATA[--output_plt file]]></c></tag>
- <item>Store the plt at the specified file after building it.</item>
- <tag><c><![CDATA[--plt plt]]></c></tag>
- <item>Use the specified plt as the initial plt (if the plt was built
- during setup the files will be checked for consistency).</item>
- <tag><c><![CDATA[--plts plt*]]></c></tag>
- <item>Merge the specified plts to create the initial plt -- requires
- that the plts are disjoint (i.e., do not have any module
- appearing in more than one plt).
- The plts are created in the usual way:
+dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam</code>
+ </item>
+ <tag><c>--build_plt</c></tag>
+ <item>
+ <p>The analysis starts from an empty PLT and creates a new one from
+ the files specified with <c>-c</c> and
+ <c>-r</c>. This option only works for BEAM files.
+ To override the default PLT location, use
+ <c>--plt</c> or <c>--output_plt</c>.</p>
+ </item>
+ <tag><c>--check_plt</c></tag>
+ <item>
+ <p>Check the PLT for consistency and rebuild it if it is not
+ up-to-date.</p>
+ </item>
+ <tag><c>-Dname</c> (or <c>-Dname=value</c>)</tag>
+ <item>
+ <p>When analyzing from source, pass the define to Dialyzer.
+ (**)</p>
+ </item>
+ <tag><c>--dump_callgraph file</c></tag>
+ <item>
+ <p>Dump the call graph into the specified file whose format is
+ determined by the filename extension. Supported extensions are:
+ <c>raw</c>, <c>dot</c>, and <c>ps</c>. If something else is used as
+ filename extension, default format <c>.raw</c> is used.</p>
+ </item>
+ <tag><c>files_or_dirs</c> (for backward compatibility also
+ as <c>-c files_or_dirs</c>)</tag>
+ <item>
+ <p>Use Dialyzer from the command line to detect defects in the
+ specified files or directories containing <c>.erl</c> or
+ <c>.beam</c> files, depending on the type of the
+ analysis.</p>
+ </item>
+ <tag><c>--fullpath</c></tag>
+ <item>
+ <p>Display the full path names of files for which warnings are
+ emitted.</p>
+ </item>
+ <tag><c>--get_warnings</c></tag>
+ <item>
+ <p>Make Dialyzer emit warnings even when manipulating the PLT.
+ Warnings are only emitted for files that are analyzed.</p>
+ </item>
+ <tag><c>--gui</c></tag>
+ <item>
+ <p>Use the GUI.</p></item>
+ <tag><c>--help</c> (or <c>-h</c>)</tag>
+ <item>
+ <p>Print this message and exit.</p>
+ </item>
+ <tag><c>-I include_dir</c></tag>
+ <item>
+ <p>When analyzing from source, pass the <c>include_dir</c>
+ to Dialyzer. (**)</p>
+ </item>
+ <tag><c>--no_check_plt</c></tag>
+ <item>
+ <p>Skip the PLT check when running Dialyzer. This is useful when
+ working with installed PLTs that never change.</p>
+ </item>
+ <tag><c>--no_native</c> (or <c>-nn</c>)</tag>
+ <item>
+ <p>Bypass the native code compilation of some key files that
+ Dialyzer heuristically performs when dialyzing many files.
+ This avoids the compilation time, but can result in (much) longer
+ analysis time.</p>
+ </item>
+ <tag><c>--no_native_cache</c></tag>
+ <item>
+ <p>By default, Dialyzer caches the results of native compilation
+ in directory <c>$XDG_CACHE_HOME/erlang/dialyzer_hipe_cache</c>.
+ <c>XDG_CACHE_HOME</c> defaults to <c>$HOME/.cache</c>.
+ Use this option to disable caching.</p>
+ </item>
+ <tag><c>-o outfile</c> (or
+ <c>--output outfile</c>)</tag>
+ <item>
+ <p>When using Dialyzer from the command line, send the analysis
+ results to the specified outfile rather than to <c>stdout</c>.</p>
+ </item>
+ <tag><c>--output_plt file</c></tag>
+ <item>
+ <p>Store the PLT at the specified file after building it.</p>
+ </item>
+ <tag><c>-pa dir</c></tag>
+ <item>
+ <p>Include <c>dir</c> in the path for Erlang. This is useful
+ when analyzing files that have <c>-include_lib()</c>
+ directives.</p>
+ </item>
+ <tag><c>--plt plt</c></tag>
+ <item>
+ <p>Use the specified PLT as the initial PLT. If the PLT was built
+ during setup, the files are checked for consistency.</p>
+ </item>
+ <tag><c>--plt_info</c></tag>
+ <item>
+ <p>Make Dialyzer print information about the PLT and then quit.
+ The PLT can be specified with <c>--plt(s)</c>.</p>
+ </item>
+ <tag><c>--plts plt*</c></tag>
+ <item>
+ <p>Merge the specified PLTs to create the initial PLT. This requires
+ that the PLTs are disjoint (that is, do not have any module
+ appearing in more than one PLT).
+ The PLTs are created in the usual way:</p>
<code type="none">
- dialyzer --build_plt --output_plt plt_1 files_to_include
- ...
- dialyzer --build_plt --output_plt plt_n files_to_include</code>
- and then can be used in either of the following ways:
+dialyzer --build_plt --output_plt plt_1 files_to_include
+...
+dialyzer --build_plt --output_plt plt_n files_to_include</code>
+ <p>They can then be used in either of the following ways:</p>
<code type="none">
- dialyzer files_to_analyze --plts plt_1 ... plt_n</code>
- or:
+dialyzer files_to_analyze --plts plt_1 ... plt_n</code>
+ <p>or</p>
<code type="none">
- dialyzer --plts plt_1 ... plt_n -- files_to_analyze</code>
- (Note the -- delimiter in the second case)</item>
- <tag><c><![CDATA[-Wwarn]]></c></tag>
- <item>A family of options which selectively turn on/off warnings
- (for help on the names of warnings use
- <c><![CDATA[dialyzer -Whelp]]></c>).
- Note that the options can also be given in the file with a
- <c>-dialyzer()</c> attribute. See <seealso
- marker="#suppression">Requesting or Suppressing Warnings in
- Source Files</seealso> below for details.</item>
- <tag><c><![CDATA[--shell]]></c></tag>
- <item>Do not disable the Erlang shell while running the GUI.</item>
- <tag><c><![CDATA[--version]]></c> (or <c><![CDATA[-v]]></c>)</tag>
- <item>Print the Dialyzer version and some more information and
- exit.</item>
- <tag><c><![CDATA[--help]]></c> (or <c><![CDATA[-h]]></c>)</tag>
- <item>Print this message and exit.</item>
- <tag><c><![CDATA[--quiet]]></c> (or <c><![CDATA[-q]]></c>)</tag>
- <item>Make Dialyzer a bit more quiet.</item>
- <tag><c><![CDATA[--verbose]]></c></tag>
- <item>Make Dialyzer a bit more verbose.</item>
- <tag><c><![CDATA[--statistics]]></c></tag>
- <item>Prints information about the progress of execution (analysis phases,
- time spent in each and size of the relative input).</item>
- <tag><c><![CDATA[--build_plt]]></c></tag>
- <item>The analysis starts from an empty plt and creates a new one from
- the files specified with <c><![CDATA[-c]]></c> and
- <c><![CDATA[-r]]></c>. Only works for beam files. Use
- <c><![CDATA[--plt]]></c> or <c><![CDATA[--output_plt]]></c> to
- override the default plt location.</item>
- <tag><c><![CDATA[--add_to_plt]]></c></tag>
- <item>The plt is extended to also include the files specified with
- <c><![CDATA[-c]]></c> and <c><![CDATA[-r]]></c>. Use
- <c><![CDATA[--plt]]></c> to specify which plt to start from,
- and <c><![CDATA[--output_plt]]></c> to specify where to put the plt.
- Note that the analysis might include files from the plt if they depend
- on the new files. This option only works with beam files.</item>
- <tag><c><![CDATA[--remove_from_plt]]></c></tag>
- <item>The information from the files specified with
- <c><![CDATA[-c]]></c> and <c><![CDATA[-r]]></c> is removed
- from the plt. Note that this may cause a re-analysis of the remaining
- dependent files.</item>
- <tag><c><![CDATA[--check_plt]]></c></tag>
- <item>Check the plt for consistency and rebuild it if it is not
- up-to-date.</item>
- <tag><c><![CDATA[--no_check_plt]]></c></tag>
- <item>Skip the plt check when running Dialyzer. Useful when working with
- installed plts that never change.</item>
- <tag><c><![CDATA[--plt_info]]></c></tag>
- <item>Make Dialyzer print information about the plt and then quit. The
- plt can be specified with <c><![CDATA[--plt(s)]]></c>.</item>
- <tag><c><![CDATA[--get_warnings]]></c></tag>
- <item>Make Dialyzer emit warnings even when manipulating the plt.
- Warnings are only emitted for files that are actually analyzed.</item>
- <tag><c><![CDATA[--dump_callgraph file]]></c></tag>
- <item>Dump the call graph into the specified file whose format is
- determined by the file name extension. Supported extensions are: raw,
- dot, and ps. If something else is used as file name extension, default
- format '.raw' will be used.</item>
- <tag><c><![CDATA[--no_native]]></c> (or <c><![CDATA[-nn]]></c>)</tag>
- <item>Bypass the native code compilation of some key files that Dialyzer
- heuristically performs when dialyzing many files; this avoids the
- compilation time but it may result in (much) longer analysis
- time.</item>
- <tag><c><![CDATA[--no_native_cache]]></c></tag>
- <item>By default, Dialyzer caches the results of native compilation in the
- <c>$XDG_CACHE_HOME/erlang/dialyzer_hipe_cache</c> directory.
- <c>XDG_CACHE_HOME</c> defaults to <c>$HOME/.cache</c>.
- Use this option to disable caching.</item>
- <tag><c><![CDATA[--fullpath]]></c></tag>
- <item>Display the full path names of files for which warnings are emitted.</item>
- <tag><c><![CDATA[--gui]]></c></tag>
- <item>Use the GUI.</item>
+dialyzer --plts plt_1 ... plt_n -- files_to_analyze</code>
+ <p>Notice the <c>--</c> delimiter in the second case.</p>
+ </item>
+ <tag><c>--quiet</c> (or <c>-q</c>)</tag>
+ <item>
+ <p>Make Dialyzer a bit more quiet.</p>
+ </item>
+ <tag><c>-r dirs</c></tag>
+ <item>
+ <p>Same as <c>files_or_dirs</c>, but the specified
+ directories are searched
+ recursively for subdirectories containing <c>.erl</c> or
+ <c>.beam</c> files in them, depending on the type of
+ analysis.</p>
+ </item>
+ <tag><c>--raw</c></tag>
+ <item>
+ <p>When using Dialyzer from the command line, output the raw
+ analysis results (Erlang terms) instead of the formatted result.
+ The raw format
+ is easier to post-process (for example, to filter warnings or to
+ output HTML pages).</p>
+ </item>
+ <tag><c>--remove_from_plt</c></tag>
+ <item>
+ <p>The information from the files specified with
+ <c>-c</c> and <c>-r</c> is removed from
+ the PLT. Notice that this can cause a reanalysis of the remaining
+ dependent files.</p>
+ </item>
+ <tag><c>--shell</c></tag>
+ <item>
+ <p>Do not disable the Erlang shell while running the GUI.</p>
+ </item>
+ <tag><c>--src</c></tag>
+ <item>
+ <p>Override the default, which is to analyze BEAM files, and
+ analyze starting from Erlang source code instead.</p>
+ </item>
+ <tag><c>--statistics</c></tag>
+ <item>
+ <p>Print information about the progress of execution (analysis phases,
+ time spent in each, and size of the relative input).</p>
+ </item>
+ <tag><c>--verbose</c></tag>
+ <item>
+ <p>Make Dialyzer a bit more verbose.</p>
+ </item>
+ <tag><c>--version</c> (or <c>-v</c>)</tag>
+ <item>
+ <p>Print the Dialyzer version and some more information and
+ exit.</p>
+ </item>
+ <tag><c>-Wwarn</c></tag>
+ <item>
+ <p>A family of options that selectively turn on/off warnings.
+ (For help on the names of warnings, use
+ <c>dialyzer -Whelp</c>.)
+ Notice that the options can also be specified in the file with a
+ <c>-dialyzer()</c> attribute. For details, see section <seealso
+ marker="#suppression">Requesting or Suppressing Warnings in
+ Source Files</seealso>.</p>
+ </item>
</taglist>
+
<note>
- <p>* denotes that multiple occurrences of these options are possible.</p>
- <p>** options <c><![CDATA[-D]]></c> and <c><![CDATA[-I]]></c> work both from command-line and in the Dialyzer GUI;
- the syntax of defines and includes is the same as that used by <c><![CDATA[erlc]]></c>.</p>
+ <p>** options <c>-D</c> and <c>-I</c> work both
+ from the command line and in the Dialyzer GUI; the syntax of
+ defines and includes is the same as that used by
+ <seealso marker="erts:erlc">erlc(1)</seealso>.</p>
</note>
- <p>Warning options:</p>
+
+ <p><em>Warning options:</em></p>
+
<taglist>
- <tag><c><![CDATA[-Wno_return]]></c></tag>
- <item>Suppress warnings for functions that will never return a
- value.</item>
- <tag><c><![CDATA[-Wno_unused]]></c></tag>
- <item>Suppress warnings for unused functions.</item>
- <tag><c><![CDATA[-Wno_improper_lists]]></c></tag>
- <item>Suppress warnings for construction of improper lists.</item>
- <tag><c><![CDATA[-Wno_fun_app]]></c></tag>
- <item>Suppress warnings for fun applications that will fail.</item>
- <tag><c><![CDATA[-Wno_match]]></c></tag>
- <item>Suppress warnings for patterns that are unused or cannot
- match.</item>
- <tag><c><![CDATA[-Wno_opaque]]></c></tag>
- <item>Suppress warnings for violations of opaqueness of data types.</item>
- <tag><c><![CDATA[-Wno_fail_call]]></c></tag>
- <item>Suppress warnings for failing calls.</item>
- <tag><c><![CDATA[-Wno_contracts]]></c></tag>
- <item>Suppress warnings about invalid contracts.</item>
- <tag><c><![CDATA[-Wno_behaviours]]></c></tag>
- <item>Suppress warnings about behaviour callbacks which drift from the
- published recommended interfaces.</item>
- <tag><c><![CDATA[-Wno_missing_calls]]></c></tag>
- <item>Suppress warnings about calls to missing functions.</item>
- <tag><c><![CDATA[-Wno_undefined_callbacks]]></c></tag>
- <item>Suppress warnings about behaviours that have no
- <c>-callback</c> attributes for their callbacks.</item>
- <tag><c><![CDATA[-Wunmatched_returns]]></c>***</tag>
- <item>Include warnings for function calls which ignore a structured return
- value or do not match against one of many possible return
- value(s).</item>
- <tag><c><![CDATA[-Werror_handling]]></c>***</tag>
- <item>Include warnings for functions that only return by means of an
- exception.</item>
- <tag><c><![CDATA[-Wrace_conditions]]></c>***</tag>
- <item>Include warnings for possible race conditions. Note that the
- analysis that finds data races performs intra-procedural data flow analysis
- and can sometimes explode in time. Enable it at your own risk.
- </item>
-i <tag><c><![CDATA[-Wunderspecs]]></c>***</tag>
- <item>Warn about underspecified functions
- (the -spec is strictly more allowing than the success typing).</item>
- <tag><c><![CDATA[-Wunknown]]></c>***</tag>
- <item>Let warnings about unknown functions and types affect the
- exit status of the command line version. The default is to ignore
- warnings about unknown functions and types when setting the exit
- status. When using the Dialyzer from Erlang, warnings about unknown
- functions and types are returned; the default is not to return
- these warnings.</item>
+ <tag><c>-Werror_handling</c> (***)</tag>
+ <item>
+ <p>Include warnings for functions that only return by an exception.</p>
+ </item>
+ <tag><c>-Wno_behaviours</c></tag>
+ <item>
+ <p>Suppress warnings about behavior callbacks that drift from the
+ published recommended interfaces.</p>
+ </item>
+ <tag><c>-Wno_contracts</c></tag>
+ <item>
+ <p>Suppress warnings about invalid contracts.</p>
+ </item>
+ <tag><c>-Wno_fail_call</c></tag>
+ <item>
+ <p>Suppress warnings for failing calls.</p>
+ </item>
+ <tag><c>-Wno_fun_app</c></tag>
+ <item>
+ <p>Suppress warnings for fun applications that will fail.</p>
+ </item>
+ <tag><c>-Wno_improper_lists</c></tag>
+ <item>
+ <p>Suppress warnings for construction of improper lists.</p>
+ </item>
+ <tag><c>-Wno_match</c></tag>
+ <item>
+ <p>Suppress warnings for patterns that are unused or cannot match.</p>
+ </item>
+ <tag><c>-Wno_missing_calls</c></tag>
+ <item>
+ <p>Suppress warnings about calls to missing functions.</p>
+ </item>
+ <tag><c>-Wno_opaque</c></tag>
+ <item>
+ <p>Suppress warnings for violations of opacity of data types.</p>
+ </item>
+ <tag><c>-Wno_return</c></tag>
+ <item>
+ <p>Suppress warnings for functions that will never return a value.</p>
+ </item>
+ <tag><c>-Wno_undefined_callbacks</c></tag>
+ <item>
+ <p>Suppress warnings about behaviors that have no
+ <c>-callback</c> attributes for their callbacks.</p>
+ </item>
+ <tag><c>-Wno_unused</c></tag>
+ <item>
+ <p>Suppress warnings for unused functions.</p>
+ </item>
+ <tag><c>-Wrace_conditions</c> (***)</tag>
+ <item>
+ <p>Include warnings for possible race conditions. Notice that the
+ analysis that finds data races performs intra-procedural data flow
+ analysis and can sometimes explode in time. Enable it at your own
+ risk.</p>
+ </item>
+ <tag><c>-Wunderspecs</c> (***)</tag>
+ <item>
+ <p>Warn about underspecified functions (the specification is strictly
+ more allowing than the success typing).</p>
+ </item>
+ <tag><c>-Wunknown</c> (***)</tag>
+ <item>
+ <p>Let warnings about unknown functions and types affect the
+ exit status of the command-line version. The default is to ignore
+ warnings about unknown functions and types when setting the exit
+ status. When using Dialyzer from Erlang, warnings about unknown
+ functions and types are returned; the default is not to return
+ these warnings.</p>
+ </item>
+ <tag><c>-Wunmatched_returns</c> (***)</tag>
+ <item>
+ <p>Include warnings for function calls that ignore a structured return
+ value or do not match against one of many possible return
+ value(s).</p>
+ </item>
</taglist>
- <p>The following options are also available but their use is not
- recommended: (they are mostly for Dialyzer developers and internal
- debugging)</p>
+
+ <p>The following options are also available, but their use is not
+ recommended (they are mostly for Dialyzer developers and internal
+ debugging):</p>
+
<taglist>
- <tag><c><![CDATA[-Woverspecs]]></c>***</tag>
- <item>Warn about overspecified functions
- (the -spec is strictly less allowing than the success typing).</item>
- <tag><c><![CDATA[-Wspecdiffs]]></c>***</tag>
- <item>Warn when the -spec is different than the success typing.</item>
+ <tag><c>-Woverspecs</c> (***)</tag>
+ <item>
+ <p>Warn about overspecified functions (the specification is strictly
+ less allowing than the success typing).</p>
+ </item>
+ <tag><c>-Wspecdiffs</c> (***)</tag>
+ <item>
+ <p>Warn when the specification is different than the success typing.</p>
+ </item>
</taglist>
+
<note>
- <p>*** Identifies options that turn on warnings rather than
- turning them off.</p>
+ <p>*** denotes options that turn on warnings rather than
+ turning them off.</p>
</note>
</section>
<section>
- <title>Using the Dialyzer from Erlang</title>
- <p>You can also use Dialyzer directly from Erlang. Both the GUI and the
- command line versions are available. The options are similar to the ones
- given from the command line, so please refer to the sections above for
- a description of these.</p>
+ <title>Using Dialyzer from Erlang</title>
+ <p>Dialyzer can be used directly from Erlang. Both the GUI and the
+ command-line versions are also available. The options are similar to the
+ ones given from the command line, see section
+ <seealso marker="#command_line">
+ Using Dialyzer from the Command Line</seealso>.</p>
</section>
<section>
<marker id="suppression"></marker>
<title>Requesting or Suppressing Warnings in Source Files</title>
- <p>
- The <c>-dialyzer()</c> attribute can be used for turning off
+ <p>Attribute <c>-dialyzer()</c> can be used for turning off
warnings in a module by specifying functions or warning options.
For example, to turn off all warnings for the function
- <c>f/0</c>, include the following line:
- </p>
-<code type="none">
--dialyzer({nowarn_function, f/0}).
-</code>
+ <c>f/0</c>, include the following line:</p>
+
+ <code type="none">
+-dialyzer({nowarn_function, f/0}).</code>
+
<p>To turn off warnings for improper lists, add the following line
- to the source file:
- </p>
-<code type="none">
--dialyzer(no_improper_lists).
-</code>
- <p>The <c>-dialyzer()</c> attribute is allowed after function
- declarations. Lists of warning options or functions are allowed:
- </p>
-<code type="none">
--dialyzer([{nowarn_function, [f/0]}, no_improper_lists]).
-</code>
- <p>
- Warning options can be restricted to functions:
- </p>
-<code type="none">
--dialyzer({no_improper_lists, g/0}).
-</code>
-<code type="none">
--dialyzer({[no_return, no_match], [g/0, h/0]}).
-</code>
- <p>
- For help on the warning options use <c>dialyzer -Whelp</c>. The
- options are also enumerated <seealso
- marker="#gui/1">below</seealso> (<c>WarnOpts</c>).
- </p>
- <note>
- <p>
- The <c>-dialyzer()</c> attribute is not checked by the Erlang
- Compiler, but by the Dialyzer itself.
- </p>
- </note>
+ to the source file:</p>
+
+ <code type="none">
+-dialyzer(no_improper_lists).</code>
+
+ <p>Attribute <c>-dialyzer()</c> is allowed after function
+ declarations. Lists of warning options or functions are allowed:</p>
+
+ <code type="none">
+-dialyzer([{nowarn_function, [f/0]}, no_improper_lists]).</code>
+
+ <p>Warning options can be restricted to functions:</p>
+
+ <code type="none">
+-dialyzer({no_improper_lists, g/0}).</code>
+
+ <code type="none">
+-dialyzer({[no_return, no_match], [g/0, h/0]}).</code>
+
+ <p>For help on the warning options, use <c>dialyzer -Whelp</c>. The
+ options are also enumerated, see function <seealso marker="#gui/1">
+ <c>gui/1</c></seealso> below (<c>WarnOpts</c>).</p>
+
<note>
- <p>
- The warning option <c>-Wrace_conditions</c> has no effect when
- set in source files.
- </p>
+ <p>Warning option <c>-Wrace_conditions</c> has no effect when
+ set in source files.</p>
</note>
- <p>
- The <c>-dialyzer()</c> attribute can also be used for turning on
- warnings. For instance, if a module has been fixed regarding
- unmatched returns, adding the line
- </p>
-<code type="none">
--dialyzer(unmatched_returns).
-</code>
- <p>
- can help in assuring that no new unmatched return warnings are
- introduced.
- </p>
+
+ <p>Attribute <c>-dialyzer()</c> can also be used for turning on
+ warnings. For example, if a module has been fixed regarding
+ unmatched returns, adding the following line can help in assuring
+ that no new unmatched return warnings are introduced:</p>
+
+ <code type="none">
+-dialyzer(unmatched_returns).</code>
</section>
<funcs>
<func>
+ <name>format_warning(Msg) -> string()</name>
+ <fsummary>Get the string version of a warning message.</fsummary>
+ <type>
+ <v>Msg = {Tag, Id, msg()}</v>
+ <d>See <c>run/1</c>.</d>
+ </type>
+ <desc>
+ <p>Get a string from warnings as returned by
+ <seealso marker="#run/1"><c>run/1</c></seealso>.</p>
+ </desc>
+ </func>
+
+ <func>
<name>gui() -> ok | {error, Msg}</name>
<name>gui(OptList) -> ok | {error, Msg}</name>
- <fsummary>Dialyzer GUI version</fsummary>
+ <fsummary>Dialyzer GUI version.</fsummary>
<type>
- <v>OptList -- see below</v>
+ <v>OptList</v>
+ <d>See below.</d>
</type>
<desc>
<p>Dialyzer GUI version.</p>
@@ -368,9 +499,12 @@ OptList :: [Option]
Option :: {files, [Filename :: string()]}
| {files_rec, [DirName :: string()]}
| {defines, [{Macro :: atom(), Value :: term()}]}
- | {from, src_code | byte_code} %% Defaults to byte_code
- | {init_plt, FileName :: string()} %% If changed from default
- | {plts, [FileName :: string()]} %% If changed from default
+ | {from, src_code | byte_code}
+ %% Defaults to byte_code
+ | {init_plt, FileName :: string()}
+ %% If changed from default
+ | {plts, [FileName :: string()]}
+ %% If changed from default
| {include_dirs, [DirName :: string()]}
| {output_file, FileName :: string()}
| {output_plt, FileName :: string()}
@@ -383,76 +517,71 @@ Option :: {files, [Filename :: string()]}
| {warnings, [WarnOpts]}
| {get_warnings, bool()}
-WarnOpts :: no_return
- | no_unused
- | no_improper_lists
+WarnOpts :: error_handling
+ | no_behaviours
+ | no_contracts
+ | no_fail_call
| no_fun_app
+ | no_improper_lists
| no_match
+ | no_missing_calls
| no_opaque
- | no_fail_call
- | no_contracts
- | no_behaviours
+ | no_return
| no_undefined_callbacks
- | unmatched_returns
- | error_handling
+ | no_unused
| race_conditions
- | overspecs
| underspecs
- | specdiffs
- | unknown</code>
+ | unknown
+ | unmatched_returns
+ | overspecs
+ | specdiffs</code>
</desc>
</func>
+
<func>
- <name>run(OptList) -> Warnings</name>
- <fsummary>Dialyzer command line version</fsummary>
- <type>
- <v>OptList -- see gui/0,1</v>
- <v>Warnings -- see below </v>
- </type>
+ <name>plt_info(string()) -> {'ok', [{atom(), any()}]} | {'error', atom()}</name>
+ <fsummary>Return information about the specified PLT.</fsummary>
<desc>
- <p>Dialyzer command line version.</p>
- <code type="none">
-Warnings :: [{Tag, Id, Msg}]
-Tag :: 'warn_behaviour'
- | 'warn_bin_construction'
- | 'warn_callgraph'
- | 'warn_contract_not_equal'
- | 'warn_contract_range'
- | 'warn_contract_subtype'
- | 'warn_contract_supertype'
- | 'warn_contract_syntax'
- | 'warn_contract_types'
- | 'warn_failing_call'
- | 'warn_fun_app'
- | 'warn_matching'
- | 'warn_non_proper_list'
- | 'warn_not_called'
- | 'warn_opaque'
- | 'warn_race_condition'
- | 'warn_return_no_exit'
- | 'warn_return_only_exit'
- | 'warn_umatched_return'
- | 'warn_undefined_callbacks'
- | 'warn_unknown'
-Id = {File :: string(), Line :: integer()}
-Msg = msg() -- Undefined</code>
+ <p>Returns information about the specified PLT.</p>
</desc>
</func>
+
<func>
- <name>format_warning(Msg) -> string()</name>
- <fsummary>Get the string version of a warning message.</fsummary>
+ <name>run(OptList) -> Warnings</name>
+ <fsummary>Dialyzer command-line version.</fsummary>
<type>
- <v>Msg = {Tag, Id, msg()} -- See run/1</v>
+ <v>OptList</v>
+ <d>See <c>gui/0,1</c>.</d>
+ <v>Warnings</v>
+ <d>See below.</d>
</type>
<desc>
- <p>Get a string from warnings as returned by dialyzer:run/1.</p>
- </desc>
- </func>
- <func>
- <name>plt_info(string()) -> {'ok', [{atom(), any()}]} | {'error', atom()}</name>
- <fsummary>Returns information about the specified plt.</fsummary>
- <desc>
- <p>Returns information about the specified plt.</p>
+ <p>Dialyzer command-line version.</p>
+ <code type="none">
+Warnings :: [{Tag, Id, Msg}]
+Tag :: 'warn_behaviour'
+ | 'warn_bin_construction'
+ | 'warn_callgraph'
+ | 'warn_contract_not_equal'
+ | 'warn_contract_range'
+ | 'warn_contract_subtype'
+ | 'warn_contract_supertype'
+ | 'warn_contract_syntax'
+ | 'warn_contract_types'
+ | 'warn_failing_call'
+ | 'warn_fun_app'
+ | 'warn_matching'
+ | 'warn_non_proper_list'
+ | 'warn_not_called'
+ | 'warn_opaque'
+ | 'warn_race_condition'
+ | 'warn_return_no_exit'
+ | 'warn_return_only_exit'
+ | 'warn_umatched_return'
+ | 'warn_undefined_callbacks'
+ | 'warn_unknown'
+Id = {File :: string(), Line :: integer()}
+Msg = msg() -- Undefined</code>
</desc>
</func>
</funcs>
diff --git a/lib/dialyzer/doc/src/dialyzer_chapter.xml b/lib/dialyzer/doc/src/dialyzer_chapter.xml
index 9bfb7ea2e1..b5acf3732e 100644
--- a/lib/dialyzer/doc/src/dialyzer_chapter.xml
+++ b/lib/dialyzer/doc/src/dialyzer_chapter.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2016</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -25,196 +25,211 @@
<title>Dialyzer</title>
<prepared></prepared>
<docno></docno>
- <date></date>
+ <date>2016-09-19</date>
<rev></rev>
<file>dialyzer_chapter.xml</file>
</header>
<section>
<title>Introduction</title>
- <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies
- such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules
- or entire (sets of) applications.</p>
- </section>
-
- <section>
- <title>Using the Dialyzer from the GUI</title>
-
<section>
- <title>Choosing the applications or modules</title>
- <p>In the "File" window you will find a listing of the current directory.
- Click your way to the directories/modules you want to add or type the
- correct path in the entry.</p>
- <p>Mark the directories/modules you want to analyze for discrepancies and
- click "Add". You can either add the <c><![CDATA[.beam]]></c> and <c><![CDATA[.erl]]></c>-files directly, or
- you can add directories that contain these kinds of files. Note that
- you are only allowed to add the type of files that can be analyzed in
- the current mode of operation (see below), and that you cannot mix
- <c><![CDATA[.beam]]></c> and <c><![CDATA[.erl]]></c>-files.</p>
+ <title>Scope</title>
+ <p>Dialyzer is a static analysis tool that identifies software
+ discrepancies, such as definite type errors, code that has become dead
+ or unreachable because of programming error, and unnecessary tests,
+ in single Erlang modules or entire (sets of) applications.</p>
+
+ <p>Dialyzer can be called from the command line, from Erlang,
+ and from a GUI.</p>
</section>
<section>
- <title>The analysis modes</title>
- <p>Dialyzer has two modes of analysis, "Byte Code" or "Source Code".
- These are controlled by the buttons in the top-middle part of the
- main window, under "Analysis Options".</p>
- </section>
-
- <section>
- <title>Controlling the discrepancies reported by the Dialyzer</title>
- <p>Under the "Warnings" pull-down menu, there are buttons that control
- which discrepancies are reported to the user in the "Warnings" window.
- By clicking on these buttons, one can enable/disable a whole class of
- warnings. Information about the classes of warnings can be found on
- the "Warnings" item under the "Help" menu (at the rightmost top corner).</p>
- <p>If modules are compiled with inlining, spurious warnings may be emitted.
- In the "Options" menu you can choose to ignore inline-compiled modules
- when analyzing byte code. When starting from source code this is not a
- problem since the inlining is explicitly turned off by Dialyzer. The
- option causes Dialyzer to suppress all warnings from inline-compiled
- modules, since there is currently no way for Dialyzer to find what
- parts of the code have been produced by inlining. </p>
+ <title>Prerequisites</title>
+ <p>It is assumed that the reader is familiar with the Erlang programming
+ language.</p>
</section>
+ </section>
- <section>
- <title>Running the analysis</title>
- <p>Once you have chosen the modules or directories you want to analyze,
- click the "Run" button to start the analysis. If for some reason you
- want to stop the analysis while it is running, push the "Stop" button.</p>
- <p>The information from the analysis will be displayed in the Log and the
- Warnings windows.</p>
- </section>
+ <section>
+ <marker id="plt"/>
+ <title>The Persistent Lookup Table</title>
+ <p>Dialyzer stores the result of an analysis in a Persistent
+ Lookup Table (PLT). The PLT can then be used as a starting
+ point for later analyses. It is recommended to build a PLT with the
+ Erlang/OTP applications that you are using, but also to include your
+ own applications that you are using frequently.</p>
+
+ <p>The PLT is built using option <c>--build_plt</c> to Dialyzer.
+ The following command builds the recommended minimal PLT for
+ Erlang/OTP:</p>
- <section>
- <title>Include directories and macro definitions</title>
- <p>When analyzing from source you might have to supply Dialyzer with a
- list of include directories and macro definitions (as you can do with
- the <c><![CDATA[erlc]]></c> flags <c><![CDATA[-I]]></c> and <c><![CDATA[-D]]></c>). This can be done either by starting Dialyzer
- with these flags from the command line as in:</p>
- <code type="none">
+ <code type="none">
+dialyzer --build_plt --apps erts kernel stdlib mnesia</code>
- dialyzer -I my_includes -DDEBUG -Dvsn=42 -I one_more_dir
- </code>
- <p>or by adding these explicitly using the "Manage Macro Definitions" or
- "Manage Include Directories" sub-menus in the "Options" menu.</p>
- </section>
+ <p>Dialyzer looks if there is an environment variable called
+ <c>DIALYZER_PLT</c> and places the PLT at this location. If no such
+ variable is set, Dialyzer places the PLT at
+ <c>$HOME/.dialyzer_plt</c>. The placement can also be specified using
+ the options <c>--plt</c> or <c>--output_plt</c>.</p>
- <section>
- <title>Saving the information on the Log and Warnings windows</title>
- <p>In the "File" menu there are options to save the contents of the Log
- and the Warnings window. Just choose the options and enter the file to
- save the contents in.</p>
- <p>There are also buttons to clear the contents of each window.</p>
- </section>
+ <p>Information can be added to an existing PLT using option
+ <c>--add_to_plt</c>. If you also want to include the Erlang compiler in
+ the PLT and place it in a new PLT, then use the following command:</p>
- <section>
- <title>Inspecting the inferred types of the analyzed functions</title>
- <p>Dialyzer stores the information of the analyzed functions in a
- Persistent Lookup Table (PLT). After an analysis you can inspect this
- information. In the PLT menu you can choose to either search the PLT
- or inspect the contents of the whole PLT. The information is presented
- in edoc format.</p>
- </section>
- </section>
+ <code type="none">
+dialyzer --add_to_plt --apps compiler --output_plt my.plt</code>
- <section>
- <title>Using the Dialyzer from the command line</title>
- <p>See <seealso marker="dialyzer">dialyzer(3)</seealso>.</p>
- </section>
+ <p>Then you can add your favorite application my_app to the new
+ PLT:</p>
- <section>
- <title>Using the Dialyzer from Erlang</title>
- <p>See <seealso marker="dialyzer">dialyzer(3)</seealso>.</p>
- </section>
+ <code type="none">
+dialyzer --add_to_plt --plt my.plt -r my_app/ebin</code>
- <section>
- <title>More on the Persistent Lookup Table (PLT)</title>
+ <p>But you realize that it is unnecessary to have the Erlang compiler in this
+ one:</p>
- <p> The persistent lookup table, or PLT, is used to store the
- result of an analysis. The PLT can then be used as a starting
- point for later analyses. It is recommended to build a PLT with
- the otp applications that you are using, but also to include your
- own applications that you are using frequently.</p>
+ <code type="none">
+dialyzer --remove_from_plt --plt my.plt --apps compiler</code>
- <p>The PLT is built using the --build_plt option to dialyzer. The
- following command builds the recommended minimal PLT for OTP.</p>
+ <p>Later, when you have fixed a bug in your application my_app,
+ you want to update the PLT so that it becomes fresh the next time
+ you run Dialyzer. In this case, run the following command:</p>
<code type="none">
+dialyzer --check_plt --plt my.plt</code>
- dialyzer --build_plt -r $ERL_TOP/lib/stdlib/ebin\
- $ERL_TOP/lib/kernel/ebin \
- $ERL_TOP/lib/mnesia/ebin
- </code>
+ <p>Dialyzer then reanalyzes the changed files
+ and the files that depend on these files. Notice that this
+ consistency check is performed automatically the next time you
+ run Dialyzer with this PLT. Option <c>--check_plt</c> is only
+ for doing so without doing any other analysis.</p>
- <p>Dialyzer will look if there is an environment variable called
- $DIALYZER_PLT and place the PLT at this location. If no such
- variable is set, Dialyzer will place the PLT at
- $HOME/.dialyzer_plt. The placement can also be specified using the
- --plt, or --output_plt options.</p>
-
- <p>You can also add information to an existing plt using the
- --add_to_plt option. Suppose you want to also include the compiler
- in the PLT and place it in a new PLT, then give the command</p>
+ <p>To get information about a PLT, use the following option:</p>
<code type="none">
+dialyzer --plt_info</code>
- dialyzer --add_to_plt -r $ERL_TOP/lib/compiler/ebin --output_plt my.plt
- </code>
+ <p>To specify which PLT, use option <c>--plt</c>.</p>
- <p>Then you would like to add your favorite application my_app to
- the new plt.</p>
+ <p>To get the output printed to a file, use option <c>--output_file</c>.</p>
- <code type="none">
+ <p>Notice that when manipulating the PLT, no warnings are
+ emitted. To turn on warnings during (re)analysis of the PLT, use
+ option <c>--get_warnings</c>.</p>
+ </section>
- dialyzer --add_to_plt --plt my.plt -r my_app/ebin
- </code>
+ <section>
+ <title>Using Dialyzer from the Command Line</title>
+ <p>Dialyzer has a command-line version for automated use.
+ See <seealso marker="dialyzer"><c>dialyzer(3)</c></seealso>.</p>
+ </section>
- <p>But you realize that it is unnecessary to have compiler in this one.</p>
+ <section>
+ <title>Using Dialyzer from Erlang</title>
+ <p>Dialyzer can also be used directly from Erlang.
+ See <seealso marker="dialyzer"><c>dialyzer(3)</c></seealso>.</p>
+ </section>
- <code type="none">
+ <section>
+ <marker id="dialyzer_gui"/>
+ <title>Using Dialyzer from the GUI</title>
+ <section>
+ <title>Choosing the Applications or Modules</title>
+ <p>The <em>File</em> window displays a listing of the current directory.
+ Click your way to the directories/modules you want to add or type the
+ correct path in the entry.</p>
- dialyzer --remove_from_plt --plt my.plt -r $ERL_TOP/lib/compiler/ebin
- </code>
+ <p>Mark the directories/modules you want to analyze for discrepancies and
+ click <em>Add</em>. You can either add the <c>.beam</c> and
+ <c>.erl</c> files directly, or add directories that contain
+ these kind of files. Notice that
+ you are only allowed to add the type of files that can be analyzed in
+ the current mode of operation (see below), and that you cannot mix
+ <c>.beam</c> and <c>.erl</c> files.</p>
+ </section>
- <p> Later, when you have fixed a bug in your application my_app,
- you want to update the plt so that it will be fresh the next time
- you run Dialyzer, run the command</p>
+ <section>
+ <title>Analysis Modes</title>
+ <p>Dialyzer has two analysis modes: "Byte Code" and "Source Code".
+ They are controlled by the buttons in the top-middle part of the
+ main window, under <em>Analysis Options</em>.</p>
+ </section>
- <code type="none">
+ <section>
+ <title>Controlling the Discrepancies Reported by Dialyzer</title>
+ <p>Under the <em>Warnings</em> pull-down menu, there are buttons that
+ control which discrepancies are reported to the user in the
+ <em>Warnings</em> window. By clicking these buttons, you can
+ enable/disable a whole class of warnings. Information about the classes
+ of warnings is found on the "Warnings" item under the <em>Help</em>
+ menu (in the rightmost top corner).</p>
+
+ <p>If modules are compiled with inlining, spurious warnings can be
+ emitted. In the <em>Options</em> menu you can choose to ignore
+ inline-compiled modules when analyzing byte code.
+ When starting from source code, this is not a problem because
+ inlining is explicitly turned off by Dialyzer. The option causes
+ Dialyzer to suppress all warnings from inline-compiled
+ modules, as there is currently no way for Dialyzer to find what
+ parts of the code have been produced by inlining.</p>
+ </section>
- dialyzer --check_plt --plt my.plt
- </code>
+ <section>
+ <title>Running the Analysis</title>
+ <p>Once you have chosen the modules or directories you want to analyze,
+ click the <em>Run</em> button to start the analysis. If you for some
+ reason want to stop the analysis while it is running, click the
+ <em>Stop</em> button.</p>
- <p> Dialyzer will then reanalyze the files that have been changed,
- and the files that depend on these files. Note that this
- consistency check will be performed automatically the next time
- you run Dialyzer with this plt. The --check_plt option is merely
- for doing so without doing any other analysis.</p>
+ <p>The information from the analysis is displayed in the <em>Log</em>
+ window and the <em>Warnings</em> window.</p>
+ </section>
- <p> To get some information about a plt use the option</p>
- <code type="none">
+ <section>
+ <title>Include Directories and Macro Definitions</title>
+ <p>When analyzing from source, you might have to supply Dialyzer
+ with a list of include directories and macro definitions (as you can do
+ with the <seealso marker="erts:erlc"><c>erlc</c></seealso> flags
+ <c>-I</c> and <c>-D</c>). This can be done
+ either by starting Dialyzer with these flags from the command
+ line as in:</p>
+
+ <code type="none">
+dialyzer -I my_includes -DDEBUG -Dvsn=42 -I one_more_dir</code>
- dialyzer --plt_info
- </code>
+ <p>or by adding these explicitly using submenu
+ <em>Manage Macro Definitions</em> or
+ <em>Manage Include Directories</em> in the <em>Options</em> menu.</p>
+ </section>
- <p>You can also specify which plt with the --plt option, and get the
- output printed to a file with --output_file</p>
+ <section>
+ <title>Saving the Information on the Log and Warnings Windows</title>
+ <p>The <em>File</em> menu includes options to save the contents of the
+ <em>Log</em> window and the <em>Warnings</em> window. Simply choose the
+ options and enter the file to save the contents in.</p>
- <p>Note that when manipulating the plt, no warnings are
- emitted. To turn on warnings during (re)analysis of the plt, use
- the option --get_warnings.</p>
+ <p>There are also buttons to clear the contents of each window.</p>
+ </section>
+ <section>
+ <title>Inspecting the Inferred Types of the Analyzed Functions</title>
+ <p>Dialyzer stores the information of the analyzed functions in a
+ Persistent Lookup Table (PLT), see section
+ <seealso marker="#plt">The Persistent Lookup Table</seealso>.</p>
+
+ <p>After an analysis, you can inspect this information.
+ In the <em>PLT</em> menu you can choose to either search the PLT
+ or inspect the contents of the whole PLT. The information is presented
+ in <seealso marker="edoc:edoc"><c>EDoc</c></seealso> format.</p>
+ </section>
</section>
<section>
- <title>Feedback and bug reports</title>
- <p>At this point, we very much welcome user feedback (even wish-lists!).
- If you notice something weird, especially if the Dialyzer reports any
- discrepancy that is a false positive, please send an error report
- describing the symptoms and how to reproduce them to:</p>
- <code type="none"><![CDATA[
- ]]></code>
+ <title>Feedback and Bug Reports</title>
+ <p>We very much welcome user feedback - even wishlists!
+ If you notice anything weird, especially if Dialyzer reports
+ any discrepancy that is a false positive, please send an error report
+ describing the symptoms and how to reproduce them.</p>
</section>
</chapter>
diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml
index 93d3b09f07..0d2cb6c4df 100644
--- a/lib/dialyzer/doc/src/notes.xml
+++ b/lib/dialyzer/doc/src/notes.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -32,6 +32,400 @@
<p>This document describes the changes made to the Dialyzer
application.</p>
+<section><title>Dialyzer 3.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> The check of bad type variables in type declarations
+ was mistakingly removed in Erlang/OTP 18, and is now
+ re-introduced. </p>
+ <p>
+ Own Id: OTP-14423 Aux Id: OTP-14323 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Analyzing modules with binary construction with huge
+ strings is now much faster. The compiler also compiles
+ such modules slightly faster.</p>
+ <p>
+ Own Id: OTP-14125 Aux Id: ERL-308 </p>
+ </item>
+ <item>
+ <p> The peak memory consumption is reduced. </p>
+ <p>
+ Own Id: OTP-14127</p>
+ </item>
+ <item>
+ <p> Warnings about unknown types are now also generated
+ for types not used by any function specification. </p>
+ <p>
+ Own Id: OTP-14218 Aux Id: OTP-14127 </p>
+ </item>
+ <item>
+ <p>TypEr has been removed as separate application and is
+ now a part of the Dialyzer application. Documentation for
+ TypEr has been added in the Dialyzer application.</p>
+ <p>
+ Own Id: OTP-14336</p>
+ </item>
+ <item>
+ <p>The format of debug information that is stored in BEAM
+ files (when <c>debug_info</c> is used) has been changed.
+ The purpose of the change is to better support other
+ BEAM-based languages such as Elixir or LFE.</p>
+ <p>All tools included in OTP (dialyzer, debugger, cover,
+ and so on) will handle both the new format and the
+ previous format. Tools that retrieve the debug
+ information using <c>beam_lib:chunk(Beam,
+ [abstract_code])</c> will continue to work with both the
+ new and old format. Tools that call
+ <c>beam_lib:chunk(Beam, ["Abst"])</c> will not work with
+ the new format.</p>
+ <p>For more information, see the description of
+ <c>debug_info</c> in the documentation for
+ <c>beam_lib</c> and the description of the
+ <c>{debug_info,{Backend,Data}}</c> option in the
+ documentation for <c>compile</c>.</p>
+ <p>
+ Own Id: OTP-14369 Aux Id: PR-1367 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.1.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Report unknown types properly. A bug was introduced
+ in Erlang/OTP 19.3, where warnings about unknown types
+ were simply discarded. </p>
+ <p>
+ Own Id: OTP-14368</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Fix a bug concerning parameterized opaque types. </p>
+ <p>
+ Own Id: OTP-14130</p>
+ </item>
+ <item>
+ <p> Improve a few warnings. One of them could cause a
+ crash. </p>
+ <p>
+ Own Id: OTP-14177</p>
+ </item>
+ <item>
+ <p>The dialyzer and observer applications will now use a
+ portable way to find the home directory. That means that
+ there is no longer any need to manually set the HOME
+ environment variable on Windows.</p>
+ <p>
+ Own Id: OTP-14249 Aux Id: ERL-161 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p> The peak memory consumption is reduced. </p><p> The
+ evaluation of huge SCCs in <c>dialyzer_typesig</c> is
+ optimized. </p><p> Analyzing modules with binary
+ construction with huge strings is now much faster. </p>
+ <p>
+ Own Id: OTP-14126 Aux Id: ERL-308 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.0.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Fix bugs regarding opaque types. </p>
+ <p>
+ Own Id: OTP-13693</p>
+ </item>
+ <item>
+ <p> Fix error handling of bad <c>-dialyzer()</c>
+ attributes. </p>
+ <p>
+ Own Id: OTP-13979 Aux Id: ERL-283 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p> A few warning messages have been improved. </p>
+ <p>
+ Own Id: OTP-11403</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.0.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p> The translation of forms to types is improved for
+ opaque types in a few cases. </p>
+ <p>
+ Own Id: OTP-13682</p>
+ </item>
+ <item>
+ <p> Add warning suppression to compiler-generated case
+ statements. Warnings about clauses that cannot match and
+ are also compiler generated are suppressed unless none of
+ the clauses return. </p>
+ <p>
+ Own Id: OTP-13723 Aux Id: ERL-159, PR-1121 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.0.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>Fix a map related bug.</p>
+ <p>
+ Own Id: OTP-13709 Aux Id: ERL-177, PR-1115 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 3.0</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Fix a bug in the translation of forms to types. </p>
+ <p>
+ Own Id: OTP-13520</p>
+ </item>
+ <item>
+ <p>Correct misspelling in Dialyzer's acronym definition.
+ </p>
+ <p>
+ Own Id: OTP-13544 Aux Id: PR-1007 </p>
+ </item>
+ <item>
+ <p>Dialyzer no longer crashes when there is an invalid
+ function call such as <c>42(7)</c> in a module being
+ analyzed. The compiler will now warn for invalid function
+ calls such as <c>X = 42, x(7)</c>.</p>
+ <p>
+ Own Id: OTP-13552 Aux Id: ERL-138 </p>
+ </item>
+ <item>
+ <p> Fix a bug that caused Dialyzer to go into an infinite
+ loop. </p>
+ <p>
+ Own Id: OTP-13653 Aux Id: ERL-157 </p>
+ </item>
+ <item>
+ <p>Fix a bug in Dialyzer related to call-site
+ analysis.</p>
+ <p>
+ Own Id: OTP-13655 Aux Id: PR-1092 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p> The evaluation of SCCs in <c>dialyzer_typesig</c> is
+ optimized. </p> <p> Maps are used instead of Dicts to
+ further optimize the evaluation. </p>
+ <p>
+ Own Id: OTP-10349</p>
+ </item>
+ <item>
+ <p> Since Erlang/OTP R14A, when support for parameterized
+ modules was added, <c>module()</c> has included
+ <c>tuple()</c>, but that part is removed; the type
+ <c>module()</c> is now the same as <c>atom()</c>, as
+ documented in the Reference Manual. </p>
+ <p>
+ Own Id: OTP-13244</p>
+ </item>
+ <item>
+ <p> The type specification syntax for Maps is improved:
+ </p> <list> <item> <p> The association type <c>KeyType :=
+ ValueType</c> denotes an association that must be
+ present. </p> </item> <item> <p> The shorthand <c>...</c>
+ stands for the association type <c>any() => any()</c>.
+ </p> </item> </list> <p> An incompatible change is that
+ <c>#{}</c> stands for the empty map. The type
+ <c>map()</c> (a map of any size) can be written as
+ <c>#{...}</c>. </p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-13542 Aux Id: PR-1014 </p>
+ </item>
+ <item>
+ <p>The translation of forms to types is improved. </p>
+ <p>
+ Own Id: OTP-13547</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 2.9</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Dialyzer no longer asserts that files and directories
+ to be removed from a PLT exist. </p>
+ <p>
+ Own Id: OTP-13103 Aux Id: ERL-40 </p>
+ </item>
+ <item>
+ <p> Fix a bug concerning parameterized opaque types. </p>
+ <p>
+ Own Id: OTP-13237</p>
+ </item>
+ <item>
+ <p>
+ Fix pretty printing of Core Maps</p>
+ <p>
+ Literal maps could cause Dialyzer to crash when pretty
+ printing the results.</p>
+ <p>
+ Own Id: OTP-13238</p>
+ </item>
+ <item>
+ <p>
+ If a behavior module contains an non-exported function
+ with the same name as one of the behavior's callbacks,
+ the callback info was inadvertently deleted from the PLT
+ as the <c>dialyzer_plt:delete_list/2</c> function was
+ cleaning up the callback table.</p>
+ <p>
+ Own Id: OTP-13287</p>
+ </item>
+ <item>
+ <p> Correct the contract for <c>erlang:byte_size/1</c>
+ </p> <p> Correct the handling of comparison operators for
+ map and bit string operands. </p>
+ <p>
+ Own Id: OTP-13312</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Dialyzer recognizes calls to <c>M:F/A</c> where <c>M</c>,
+ <c>F</c>, and <c>A</c> are all literals.</p>
+ <p>
+ Own Id: OTP-13217</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 2.8.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Reintroduce the <c>erlang:make_fun/3</c> BIF in
+ erl_bif_types.</p>
+ <p>
+ Own Id: OTP-13068</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Dialyzer 2.8.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>Improve the translation of forms to types. </p>
+ <p>
+ Own Id: OTP-12865</p>
+ </item>
+ <item>
+ <p> Fix a bug concerning parameterized opaque types. </p>
+ <p>
+ Own Id: OTP-12866</p>
+ </item>
+ <item>
+ <p> Fix a bug concerning parameterized opaque types. </p>
+ <p>
+ Own Id: OTP-12940</p>
+ </item>
+ <item>
+ <p> Fix bugs concerning <c>erlang:abs/1</c>. </p>
+ <p>
+ Own Id: OTP-12948</p>
+ </item>
+ <item>
+ <p> Fix a bug concerning <c>lists:keydelete/3</c> with
+ union and opaque types. </p>
+ <p>
+ Own Id: OTP-12949</p>
+ </item>
+ <item>
+ <p>
+ Use new function <c>hipe:erts_checksum</c> to get correct
+ runtime checksum for cached beam files.</p>
+ <p>
+ Own Id: OTP-12964 Aux Id: OTP-12963, OTP-12962 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Dialyzer 2.8</title>
<section><title>Fixed Bugs and Malfunctions</title>
@@ -286,7 +680,7 @@
or modifying opaque types within the scope of a module.
</p> <p> Hitherto the shape of terms (tuple, list, etc.)
has been used to determine the opaque terms, but now the
- contracts are used for decorating types with opaqueness.
+ contracts are used for decorating types with opacity.
</p>
<p>
Own Id: OTP-10397</p>
@@ -405,22 +799,28 @@
<p>
EEP43: New data type - Maps</p>
<p>
- With Maps you may for instance: <taglist> <item><c>M0 =
- #{ a =&gt; 1, b =&gt; 2}, % create
- associations</c></item> <item><c>M1 = M0#{ a := 10 }, %
- update values</c></item> <item><c>M2 = M1#{ "hi" =&gt;
- "hello"}, % add new associations</c></item> <item><c>#{
- "hi" := V1, a := V2, b := V3} = M2. % match keys with
- values</c></item> </taglist></p>
+ With Maps you may for instance:</p>
+ <taglist>
+ <tag/> <item><c>M0 = #{ a =&gt; 1, b =&gt; 2}, % create
+ associations</c></item>
+ <tag/><item><c>M1 = M0#{ a := 10 }, % update values</c></item>
+ <tag/><item><c>M2 = M1#{ "hi" =&gt;
+ "hello"}, % add new associations</c></item>
+ <tag/><item><c>#{ "hi" := V1, a := V2, b := V3} = M2.
+ % match keys with values</c></item>
+ </taglist>
<p>
For information on how to use Maps please see Map Expressions in the
<seealso marker="doc/reference_manual:expressions#map_expressions">
Reference Manual</seealso>.</p>
<p>
The current implementation is without the following
- features: <taglist> <item>No variable keys</item>
- <item>No single value access</item> <item>No map
- comprehensions</item> </taglist></p>
+ features:</p>
+ <taglist>
+ <tag/><item>No variable keys</item>
+ <tag/><item>No single value access</item>
+ <tag/><item>No map comprehensions</item>
+ </taglist>
<p>
Note that Maps is <em>experimental</em> during OTP 17.0.</p>
<p>
@@ -734,19 +1134,17 @@
Own Id: OTP-9731</p>
</item>
<item>
- <p>
<list> <item><p>No warnings for underspecs with remote
types</p></item> <item><p> Fix crash in Typer</p></item>
<item><p>Fix Dialyzer's warning for its own
code</p></item> <item><p>Fix Dialyzer's warnings in
HiPE</p></item> <item><p>Add file/line info in a
particular Dialyzer crash</p></item> <item><p>Update
- inets test results</p></item> </list></p>
+ inets test results</p></item> </list>
<p>
Own Id: OTP-9758</p>
</item>
<item>
- <p>
<list> <item><p>Correct callback spec in application
module</p></item> <item><p>Refine warning about callback
specs with extra ranges</p></item> <item><p>Cleanup
@@ -757,7 +1155,7 @@
analysis</p></item> <item><p>Fix crash in
Dialyzer</p></item> <item><p>Variable substitution was
not generalizing any unknown variables.</p></item>
- </list></p>
+ </list>
<p>
Own Id: OTP-9776</p>
</item>
@@ -1265,7 +1663,7 @@
<list>
<item>
<p>The analysis accepts opaque type declarations and
- detects violations of opaqueness of terms of such types.
+ detects violations of opacity of terms of such types.
Starting with R13, many Erlang/OTP standard libraries
(array, dict, digraph, ets, gb_sets, gb_trees, queue, and
sets) contain opaque type declarations of their main data
diff --git a/lib/dialyzer/doc/src/part.xml b/lib/dialyzer/doc/src/part.xml
index eff8e05257..9bfcf21a66 100644
--- a/lib/dialyzer/doc/src/part.xml
+++ b/lib/dialyzer/doc/src/part.xml
@@ -4,7 +4,7 @@
<part xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2016</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -25,12 +25,11 @@
<title>Dialyzer User's Guide</title>
<prepared></prepared>
<docno></docno>
- <date></date>
+ <date>2016-09-19</date>
<rev></rev>
<file>part.xml</file>
</header>
<description>
- <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules or entire (sets of) applications.</p>
</description>
<xi:include href="dialyzer_chapter.xml"/>
</part>
diff --git a/lib/dialyzer/doc/src/part_notes.xml b/lib/dialyzer/doc/src/part_notes.xml
index ba8d30e1be..4a0a0af2d1 100644
--- a/lib/dialyzer/doc/src/part_notes.xml
+++ b/lib/dialyzer/doc/src/part_notes.xml
@@ -4,7 +4,7 @@
<part xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2016</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
diff --git a/lib/dialyzer/doc/src/ref_man.xml b/lib/dialyzer/doc/src/ref_man.xml
index 9c3887b599..d8cf324232 100644
--- a/lib/dialyzer/doc/src/ref_man.xml
+++ b/lib/dialyzer/doc/src/ref_man.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>2006</year><year>2013</year>
+ <year>2006</year><year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -25,12 +25,12 @@
<title>Dialyzer Reference Manual</title>
<prepared></prepared>
<docno></docno>
- <date></date>
+ <date>2016-09-19</date>
<rev></rev>
</header>
<description>
- <p><em>Dialyzer</em> is a static analysis tool that identifies software discrepancies such as type errors, unreachable code, unnecessary tests, etc in single Erlang modules or entire (sets of) applications.</p>
</description>
<xi:include href="dialyzer.xml"/>
+ <xi:include href="typer.xml"/>
</application>
diff --git a/lib/dialyzer/doc/src/typer.xml b/lib/dialyzer/doc/src/typer.xml
new file mode 100644
index 0000000000..1cfbe94807
--- /dev/null
+++ b/lib/dialyzer/doc/src/typer.xml
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2006</year><year>2017</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ </legalnotice>
+
+ <title>typer</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date>2017-04-13</date>
+ <rev></rev>
+ <file>type.xml</file>
+ </header>
+ <module>typer</module>
+ <modulesummary>Typer, a Type annotator for ERlang programs.
+ </modulesummary>
+ <description>
+ <p>TypEr shows type information for Erlang modules to the user.
+ Additionally, it can annotate the code of files with such type
+ information.</p>
+ </description>
+
+ <section>
+ <marker id="command_line"></marker>
+ <title>Using TypEr from the Command Line</title>
+ <p>TypEr is used from the command-line. This section provides a
+ brief description of the options. The same information can be
+ obtained by writing the following in a shell:</p>
+
+ <code type="none">
+typer --help</code>
+
+ <p><em>Usage:</em></p>
+
+ <code type="none">
+typer [--help] [--version] [--plt PLT] [--edoc]
+ [--show | --show-exported | --annotate | --annotate-inc-files]
+ [-Ddefine]* [-I include_dir]* [-pa dir]* [-pz dir]*
+ [-T application]* [-r] file*</code>
+
+ <note>
+ <p>* denotes that multiple occurrences of the option are possible.</p>
+ </note>
+
+ <p><em>Options:</em></p>
+
+ <taglist>
+
+ <tag><c>--r</c></tag>
+ <item>
+ <p>Search directories recursively for .erl files below them.</p>
+ </item>
+ <tag><c>--show</c></tag>
+ <item>
+ <p>Print type specifications for all functions on stdout.
+ (This is the default behaviour; this option is not really
+ needed.)</p>
+ </item>
+
+ <tag><c>--show-exported</c> (or <c>show_exported</c>)</tag>
+ <item>
+ <p>Same as <c>--show</c>, but print specifications for
+ exported functions only. Specs are displayed sorted
+ alphabetically on the function's name.</p>
+ </item>
+
+ <tag><c>--annotate</c></tag>
+ <item>
+ <p>Annotate the specified files with type specifications.</p>
+ </item>
+
+ <tag><c>--annotate-inc-files</c></tag>
+ <item>
+ <p>Same as <c>--annotate</c> but annotates all
+ <c>-include()</c> files as well as all .erl files. (Use this
+ option with caution - it has not been tested much).</p>
+ </item>
+
+ <tag><c>--edoc</c></tag>
+ <item>
+ <p>Print type information as Edoc <c>@spec</c> comments, not
+ as type specs.</p>
+ </item>
+
+ <tag><c>--plt</c></tag>
+ <item>
+ <p>Use the specified dialyzer PLT file rather than the default one.</p>
+ </item>
+
+ <tag><c>-T file*</c></tag>
+ <item>
+ <p>The specified file(s) already contain type specifications
+ and these are to be trusted in order to print specs for the
+ rest of the files. (Multiple files or dirs, separated by
+ spaces, can be specified.)</p>
+ </item>
+
+ <tag><c>-Dname</c> (or <c>-Dname=value</c>)</tag>
+ <item>
+ <p>Pass the defined name(s) to TypEr. (**)</p>
+ </item>
+
+ <tag><c>-I</c></tag>
+ <item>
+ <p>Pass the include_dir to TypEr. (**)</p>
+ </item>
+
+ <tag><c>-pa dir</c></tag>
+ <item>
+ <p>Include <c>dir</c> in the path for Erlang. This is useful
+ when analyzing files that have <c>-include_lib()</c>
+ directives or use parse transforms.</p>
+ </item>
+
+ <tag><c>-pz dir</c></tag>
+ <item>
+ <p>Include <c>dir</c> in the path for Erlang. This is useful
+ when analyzing files that have <c>-include_lib()</c>
+ directives or use parse transforms.</p>
+ </item>
+
+ <tag><c>--version</c> (or <c>-v</c>)</tag>
+ <item>
+ <p>Print the TypEr version and some more information and
+ exit.</p>
+ </item>
+
+ </taglist>
+
+ <note>
+ <p>** options <c>-D</c> and <c>-I</c> work both
+ from the command line and in the TypEr GUI; the syntax of
+ defines and includes is the same as that used by
+ <seealso marker="erts:erlc">erlc(1)</seealso>.</p>
+ </note>
+
+ </section>
+
+</erlref>
diff --git a/lib/dialyzer/info b/lib/dialyzer/info
index 9fba4b54ad..b9ea26b712 100644
--- a/lib/dialyzer/info
+++ b/lib/dialyzer/info
@@ -1,2 +1,2 @@
group: tools
-short: The DIALYZER, a DIscrepany AnaLYZer for ERlang programs.
+short: The DIALYZER, a DIscrepancy AnaLYZer for ERlang programs.
diff --git a/lib/dialyzer/src/Makefile b/lib/dialyzer/src/Makefile
index 770af2140f..fc08e7ca2f 100644
--- a/lib/dialyzer/src/Makefile
+++ b/lib/dialyzer/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2006-2012. All Rights Reserved.
+# Copyright Ericsson AB 2006-2017. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -61,13 +61,15 @@ MODULES = \
dialyzer_gui_wx \
dialyzer_options \
dialyzer_plt \
+ dialyzer_race_data_server \
dialyzer_races \
dialyzer_succ_typings \
dialyzer_timing \
dialyzer_typesig \
dialyzer_coordinator \
dialyzer_worker \
- dialyzer_utils
+ dialyzer_utils \
+ typer
HRL_FILES= dialyzer.hrl dialyzer_gui_wx.hrl
ERL_FILES= $(MODULES:%=%.erl)
@@ -116,6 +118,9 @@ $(EBIN)/dialyzer_plt.$(EMULATOR): dialyzer_plt.erl ../vsn.mk
$(EBIN)/dialyzer_gui_wx.$(EMULATOR): dialyzer_gui_wx.erl ../vsn.mk
$(erlc_verbose)erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) dialyzer_gui_wx.erl
+$(EBIN)/typer.$(EMULATOR): typer.erl ../vsn.mk
+ $(erlc_verbose)erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) typer.erl
+
$(APP_TARGET): $(APP_SRC) ../vsn.mk
$(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
@@ -140,6 +145,7 @@ $(EBIN)/dialyzer_explanation.beam: dialyzer.hrl
$(EBIN)/dialyzer_gui_wx.beam: dialyzer.hrl dialyzer_gui_wx.hrl
$(EBIN)/dialyzer_options.beam: dialyzer.hrl
$(EBIN)/dialyzer_plt.beam: dialyzer.hrl
+$(EBIN)/dialyzer_race_data_server.beam: dialyzer.hrl
$(EBIN)/dialyzer_races.beam: dialyzer.hrl
$(EBIN)/dialyzer_succ_typings.beam: dialyzer.hrl
$(EBIN)/dialyzer_typesig.beam: dialyzer.hrl
diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src
index 6718178fae..5f803875b0 100644
--- a/lib/dialyzer/src/dialyzer.app.src
+++ b/lib/dialyzer/src/dialyzer.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -37,15 +37,17 @@
dialyzer_gui_wx,
dialyzer_options,
dialyzer_plt,
+ dialyzer_race_data_server,
dialyzer_races,
dialyzer_succ_typings,
dialyzer_typesig,
dialyzer_utils,
dialyzer_timing,
- dialyzer_worker]},
+ dialyzer_worker,
+ typer]},
{registered, []},
- {applications, [compiler, gs, hipe, kernel, stdlib, wx]},
+ {applications, [compiler, hipe, kernel, stdlib, wx]},
{env, []},
- {runtime_dependencies, ["wx-1.2","syntax_tools-1.6.14","stdlib-2.5",
- "kernel-3.0","hipe-3.10.3","erts-7.0",
- "compiler-5.0"]}]}.
+ {runtime_dependencies, ["wx-1.2","syntax_tools-2.0","stdlib-3.0",
+ "kernel-5.0","hipe-3.15.4","erts-8.0",
+ "compiler-7.0"]}]}.
diff --git a/lib/dialyzer/src/dialyzer.appup.src b/lib/dialyzer/src/dialyzer.appup.src
index 727e961629..d81ff641d7 100644
--- a/lib/dialyzer/src/dialyzer.appup.src
+++ b/lib/dialyzer/src/dialyzer.appup.src
@@ -1,7 +1,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index 9f51dfe356..c319acb2fb 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer.erl
@@ -118,13 +111,13 @@ get_plt_info([PLT|PLTs]) ->
String =
case dialyzer_plt:included_files(PLT) of
{ok, Files} ->
- io_lib:format("The PLT ~s includes the following files:\n~p\n\n",
+ io_lib:format("The PLT ~ts includes the following files:\n~tp\n\n",
[PLT, Files]);
{error, read_error} ->
- Msg = io_lib:format("Could not read the PLT file ~p\n\n", [PLT]),
+ Msg = io_lib:format("Could not read the PLT file ~tp\n\n", [PLT]),
throw({dialyzer_error, Msg});
{error, no_such_file} ->
- Msg = io_lib:format("The PLT file ~p does not exist\n\n", [PLT]),
+ Msg = io_lib:format("The PLT file ~tp does not exist\n\n", [PLT]),
throw({dialyzer_error, Msg})
end,
String ++ get_plt_info(PLTs);
@@ -133,16 +126,16 @@ get_plt_info([]) -> "".
do_print_plt_info(PLTInfo, OutputFile) ->
case OutputFile =:= none of
true ->
- io:format("~s", [PLTInfo]),
+ io:format("~ts", [PLTInfo]),
?RET_NOTHING_SUSPICIOUS;
false ->
case file:open(OutputFile, [write]) of
{ok, FileDesc} ->
- io:format(FileDesc, "~s", [PLTInfo]),
+ io:format(FileDesc, "~ts", [PLTInfo]),
ok = file:close(FileDesc),
?RET_NOTHING_SUSPICIOUS;
{error, Reason} ->
- Msg1 = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ Msg1 = io_lib:format("Could not open output file ~tp, Reason: ~p\n",
[OutputFile, Reason]),
throw({dialyzer_error, Msg1})
end
@@ -271,7 +264,7 @@ cl_halt({ok, R = ?RET_DISCREPANCIES}, #options{output_file = Output}) ->
halt(R);
cl_halt({error, Msg1}, #options{output_file = Output}) ->
%% Msg2 = "dialyzer: Internal problems were encountered in the analysis",
- io:format("\ndialyzer: ~s\n", [Msg1]),
+ io:format("\ndialyzer: ~ts\n", [Msg1]),
cl_check_log(Output),
halt(?RET_INTERNAL_ERROR).
@@ -280,9 +273,9 @@ cl_halt({error, Msg1}, #options{output_file = Output}) ->
cl_check_log(none) ->
ok;
cl_check_log(Output) ->
- io:format(" Check output file `~s' for details\n", [Output]).
+ io:format(" Check output file `~ts' for details\n", [Output]).
--spec format_warning(raw_warning()) -> string().
+-spec format_warning(raw_warning() | dial_warning()) -> string().
format_warning(W) ->
format_warning(W, basename).
@@ -298,7 +291,7 @@ format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
basename -> filename:basename(File)
end,
String = lists:flatten(message_to_string(Msg)),
- lists:flatten(io_lib:format("~s:~w: ~s", [F, Line, String])).
+ lists:flatten(io_lib:format("~ts:~w: ~ts", [F, Line, String])).
%%-----------------------------------------------------------------------------
@@ -309,51 +302,55 @@ format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
%%----- Warnings for general discrepancies ----------------
message_to_string({apply, [Args, ArgNs, FailReason,
SigArgs, SigRet, Contract]}) ->
- io_lib:format("Fun application with arguments ~s ", [Args]) ++
+ io_lib:format("Fun application with arguments ~ts ", [Args]) ++
call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
message_to_string({app_call, [M, F, Args, Culprit, ExpectedType, FoundType]}) ->
- io_lib:format("The call ~s:~s~s requires that ~s is of type ~s not ~s\n",
+ io_lib:format("The call ~s:~ts~ts requires that ~ts is of type ~ts not ~ts\n",
[M, F, Args, Culprit, ExpectedType, FoundType]);
message_to_string({bin_construction, [Culprit, Size, Seg, Type]}) ->
io_lib:format("Binary construction will fail since the ~s field ~s in"
" segment ~s has type ~s\n", [Culprit, Size, Seg, Type]);
message_to_string({call, [M, F, Args, ArgNs, FailReason,
SigArgs, SigRet, Contract]}) ->
- io_lib:format("The call ~w:~w~s ", [M, F, Args]) ++
+ io_lib:format("The call ~w:~tw~ts ", [M, F, Args]) ++
call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet, Contract);
message_to_string({call_to_missing, [M, F, A]}) ->
- io_lib:format("Call to missing or unexported function ~w:~w/~w\n", [M, F, A]);
+ io_lib:format("Call to missing or unexported function ~w:~tw/~w\n",
+ [M, F, A]);
message_to_string({exact_eq, [Type1, Op, Type2]}) ->
- io_lib:format("The test ~s ~s ~s can never evaluate to 'true'\n",
+ io_lib:format("The test ~ts ~s ~ts can never evaluate to 'true'\n",
[Type1, Op, Type2]);
message_to_string({fun_app_args, [Args, Type]}) ->
- io_lib:format("Fun application with arguments ~s will fail"
- " since the function has type ~s\n", [Args, Type]);
+ io_lib:format("Fun application with arguments ~ts will fail"
+ " since the function has type ~ts\n", [Args, Type]);
message_to_string({fun_app_no_fun, [Op, Type, Arity]}) ->
- io_lib:format("Fun application will fail since ~s :: ~s"
+ io_lib:format("Fun application will fail since ~ts :: ~ts"
" is not a function of arity ~w\n", [Op, Type, Arity]);
message_to_string({guard_fail, []}) ->
"Clause guard cannot succeed.\n";
message_to_string({guard_fail, [Arg1, Infix, Arg2]}) ->
- io_lib:format("Guard test ~s ~s ~s can never succeed\n", [Arg1, Infix, Arg2]);
+ io_lib:format("Guard test ~ts ~s ~ts can never succeed\n", [Arg1, Infix, Arg2]);
+message_to_string({map_update, [Type, Key]}) ->
+ io_lib:format("A key of type ~ts cannot exist "
+ "in a map of type ~ts\n", [Key, Type]);
message_to_string({neg_guard_fail, [Arg1, Infix, Arg2]}) ->
- io_lib:format("Guard test not(~s ~s ~s) can never succeed\n",
+ io_lib:format("Guard test not(~ts ~s ~ts) can never succeed\n",
[Arg1, Infix, Arg2]);
message_to_string({guard_fail, [Guard, Args]}) ->
- io_lib:format("Guard test ~w~s can never succeed\n", [Guard, Args]);
+ io_lib:format("Guard test ~w~ts can never succeed\n", [Guard, Args]);
message_to_string({neg_guard_fail, [Guard, Args]}) ->
- io_lib:format("Guard test not(~w~s) can never succeed\n", [Guard, Args]);
+ io_lib:format("Guard test not(~w~ts) can never succeed\n", [Guard, Args]);
message_to_string({guard_fail_pat, [Pat, Type]}) ->
- io_lib:format("Clause guard cannot succeed. The ~s was matched"
- " against the type ~s\n", [Pat, Type]);
+ io_lib:format("Clause guard cannot succeed. The ~ts was matched"
+ " against the type ~ts\n", [Pat, Type]);
message_to_string({improper_list_constr, [TlType]}) ->
io_lib:format("Cons will produce an improper list"
- " since its 2nd argument is ~s\n", [TlType]);
+ " since its 2nd argument is ~ts\n", [TlType]);
message_to_string({no_return, [Type|Name]}) ->
NameString =
case Name of
[] -> "The created fun ";
- [F, A] -> io_lib:format("Function ~w/~w ", [F, A])
+ [F, A] -> io_lib:format("Function ~tw/~w ", [F, A])
end,
case Type of
no_match -> NameString ++ "has no clauses that will ever match\n";
@@ -362,125 +359,129 @@ message_to_string({no_return, [Type|Name]}) ->
both -> NameString ++ "has no local return\n"
end;
message_to_string({record_constr, [RecConstr, FieldDiffs]}) ->
- io_lib:format("Record construction ~s violates the"
- " declared type of field ~s\n", [RecConstr, FieldDiffs]);
+ io_lib:format("Record construction ~ts violates the"
+ " declared type of field ~ts\n", [RecConstr, FieldDiffs]);
message_to_string({record_constr, [Name, Field, Type]}) ->
- io_lib:format("Record construction violates the declared type for #~w{}"
- " since ~s cannot be of type ~s\n", [Name, Field, Type]);
+ io_lib:format("Record construction violates the declared type for #~tw{}"
+ " since ~ts cannot be of type ~ts\n", [Name, Field, Type]);
message_to_string({record_matching, [String, Name]}) ->
- io_lib:format("The ~s violates the"
- " declared type for #~w{}\n", [String, Name]);
+ io_lib:format("The ~ts violates the"
+ " declared type for #~tw{}\n", [String, Name]);
message_to_string({record_match, [Pat, Type]}) ->
- io_lib:format("Matching of ~s tagged with a record name violates the declared"
- " type of ~s\n", [Pat, Type]);
+ io_lib:format("Matching of ~ts tagged with a record name violates"
+ " the declared type of ~ts\n", [Pat, Type]);
message_to_string({pattern_match, [Pat, Type]}) ->
- io_lib:format("The ~s can never match the type ~s\n", [Pat, Type]);
+ io_lib:format("The ~ts can never match the type ~ts\n", [Pat, Type]);
message_to_string({pattern_match_cov, [Pat, Type]}) ->
- io_lib:format("The ~s can never match since previous"
- " clauses completely covered the type ~s\n",
+ io_lib:format("The ~ts can never match since previous"
+ " clauses completely covered the type ~ts\n",
[Pat, Type]);
message_to_string({unmatched_return, [Type]}) ->
- io_lib:format("Expression produces a value of type ~s,"
+ io_lib:format("Expression produces a value of type ~ts,"
" but this value is unmatched\n", [Type]);
message_to_string({unused_fun, [F, A]}) ->
- io_lib:format("Function ~w/~w will never be called\n", [F, A]);
+ io_lib:format("Function ~tw/~w will never be called\n", [F, A]);
%%----- Warnings for specs and contracts -------------------
message_to_string({contract_diff, [M, F, _A, Contract, Sig]}) ->
- io_lib:format("Type specification ~w:~w~s"
- " is not equal to the success typing: ~w:~w~s\n",
+ io_lib:format("Type specification ~w:~tw~ts"
+ " is not equal to the success typing: ~w:~tw~ts\n",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_subtype, [M, F, _A, Contract, Sig]}) ->
- io_lib:format("Type specification ~w:~w~s"
- " is a subtype of the success typing: ~w:~w~s\n",
+ io_lib:format("Type specification ~w:~tw~ts"
+ " is a subtype of the success typing: ~w:~tw~ts\n",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_supertype, [M, F, _A, Contract, Sig]}) ->
- io_lib:format("Type specification ~w:~w~s"
- " is a supertype of the success typing: ~w:~w~s\n",
+ io_lib:format("Type specification ~w:~tw~ts"
+ " is a supertype of the success typing: ~w:~tw~ts\n",
[M, F, Contract, M, F, Sig]);
message_to_string({contract_range, [Contract, M, F, ArgStrings, Line, CRet]}) ->
- io_lib:format("The contract ~w:~w~s cannot be right because the inferred"
- " return for ~w~s on line ~w is ~s\n",
+ io_lib:format("The contract ~w:~tw~ts cannot be right because the inferred"
+ " return for ~tw~ts on line ~w is ~ts\n",
[M, F, Contract, F, ArgStrings, Line, CRet]);
message_to_string({invalid_contract, [M, F, A, Sig]}) ->
- io_lib:format("Invalid type specification for function ~w:~w/~w."
- " The success typing is ~s\n", [M, F, A, Sig]);
+ io_lib:format("Invalid type specification for function ~w:~tw/~w."
+ " The success typing is ~ts\n", [M, F, A, Sig]);
+message_to_string({contract_with_opaque, [M, F, A, OpaqueType, SigType]}) ->
+ io_lib:format("The specification for ~w:~tw/~w"
+ " has an opaque subtype ~ts which is violated by the"
+ " success typing ~ts\n", [M, F, A, OpaqueType, SigType]);
message_to_string({extra_range, [M, F, A, ExtraRanges, SigRange]}) ->
- io_lib:format("The specification for ~w:~w/~w states that the function"
- " might also return ~s but the inferred return is ~s\n",
+ io_lib:format("The specification for ~w:~tw/~w states that the function"
+ " might also return ~ts but the inferred return is ~ts\n",
[M, F, A, ExtraRanges, SigRange]);
message_to_string({overlapping_contract, [M, F, A]}) ->
- io_lib:format("Overloaded contract for ~w:~w/~w has overlapping domains;"
+ io_lib:format("Overloaded contract for ~w:~tw/~w has overlapping domains;"
" such contracts are currently unsupported and are simply ignored\n",
[M, F, A]);
message_to_string({spec_missing_fun, [M, F, A]}) ->
- io_lib:format("Contract for function that does not exist: ~w:~w/~w\n",
+ io_lib:format("Contract for function that does not exist: ~w:~tw/~w\n",
[M, F, A]);
%%----- Warnings for opaque type violations -------------------
message_to_string({call_with_opaque, [M, F, Args, ArgNs, ExpArgs]}) ->
- io_lib:format("The call ~w:~w~s contains ~s when ~s\n",
+ io_lib:format("The call ~w:~tw~ts contains ~ts when ~ts\n",
[M, F, Args, form_positions(ArgNs), form_expected(ExpArgs)]);
message_to_string({call_without_opaque, [M, F, Args, ExpectedTriples]}) ->
- io_lib:format("The call ~w:~w~s does not have ~s\n",
+ io_lib:format("The call ~w:~tw~ts does not have ~ts\n",
[M, F, Args, form_expected_without_opaque(ExpectedTriples)]);
message_to_string({opaque_eq, [Type, _Op, OpaqueType]}) ->
- io_lib:format("Attempt to test for equality between a term of type ~s"
- " and a term of opaque type ~s\n", [Type, OpaqueType]);
+ io_lib:format("Attempt to test for equality between a term of type ~ts"
+ " and a term of opaque type ~ts\n", [Type, OpaqueType]);
message_to_string({opaque_guard, [Arg1, Infix, Arg2, ArgNs]}) ->
- io_lib:format("Guard test ~s ~s ~s contains ~s\n",
+ io_lib:format("Guard test ~ts ~s ~ts contains ~s\n",
[Arg1, Infix, Arg2, form_positions(ArgNs)]);
message_to_string({opaque_guard, [Guard, Args]}) ->
- io_lib:format("Guard test ~w~s breaks the opaqueness of its argument\n",
+ io_lib:format("Guard test ~w~ts breaks the opacity of its argument\n",
[Guard, Args]);
message_to_string({opaque_match, [Pat, OpaqueType, OpaqueTerm]}) ->
Term = if OpaqueType =:= OpaqueTerm -> "the term";
true -> OpaqueTerm
end,
- io_lib:format("The attempt to match a term of type ~s against the ~s"
- " breaks the opaqueness of ~s\n", [OpaqueType, Pat, Term]);
+ io_lib:format("The attempt to match a term of type ~s against the ~ts"
+ " breaks the opacity of ~ts\n", [OpaqueType, Pat, Term]);
message_to_string({opaque_neq, [Type, _Op, OpaqueType]}) ->
- io_lib:format("Attempt to test for inequality between a term of type ~s"
- " and a term of opaque type ~s\n", [Type, OpaqueType]);
+ io_lib:format("Attempt to test for inequality between a term of type ~ts"
+ " and a term of opaque type ~ts\n", [Type, OpaqueType]);
message_to_string({opaque_type_test, [Fun, Args, Arg, ArgType]}) ->
- io_lib:format("The type test ~s~s breaks the opaqueness of the term ~s~s\n",
+ io_lib:format("The type test ~ts~ts breaks the opacity of the term ~ts~ts\n",
[Fun, Args, Arg, ArgType]);
message_to_string({opaque_size, [SizeType, Size]}) ->
- io_lib:format("The size ~s breaks the opaqueness of ~s\n",
+ io_lib:format("The size ~ts breaks the opacity of ~ts\n",
[SizeType, Size]);
message_to_string({opaque_call, [M, F, Args, Culprit, OpaqueType]}) ->
- io_lib:format("The call ~s:~s~s breaks the opaqueness of the term ~s :: ~s\n",
+ io_lib:format("The call ~s:~ts~ts breaks the opacity of the term ~ts :: ~ts\n",
[M, F, Args, Culprit, OpaqueType]);
%%----- Warnings for concurrency errors --------------------
message_to_string({race_condition, [M, F, Args, Reason]}) ->
- io_lib:format("The call ~w:~w~s ~s\n", [M, F, Args, Reason]);
+ io_lib:format("The call ~w:~tw~ts ~ts\n", [M, F, Args, Reason]);
%%----- Warnings for behaviour errors --------------------
message_to_string({callback_type_mismatch, [B, F, A, ST, CT]}) ->
- io_lib:format("The inferred return type of ~w/~w (~s) has nothing in common"
- " with ~s, which is the expected return type for the callback of"
- " ~w behaviour\n", [F, A, ST, CT, B]);
+ io_lib:format("The inferred return type of ~tw/~w (~ts) has nothing in"
+ " common with ~ts, which is the expected return type for"
+ " the callback of the ~w behaviour\n", [F, A, ST, CT, B]);
message_to_string({callback_arg_type_mismatch, [B, F, A, N, ST, CT]}) ->
- io_lib:format("The inferred type for the ~s argument of ~w/~w (~s) is"
- " not a supertype of ~s, which is expected type for this"
+ io_lib:format("The inferred type for the ~s argument of ~tw/~w (~ts) is"
+ " not a supertype of ~ts, which is expected type for this"
" argument in the callback of the ~w behaviour\n",
[ordinal(N), F, A, ST, CT, B]);
message_to_string({callback_spec_type_mismatch, [B, F, A, ST, CT]}) ->
- io_lib:format("The return type ~s in the specification of ~w/~w is not a"
- " subtype of ~s, which is the expected return type for the"
- " callback of ~w behaviour\n", [ST, F, A, CT, B]);
+ io_lib:format("The return type ~ts in the specification of ~tw/~w is not a"
+ " subtype of ~ts, which is the expected return type for the"
+ " callback of the ~w behaviour\n", [ST, F, A, CT, B]);
message_to_string({callback_spec_arg_type_mismatch, [B, F, A, N, ST, CT]}) ->
- io_lib:format("The specified type for the ~s argument of ~w/~w (~s) is"
- " not a supertype of ~s, which is expected type for this"
+ io_lib:format("The specified type for the ~ts argument of ~tw/~w (~ts) is"
+ " not a supertype of ~ts, which is expected type for this"
" argument in the callback of the ~w behaviour\n",
[ordinal(N), F, A, ST, CT, B]);
message_to_string({callback_missing, [B, F, A]}) ->
- io_lib:format("Undefined callback function ~w/~w (behaviour '~w')\n",
+ io_lib:format("Undefined callback function ~tw/~w (behaviour ~w)\n",
[F, A, B]);
message_to_string({callback_info_missing, [B]}) ->
io_lib:format("Callback info about the ~w behaviour is not available\n", [B]);
%%----- Warnings for unknown functions, types, and behaviours -------------
message_to_string({unknown_type, {M, F, A}}) ->
- io_lib:format("Unknown type ~w:~w/~w", [M, F, A]);
+ io_lib:format("Unknown type ~w:~tw/~w", [M, F, A]);
message_to_string({unknown_function, {M, F, A}}) ->
- io_lib:format("Unknown function ~w:~w/~w", [M, F, A]);
+ io_lib:format("Unknown function ~w:~tw/~w", [M, F, A]);
message_to_string({unknown_behaviour, B}) ->
io_lib:format("Unknown behaviour ~w", [B]).
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index de236f91ab..e7cf2860b7 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -1,9 +1,5 @@
%%% This is an -*- Erlang -*- file.
%%%
-%%% %CopyrightBegin%
-%%%
-%%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
-%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
@@ -16,8 +12,6 @@
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
-%%% %CopyrightEnd%
-%%%
%%%-------------------------------------------------------------------
%%% File : dialyzer.hrl
%%% Author : Tobias Lindahl <[email protected]>
@@ -60,6 +54,7 @@
-define(WARN_BEHAVIOUR, warn_behaviour).
-define(WARN_UNDEFINED_CALLBACK, warn_undefined_callbacks).
-define(WARN_UNKNOWN, warn_unknown).
+-define(WARN_MAP_CONSTRUCTION, warn_map_construction).
%%
%% The following type has double role:
@@ -75,7 +70,8 @@
| ?WARN_CONTRACT_SUPERTYPE | ?WARN_CALLGRAPH
| ?WARN_UNMATCHED_RETURN | ?WARN_RACE_CONDITION
| ?WARN_BEHAVIOUR | ?WARN_CONTRACT_RANGE
- | ?WARN_UNDEFINED_CALLBACK | ?WARN_UNKNOWN.
+ | ?WARN_UNDEFINED_CALLBACK | ?WARN_UNKNOWN
+ | ?WARN_MAP_CONSTRUCTION.
%%
%% This is the representation of each warning as they will be returned
@@ -123,10 +119,12 @@
%% Record declarations used by various files
%%--------------------------------------------------------------------
--record(analysis, {analysis_pid :: pid(),
+-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
+
+-record(analysis, {analysis_pid :: pid() | 'undefined',
type = succ_typings :: anal_type(),
defines = [] :: [dial_define()],
- doc_plt :: dialyzer_plt:plt(),
+ doc_plt :: doc_plt(),
files = [] :: [file:filename()],
include_dirs = [] :: [file:filename()],
start_from = byte_code :: start_from(),
@@ -135,7 +133,7 @@
race_detection = false :: boolean(),
behaviours_chk = false :: boolean(),
timing = false :: boolean() | 'debug',
- timing_server :: dialyzer_timing:timing_server(),
+ timing_server = none :: dialyzer_timing:timing_server(),
callgraph_file = "" :: file:filename(),
solvers :: [solver()]}).
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index 76b43b6ff0..a4b42c9367 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%--------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_analysis_callgraph.erl
@@ -31,14 +24,17 @@
-export([start/3]).
+%%% Export for dialyzer_coordinator...
-export([compile_init_result/0,
- add_to_result/4,
- start_compilation/2,
+ add_to_result/4]).
+%%% ... and export for dialyzer_worker.
+-export([start_compilation/2,
continue_compilation/2]).
-export_type([compile_init_data/0,
- one_file_result/0,
- compile_result/0]).
+ one_file_mid_error/0,
+ one_file_result_ok/0,
+ compile_result/0]).
-include("dialyzer.hrl").
@@ -93,31 +89,20 @@ loop(#server_state{parent = Parent} = State,
send_log(Parent, LogMsg),
loop(State, Analysis, ExtCalls);
{AnalPid, warnings, Warnings} ->
- case Warnings of
- [] -> ok;
- SendWarnings ->
- send_warnings(Parent, SendWarnings)
- end,
+ send_warnings(Parent, Warnings),
loop(State, Analysis, ExtCalls);
{AnalPid, cserver, CServer, Plt} ->
+ skip_ets_transfer(AnalPid),
send_codeserver_plt(Parent, CServer, Plt),
loop(State, Analysis, ExtCalls);
{AnalPid, done, Plt, DocPlt} ->
- case ExtCalls =:= none of
- true ->
- send_analysis_done(Parent, Plt, DocPlt);
- false ->
- send_ext_calls(Parent, ExtCalls),
- send_analysis_done(Parent, Plt, DocPlt)
- end;
+ send_ext_calls(Parent, ExtCalls),
+ send_analysis_done(Parent, Plt, DocPlt);
{AnalPid, ext_calls, NewExtCalls} ->
loop(State, Analysis, NewExtCalls);
{AnalPid, ext_types, ExtTypes} ->
send_ext_types(Parent, ExtTypes),
loop(State, Analysis, ExtCalls);
- {AnalPid, unknown_behaviours, UnknownBehaviour} ->
- send_unknown_behaviours(Parent, UnknownBehaviour),
- loop(State, Analysis, ExtCalls);
{AnalPid, mod_deps, ModDeps} ->
send_mod_deps(Parent, ModDeps),
loop(State, Analysis, ExtCalls);
@@ -149,77 +134,120 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
Files = ordsets:from_list(Analysis#analysis.files),
{Callgraph, TmpCServer0} = compile_and_store(Files, State),
%% Remote type postprocessing
- NewCServer =
- try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
- NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
- OldRecords = dialyzer_plt:get_types(Plt),
- OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- RemMods =
- [case Analysis#analysis.start_from of
- byte_code -> list_to_atom(filename:basename(F, ".beam"));
- src_code -> list_to_atom(filename:basename(F, ".erl"))
- end || F <- Files],
- OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
- MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
- TmpCServer2 =
- dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes,
- TmpCServer1),
- TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
- ?timing(State#analysis_state.timing_server, "remote",
- dialyzer_contracts:process_contract_remote_types(TmpCServer3))
- catch
- throw:{error, _ErrorMsg} = Error -> exit(Error)
- end,
- NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
- ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer),
- NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
- State0 = State#analysis_state{plt = NewPlt1},
- dump_callgraph(Callgraph, State0, Analysis),
- State1 = State0#analysis_state{codeserver = NewCServer},
+ Args = {Plt, Analysis, Parent},
+ NewCServer = remote_type_postprocessing(TmpCServer0, Args),
+ dump_callgraph(Callgraph, State, Analysis),
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes),
+ Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer),
+ State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1},
Exports = dialyzer_codeserver:get_exports(NewCServer),
+ NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
+ NonExportsList = sets:to_list(NonExports),
NewCallgraph =
case Analysis#analysis.race_detection of
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1),
+ #analysis_state{plt = Plt2,
+ doc_plt = DocPlt,
+ codeserver = Codeserver0} = State2,
+ {Codeserver, Plt3} = move_data(Codeserver0, Plt2),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
- rcv_and_send_ext_types(Parent),
- NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
- NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
+ %% Since the PLT is never used, a dummy is sent:
+ DummyPlt = dialyzer_plt:new(),
+ send_codeserver_plt(Parent, Codeserver, DummyPlt),
+ dialyzer_plt:delete(DummyPlt),
+ Plt4 = dialyzer_plt:delete_list(Plt3, NonExportsList),
+ send_analysis_done(Parent, Plt4, DocPlt).
+
+remote_type_postprocessing(TmpCServer, Args) ->
+ Fun = fun() ->
+ exit(remote_type_postproc(TmpCServer, Args))
+ end,
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_codeserver:give_away(TmpCServer, Pid),
+ Pid ! {self(), go},
+ receive {'DOWN', Ref, process, Pid, Return} ->
+ skip_ets_transfer(Pid),
+ case Return of
+ {error, _ErrorMsg} = Error -> exit(Error);
+ _ -> Return
+ end
+ end.
+
+remote_type_postproc(TmpCServer0, Args) ->
+ {Plt, Analysis, Parent} = Args,
+ fun() ->
+ Caller = receive {Pid, go} -> Pid end,
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt),
+ NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
+ OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
+ #analysis{start_from = StartFrom,
+ timing_server = TimingServer} = Analysis,
+ Files = ordsets:from_list(Analysis#analysis.files),
+ RemMods =
+ [case StartFrom of
+ byte_code -> list_to_atom(filename:basename(F, ".beam"));
+ src_code -> list_to_atom(filename:basename(F, ".erl"))
+ end || F <- Files],
+ OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
+ MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
+ TmpCServer2 =
+ dialyzer_codeserver:finalize_exported_types(MergedExpTypes,
+ TmpCServer1),
+ TmpServer4 =
+ ?timing
+ (TimingServer, "remote",
+ begin
+ TmpCServer3 =
+ dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
+ end),
+ rcv_and_send_ext_types(Caller, Parent),
+ dialyzer_codeserver:give_away(TmpServer4, Caller),
+ TmpServer4
+ end().
+
+skip_ets_transfer(Pid) ->
+ receive
+ {'ETS-TRANSFER', _Tid, Pid, _HeriData} ->
+ skip_ets_transfer(Pid)
+ after 0 ->
+ ok
+ end.
+
+move_data(CServer, Plt) ->
+ {CServer1, Records} = dialyzer_codeserver:extract_records(CServer),
+ Plt1 = dialyzer_plt:insert_types(Plt, Records),
+ {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1),
+ NewPlt = dialyzer_plt:insert_exported_types(Plt1, ExpTypes),
+ {NewCServer, NewPlt}.
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
+ plt = Plt,
timing_server = TimingServer,
parent = Parent,
solvers = Solvers} = State) ->
- Plt = dialyzer_plt:insert_callbacks(State#analysis_state.plt, Codeserver),
- {NewPlt, NewDocPlt} =
- case State#analysis_state.analysis_type of
- plt_build ->
- NewPlt0 =
- dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
- TimingServer, Solvers, Parent),
- {NewPlt0, DocPlt};
- succ_typings ->
- {Warnings, NewPlt0, NewDocPlt0} =
- dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- TimingServer, Solvers, Parent),
- Warnings1 = filter_warnings(Warnings, Codeserver),
- send_warnings(State#analysis_state.parent, Warnings1),
- {NewPlt0, NewDocPlt0}
- end,
- dialyzer_callgraph:delete(Callgraph),
- State#analysis_state{plt = NewPlt, doc_plt = NewDocPlt}.
+ case State#analysis_state.analysis_type of
+ plt_build ->
+ NewPlt =
+ dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ State#analysis_state{plt = NewPlt, doc_plt = DocPlt};
+ succ_typings ->
+ {Warnings, NewPlt, NewDocPlt} =
+ dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
+ State#analysis_state{plt = NewPlt, doc_plt = NewDocPlt}
+ end.
%%--------------------------------------------------------------------
%% Build the callgraph and fill the codeserver.
@@ -256,6 +284,10 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T1, _} = statistics(wall_clock),
Callgraph = dialyzer_callgraph:new(),
CompileInit = make_compile_init(State, Callgraph),
+ %% Spawn a worker per file - where each worker calls
+ %% start_compilation on its file, asks next label to coordinator and
+ %% calls continue_compilation - and let coordinator aggregate
+ %% results using (compile_init_result and) add_to_result.
{{Failed, Modules}, NextLabel} =
?timing(Timing, "compile", _C1,
dialyzer_coordinator:parallel_job(compile, Files,
@@ -272,7 +304,7 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
dict:new(), Files),
check_for_duplicate_modules(ModDict);
false ->
- Msg = io_lib:format("Could not scan the following file(s):~n~s",
+ Msg = io_lib:format("Could not scan the following file(s):~n~ts",
[[Reason || {_Filename, Reason} <- Failed]]),
exit({error, Msg})
end,
@@ -287,16 +319,18 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
send_log(Parent, Msg2),
{Callgraph, CServer2}.
--type compile_init_data() :: #compile_init{}.
--type error_reason() :: string().
--type compile_result() :: {[{file:filename(), error_reason()}],
- [module()]}. %%opaque
--type one_file_result() :: {error, error_reason()} |
- {ok, [dialyzer_callgraph:callgraph_edge()],
- [mfa_or_funlbl()], module()}. %%opaque
--type compile_mid_data() :: {module(), cerl:cerl(),
- dialyzer_callgraph:callgraph(),
- dialyzer_codeserver:codeserver()}.
+-opaque compile_init_data() :: #compile_init{}.
+-type error_reason() :: string().
+-opaque compile_result() :: {[{file:filename(), error_reason()}],
+ [module()]}.
+-type one_file_mid_error() :: {error, error_reason()}.
+-opaque one_file_result_ok() :: {ok, [dialyzer_callgraph:callgraph_edge()],
+ [mfa_or_funlbl()], module()}.
+-type one_file_result() :: one_file_mid_error() |
+ one_file_result_ok().
+-type compile_mid_data() :: {module(), cerl:cerl(),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver()}.
-spec compile_init_result() -> compile_result().
@@ -368,64 +402,46 @@ compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts,
DefaultIncludes = default_includes(filename:dirname(File)),
SrcCompOpts = dialyzer_utils:src_compiler_opts(),
CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes,
- case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
+ case dialyzer_utils:get_core_from_src(File, CompOpts) of
{error, _Msg} = Error -> Error;
- {ok, AbstrCode} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
- UseContracts, LegalWarnings)
+ {ok, Core} ->
+ compile_common(Core, Callgraph, CServer, UseContracts, LegalWarnings)
end.
compile_byte(File, Callgraph, CServer, UseContracts, LegalWarnings) ->
- case dialyzer_utils:get_abstract_code_from_beam(File) of
- error ->
- {error, " Could not get abstract code for: " ++ File ++ "\n" ++
- " Recompile with +debug_info or analyze starting from source code"};
- {ok, AbstrCode} ->
- compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
- LegalWarnings)
- end.
-
-compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
- LegalWarnings) ->
- case dialyzer_utils:get_compile_options_from_beam(File) of
- error ->
- {error, " Could not get compile options for: " ++ File ++ "\n" ++
- " Recompile or analyze starting from source code"};
- {ok, CompOpts} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
- UseContracts, LegalWarnings)
+ case dialyzer_utils:get_core_from_beam(File) of
+ {error, _} = Error -> Error;
+ {ok, Core} ->
+ compile_common(Core, Callgraph, CServer, UseContracts, LegalWarnings)
end.
-compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
- UseContracts, LegalWarnings) ->
- case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of
- error -> {error, " Could not get core Erlang code for: " ++ File};
- {ok, Core} ->
- Mod = cerl:concrete(cerl:module_name(Core)),
- case dialyzer_utils:get_record_and_type_info(AbstrCode) of
+compile_common(Core, Callgraph, CServer, UseContracts, LegalWarnings) ->
+ Mod = cerl:concrete(cerl:module_name(Core)),
+ case dialyzer_utils:get_record_and_type_info(Core) of
+ {error, _} = Error -> Error;
+ {ok, RecInfo} ->
+ CServer1 =
+ dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ case dialyzer_utils:get_fun_meta_info(Mod, Core, LegalWarnings) of
{error, _} = Error -> Error;
- {ok, RecInfo} ->
- CServer1 =
- dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
- MetaFunInfo =
- dialyzer_utils:get_fun_meta_info(Mod, AbstrCode, LegalWarnings),
- CServer2 =
- dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1),
+ MetaFunInfo ->
+ CServer2 =
+ dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1),
case UseContracts of
true ->
- case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
- {error, _} = Error -> Error;
- {ok, SpecInfo, CallbackInfo} ->
- CServer3 =
- dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
- CallbackInfo,
- CServer2),
- store_core(Mod, Core, Callgraph, CServer3)
+ case dialyzer_utils:get_spec_info(Mod, Core, RecInfo) of
+ {error, _} = Error -> Error;
+ {ok, SpecInfo, CallbackInfo} ->
+ CServer3 =
+ dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
+ CallbackInfo,
+ CServer2),
+ store_core(Mod, Core, Callgraph, CServer3)
end;
false ->
store_core(Mod, Core, Callgraph, CServer2)
- end
end
+ end
end.
store_core(Mod, Core, Callgraph, CServer) ->
@@ -437,7 +453,8 @@ store_core(Mod, Core, Callgraph, CServer) ->
CoreSize = cerl_trees:size(CoreTree),
{ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}.
--spec continue_compilation(integer(), compile_mid_data()) -> one_file_result().
+-spec continue_compilation(integer(), compile_mid_data()) ->
+ one_file_result_ok().
continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) ->
{LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
@@ -525,13 +542,13 @@ default_includes(Dir) ->
%% Handle Messages
%%-------------------------------------------------------------------
-rcv_and_send_ext_types(Parent) ->
+rcv_and_send_ext_types(SendTo, Parent) ->
Self = self(),
Self ! {Self, done},
case rcv_ext_types(Self, []) of
[] -> ok;
ExtTypes ->
- Parent ! {Self, ext_types, ExtTypes},
+ Parent ! {SendTo, ext_types, ExtTypes},
ok
end.
@@ -569,6 +586,8 @@ send_analysis_done(Parent, Plt, DocPlt) ->
Parent ! {self(), done, Plt, DocPlt},
ok.
+send_ext_calls(_Parent, none) ->
+ ok;
send_ext_calls(Parent, ExtCalls) ->
Parent ! {self(), ext_calls, ExtCalls},
ok.
@@ -577,11 +596,8 @@ send_ext_types(Parent, ExtTypes) ->
Parent ! {self(), ext_types, ExtTypes},
ok.
-send_unknown_behaviours(Parent, UnknownBehaviours) ->
- Parent ! {self(), unknown_behaviours, UnknownBehaviours},
- ok.
-
-send_codeserver_plt(Parent, CServer, Plt ) ->
+send_codeserver_plt(Parent, CServer, Plt) ->
+ ok = dialyzer_codeserver:give_away(CServer, Parent),
Parent ! {self(), cserver, CServer, Plt},
ok.
@@ -600,14 +616,14 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- {File, Line} = find_call_file_and_line(FunCode, To),
+ {File, Line} = find_call_file_and_line(FromMFA, FunCode, To, CodeServer),
WarningInfo = {File, Line, FromMFA},
NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
-find_call_file_and_line(Tree, MFA) ->
+find_call_file_and_line({Module, _, _}, Tree, MFA, CodeServer) ->
Fun =
fun(SubTree, Acc) ->
case cerl:is_c_call(SubTree) of
@@ -620,7 +636,30 @@ find_call_file_and_line(Tree, MFA) ->
case {cerl:concrete(M), cerl:concrete(F), A} of
MFA ->
Ann = cerl:get_ann(SubTree),
- [{get_file(Ann), get_line(Ann)}|Acc];
+ [{get_file(CodeServer, Module, Ann), get_line(Ann)}|Acc];
+ {erlang, make_fun, 3} ->
+ [CA1, CA2, CA3] = cerl:call_args(SubTree),
+ case
+ cerl:is_c_atom(CA1) andalso
+ cerl:is_c_atom(CA2) andalso
+ cerl:is_c_int(CA3)
+ of
+ true ->
+ case
+ {cerl:concrete(CA1),
+ cerl:concrete(CA2),
+ cerl:concrete(CA3)}
+ of
+ MFA ->
+ Ann = cerl:get_ann(SubTree),
+ [{get_file(CodeServer, Module, Ann),
+ get_line(Ann)}|Acc];
+ _ ->
+ Acc
+ end;
+ false ->
+ Acc
+ end;
_ -> Acc
end;
false -> Acc
@@ -634,8 +673,10 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
-spec dump_callgraph(dialyzer_callgraph:callgraph(), #analysis_state{}, #analysis{}) ->
'ok'.
@@ -658,12 +699,13 @@ dump_callgraph(CallGraph, _State, #analysis{callgraph_file = File}, ".ps") ->
Args = "-Gratio=compress -Gsize=\"100,100\"",
dialyzer_callgraph:to_ps(CallGraph, File, Args);
dump_callgraph(CallGraph, State, #analysis{callgraph_file = File}, _Ext) ->
+ %% TODO: write the graph, not the ETS table identifiers.
case file:open(File, [write]) of
{ok, Fd} ->
io:format(Fd, "~p", [CallGraph]),
ok = file:close(Fd);
{error, Reason} ->
- Msg = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ Msg = io_lib:format("Could not open output file ~tp, Reason: ~p\n",
[File, Reason]),
send_log(State#analysis_state.parent, Msg)
end.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index b6f15fdc0e..d380ab2a50 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_behaviours.erl
@@ -62,9 +55,9 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
_ ->
MFA = {Module,module_info,0},
{_Var,Code} = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- File = get_file(cerl:get_ann(Code)),
+ File = get_file(Codeserver, Module, cerl:get_ann(Code)),
State = #state{plt = Plt, filename = File, behlines = BehLines,
- codeserver = Codeserver, records = Records},
+ codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
[add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
@@ -213,12 +206,15 @@ add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ File = get_file(State#state.codeserver, Module, Anns),
+ WarningInfo = {File, get_line(Anns), {Module, Fun, Arity}},
{?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index a1cd2015ca..7411b1d28b 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_callgraph.erl
@@ -47,7 +40,7 @@
module_postorder_from_funs/2,
new/0,
get_depends_on/2,
- get_required_by/2,
+ %% get_required_by/2,
in_neighbours/2,
renew_race_info/4,
renew_race_code/2,
@@ -96,28 +89,34 @@
%% whenever applicable.
%%-----------------------------------------------------------------------------
+%% Types with comment 'race' are due to dialyzer_races.erl.
-record(callgraph, {digraph = digraph:new() :: digraph:graph(),
- active_digraph :: active_digraph(),
- esc :: ets:tid(),
- letrec_map :: ets:tid(),
+ active_digraph :: active_digraph()
+ | 'undefined', % race
+ esc :: ets:tid()
+ | 'undefined', % race
+ letrec_map :: ets:tid()
+ | 'undefined', % race
name_map :: ets:tid(),
rev_name_map :: ets:tid(),
- rec_var_map :: ets:tid(),
- self_rec :: ets:tid(),
- calls :: ets:tid(),
+ rec_var_map :: ets:tid()
+ | 'undefined', % race
+ self_rec :: ets:tid()
+ | 'undefined', % race
+ calls :: ets:tid()
+ | 'undefined', % race
race_detection = false :: boolean(),
- race_data_server = new_race_data_server() :: pid()}).
-
--record(race_data_state, {race_code = dict:new() :: dict:dict(),
- public_tables = [] :: [label()],
- named_tables = [] :: [string()],
- beh_api_calls = [] :: [{mfa(), mfa()}]}).
+ race_data_server = dialyzer_race_data_server:new() :: pid()}).
%% Exported Types
--type callgraph() :: #callgraph{}.
+-opaque callgraph() :: #callgraph{}.
--type active_digraph() :: {'d', digraph:graph()} | {'e', ets:tid(), ets:tid()}.
+-type active_digraph() :: {'d', digraph:graph()}
+ | {'e',
+ Out :: ets:tid(),
+ In :: ets:tid(),
+ Map :: ets:tid()}.
%%----------------------------------------------------------------------
@@ -246,23 +245,29 @@ find_non_local_calls([], Set) ->
-spec get_depends_on(scc() | module(), callgraph()) -> [scc()].
-get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In}}) ->
- case ets_lookup_dict(SCC, Out) of
- {ok, Value} -> Value;
- error -> []
- end;
+get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) ->
+ lookup_scc(SCC, Out, Maps);
get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:out_neighbours(DG, SCC).
--spec get_required_by(scc() | module(), callgraph()) -> [scc()].
-
-get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In}}) ->
- case ets_lookup_dict(SCC, In) of
- {ok, Value} -> Value;
+%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()].
+
+%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
+%% lookup_scc(SCC, In, Maps);
+%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
+%% digraph:in_neighbours(DG, SCC).
+
+lookup_scc(SCC, Table, Maps) ->
+ case ets_lookup_dict({'scc', SCC}, Maps) of
+ {ok, SCCInt} ->
+ case ets_lookup_dict(SCCInt, Table) of
+ {ok, Ints} ->
+ [ets:lookup_element(Maps, Int, 2) || Int <- Ints];
+ error ->
+ []
+ end;
error -> []
- end;
-get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
- digraph:in_neighbours(DG, SCC).
+ end.
%%----------------------------------------------------------------------
%% Handling of modules & SCCs
@@ -280,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new([acyclic]),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
- {digraph_utils:topsort(MDG), {'d', MDG}}.
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}.
edge_fold({{M1,_,_},{M2,_,_}}, Set) ->
case M1 =/= M2 of
@@ -300,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new(),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))}
|| N <- sets:to_list(Nodes)],
@@ -358,7 +365,7 @@ ets_lookup_set(Key, Table) ->
%% The core tree must be labeled as by cerl_trees:label/1 (or /2).
%% The set of labels in the tree must be disjoint from the set of
-%% labels already occuring in the callgraph.
+%% labels already occurring in the callgraph.
-spec scan_core_tree(cerl:c_module(), callgraph()) ->
{[mfa_or_funlbl()], [callgraph_edge()]}.
@@ -478,14 +485,37 @@ scan_one_core_fun(TopTree, FunName) ->
call ->
CalleeM = cerl:call_module(Tree),
CalleeF = cerl:call_name(Tree),
- A = length(cerl:call_args(Tree)),
+ CalleeArgs = cerl:call_args(Tree),
+ A = length(CalleeArgs),
case (cerl:is_c_atom(CalleeM) andalso
cerl:is_c_atom(CalleeF)) of
true ->
M = cerl:atom_val(CalleeM),
F = cerl:atom_val(CalleeF),
case erl_bif_types:is_known(M, F, A) of
- true -> Acc;
+ true ->
+ case {M, F, A} of
+ {erlang, make_fun, 3} ->
+ [CA1, CA2, CA3] = CalleeArgs,
+ case
+ cerl:is_c_atom(CA1) andalso
+ cerl:is_c_atom(CA2) andalso
+ cerl:is_c_int(CA3)
+ of
+ true ->
+ MM = cerl:atom_val(CA1),
+ FF = cerl:atom_val(CA2),
+ AA = cerl:int_val(CA3),
+ case erl_bif_types:is_known(MM, FF, AA) of
+ true -> Acc;
+ false -> [{FunName, {MM, FF, AA}}|Acc]
+ end;
+ false ->
+ Acc
+ end;
+ _ ->
+ Acc
+ end;
false -> [{FunName, {M, F, A}}|Acc]
end;
false ->
@@ -524,9 +554,21 @@ digraph_add_edge(From, To, DG) ->
false -> digraph:add_vertex(DG, To);
{To, _} -> ok
end,
- digraph:add_edge(DG, {From, To}, From, To, []),
+ check_add_edge(DG, {From, To}, From, To, []),
ok.
+check_add_edge(G, V1, V2) ->
+ case digraph:add_edge(G, V1, V2) of
+ {error, Error} -> exit({add_edge, V1, V2, Error});
+ _Edge -> ok
+ end.
+
+check_add_edge(G, E, V1, V2, L) ->
+ case digraph:add_edge(G, E, V1, V2, L) of
+ {error, Error} -> exit({add_edge, E, V1, V2, L, Error});
+ _Edge -> ok
+ end.
+
digraph_confirm_vertices([MFA|Left], DG) ->
digraph:add_vertex(DG, MFA, confirmed),
digraph_confirm_vertices(Left, DG);
@@ -557,9 +599,10 @@ digraph_delete(DG) ->
active_digraph_delete({'d', DG}) ->
digraph:delete(DG);
-active_digraph_delete({'e', Out, In}) ->
+active_digraph_delete({'e', Out, In, Maps}) ->
ets:delete(Out),
- ets:delete(In).
+ ets:delete(In),
+ ets:delete(Maps).
digraph_edges(DG) ->
digraph:edges(DG).
@@ -586,45 +629,30 @@ digraph_reaching_subgraph(Funs, DG) ->
renew_race_info(#callgraph{race_data_server = RaceDataServer} = CG,
RaceCode, PublicTables, NamedTables) ->
- ok = race_data_server_cast(
+ ok = dialyzer_race_data_server:cast(
{renew_race_info, {RaceCode, PublicTables, NamedTables}},
RaceDataServer),
CG.
-renew_race_info({RaceCode, PublicTables, NamedTables},
- #race_data_state{} = State) ->
- State#race_data_state{race_code = RaceCode,
- public_tables = PublicTables,
- named_tables = NamedTables}.
-
-spec renew_race_code(dialyzer_races:races(), callgraph()) -> callgraph().
renew_race_code(Races, #callgraph{race_data_server = RaceDataServer} = CG) ->
Fun = dialyzer_races:get_curr_fun(Races),
FunArgs = dialyzer_races:get_curr_fun_args(Races),
Code = lists:reverse(dialyzer_races:get_race_list(Races)),
- ok = race_data_server_cast(
+ ok = dialyzer_race_data_server:cast(
{renew_race_code, {Fun, FunArgs, Code}},
RaceDataServer),
CG.
-renew_race_code_handler({Fun, FunArgs, Code},
- #race_data_state{race_code = RaceCode} = State) ->
- State#race_data_state{race_code = dict:store(Fun, [FunArgs, Code], RaceCode)}.
-
-spec renew_race_public_tables(label(), callgraph()) -> callgraph().
renew_race_public_tables(VarLabel,
#callgraph{race_data_server = RaceDataServer} = CG) ->
ok =
- race_data_server_cast({renew_race_public_tables, VarLabel}, RaceDataServer),
+ dialyzer_race_data_server:cast({renew_race_public_tables, VarLabel}, RaceDataServer),
CG.
-renew_race_public_tables_handler(VarLabel,
- #race_data_state{public_tables = PT}
- = State) ->
- State#race_data_state{public_tables = ordsets:add_element(VarLabel, PT)}.
-
-spec cleanup(callgraph()) -> callgraph().
cleanup(#callgraph{digraph = Digraph,
@@ -634,18 +662,18 @@ cleanup(#callgraph{digraph = Digraph,
#callgraph{digraph = Digraph,
name_map = NameMap,
rev_name_map = RevNameMap,
- race_data_server = race_data_server_call(dup, RaceDataServer)}.
+ race_data_server = dialyzer_race_data_server:duplicate(RaceDataServer)}.
-spec duplicate(callgraph()) -> callgraph().
duplicate(#callgraph{race_data_server = RaceDataServer} = Callgraph) ->
Callgraph#callgraph{
- race_data_server = race_data_server_call(dup, RaceDataServer)}.
+ race_data_server = dialyzer_race_data_server:duplicate(RaceDataServer)}.
-spec dispose_race_server(callgraph()) -> ok.
dispose_race_server(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_cast(stop, RaceDataServer).
+ dialyzer_race_data_server:stop(RaceDataServer).
-spec get_digraph(callgraph()) -> digraph:graph().
@@ -655,17 +683,17 @@ get_digraph(#callgraph{digraph = Digraph}) ->
-spec get_named_tables(callgraph()) -> [string()].
get_named_tables(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_named_tables, RaceDataServer).
+ dialyzer_race_data_server:call(get_named_tables, RaceDataServer).
-spec get_public_tables(callgraph()) -> [label()].
get_public_tables(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_public_tables, RaceDataServer).
+ dialyzer_race_data_server:call(get_public_tables, RaceDataServer).
-spec get_race_code(callgraph()) -> dict:dict().
get_race_code(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_race_code, RaceDataServer).
+ dialyzer_race_data_server:call(get_race_code, RaceDataServer).
-spec get_race_detection(callgraph()) -> boolean().
@@ -675,12 +703,12 @@ get_race_detection(#callgraph{race_detection = RD}) ->
-spec get_behaviour_api_calls(callgraph()) -> [{mfa(), mfa()}].
get_behaviour_api_calls(#callgraph{race_data_server = RaceDataServer}) ->
- race_data_server_call(get_behaviour_api_calls, RaceDataServer).
+ dialyzer_race_data_server:call(get_behaviour_api_calls, RaceDataServer).
-spec race_code_new(callgraph()) -> callgraph().
race_code_new(#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast(race_code_new, RaceDataServer),
+ ok = dialyzer_race_data_server:cast(race_code_new, RaceDataServer),
CG.
-spec put_digraph(digraph:graph(), callgraph()) -> callgraph().
@@ -691,7 +719,7 @@ put_digraph(Digraph, Callgraph) ->
-spec put_race_code(dict:dict(), callgraph()) -> callgraph().
put_race_code(RaceCode, #callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_race_code, RaceCode}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_race_code, RaceCode}, RaceDataServer),
CG.
-spec put_race_detection(boolean(), callgraph()) -> callgraph().
@@ -703,78 +731,23 @@ put_race_detection(RaceDetection, Callgraph) ->
put_named_tables(NamedTables,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_named_tables, NamedTables}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_named_tables, NamedTables}, RaceDataServer),
CG.
-spec put_public_tables([label()], callgraph()) -> callgraph().
put_public_tables(PublicTables,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_public_tables, PublicTables}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_public_tables, PublicTables}, RaceDataServer),
CG.
-spec put_behaviour_api_calls([{mfa(), mfa()}], callgraph()) -> callgraph().
put_behaviour_api_calls(Calls,
#callgraph{race_data_server = RaceDataServer} = CG) ->
- ok = race_data_server_cast({put_behaviour_api_calls, Calls}, RaceDataServer),
+ ok = dialyzer_race_data_server:cast({put_behaviour_api_calls, Calls}, RaceDataServer),
CG.
-
-new_race_data_server() ->
- spawn_link(fun() -> race_data_server_loop(#race_data_state{}) end).
-
-race_data_server_loop(State) ->
- receive
- {call, From, Ref, Query} ->
- Reply = race_data_server_handle_call(Query, State),
- From ! {Ref, Reply},
- race_data_server_loop(State);
- {cast, stop} ->
- ok;
- {cast, Message} ->
- NewState = race_data_server_handle_cast(Message, State),
- race_data_server_loop(NewState)
- end.
-
-race_data_server_call(Query, Server) ->
- Ref = make_ref(),
- Server ! {call, self(), Ref, Query},
- receive
- {Ref, Reply} -> Reply
- end.
-
-race_data_server_cast(Message, Server) ->
- Server ! {cast, Message},
- ok.
-
-race_data_server_handle_cast(race_code_new, State) ->
- State#race_data_state{race_code = dict:new()};
-race_data_server_handle_cast({Tag, Data}, State) ->
- case Tag of
- renew_race_info -> renew_race_info(Data, State);
- renew_race_code -> renew_race_code_handler(Data, State);
- renew_race_public_tables -> renew_race_public_tables_handler(Data, State);
- put_race_code -> State#race_data_state{race_code = Data};
- put_public_tables -> State#race_data_state{public_tables = Data};
- put_named_tables -> State#race_data_state{named_tables = Data};
- put_behaviour_api_calls -> State#race_data_state{beh_api_calls = Data}
- end.
-
-race_data_server_handle_call(Query,
- #race_data_state{race_code = RaceCode,
- public_tables = PublicTables,
- named_tables = NamedTables,
- beh_api_calls = BehApiCalls}
- = State) ->
- case Query of
- dup -> spawn_link(fun() -> race_data_server_loop(State) end);
- get_race_code -> RaceCode;
- get_public_tables -> PublicTables;
- get_named_tables -> NamedTables;
- get_behaviour_api_calls -> BehApiCalls
- end.
-
%%=============================================================================
%% Utilities for 'dot'
%%=============================================================================
@@ -782,6 +755,7 @@ race_data_server_handle_call(Query,
-spec to_dot(callgraph(), file:filename()) -> 'ok'.
to_dot(#callgraph{digraph = DG, esc = Esc} = CG, File) ->
+ %% TODO: handle Unicode names.
Fun = fun(L) ->
case lookup_name(L, CG) of
error -> L;
@@ -796,44 +770,60 @@ to_dot(#callgraph{digraph = DG, esc = Esc} = CG, File) ->
-spec to_ps(callgraph(), file:filename(), string()) -> 'ok'.
to_ps(#callgraph{} = CG, File, Args) ->
+ %% TODO: handle Unicode names.
Dot_File = filename:rootname(File) ++ ".dot",
to_dot(CG, Dot_File),
- Command = io_lib:format("dot -Tps ~s -o ~s ~s", [Args, File, Dot_File]),
+ Command = io_lib:format("dot -Tps ~ts -o ~ts ~ts", [Args, File, Dot_File]),
_ = os:cmd(Command),
ok.
condensation(G) ->
- SCs = digraph_utils:strong_components(G),
- V2I = ets:new(condensation_v2i, []),
- I2C = ets:new(condensation_i2c, []),
- I2I = ets:new(condensation_i2i, [bag]),
- CFun =
- fun(SC, N) ->
- lists:foreach(fun(V) -> true = ets:insert(V2I, {V,N}) end, SC),
- true = ets:insert(I2C, {N, SC}),
- N + 1
- end,
- lists:foldl(CFun, 1, SCs),
- Fun1 =
- fun({V1, V2}) ->
- I1 = ets:lookup_element(V2I, V1, 2),
- I2 = ets:lookup_element(V2I, V2, 2),
- I1 =:= I2 orelse ets:insert(I2I, {I1, I2})
- end,
- lists:foreach(Fun1, digraph:edges(G)),
- Fun3 =
- fun({I1, I2}, {Out, In}) ->
- SC1 = ets:lookup_element(I2C, I1, 2),
- SC2 = ets:lookup_element(I2C, I2, 2),
- {dict:append(SC1, SC2, Out), dict:append(SC2, SC1, In)}
- end,
- {OutDict, InDict} = ets:foldl(Fun3, {dict:new(), dict:new()}, I2I),
- [OutETS, InETS] =
- [ets:new(Name,[{read_concurrency, true}]) ||
- Name <- [callgraph_deps_out, callgraph_deps_in]],
- ets:insert(OutETS, dict:to_list(OutDict)),
- ets:insert(InETS, dict:to_list(InDict)),
- ets:delete(V2I),
- ets:delete(I2C),
- ets:delete(I2I),
- {{'e', OutETS, InETS}, SCs}.
+ {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())),
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ {SCCInts, OutETS, InETS, MapsETS} = Result,
+ NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts],
+ {{'e', OutETS, InETS, MapsETS}, NewSCCs}
+ end.
+
+-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()).
+
+do_condensation(G, Parent) ->
+ fun() ->
+ [OutETS, InETS, MapsETS] =
+ [ets:new(Name,[{read_concurrency, true}]) ||
+ Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
+ SCCs = digraph_utils:strong_components(G),
+ %% Assign unique numbers to SCCs:
+ Ints = lists:seq(1, length(SCCs)),
+ IntToSCC = lists:zip(Ints, SCCs),
+ IntScc = sofs:relation(IntToSCC, [{int, scc}]),
+ %% Create mapping from unique integers to SCCs:
+ ets:insert(MapsETS, IntToSCC),
+ %% Subsitute strong components for vertices in edges using the
+ %% unique numbers:
+ C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
+ I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
+ Es = sofs:relation(digraph:edges(G), [{v, v}]),
+ R1 = sofs:relative_product(I2V, Es),
+ R2 = sofs:relative_product(I2V, sofs:converse(R1)),
+ R2Strict = sofs:strict_relation(R2),
+ %% Create out-neighbours:
+ Out = sofs:relation_to_family(sofs:converse(R2Strict)),
+ ets:insert(OutETS, sofs:to_external(Out)),
+ %% Sort the SCCs topologically:
+ DG = sofs:family_to_digraph(Out),
+ lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints),
+ SCCInts0 = digraph_utils:topsort(DG),
+ digraph:delete(DG),
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ SCCInts = lists:reverse(SCCInts0),
+ %% Create in-neighbours:
+ In = sofs:relation_to_family(R2Strict),
+ ets:insert(InETS, sofs:to_external(In)),
+ %% Create mapping from SCCs to unique integers:
+ ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
+ lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any)
+ end, [OutETS, InETS, MapsETS]),
+ exit({SCCInts, OutETS, InETS, MapsETS})
+ end.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 55302d5869..0617be6435 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_cl.erl
@@ -36,7 +29,9 @@
-include_lib("kernel/include/file.hrl"). % needed for #file_info{}
-record(cl_state,
- {backend_pid :: pid(),
+ {backend_pid :: pid() | 'undefined',
+ code_server = none :: 'none'
+ | dialyzer_codeserver:codeserver(),
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
external_types = [] :: [mfa()],
@@ -49,8 +44,7 @@
plt_info = none :: 'none' | dialyzer_plt:plt_info(),
report_mode = normal :: rep_mode(),
return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
- stored_warnings = [] :: [raw_warning()],
- unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()]
+ stored_warnings = [] :: [raw_warning()]
}).
%%--------------------------------------------------------------------
@@ -83,7 +77,7 @@ init_opts_for_build(Opts) ->
[] -> Opts#options{output_plt = get_default_output_plt()};
[Plt] -> Opts#options{init_plts = [], output_plt = Plt};
Plts ->
- Msg = io_lib:format("Could not build multiple PLT files: ~s\n",
+ Msg = io_lib:format("Could not build multiple PLT files: ~ts\n",
[format_plts(Plts)]),
cl_error(Msg)
end;
@@ -105,7 +99,7 @@ init_opts_for_add(Opts) ->
init_plts = get_default_init_plt()};
[Plt] -> Opts#options{output_plt = Plt};
Plts ->
- Msg = io_lib:format("Could not add to multiple PLT files: ~s\n",
+ Msg = io_lib:format("Could not add to multiple PLT files: ~ts\n",
[format_plts(Plts)]),
cl_error(Msg)
end;
@@ -171,7 +165,7 @@ init_opts_for_remove(Opts) ->
init_plts = get_default_init_plt()};
[Plt] -> Opts#options{output_plt = Plt};
Plts ->
- Msg = io_lib:format("Could not remove from multiple PLT files: ~s\n",
+ Msg = io_lib:format("Could not remove from multiple PLT files: ~ts\n",
[format_plts(Plts)]),
cl_error(Msg)
end;
@@ -218,19 +212,19 @@ plt_common(#options{init_plts = [InitPlt]} = Opts, RemoveFiles, AddFiles) ->
do_analysis(AnalFiles, Opts, Plt, {Md5, ModDeps1})
end;
{error, no_such_file} ->
- Msg = io_lib:format("Could not find the PLT: ~s\n~s",
+ Msg = io_lib:format("Could not find the PLT: ~ts\n~s",
[InitPlt, default_plt_error_msg()]),
cl_error(Msg);
{error, not_valid} ->
- Msg = io_lib:format("The file: ~s is not a valid PLT file\n~s",
+ Msg = io_lib:format("The file: ~ts is not a valid PLT file\n~s",
[InitPlt, default_plt_error_msg()]),
cl_error(Msg);
{error, read_error} ->
- Msg = io_lib:format("Could not read the PLT: ~s\n~s",
+ Msg = io_lib:format("Could not read the PLT: ~ts\n~s",
[InitPlt, default_plt_error_msg()]),
cl_error(Msg);
{error, {no_file_to_remove, F}} ->
- Msg = io_lib:format("Could not remove the file ~s from the PLT: ~s\n",
+ Msg = io_lib:format("Could not remove the file ~ts from the PLT: ~ts\n",
[F, InitPlt]),
cl_error(Msg)
end.
@@ -270,7 +264,7 @@ report_check(#options{report_mode = ReportMode, init_plts = [InitPlt]}) ->
case ReportMode of
quiet -> ok;
_ ->
- io:format(" Checking whether the PLT ~s is up-to-date...", [InitPlt])
+ io:format(" Checking whether the PLT ~ts is up-to-date...", [InitPlt])
end.
report_old_version(#options{report_mode = ReportMode, init_plts = [InitPlt]}) ->
@@ -278,7 +272,7 @@ report_old_version(#options{report_mode = ReportMode, init_plts = [InitPlt]}) ->
quiet -> ok;
_ ->
io:put_chars(" no\n"),
- io:format(" (the PLT ~s was built with an old version of Dialyzer)\n",
+ io:format(" (the PLT ~ts was built with an old version of Dialyzer)\n",
[InitPlt])
end.
@@ -306,19 +300,19 @@ report_analysis_start(#options{analysis_type = Type,
plt_add ->
[InitPlt] = InitPlts,
case InitPlt =:= OutputPlt of
- true -> io:format("Adding information to ~s...", [OutputPlt]);
- false -> io:format("Adding information from ~s to ~s...",
+ true -> io:format("Adding information to ~ts...", [OutputPlt]);
+ false -> io:format("Adding information from ~ts to ~ts...",
[InitPlt, OutputPlt])
end;
plt_build ->
- io:format("Creating PLT ~s ...", [OutputPlt]);
+ io:format("Creating PLT ~ts ...", [OutputPlt]);
plt_check ->
- io:format("Rebuilding the information in ~s...", [OutputPlt]);
+ io:format("Rebuilding the information in ~ts...", [OutputPlt]);
plt_remove ->
[InitPlt] = InitPlts,
case InitPlt =:= OutputPlt of
- true -> io:format("Removing information from ~s...", [OutputPlt]);
- false -> io:format("Removing information from ~s to ~s...",
+ true -> io:format("Removing information from ~ts...", [OutputPlt]);
+ false -> io:format("Removing information from ~ts to ~ts...",
[InitPlt, OutputPlt])
end;
succ_typings -> io:format("Proceeding with analysis...")
@@ -426,7 +420,7 @@ assert_writable(PltFile) ->
case check_if_writable(PltFile) of
true -> ok;
false ->
- Msg = io_lib:format(" The PLT file ~s is not writable", [PltFile]),
+ Msg = io_lib:format(" The PLT file ~ts is not writable", [PltFile]),
cl_error(Msg)
end.
@@ -547,13 +541,13 @@ hc(Mod, Cache) ->
hc_cache(Mod) ->
CacheBase = cache_base_dir(),
- %% Use HiPE architecture and version in directory name, to avoid
- %% clashes between incompatible binaries.
+ %% Use HiPE architecture, version and erts checksum in directory name,
+ %% to avoid clashes between incompatible binaries.
HipeArchVersion =
lists:concat(
[erlang:system_info(hipe_architecture), "-",
hipe:version(), "-",
- hipe_bifs:system_crc()]),
+ hipe:erts_checksum()]),
CacheDir = filename:join(CacheBase, HipeArchVersion),
OrigBeamFile = code:which(Mod),
{ok, {Mod, <<Checksum:128>>}} = beam_lib:md5(OrigBeamFile),
@@ -602,9 +596,11 @@ init_output(State0, #options{output_file = OutFile,
false ->
case file:open(OutFile, [write]) of
{ok, File} ->
+ %% Warnings and errors can include Unicode characters.
+ ok = io:setopts(File, [{encoding, unicode}]),
State#cl_state{output = File};
{error, Reason} ->
- Msg = io_lib:format("Could not open output file ~p, Reason: ~p\n",
+ Msg = io_lib:format("Could not open output file ~tp, Reason: ~p\n",
[OutFile, Reason]),
cl_error(State, lists:flatten(Msg))
end
@@ -638,8 +634,8 @@ cl_loop(State, LogCache) ->
{BackendPid, warnings, Warnings} ->
NewState = store_warnings(State, Warnings),
cl_loop(NewState, LogCache);
- {BackendPid, unknown_behaviours, Behaviours} ->
- NewState = store_unknown_behaviours(State, Behaviours),
+ {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored
+ NewState = State#cl_state{code_server = CodeServer},
cl_loop(NewState, LogCache);
{BackendPid, done, NewPlt, _NewDocPlt} ->
return_value(State, NewPlt);
@@ -683,11 +679,6 @@ format_log_cache(LogCache) ->
store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
--spec store_unknown_behaviours(#cl_state{}, [dialyzer_behaviours:behaviour()]) -> #cl_state{}.
-
-store_unknown_behaviours(#cl_state{unknown_behaviours = Behs} = St, Beh) ->
- St#cl_state{unknown_behaviours = Beh ++ Behs}.
-
-spec cl_error(string()) -> no_return().
cl_error(Msg) ->
@@ -698,20 +689,30 @@ cl_error(Msg) ->
cl_error(State, Msg) ->
case State#cl_state.output of
standard_io -> ok;
- Outfile -> io:format(Outfile, "\n~s\n", [Msg])
+ Outfile -> io:format(Outfile, "\n~ts\n", [Msg])
end,
maybe_close_output_file(State),
throw({dialyzer_error, lists:flatten(Msg)}).
-return_value(State = #cl_state{erlang_mode = ErlangMode,
+return_value(State = #cl_state{code_server = CodeServer,
+ erlang_mode = ErlangMode,
mod_deps = ModDeps,
output_plt = OutputPlt,
plt_info = PltInfo,
stored_warnings = StoredWarnings},
Plt) ->
+ %% Just for now:
+ case CodeServer =:= none of
+ true ->
+ ok;
+ false ->
+ dialyzer_codeserver:delete(CodeServer)
+ end,
case OutputPlt =:= none of
- true -> ok;
- false -> dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ true ->
+ dialyzer_plt:delete(Plt);
+ false ->
+ dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
end,
UnknownWarnings = unknown_warnings(State),
RetValue =
@@ -724,7 +725,6 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
print_warnings(State),
print_ext_calls(State),
print_ext_types(State),
- print_unknown_behaviours(State),
maybe_close_output_file(State),
{RetValue, []};
true ->
@@ -737,8 +737,7 @@ unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) ->
Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
true ->
unknown_functions(State) ++
- unknown_types(State) ++
- unknown_behaviours(State);
+ unknown_types(State);
false -> []
end,
WarningInfo = {_Filename = "", _Line = 0, _MorMFA = ''},
@@ -776,7 +775,7 @@ print_ext_calls(#cl_state{output = Output,
end.
do_print_ext_calls(Output, [{M,F,A}|T], Before) ->
- io:format(Output, "~s~p:~p/~p\n", [Before,M,F,A]),
+ io:format(Output, "~s~tp:~tp/~p\n", [Before,M,F,A]),
do_print_ext_calls(Output, T, Before);
do_print_ext_calls(_, [], _) ->
ok.
@@ -809,53 +808,11 @@ print_ext_types(#cl_state{output = Output,
end.
do_print_ext_types(Output, [{M,F,A}|T], Before) ->
- io:format(Output, "~s~p:~p/~p\n", [Before,M,F,A]),
+ io:format(Output, "~s~tp:~tp/~p\n", [Before,M,F,A]),
do_print_ext_types(Output, T, Before);
do_print_ext_types(_, [], _) ->
ok.
-unknown_behaviours(#cl_state{unknown_behaviours = DupBehaviours,
- legal_warnings = LegalWarnings}) ->
- case ordsets:is_element(?WARN_BEHAVIOUR, LegalWarnings) of
- false -> [];
- true ->
- Behaviours = lists:usort(DupBehaviours),
- [{unknown_behaviour, B} || B <- Behaviours]
- end.
-
-%%print_unknown_behaviours(#cl_state{report_mode = quiet}) ->
-%% ok;
-print_unknown_behaviours(#cl_state{output = Output,
- external_calls = Calls,
- external_types = Types,
- stored_warnings = Warnings,
- unknown_behaviours = DupBehaviours,
- legal_warnings = LegalWarnings,
- output_format = Format}) ->
- case ordsets:is_element(?WARN_BEHAVIOUR, LegalWarnings)
- andalso DupBehaviours =/= [] of
- false -> ok;
- true ->
- Behaviours = lists:usort(DupBehaviours),
- case Warnings =:= [] andalso Calls =:= [] andalso Types =:= [] of
- true -> io:nl(Output); %% Need to do a newline first
- false -> ok
- end,
- {Prompt, Prefix} =
- case Format of
- formatted -> {"Unknown behaviours:\n"," "};
- raw -> {"%% Unknown behaviours:\n","%% "}
- end,
- io:put_chars(Output, Prompt),
- do_print_unknown_behaviours(Output, Behaviours, Prefix)
- end.
-
-do_print_unknown_behaviours(Output, [B|T], Before) ->
- io:format(Output, "~s~p\n", [Before,B]),
- do_print_unknown_behaviours(Output, T, Before);
-do_print_unknown_behaviours(_, [], _) ->
- ok.
-
print_warnings(#cl_state{stored_warnings = []}) ->
ok;
print_warnings(#cl_state{output = Output,
@@ -870,10 +827,10 @@ print_warnings(#cl_state{output = Output,
formatted ->
[dialyzer:format_warning(W, FOpt) || W <- PrWarnings];
raw ->
- [io_lib:format("~p. \n",
+ [io_lib:format("~tp. \n",
[W]) || W <- set_warning_id(PrWarnings)]
end,
- io:format(Output, "\n~s", [S])
+ io:format(Output, "\n~ts", [S])
end.
-spec process_warnings([raw_warning()]) -> [raw_warning()].
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index 934351aeeb..a456d38e64 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-module(dialyzer_cl_parse).
@@ -44,11 +37,12 @@ start() ->
init(),
Args = init:get_plain_arguments(),
try
- cl(Args)
+ Ret = cl(Args),
+ Ret
catch
throw:{dialyzer_cl_parse_error, Msg} -> {error, Msg};
_:R ->
- Msg = io_lib:format("~p\n~p\n", [R, erlang:get_stacktrace()]),
+ Msg = io_lib:format("~tp\n~tp\n", [R, erlang:get_stacktrace()]),
{error, lists:flatten(Msg)}
end.
@@ -510,7 +504,7 @@ warning_options_msg() ->
-Wno_match
Suppress warnings for patterns that are unused or cannot match.
-Wno_opaque
- Suppress warnings for violations of opaqueness of data types.
+ Suppress warnings for violations of opacity of data types.
-Wno_fail_call
Suppress warnings for failing calls.
-Wno_contracts
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 978ecd3843..5587cf2bdf 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_codeserver.erl
@@ -29,18 +22,25 @@
-module(dialyzer_codeserver).
-export([delete/1,
- finalize_contracts/3,
+ store_temp_contracts/4,
+ give_away/2,
+ finalize_contracts/1,
finalize_exported_types/2,
- finalize_records/2,
+ finalize_records/1,
get_contracts/1,
get_callbacks/1,
get_exported_types/1,
+ extract_exported_types/1,
get_exports/1,
- get_records/1,
+ get_records_table/1,
+ extract_records/1,
get_next_core_label/1,
- get_temp_contracts/1,
+ get_temp_contracts/2,
+ all_temp_modules/1,
+ store_contracts/4,
get_temp_exported_types/1,
- get_temp_records/1,
+ get_temp_records_table/1,
+ lookup_temp_mod_records/2,
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
@@ -48,30 +48,29 @@
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
+ lookup_mfa_var_label/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
lookup_meta_info/2,
new/0,
set_next_core_label/2,
- set_temp_records/2,
store_temp_records/3,
- store_temp_contracts/4]).
+ translate_fake_file/3]).
--export_type([codeserver/0, fun_meta_info/0]).
+-export_type([codeserver/0, fun_meta_info/0, contracts/0]).
-include("dialyzer.hrl").
%%--------------------------------------------------------------------
-type dict_ets() :: ets:tid().
+-type map_ets() :: ets:tid().
-type set_ets() :: ets:tid().
-type types() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), types()).
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
--type mod_contracts() :: dict:dict(module(), contracts()).
+-type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}.
%% A property-list of data compiled from -compile and -dialyzer attributes.
-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
@@ -81,16 +80,16 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets(), % set(mfa())
- records :: dict_ets(),
- contracts :: dict_ets(),
- callbacks :: dict_ets(),
+ exported_types :: 'clean' | set_ets(), % set(mfa())
+ records :: 'clean' | map_ets(),
+ contracts :: map_ets(),
+ callbacks :: map_ets(),
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
- temp_records :: 'clean' | dict_ets(),
- temp_contracts :: 'clean' | dict_ets(),
- temp_callbacks :: 'clean' | dict_ets()
+ temp_records :: 'clean' | map_ets(),
+ temp_contracts :: 'clean' | map_ets(),
+ temp_callbacks :: 'clean' | map_ets()
}).
-opaque codeserver() :: #codeserver{}.
@@ -104,13 +103,10 @@ ets_dict_find(Key, Table) ->
_:_ -> error
end.
-ets_dict_store(Key, Element, Table) ->
+ets_map_store(Key, Element, Table) ->
true = ets:insert(Table, {Key, Element}),
Table.
-ets_dict_store_dict(Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)).
-
ets_dict_to_dict(Table) ->
Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end,
ets:foldl(Fold, dict:new(), Table).
@@ -128,9 +124,6 @@ ets_set_to_set(Table) ->
Fold = fun({E}, Set) -> sets:add_element(E, Set) end,
ets:foldl(Fold, sets:new(), Table).
-ets_read_concurrent_table(Name) ->
- ets:new(Name, [{read_concurrency, true}]).
-
%%--------------------------------------------------------------------
-spec new() -> codeserver().
@@ -138,6 +131,13 @@ ets_read_concurrent_table(Name) ->
new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
+ ReadOptions = [compressed, {read_concurrency, true}],
+ [Contracts, Callbacks, Records, ExportedTypes] =
+ [ets:new(Name, ReadOptions) ||
+ Name <- [dialyzer_codeserver_contracts,
+ dialyzer_codeserver_callbacks,
+ dialyzer_codeserver_records,
+ dialyzer_codeserver_exported_types]],
TempOptions = [public, {write_concurrency, true}],
[Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
TempCallbacks] =
@@ -150,6 +150,10 @@ new() ->
#codeserver{code = Code,
exports = Exports,
fun_meta_info = FunMetaInfo,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -157,11 +161,8 @@ new() ->
-spec delete(codeserver()) -> 'ok'.
-delete(#codeserver{code = Code, exported_types = ExportedTypes,
- records = Records, contracts = Contracts,
- callbacks = Callbacks}) ->
- lists:foreach(fun ets:delete/1,
- [Code, ExportedTypes, Records, Contracts, Callbacks]).
+delete(CServer) ->
+ lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)).
-spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver().
@@ -170,13 +171,15 @@ insert(Mod, ModCode, CS) ->
Exports = cerl:module_exports(ModCode),
Attrs = cerl:module_attrs(ModCode),
Defs = cerl:module_defs(ModCode),
+ {Files, SmallDefs} = compress_file_anno(Defs),
As = cerl:get_ann(ModCode),
Funs =
[{{Mod, cerl:fname_id(Var), cerl:fname_arity(Var)},
- Val} || Val = {Var, _Fun} <- Defs],
- Keys = [Key || {Key, _Value} <- Funs],
+ Val, {Var, cerl_trees:get_label(Fun)}} || Val = {Var, Fun} <- SmallDefs],
+ Keys = [Key || {Key, _Value, _Label} <- Funs],
ModEntry = {Mod, {Name, Exports, Attrs, Keys, As}},
- true = ets:insert(CS#codeserver.code, [ModEntry|Funs]),
+ ModFileEntry = {{mod, Mod}, Files},
+ true = ets:insert(CS#codeserver.code, [ModEntry, ModFileEntry|Funs]),
CS.
-spec get_temp_exported_types(codeserver()) -> sets:set(mfa()).
@@ -213,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) ->
get_exported_types(#codeserver{exported_types = ExpTypes}) ->
ets_set_to_set(ExpTypes).
+-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}.
+
+extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) ->
+ {CS#codeserver{exported_types = 'clean'}, ExpTypes}.
+
-spec get_exports(codeserver()) -> sets:set(mfa()).
get_exports(#codeserver{exports = Exports}) ->
@@ -220,12 +228,12 @@ get_exports(#codeserver{exports = Exports}) ->
-spec finalize_exported_types(sets:set(mfa()), codeserver()) -> codeserver().
-finalize_exported_types(Set, CS) ->
- ExportedTypes = ets_read_concurrent_table(dialyzer_codeserver_exported_types),
+finalize_exported_types(Set,
+ #codeserver{exported_types = ExportedTypes,
+ temp_exported_types = TempETypes} = CS) ->
true = ets_set_insert_set(Set, ExportedTypes),
- TempExpTypes = CS#codeserver.temp_exported_types,
- true = ets:delete(TempExpTypes),
- CS#codeserver{exported_types = ExportedTypes, temp_exported_types = clean}.
+ true = ets:delete(TempETypes),
+ CS#codeserver{temp_exported_types = clean}.
-spec lookup_mod_code(atom(), codeserver()) -> cerl:c_module().
@@ -237,6 +245,11 @@ lookup_mod_code(Mod, CS) when is_atom(Mod) ->
lookup_mfa_code({_M, _F, _A} = MFA, CS) ->
table__lookup(CS#codeserver.code, MFA).
+-spec lookup_mfa_var_label(mfa(), codeserver()) -> {cerl:c_var(), label()}.
+
+lookup_mfa_var_label({_M, _F, _A} = MFA, CS) ->
+ ets:lookup_element(CS#codeserver.code, MFA, 3).
+
-spec get_next_core_label(codeserver()) -> label().
get_next_core_label(#codeserver{next_core_label = NCL}) ->
@@ -251,53 +264,78 @@ set_next_core_label(NCL, CS) ->
lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) ->
case ets_dict_find(Mod, RecDict) of
- error -> dict:new();
- {ok, Dict} -> Dict
+ error -> maps:new();
+ {ok, Map} -> Map
end.
--spec get_records(codeserver()) -> mod_records().
+-spec get_records_table(codeserver()) -> map_ets().
+
+get_records_table(#codeserver{records = RecDict}) ->
+ RecDict.
+
+-spec extract_records(codeserver()) -> {codeserver(), map_ets()}.
-get_records(#codeserver{records = RecDict}) ->
- ets_dict_to_dict(RecDict).
+extract_records(#codeserver{records = RecDict} = CS) ->
+ {CS#codeserver{records = clean}, RecDict}.
-spec store_temp_records(module(), types(), codeserver()) -> codeserver().
-store_temp_records(Mod, Dict, #codeserver{temp_records = TempRecDict} = CS)
+store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS)
when is_atom(Mod) ->
- case dict:size(Dict) =:= 0 of
+ case maps:size(Map) =:= 0 of
true -> CS;
- false -> CS#codeserver{temp_records = ets_dict_store(Mod, Dict, TempRecDict)}
+ false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)}
end.
--spec get_temp_records(codeserver()) -> mod_records().
+-spec get_temp_records_table(codeserver()) -> map_ets().
-get_temp_records(#codeserver{temp_records = TempRecDict}) ->
- ets_dict_to_dict(TempRecDict).
+get_temp_records_table(#codeserver{temp_records = TempRecDict}) ->
+ TempRecDict.
--spec set_temp_records(mod_records(), codeserver()) -> codeserver().
+-spec lookup_temp_mod_records(module(), codeserver()) -> types().
-set_temp_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- TempRecords = ets:new(dialyzer_codeserver_temp_records,[]),
- true = ets_dict_store_dict(Dict, TempRecords),
- CS#codeserver{temp_records = TempRecords}.
-
--spec finalize_records(mod_records(), codeserver()) -> codeserver().
+lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) ->
+ case ets_dict_find(Mod, TempRecDict) of
+ error -> maps:new();
+ {ok, Map} -> Map
+ end.
-finalize_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- Records = ets_read_concurrent_table(dialyzer_codeserver_records),
- true = ets_dict_store_dict(Dict, Records),
- CS#codeserver{records = Records, temp_records = clean}.
+-spec finalize_records(codeserver()) -> codeserver().
+
+finalize_records(#codeserver{temp_records = TmpRecords,
+ records = Records} = CS) ->
+ %% The annotations of the abstract code are reset as they are no
+ %% longer needed, which makes the ETS table compression better.
+ A0 = erl_anno:new(0),
+ AFun = fun(_) -> A0 end,
+ FFun = fun({F, Abs, Type}) ->
+ NewAbs = erl_parse:map_anno(AFun, Abs),
+ {F, NewAbs, Type}
+ end,
+ ArFun = fun({Arity, Fields}) -> {Arity, lists:map(FFun, Fields)} end,
+ List = dialyzer_utils:ets_tab2list(TmpRecords),
+ true = ets:delete(TmpRecords),
+ Fun = fun({Mod, Map}) ->
+ MFun =
+ fun({record, _}, {FileLine, ArityFields}) ->
+ {FileLine, lists:map(ArFun, ArityFields)};
+ (_, {{M, FileLine, Abs, Args}, Type}) ->
+ {{M, FileLine, erl_parse:map_anno(AFun, Abs), Args}, Type}
+ end,
+ {Mod, maps:map(MFun, Map)}
+ end,
+ NewList = lists:map(Fun, List),
+ true = ets:insert(Records, NewList),
+ CS#codeserver{temp_records = clean}.
-spec lookup_mod_contracts(atom(), codeserver()) -> contracts().
lookup_mod_contracts(Mod, #codeserver{contracts = ContDict})
when is_atom(Mod) ->
case ets_dict_find(Mod, ContDict) of
- error -> dict:new();
+ error -> maps:new();
{ok, Keys} ->
- dict:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
+ maps:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
end.
get_file_contract(Key, ContDict) ->
@@ -317,10 +355,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
{ok, PropList} -> PropList
end.
--spec get_contracts(codeserver()) -> mod_contracts().
+-spec get_contracts(codeserver()) ->
+ dict:dict(mfa(), dialyzer_contracts:file_contract()).
get_contracts(#codeserver{contracts = ContDict}) ->
- ets_dict_to_dict(ContDict).
+ dict:filter(fun({_M, _F, _A}, _) -> true;
+ (_, _) -> false
+ end, ets_dict_to_dict(ContDict)).
-spec get_callbacks(codeserver()) -> list().
@@ -330,48 +371,79 @@ get_callbacks(#codeserver{callbacks = CallbDict}) ->
-spec store_temp_contracts(module(), contracts(), contracts(), codeserver()) ->
codeserver().
-store_temp_contracts(Mod, SpecDict, CallbackDict,
+store_temp_contracts(Mod, SpecMap, CallbackMap,
#codeserver{temp_contracts = Cn,
temp_callbacks = Cb} = CS)
when is_atom(Mod) ->
- CS1 =
- case dict:size(SpecDict) =:= 0 of
- true -> CS;
- false ->
- CS#codeserver{temp_contracts = ets_dict_store(Mod, SpecDict, Cn)}
- end,
- case dict:size(CallbackDict) =:= 0 of
- true -> CS1;
- false ->
- CS1#codeserver{temp_callbacks = ets_dict_store(Mod, CallbackDict, Cb)}
- end.
-
--spec get_temp_contracts(codeserver()) -> {mod_contracts(), mod_contracts()}.
+ %% Make sure Mod is stored even if there are no callbacks or
+ %% contracts.
+ CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)},
+ CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}.
-get_temp_contracts(#codeserver{temp_contracts = TempContDict,
- temp_callbacks = TempCallDict}) ->
- {ets_dict_to_dict(TempContDict), ets_dict_to_dict(TempCallDict)}.
+-spec all_temp_modules(codeserver()) -> [module()].
--spec finalize_contracts(mod_contracts(), mod_contracts(), codeserver()) ->
- codeserver().
+all_temp_modules(#codeserver{temp_contracts = TempContTable}) ->
+ ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]).
-finalize_contracts(SpecDict, CallbackDict, CS) ->
- Contracts = ets_read_concurrent_table(dialyzer_codeserver_contracts),
- Callbacks = ets_read_concurrent_table(dialyzer_codeserver_callbacks),
- Contracts = dict:fold(fun decompose_spec_dict/3, Contracts, SpecDict),
- Callbacks = dict:fold(fun decompose_cb_dict/3, Callbacks, CallbackDict),
- CS#codeserver{contracts = Contracts, callbacks = Callbacks,
- temp_contracts = clean, temp_callbacks = clean}.
+-spec store_contracts(module(), contracts(), contracts(), codeserver()) ->
+ codeserver().
-decompose_spec_dict(Mod, Dict, Table) ->
- Keys = dict:fetch_keys(Dict),
- true = ets:insert(Table, dict:to_list(Dict)),
- true = ets:insert(Table, {Mod, Keys}),
- Table.
+store_contracts(Mod, SpecMap, CallbackMap, CS) ->
+ #codeserver{contracts = SpecDict, callbacks = CallbackDict} = CS,
+ Keys = maps:keys(SpecMap),
+ true = ets:insert(SpecDict, maps:to_list(SpecMap)),
+ true = ets:insert(SpecDict, {Mod, Keys}),
+ true = ets:insert(CallbackDict, maps:to_list(CallbackMap)),
+ CS.
-decompose_cb_dict(_Mod, Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)),
- Table.
+-spec get_temp_contracts(module(), codeserver()) ->
+ {contracts(), contracts()}.
+
+get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict}) ->
+ [{Mod, Contracts}] = ets:lookup(TempContDict, Mod),
+ true = ets:delete(TempContDict, Mod),
+ [{Mod, Callbacks}] = ets:lookup(TempCallDict, Mod),
+ true = ets:delete(TempCallDict, Mod),
+ {Contracts, Callbacks}.
+
+-spec give_away(codeserver(), pid()) -> 'ok'.
+
+give_away(CServer, Pid) ->
+ lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any)
+ end, tables(CServer)).
+
+tables(#codeserver{code = Code,
+ fun_meta_info = FunMetaInfo,
+ exports = Exports,
+ temp_exported_types = TempExpTypes,
+ temp_records = TempRecords,
+ temp_contracts = TempContracts,
+ temp_callbacks = TempCallbacks,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks}) ->
+ [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes,
+ TempRecords, TempContracts, TempCallbacks,
+ ExportedTypes, Records, Contracts, Callbacks],
+ Table =/= clean].
+
+-spec finalize_contracts(codeserver()) -> codeserver().
+
+finalize_contracts(#codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict} = CS) ->
+ true = ets:delete(TempContDict),
+ true = ets:delete(TempCallDict),
+ CS#codeserver{temp_contracts = clean, temp_callbacks = clean}.
+
+-spec translate_fake_file(codeserver(), module(), file:filename()) ->
+ file:filename().
+
+translate_fake_file(#codeserver{code = Code}, Module, FakeFile) ->
+ Files = ets:lookup_element(Code, {mod, Module}, 2),
+ {FakeFile, File} = lists:keyfind(FakeFile, 1, Files),
+ File.
table__lookup(TablePid, M) when is_atom(M) ->
{Name, Exports, Attrs, Keys, As} = ets:lookup_element(TablePid, M, 2),
@@ -379,3 +451,25 @@ table__lookup(TablePid, M) when is_atom(M) ->
cerl:ann_c_module(As, Name, Exports, Attrs, Defs);
table__lookup(TablePid, MFA) ->
ets:lookup_element(TablePid, MFA, 2).
+
+compress_file_anno(Term) ->
+ {Files, SmallTerm} = compress_file_anno(Term, []),
+ {[{FakeFile, File} || {File, {file, FakeFile}} <- Files], SmallTerm}.
+
+compress_file_anno({file, F}, Fs) when is_list(F) ->
+ case lists:keyfind(F, 1, Fs) of
+ false ->
+ I = integer_to_list(length(Fs)),
+ FileI = {file, I},
+ NFs = [{F, FileI}|Fs],
+ {NFs, FileI};
+ {F, FileI} -> {Fs, FileI}
+ end;
+compress_file_anno(T, Fs) when is_tuple(T) ->
+ {NFs, NL} = compress_file_anno(tuple_to_list(T), Fs),
+ {NFs, list_to_tuple(NL)};
+compress_file_anno([E|L], Fs) ->
+ {Fs1, NE} = compress_file_anno(E, Fs),
+ {NFs, NL} = compress_file_anno(L, Fs1),
+ {NFs, [NE|NL]};
+compress_file_anno(T, Fs) -> {Fs, T}.
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 0c29d524b5..b554ebc2cc 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-module(dialyzer_contracts).
@@ -25,7 +18,7 @@
check_contracts/4,
contracts_without_fun/3,
contract_to_string/1,
- get_invalid_contract_warnings/4,
+ get_invalid_contract_warnings/3,
get_contract_args/1,
get_contract_return/1,
get_contract_return/2,
@@ -46,14 +39,16 @@
-type file_contract() :: {file_line(), #contract{}, Extra :: [_]}.
--type plt_contracts() :: [{mfa(), #contract{}}]. % actually, an orddict()
+-type plt_contracts() :: orddict:orddict(mfa(), #contract{}).
%%-----------------------------------------------------------------------
%% Internal record for contracts whose components have not been processed
%% to expand records and/or remote types that they might contain.
%%-----------------------------------------------------------------------
--type tmp_contract_fun() :: fun((sets:set(mfa()), types()) -> contract_pair()).
+-type cache() :: ets:tid().
+-type tmp_contract_fun() ::
+ fun((sets:set(mfa()), types(), cache()) -> contract_pair()).
-record(tmp_contract, {contract_funs = [] :: [tmp_contract_fun()],
forms = [] :: [{_, _}]}).
@@ -126,58 +121,72 @@ butlast([H|T]) -> [H|butlast(T)].
constraints_to_string([]) ->
"";
-constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}]) ->
- atom_to_list(What) ++ "(" ++
- sequence([erl_types:t_form_to_string(T) || T <- Types], ",") ++ ")";
constraints_to_string([{type, _, constraint, [{atom, _, What}, Types]}|Rest]) ->
- atom_to_list(What) ++ "(" ++
- sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
- ++ "), " ++ constraints_to_string(Rest).
+ S = constraint_to_string(What, Types),
+ case Rest of
+ [] -> S;
+ _ -> S ++ ", " ++ constraints_to_string(Rest)
+ end.
+
+constraint_to_string(is_subtype, [{var, _, Var}, T]) ->
+ atom_to_list(Var) ++ " :: " ++ erl_types:t_form_to_string(T);
+constraint_to_string(What, Types) ->
+ atom_to_list(What) ++ "("
+ ++ sequence([erl_types:t_form_to_string(T) || T <- Types], ",")
+ ++ ")".
sequence([], _Delimiter) -> "";
sequence([H], _Delimiter) -> H;
sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
- dialyzer_codeserver:codeserver().
+ dialyzer_codeserver:codeserver().
process_contract_remote_types(CodeServer) ->
- {TmpContractDict, TmpCallbackDict} =
- dialyzer_codeserver:get_temp_contracts(CodeServer),
+ Mods = dialyzer_codeserver:all_temp_modules(CodeServer),
+ RecordTable = dialyzer_codeserver:get_records_table(CodeServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
- RecordDict = dialyzer_codeserver:get_records(CodeServer),
ContractFun =
- fun({_M, _F, _A}, {File, #tmp_contract{contract_funs = CFuns, forms = Forms}, Xtra}) ->
- NewCs = [CFun(ExpTypes, RecordDict) || CFun <- CFuns],
- Args = general_domain(NewCs),
- {File, #contract{contracts = NewCs, args = Args, forms = Forms}, Xtra}
+ fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
+ #tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
+ {NewCs, C2} = lists:mapfoldl(fun(CFun, C1) ->
+ CFun(ExpTypes, RecordTable, C1)
+ end, C0, CFuns),
+ Args = general_domain(NewCs),
+ Contract = #contract{contracts = NewCs, args = Args, forms = Forms},
+ {{MFA, {File, Contract, Xtra}}, C2}
end,
ModuleFun =
- fun(_ModuleName, ContractDict) ->
- dict:map(ContractFun, ContractDict)
+ fun(ModuleName) ->
+ Cache = erl_types:cache__new(),
+ {ContractMap, CallbackMap} =
+ dialyzer_codeserver:get_temp_contracts(ModuleName, CodeServer),
+ {NewContractList, Cache1} =
+ lists:mapfoldl(ContractFun, Cache, maps:to_list(ContractMap)),
+ {NewCallbackList, _NewCache} =
+ lists:mapfoldl(ContractFun, Cache1, maps:to_list(CallbackMap)),
+ dialyzer_codeserver:store_contracts(ModuleName,
+ maps:from_list(NewContractList),
+ maps:from_list(NewCallbackList),
+ CodeServer)
end,
- NewContractDict = dict:map(ModuleFun, TmpContractDict),
- NewCallbackDict = dict:map(ModuleFun, TmpCallbackDict),
- dialyzer_codeserver:finalize_contracts(NewContractDict, NewCallbackDict,
- CodeServer).
-
--type opaques() :: [erl_types:erl_type()] | 'universe'.
--type opaques_fun() :: fun((module()) -> opaques()).
+ lists:foreach(ModuleFun, Mods),
+ dialyzer_codeserver:finalize_contracts(CodeServer).
--type fun_types() :: dict:dict(label(), erl_types:type_table()).
+-type fun_types() :: orddict:orddict(label(), erl_types:type_table()).
--spec check_contracts([{mfa(), file_contract()}],
+-spec check_contracts(orddict:orddict(mfa(), #contract{}),
dialyzer_callgraph:callgraph(), fun_types(),
- opaques_fun()) -> plt_contracts().
+ erl_types:opaques()) -> plt_contracts().
-check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
+check_contracts(Contracts, Callgraph, FunTypes, ModOpaques) ->
FoldFun =
- fun(Label, Type, NewContracts) ->
+ fun({Label, Type}, NewContracts) ->
case dialyzer_callgraph:lookup_name(Label, Callgraph) of
{ok, {M,F,A} = MFA} ->
case orddict:find(MFA, Contracts) of
- {ok, {_FileLine, Contract, _Xtra}} ->
- Opaques = FindOpaques(M),
+ {ok, Contract} ->
+ {M, Opaques} = lists:keyfind(M, 1, ModOpaques),
case check_contract(Contract, Type, Opaques) of
ok ->
case erl_bif_types:is_known(M, F, A) of
@@ -195,7 +204,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
error -> NewContracts
end
end,
- dict:fold(FoldFun, [], FunTypes).
+ orddict:from_list(lists:foldl(FoldFun, [], orddict:to_list(FunTypes))).
%% Checks all components of a contract
-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
@@ -203,18 +212,21 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
check_contract(Contract, SuccType) ->
check_contract(Contract, SuccType, 'universe').
+-spec check_contract(#contract{}, erl_types:erl_type(), erl_types:opaques()) ->
+ 'ok' | {'error', term()}.
+
check_contract(#contract{contracts = Contracts}, SuccType, Opaques) ->
try
- Contracts1 = [{Contract, insert_constraints(Constraints, dict:new())}
+ Contracts1 = [{Contract, insert_constraints(Constraints)}
|| {Contract, Constraints} <- Contracts],
- Contracts2 = [erl_types:t_subst(Contract, Dict)
- || {Contract, Dict} <- Contracts1],
+ Contracts2 = [erl_types:t_subst(Contract, Map)
+ || {Contract, Map} <- Contracts1],
GenDomains = [erl_types:t_fun_args(C) || C <- Contracts2],
case check_domains(GenDomains) of
error ->
{error, {overlapping_contract, []}};
ok ->
- InfList = [erl_types:t_inf(Contract, SuccType, Opaques)
+ InfList = [{Contract, erl_types:t_inf(Contract, SuccType, Opaques)}
|| Contract <- Contracts2],
case check_contract_inf_list(InfList, SuccType, Opaques) of
{error, _} = Invalid -> Invalid;
@@ -238,10 +250,21 @@ check_domains([Dom|Doms]) ->
%% Allow a contract if one of the overloaded contracts is possible.
%% We used to be more strict, e.g., all overloaded contracts had to be
%% possible.
-check_contract_inf_list([FunType|Left], SuccType, Opaques) ->
+check_contract_inf_list(List, SuccType, Opaques) ->
+ case check_contract_inf_list(List, SuccType, Opaques, []) of
+ ok -> ok;
+ {error, []} -> {error, invalid_contract};
+ {error, [{SigRange, ContrRange}|_]} ->
+ case erl_types:t_find_opaque_mismatch(SigRange, ContrRange, Opaques) of
+ error -> {error, invalid_contract};
+ {ok, _T1, T2} -> {error, {opaque_mismatch, T2}}
+ end
+ end.
+
+check_contract_inf_list([{Contract, FunType}|Left], SuccType, Opaques, OM) ->
FunArgs = erl_types:t_fun_args(FunType),
case lists:any(fun erl_types:t_is_none_or_unit/1, FunArgs) of
- true -> check_contract_inf_list(Left, SuccType, Opaques);
+ true -> check_contract_inf_list(Left, SuccType, Opaques, OM);
false ->
STRange = erl_types:t_fun_range(SuccType),
case erl_types:t_is_none_or_unit(STRange) of
@@ -249,13 +272,16 @@ check_contract_inf_list([FunType|Left], SuccType, Opaques) ->
false ->
Range = erl_types:t_fun_range(FunType),
case erl_types:t_is_none(erl_types:t_inf(STRange, Range)) of
- true -> check_contract_inf_list(Left, SuccType, Opaques);
+ true ->
+ CR = erl_types:t_fun_range(Contract),
+ NewOM = [{STRange, CR}|OM],
+ check_contract_inf_list(Left, SuccType, Opaques, NewOM);
false -> ok
end
end
end;
-check_contract_inf_list([], _SuccType, _Opaques) ->
- {error, invalid_contract}.
+check_contract_inf_list([], _SuccType, _Opaques, OM) ->
+ {error, OM}.
check_extraneous([], _SuccType) -> ok;
check_extraneous([C|Cs], SuccType) ->
@@ -268,32 +294,49 @@ check_extraneous_1(Contract, SuccType) ->
CRng = erl_types:t_fun_range(Contract),
CRngs = erl_types:t_elements(CRng),
STRng = erl_types:t_fun_range(SuccType),
- ?debug("CR = ~p\nSR = ~p\n", [CRngs, STRng]),
+ ?debug("CR = ~tp\nSR = ~tp\n", [CRngs, STRng]),
case [CR || CR <- CRngs,
erl_types:t_is_none(erl_types:t_inf(CR, STRng))] of
[] ->
- CRngList = list_part(CRng),
- STRngList = list_part(STRng),
- case is_not_nil_list(CRngList) andalso is_not_nil_list(STRngList) of
- false -> ok;
- true ->
- CRngElements = erl_types:t_list_elements(CRngList),
- STRngElements = erl_types:t_list_elements(STRngList),
- Inf = erl_types:t_inf(CRngElements, STRngElements),
- case erl_types:t_is_none(Inf) of
- true -> {error, invalid_contract};
- false -> ok
- end
+ case bad_extraneous_list(CRng, STRng)
+ orelse bad_extraneous_map(CRng, STRng)
+ of
+ true -> {error, invalid_contract};
+ false -> ok
end;
CRs -> {error, {extra_range, erl_types:t_sup(CRs), STRng}}
end.
+bad_extraneous_list(CRng, STRng) ->
+ CRngList = list_part(CRng),
+ STRngList = list_part(STRng),
+ case is_not_nil_list(CRngList) andalso is_not_nil_list(STRngList) of
+ false -> false;
+ true ->
+ CRngElements = erl_types:t_list_elements(CRngList),
+ STRngElements = erl_types:t_list_elements(STRngList),
+ Inf = erl_types:t_inf(CRngElements, STRngElements),
+ erl_types:t_is_none(Inf)
+ end.
+
list_part(Type) ->
erl_types:t_inf(erl_types:t_list(), Type).
is_not_nil_list(Type) ->
erl_types:t_is_list(Type) andalso not erl_types:t_is_nil(Type).
+bad_extraneous_map(CRng, STRng) ->
+ CRngMap = map_part(CRng),
+ STRngMap = map_part(STRng),
+ (not is_empty_map(CRngMap)) andalso (not is_empty_map(STRngMap))
+ andalso is_empty_map(erl_types:t_inf(CRngMap, STRngMap)).
+
+map_part(Type) ->
+ erl_types:t_inf(erl_types:t_map(), Type).
+
+is_empty_map(Type) ->
+ erl_types:t_is_equal(Type, erl_types:t_from_term(#{})).
+
%% This is the heart of the "range function"
-spec process_contracts([contract_pair()], [erl_types:erl_type()]) ->
erl_types:erl_type().
@@ -318,19 +361,19 @@ process_contract({Contract, Constraints}, CallTypes0) ->
CallTypesFun = erl_types:t_fun(CallTypes0, erl_types:t_any()),
ContArgsFun = erl_types:t_fun(erl_types:t_fun_args(Contract),
erl_types:t_any()),
- ?debug("Instance: Contract: ~s\n Arguments: ~s\n",
+ ?debug("Instance: Contract: ~ts\n Arguments: ~ts\n",
[erl_types:t_to_string(ContArgsFun),
erl_types:t_to_string(CallTypesFun)]),
case solve_constraints(ContArgsFun, CallTypesFun, Constraints) of
- {ok, VarDict} ->
- {ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarDict)};
+ {ok, VarMap} ->
+ {ok, erl_types:t_subst(erl_types:t_fun_range(Contract), VarMap)};
error -> error
end.
solve_constraints(Contract, Call, Constraints) ->
%% First make sure the call follows the constraints
- CDict = insert_constraints(Constraints, dict:new()),
- Contract1 = erl_types:t_subst(Contract, CDict),
+ CMap = insert_constraints(Constraints),
+ Contract1 = erl_types:t_subst(Contract, CMap),
%% Just a safe over-approximation.
%% TODO: Find the types for type variables properly
ContrArgs = erl_types:t_fun_args(Contract1),
@@ -338,7 +381,7 @@ solve_constraints(Contract, Call, Constraints) ->
InfList = erl_types:t_inf_lists(ContrArgs, CallArgs),
case erl_types:any_none_or_unit(InfList) of
true -> error;
- false -> {ok, CDict}
+ false -> {ok, CMap}
end.
%%Inf = erl_types:t_inf(Contract1, Call),
%% Then unify with the constrained call type.
@@ -348,7 +391,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% ?debug("Inf: ~s\n", [erl_types:t_to_string(Inf)]),
%% erl_types:t_assign_variables_to_subtype(Contract, Inf).
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
+-type contracts() :: dialyzer_codeserver:contracts().
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
@@ -358,33 +401,36 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
|| {Label, Arity} <- AllFuns0],
AllFuns2 = [{M, F, A} || {{ok, {M, F, _}}, A} <- AllFuns1],
- AllContractMFAs = dict:fetch_keys(Contracts),
+ AllContractMFAs = maps:keys(Contracts),
ErrorContractMFAs = AllContractMFAs -- AllFuns2,
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ {{File, Line}, _Contract, _Xtra} = maps:get(MFA, Contracts),
WarningInfo = {File, Line, MFA},
{?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
%% This treats the "when" constraints. It will be extended, we hope.
-insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
+insert_constraints(Constraints) ->
+ insert_constraints(Constraints, maps:new()).
+
+insert_constraints([{subtype, Type1, Type2}|Left], Map) ->
case erl_types:t_is_var(Type1) of
true ->
Name = erl_types:t_var_name(Type1),
- Dict1 = case dict:find(Name, Dict) of
- error ->
- dict:store(Name, Type2, Dict);
- {ok, VarType} ->
- dict:store(Name, erl_types:t_inf(VarType, Type2), Dict)
- end,
- insert_constraints(Left, Dict1);
+ Map1 = case maps:find(Name, Map) of
+ error ->
+ maps:put(Name, Type2, Map);
+ {ok, VarType} ->
+ maps:put(Name, erl_types:t_inf(VarType, Type2), Map)
+ end,
+ insert_constraints(Left, Map1);
false ->
%% A lot of things should change to add supertypes
throw({error, io_lib:format("First argument of is_subtype constraint "
- "must be a type variable: ~p\n", [Type1])})
+ "must be a type variable: ~tp\n", [Type1])})
end;
-insert_constraints([], Dict) -> Dict.
+insert_constraints([], Map) -> Map.
-type types() :: erl_types:type_table().
@@ -393,121 +439,141 @@ insert_constraints([], Dict) -> Dict.
-spec store_tmp_contract(mfa(), file_line(), spec_data(), contracts(), types()) ->
contracts().
-store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) ->
- %% io:format("contract from form: ~p\n", [TypeSpec]),
- {Module, _, _} = MFA,
- TmpContract = contract_from_form(TypeSpec, Module, RecordsDict, FileLine),
- %% io:format("contract: ~p\n", [TmpContract]),
- dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict).
+store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecMap, RecordsDict) ->
+ %% io:format("contract from form: ~tp\n", [TypeSpec]),
+ TmpContract = contract_from_form(TypeSpec, MFA, RecordsDict, FileLine),
+ %% io:format("contract: ~tp\n", [TmpContract]),
+ maps:put(MFA, {FileLine, TmpContract, Xtra}, SpecMap).
-contract_from_form(Forms, Module, RecDict, FileLine) ->
- {CFuns, Forms1} = contract_from_form(Forms, Module, RecDict, FileLine, [], []),
+contract_from_form(Forms, MFA, RecDict, FileLine) ->
+ {CFuns, Forms1} = contract_from_form(Forms, MFA, RecDict, FileLine, [], []),
#tmp_contract{contract_funs = CFuns, forms = Forms1}.
-contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], Module, RecDict,
+contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords) ->
- NewType =
+ fun(ExpTypes, RecordTable, Cache) ->
+ {NewType, NewCache} =
try
- from_form_with_check(Form, ExpTypes, Module, AllRecords)
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
- NewMsg = io_lib:format("~s:~p: ~s", [filename:basename(File),
- Line, Msg]),
+ NewMsg = io_lib:format("~ts:~p: ~ts", [filename:basename(File),
+ Line, Msg]),
throw({error, NewMsg})
end,
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
- {NewTypeNoVars, []}
+ {{NewTypeNoVars, []}, NewCache}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, []} | FormAcc],
- contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+ contract_from_form(Left, MFA, RecDict, FileLine, NewTypeAcc, NewFormAcc);
contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
- Module, RecDict, FileLine, TypeAcc, FormAcc) ->
+ MFA, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords) ->
- {Constr1, VarDict} =
- process_constraints(Constr, Module, RecDict, ExpTypes, AllRecords),
- NewType = from_form_with_check(Form, ExpTypes, Module, AllRecords,
- VarDict),
+ fun(ExpTypes, RecordTable, Cache) ->
+ {Constr1, VarTable, Cache1} =
+ process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable,
+ Cache),
+ {NewType, NewCache} =
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable,
+ VarTable, Cache1),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
- {NewTypeNoVars, Constr1}
+ {{NewTypeNoVars, Constr1}, NewCache}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, Constr} | FormAcc],
- contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
-contract_from_form([], _Module, _RecDict, _FileLine, TypeAcc, FormAcc) ->
+ contract_from_form(Left, MFA, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
- Init0 = initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords),
+process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ {Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes,
+ RecordTable, Cache),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, Module, RecDict, ExpTypes, AllRecords).
+ constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache).
-initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
- initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords, []).
+initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable,
+ Cache, []).
-initialize_constraints([], _Module, _RecDict, _ExpTypes, _AllRecords, Acc) ->
- Acc;
-initialize_constraints([Constr|Rest], Module, RecDict, ExpTypes, AllRecords, Acc) ->
+initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable,
+ Cache, Acc) ->
+ {Acc, Cache};
+initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable,
+ Cache, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
- T1 = final_form(Type1, ExpTypes, Module, AllRecords, dict:new()),
+ VarTable = erl_types:var_table__new(),
+ {T1, NewCache} =
+ final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache),
Entry = {T1, Type2},
- initialize_constraints(Rest, Module, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
+ initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable,
+ NewCache, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
throw({error,
- io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
+ io_lib:format("Unsupported type guard ~tw/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
- VarDict =
- constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, dict:new()),
- constraints_fixpoint(VarDict, Module, Constrs, RecDict, ExpTypes, AllRecords).
-
-constraints_fixpoint(OldVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) ->
- NewVarDict =
- constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, OldVarDict),
- case NewVarDict of
- OldVarDict ->
- DictFold =
+constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ VarTable = erl_types:var_table__new(),
+ {VarTab, NewCache} =
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
+ VarTable, Cache),
+ constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes,
+ RecordTable, NewCache).
+
+constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
+ RecordTable, Cache) ->
+ {NewVarTab, NewCache} =
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
+ OldVarTab, Cache),
+ case NewVarTab of
+ OldVarTab ->
+ Fun =
fun(Key, Value, Acc) ->
[{subtype, erl_types:t_var(Key), Value}|Acc]
end,
- FinalConstrs = dict:fold(DictFold, [], NewVarDict),
- {FinalConstrs, NewVarDict};
+ FinalConstrs = maps:fold(Fun, [], NewVarTab),
+ {FinalConstrs, NewVarTab, NewCache};
_Other ->
- constraints_fixpoint(NewVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords)
+ constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes,
+ RecordTable, NewCache)
end.
-final_form(Form, ExpTypes, Module, AllRecords, VarDict) ->
- from_form_with_check(Form, ExpTypes, Module, AllRecords, VarDict).
-
-from_form_with_check(Form, ExpTypes, Module, AllRecords) ->
- erl_types:t_check_record_fields(Form, ExpTypes, Module, AllRecords),
- erl_types:t_from_form(Form, ExpTypes, Module, AllRecords).
-
-from_form_with_check(Form, ExpTypes, Module, AllRecords, VarDict) ->
- erl_types:t_check_record_fields(Form, ExpTypes, Module, AllRecords,
- VarDict),
- erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, VarDict).
-
-constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict) ->
- Subtypes =
- constraints_to_subs(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict, []),
- insert_constraints(Subtypes, dict:new()).
-
-constraints_to_subs([], _Module, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
- Acc;
-constraints_to_subs([C|Rest], Module, RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
- {T1, Form2} = C,
- T2 = final_form(Form2, ExpTypes, Module, AllRecords, VarDict),
+final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
+
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) ->
+ VarTable = erl_types:var_table__new(),
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
+
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
+ Site = {spec, MFA},
+ C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable,
+ VarTable, Cache),
+ erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1).
+
+constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
+ VarTab, Cache) ->
+ {Subtypes, NewCache} =
+ constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable,
+ VarTab, Cache, []),
+ {insert_constraints(Subtypes), NewCache}.
+
+constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable,
+ _VarTab, Cache, Acc) ->
+ {Acc, Cache};
+constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable,
+ VarTab, Cache, Acc) ->
+ {T2, NewCache} =
+ final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, Module, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
+ constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable,
+ VarTab, NewCache, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
%% the constraints.
@@ -565,10 +631,13 @@ remove_uses([{Var, Use}|ToRemove], Constrs0) ->
remove_uses(_Var, _Use, []) -> [];
remove_uses(Var, Use, [Constr|Constrs]) ->
{V, Form} = Constr,
- case erl_types:t_var_name(V) =:= Var of
- true -> [{V, remove_use(Form, Use)}|Constrs];
- false -> [Constr|remove_uses(Var, Use, Constrs)]
- end.
+ NewConstr = case erl_types:t_var_name(V) =:= Var of
+ true ->
+ {V, remove_use(Form, Use)};
+ false ->
+ Constr
+ end,
+ [NewConstr|remove_uses(Var, Use, Constrs)].
remove_use({var, L, V}, V) -> {var, L, '_'};
remove_use(T, V) when is_tuple(T) ->
@@ -584,8 +653,8 @@ general_domain(List) ->
general_domain(List, erl_types:t_none()).
general_domain([{Sig, Constraints}|Left], AccSig) ->
- Dict = insert_constraints(Constraints, dict:new()),
- Sig1 = erl_types:t_subst(Sig, Dict),
+ Map = insert_constraints(Constraints),
+ Sig1 = erl_types:t_subst(Sig, Map),
general_domain(Left, erl_types:t_sup(AccSig, Sig1));
general_domain([], AccSig) ->
%% Get rid of all variables in the domain.
@@ -594,43 +663,52 @@ general_domain([], AccSig) ->
-spec get_invalid_contract_warnings([module()],
dialyzer_codeserver:codeserver(),
- dialyzer_plt:plt(),
- opaques_fun()) -> [raw_warning()].
+ dialyzer_plt:plt()) -> [raw_warning()].
-get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
- get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, FindOpaques, []).
+get_invalid_contract_warnings(Modules, CodeServer, Plt) ->
+ get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, []).
-get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, FindOpaques, Acc) ->
+get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, Acc) ->
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(Mod, CodeServer),
- Contracts2 = dict:to_list(Contracts1),
- Records = dialyzer_codeserver:lookup_mod_records(Mod, CodeServer),
- NewAcc = get_invalid_contract_warnings_funs(Contracts2, Plt, Records, FindOpaques, Acc),
- get_invalid_contract_warnings_modules(Mods, CodeServer, Plt, FindOpaques, NewAcc);
-get_invalid_contract_warnings_modules([], _CodeServer, _Plt, _FindOpaques, Acc) ->
+ NewAcc =
+ case maps:size(Contracts1) =:= 0 of
+ true -> Acc;
+ false ->
+ Contracts2 = maps:to_list(Contracts1),
+ Records = dialyzer_codeserver:lookup_mod_records(Mod, CodeServer),
+ Opaques = erl_types:t_opaque_from_records(Records),
+ get_invalid_contract_warnings_funs(Contracts2, Plt, Records,
+ Opaques, Acc)
+ end,
+ get_invalid_contract_warnings_modules(Mods, CodeServer, Plt, NewAcc);
+get_invalid_contract_warnings_modules([], _CodeServer, _Plt, Acc) ->
Acc.
get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
- Plt, RecDict, FindOpaques, Acc) ->
+ Plt, RecDict, Opaques, Acc) ->
case dialyzer_plt:lookup(Plt, MFA) of
none ->
%% This must be a contract for a non-available function. Just accept it.
- get_invalid_contract_warnings_funs(Left, Plt, RecDict, FindOpaques, Acc);
+ get_invalid_contract_warnings_funs(Left, Plt, RecDict, Opaques, Acc);
{value, {Ret, Args}} ->
Sig = erl_types:t_fun(Args, Ret),
{M, _F, _A} = MFA,
- Opaques = FindOpaques(M),
+ %% io:format("MFA ~tp~n", [MFA]),
{File, Line} = FileLine,
WarningInfo = {File, Line, MFA},
NewAcc =
case check_contract(Contract, Sig, Opaques) of
{error, invalid_contract} ->
[invalid_contract_warning(MFA, WarningInfo, Sig, RecDict)|Acc];
+ {error, {opaque_mismatch, T2}} ->
+ W = contract_opaque_warning(MFA, WarningInfo, T2, Sig, RecDict),
+ [W|Acc];
{error, {overlapping_contract, []}} ->
[overlapping_contract_warning(MFA, WarningInfo)|Acc];
{error, {extra_range, ExtraRanges, STRange}} ->
Warn =
case t_from_forms_without_remote(Contract#contract.forms,
- RecDict) of
+ MFA, RecDict) of
{ok, NoRemoteType} ->
CRet = erl_types:t_fun_range(NoRemoteType),
erl_types:t_is_subtype(ExtraRanges, CRet);
@@ -669,15 +747,21 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
RecDict, Acc)
end
end,
- get_invalid_contract_warnings_funs(Left, Plt, RecDict, FindOpaques, NewAcc)
+ get_invalid_contract_warnings_funs(Left, Plt, RecDict, Opaques, NewAcc)
end;
-get_invalid_contract_warnings_funs([], _Plt, _RecDict, _FindOpaques, Acc) ->
+get_invalid_contract_warnings_funs([], _Plt, _RecDict, _Opaques, Acc) ->
Acc.
invalid_contract_warning({M, F, A}, WarningInfo, SuccType, RecDict) ->
SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict),
{?WARN_CONTRACT_TYPES, WarningInfo, {invalid_contract, [M, F, A, SuccTypeStr]}}.
+contract_opaque_warning({M, F, A}, WarningInfo, OpType, SuccType, RecDict) ->
+ OpaqueStr = erl_types:t_to_string(OpType),
+ SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict),
+ {?WARN_CONTRACT_TYPES, WarningInfo,
+ {contract_with_opaque, [M, F, A, OpaqueStr, SuccTypeStr]}}.
+
overlapping_contract_warning({M, F, A}, WarningInfo) ->
{?WARN_CONTRACT_TYPES, WarningInfo, {overlapping_contract, [M, F, A]}}.
@@ -705,23 +789,14 @@ picky_contract_check(CSig0, Sig0, MFA, WarningInfo, Contract, RecDict, Acc) ->
end
end.
-extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) ->
- %% We do not want to depend upon erl_types:t_to_string() possibly
- %% hiding the contents of opaque types.
- SigUnopaque = erl_types:t_unopaque(Sig),
- CSigUnopaque = erl_types:t_unopaque(CSig),
- SigString0 =
- lists:flatten(dialyzer_utils:format_sig(SigUnopaque, RecDict)),
- ContractString0 =
- lists:flatten(dialyzer_utils:format_sig(CSigUnopaque, RecDict)),
- %% The only difference is in record fields containing 'undefined' or not.
- IsUndefRecordFieldsRelated = SigString0 =:= ContractString0,
+extra_contract_warning(MFA, WarningInfo, Contract, CSig, Sig, RecDict) ->
{IsRemoteTypesRelated, SubtypeRelation} =
- is_remote_types_related(Contract, CSig, Sig, RecDict),
- case IsUndefRecordFieldsRelated orelse IsRemoteTypesRelated of
+ is_remote_types_related(Contract, CSig, Sig, MFA, RecDict),
+ case IsRemoteTypesRelated of
true ->
no_warning;
false ->
+ {M, F, A} = MFA,
SigString = lists:flatten(dialyzer_utils:format_sig(Sig, RecDict)),
ContractString = contract_to_string(Contract),
{Tag, Msg} =
@@ -739,14 +814,15 @@ extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) ->
{warning, {Tag, WarningInfo, Msg}}
end.
-is_remote_types_related(Contract, CSig, Sig, RecDict) ->
+is_remote_types_related(Contract, CSig, Sig, MFA, RecDict) ->
case erl_types:t_is_subtype(CSig, Sig) of
true ->
{false, contract_is_subtype};
false ->
case erl_types:t_is_subtype(Sig, CSig) of
true ->
- case t_from_forms_without_remote(Contract#contract.forms, RecDict) of
+ case t_from_forms_without_remote(Contract#contract.forms, MFA,
+ RecDict) of
{ok, NoRemoteTypeSig} ->
case blame_remote(CSig, NoRemoteTypeSig, Sig) of
true ->
@@ -762,13 +838,14 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) ->
end
end.
-t_from_forms_without_remote([{FType, []}], RecDict) ->
- Type1 = erl_types:t_from_form_without_remote(FType, RecDict),
+t_from_forms_without_remote([{FType, []}], MFA, RecDict) ->
+ Site = {spec, MFA},
+ {Type1, _} = erl_types:t_from_form_without_remote(FType, Site, RecDict),
{ok, erl_types:subst_all_vars_to_any(Type1)};
-t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) ->
+t_from_forms_without_remote([{_FType, _Constrs}], _MFA, _RecDict) ->
%% 'When' constraints
unsupported;
-t_from_forms_without_remote(_Forms, _RecDict) ->
+t_from_forms_without_remote(_Forms, _MFA, _RecDict) ->
%% Lots of forms
unsupported.
diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl
index 612f379d32..7c1bc1de5a 100644
--- a/lib/dialyzer/src/dialyzer_coordinator.erl
+++ b/lib/dialyzer/src/dialyzer_coordinator.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_coordinator.erl
@@ -29,8 +22,8 @@
%%% Export for dialyzer main process
-export([parallel_job/4]).
-%%% Exports for all possible workers
--export([wait_activation/0, job_done/3]).
+%%% Export for all possible workers
+-export([job_done/3]).
%%% Exports for the typesig and dataflow analysis workers
-export([sccs_to_pids/2, request_activation/1]).
@@ -38,7 +31,7 @@
%%% Exports for the compilation workers
-export([get_next_label/2]).
--export_type([coordinator/0, mode/0, init_data/0, result/0]).
+-export_type([coordinator/0, mode/0, init_data/0, result/0, job/0]).
%%--------------------------------------------------------------------
@@ -46,16 +39,18 @@
-type regulator() :: pid().
-type scc_to_pid() :: ets:tid() | 'unused'.
--type coordinator() :: {collector(), regulator(), scc_to_pid()}. %%opaque
+-opaque coordinator() :: {collector(), regulator(), scc_to_pid()}.
-type timing() :: dialyzer_timing:timing_server().
-type scc() :: [mfa_or_funlbl()].
-type mode() :: 'typesig' | 'dataflow' | 'compile' | 'warnings'.
--type compile_jobs() :: [file:filename()].
--type typesig_jobs() :: [scc()].
--type dataflow_jobs() :: [module()].
--type warnings_jobs() :: [module()].
+-type compile_job() :: file:filename().
+-type typesig_job() :: scc().
+-type dataflow_job() :: module().
+-type warnings_job() :: module().
+
+-type job() :: compile_job() | typesig_job() | dataflow_job() | warnings_job().
-type compile_init_data() :: dialyzer_analysis_callgraph:compile_init_data().
-type typesig_init_data() :: dialyzer_succ_typings:typesig_init_data().
@@ -73,14 +68,16 @@
-type result() :: compile_result() | typesig_result() |
dataflow_result() | warnings_result().
--type job() :: scc() | module() | file:filename().
--type job_result() :: dialyzer_analysis_callgraph:one_file_result() |
- typesig_result() | dataflow_result() | warnings_result().
+-type job_result() :: dialyzer_analysis_callgraph:one_file_mid_error() |
+ dialyzer_analysis_callgraph:one_file_result_ok() |
+ typesig_result() | dataflow_result() | warnings_result().
-record(state, {mode :: mode(),
active = 0 :: integer(),
result :: result(),
next_label = 0 :: integer(),
+ jobs :: [job()],
+ job_fun :: fun(),
init_data :: init_data(),
regulator :: regulator(),
scc_to_pid :: scc_to_pid()
@@ -90,13 +87,13 @@
%%--------------------------------------------------------------------
--spec parallel_job('compile', compile_jobs(), compile_init_data(), timing()) ->
+-spec parallel_job('compile', [compile_job()], compile_init_data(), timing()) ->
{compile_result(), integer()};
- ('typesig', typesig_jobs(), typesig_init_data(), timing()) ->
+ ('typesig', [typesig_job()], typesig_init_data(), timing()) ->
typesig_result();
- ('dataflow', dataflow_jobs(), dataflow_init_data(),
+ ('dataflow', [dataflow_job()], dataflow_init_data(),
timing()) -> dataflow_result();
- ('warnings', warnings_jobs(), warnings_init_data(),
+ ('warnings', [warnings_job()], warnings_init_data(),
timing()) -> warnings_result().
parallel_job(Mode, Jobs, InitData, Timing) ->
@@ -113,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
false -> unused
end,
Coordinator = {Collector, Regulator, SCCtoPID},
- Fold =
- fun(Job, Count) ->
- Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
- case TypesigOrDataflow of
- true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok;
- false -> request_activation(Regulator, Pid)
- end,
- Count + 1
+ JobFun =
+ fun(Job) ->
+ Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
+ case TypesigOrDataflow of
+ true -> true = ets:insert(SCCtoPID, {Job, Pid});
+ false -> true
+ end
end,
- JobCount = lists:foldl(Fold, 0, Jobs),
+ JobCount = length(Jobs),
+ NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()),
+ {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs),
+ lists:foreach(JobFun, InitJobs),
Unit =
case Mode of
'typesig' -> "SCCs";
@@ -134,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
'compile' -> dialyzer_analysis_callgraph:compile_init_result();
_ -> []
end,
- #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0,
- init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
+ #state{mode = Mode, active = JobCount, result = InitResult,
+ next_label = 0, job_fun = JobFun, jobs = RestJobs,
+ init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
collect_result(#state{mode = Mode, active = Active, result = Result,
next_label = NextLabel, init_data = InitData,
+ jobs = JobsLeft, job_fun = JobFun,
regulator = Regulator, scc_to_pid = SCCtoPID} = State) ->
receive
{next_label_request, Estimation, Pid} ->
@@ -146,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result,
collect_result(State#state{next_label = NextLabel + Estimation});
{done, Job, Data} ->
NewResult = update_result(Mode, InitData, Job, Data, Result),
+ TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'),
case Active of
1 ->
kill_regulator(Regulator),
case Mode of
'compile' ->
{NewResult, NextLabel};
- X when X =:= 'typesig'; X =:= 'dataflow' ->
+ _ when TypesigOrDataflow ->
ets:delete(SCCtoPID),
NewResult;
'warnings' ->
NewResult
end;
N ->
- collect_result(State#state{result = NewResult, active = N - 1})
+ case TypesigOrDataflow of
+ true -> true = ets:delete(SCCtoPID, Job);
+ false -> true
+ end,
+ NewJobsLeft =
+ case JobsLeft of
+ [] -> [];
+ [NewJob|JobsLeft1] ->
+ JobFun(NewJob),
+ JobsLeft1
+ end,
+ NewState = State#state{result = NewResult,
+ jobs = NewJobsLeft,
+ active = N - 1},
+ collect_result(NewState)
end
end.
@@ -175,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) ->
end.
-spec sccs_to_pids([scc() | module()], coordinator()) ->
- {[dialyzer_worker:worker()], [scc() | module()]}.
+ [dialyzer_worker:worker()].
sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) ->
Fold =
- fun(SCC, {Pids, Unknown}) ->
- try ets:lookup_element(SCCtoPID, SCC, 2) of
- Result -> {[Result|Pids], Unknown}
- catch
- _:_ -> {Pids, [SCC|Unknown]}
- end
+ fun(SCC, Pids) ->
+ %% The SCCs that SCC depends on have always been started.
+ try ets:lookup_element(SCCtoPID, SCC, 2) of
+ Pid when is_pid(Pid) ->
+ [Pid|Pids]
+ catch
+ _:_ -> Pids
+ end
end,
- lists:foldl(Fold, {[], []}, SCCs).
+ lists:foldl(Fold, [], SCCs).
-spec job_done(job(), job_result(), coordinator()) -> ok.
@@ -217,10 +235,6 @@ request_activation({_Collector, Regulator, _SCCtoPID}) ->
Regulator ! {req, self()},
wait_activation().
-request_activation(Regulator, Pid) ->
- Regulator ! {req, Pid},
- ok.
-
spawn_regulator() ->
InitTickets = dialyzer_utils:parallelism(),
spawn_link(fun() -> regulator_loop(InitTickets, queue:new()) end).
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index cabc5e9e0d..8367432ac5 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%--------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_dataflow.erl
@@ -43,7 +36,6 @@
-include("dialyzer.hrl").
-%%-import(helper, %% 'helper' could be any module doing sanity checks...
-import(erl_types,
[t_inf/2, t_inf/3, t_inf_lists/2, t_inf_lists/3,
t_inf_lists/3, t_is_equal/2, t_is_subtype/2, t_subtract/2,
@@ -72,7 +64,7 @@
t_tuple/0, t_tuple/1, t_tuple_args/1, t_tuple_args/2,
t_tuple_subtypes/2,
t_unit/0, t_unopaque/2,
- t_map/1
+ t_map/0, t_map/1, t_is_singleton/2
]).
%%-define(DEBUG, true).
@@ -99,25 +91,35 @@
-define(BITS, 128).
--record(state, {callgraph :: dialyzer_callgraph:callgraph(),
- codeserver :: dialyzer_codeserver:codeserver(),
- envs :: env_tab(),
- fun_tab :: fun_tab(),
- fun_homes :: dict:dict(label(), mfa()),
- plt :: dialyzer_plt:plt(),
- opaques :: [type()],
+%% Types with comment 'race' are due to dialyzer_races.erl.
+-record(state, {callgraph :: dialyzer_callgraph:callgraph()
+ | 'undefined', % race
+ codeserver :: dialyzer_codeserver:codeserver()
+ | 'undefined', % race
+ envs :: env_tab()
+ | 'undefined', % race
+ fun_tab :: fun_tab()
+ | 'undefined', % race
+ fun_homes :: dict:dict(label(), mfa())
+ | 'undefined', % race
+ plt :: dialyzer_plt:plt()
+ | 'undefined', % race
+ opaques :: [type()]
+ | 'undefined', % race
races = dialyzer_races:new() :: dialyzer_races:races(),
records = dict:new() :: types(),
- tree_map :: dict:dict(label(), cerl:cerl()),
+ tree_map :: dict:dict(label(), cerl:cerl())
+ | 'undefined', % race
warning_mode = false :: boolean(),
warnings = [] :: [raw_warning()],
- work :: {[_], [_], sets:set()},
+ work :: {[_], [_], sets:set()}
+ | 'undefined', % race
module :: module(),
curr_fun :: curr_fun()
}).
--record(map, {dict = dict:new() :: type_tab(),
- subst = dict:new() :: subst_tab(),
+-record(map, {map = maps:new() :: type_tab(),
+ subst = maps:new() :: subst_tab(),
modified = [] :: [Key :: term()],
modified_stack = [] :: [{[Key :: term()],reference()}],
ref = undefined :: reference() | undefined}).
@@ -125,10 +127,10 @@
-type env_tab() :: dict:dict(label(), #map{}).
-type fun_entry() :: {Args :: [type()], RetType :: type()}.
-type fun_tab() :: dict:dict('top' | label(),
- {'not_handled', fun_entry()} | fun_entry()).
+ {'not_handled', fun_entry()} | fun_entry()).
-type key() :: label() | cerl:cerl().
--type type_tab() :: dict:dict(key(), type()).
--type subst_tab() :: dict:dict(key(), cerl:cerl()).
+-type type_tab() :: #{key() => type()}.
+-type subst_tab() :: #{key() => cerl:cerl()}.
%% Exported Types
@@ -136,7 +138,7 @@
%%--------------------------------------------------------------------
--type fun_types() :: dict:dict(label(), type()).
+-type fun_types() :: orddict:orddict(label(), type()).
-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
dialyzer_callgraph:callgraph(),
@@ -189,19 +191,19 @@ analyze_loop(State) ->
{ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1),
case not IsCalled of
true ->
- ?debug("Not handling (not called) ~w: ~s\n",
+ ?debug("Not handling (not called) ~w: ~ts\n",
[NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
false ->
case state__fun_env(Fun, NewState1) of
none ->
- ?debug("Not handling (no env) ~w: ~s\n",
+ ?debug("Not handling (no env) ~w: ~ts\n",
[NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
Map ->
- ?debug("Handling fun ~p: ~s\n",
+ ?debug("Handling fun ~p: ~ts\n",
[NewState1#state.curr_fun,
t_to_string(state__fun_type(Fun, NewState1))]),
Vars = cerl:fun_vars(Fun),
@@ -220,7 +222,7 @@ analyze_loop(State) ->
end,
{NewState4, _Map2, BodyType} =
traverse(Body, Map1, NewState3),
- ?debug("Done analyzing: ~w:~s\n",
+ ?debug("Done analyzing: ~w:~ts\n",
[NewState1#state.curr_fun,
t_to_string(t_fun(ArgTypes, BodyType))]),
NewState5 =
@@ -230,7 +232,7 @@ analyze_loop(State) ->
end,
NewState6 =
state__update_fun_entry(Fun, ArgTypes, BodyType, NewState5),
- ?debug("done adding stuff for ~w\n",
+ ?debug("done adding stuff for ~tw\n",
[state__lookup_name(get_label(Fun), State)]),
analyze_loop(NewState6)
end
@@ -332,8 +334,6 @@ traverse(Tree, Map, State) ->
handle_tuple(Tree, Map, State);
map ->
handle_map(Tree, Map, State);
- map_pair ->
- handle_map_pair(Tree, Map, State);
values ->
Elements = cerl:values_es(Tree),
{State1, Map1, EsType} = traverse_list(Elements, Map, State),
@@ -423,17 +423,35 @@ handle_apply(Tree, Map, State) ->
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State) ->
None = t_none(),
+ %% Call-site analysis may be inaccurate and consider more funs than those that
+ %% are actually possible. If all of them are incorrect, then warnings can be
+ %% emitted. If at least one fun is ok, however, then no warning is emitted,
+ %% just in case the bad ones are not really possible. The last argument is
+ %% used for this, with the following encoding:
+ %% Initial value: {none, []}
+ %% First fun checked: {one, <List of warns>}
+ %% More funs checked: {many, <List of warns>}
+ %% A '{one, []}' can only become '{many, []}'.
+ %% If at any point an fun does not add warnings, then the list is also
+ %% replaced with an empty list.
handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
- [None || _ <- ArgTypes], None, false).
+ [None || _ <- ArgTypes], None, false, {none, []}).
handle_apply_or_call([{local, external}|Left], Args, ArgTypes, Map, Tree, State,
- _AccArgTypes, _AccRet, _HadExternal) ->
+ _AccArgTypes, _AccRet, _HadExternal, Warns) ->
+ {HowMany, _} = Warns,
+ NewHowMany =
+ case HowMany of
+ none -> one;
+ _ -> many
+ end,
+ NewWarns = {NewHowMany, []},
handle_apply_or_call(Left, Args, ArgTypes, Map, Tree, State,
- ArgTypes, t_any(), true);
+ ArgTypes, t_any(), true, NewWarns);
handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
Args, ArgTypes, Map, Tree,
#state{opaques = Opaques} = State,
- AccArgTypes, AccRet, HadExternal) ->
+ AccArgTypes, AccRet, HadExternal, Warns) ->
Any = t_any(),
AnyArgs = [Any || _ <- Args],
GenSig = {AnyArgs, fun(_) -> t_any() end},
@@ -469,23 +487,23 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
end,
?debug("--------------------------------------------------------\n", []),
- ?debug("Fun: ~p\n", [state__lookup_name(Fun, State)]),
+ ?debug("Fun: ~tp\n", [state__lookup_name(Fun, State)]),
?debug("Module ~p\n", [State#state.module]),
- ?debug("CArgs ~s\n", [erl_types:t_to_string(t_product(CArgs))]),
- ?debug("ArgTypes ~s\n", [erl_types:t_to_string(t_product(ArgTypes))]),
- ?debug("BifArgs ~p\n", [erl_types:t_to_string(t_product(BifArgs))]),
+ ?debug("CArgs ~ts\n", [erl_types:t_to_string(t_product(CArgs))]),
+ ?debug("ArgTypes ~ts\n", [erl_types:t_to_string(t_product(ArgTypes))]),
+ ?debug("BifArgs ~tp\n", [erl_types:t_to_string(t_product(BifArgs))]),
NewArgsSig = t_inf_lists(SigArgs, ArgTypes, Opaques),
- ?debug("SigArgs ~s\n", [erl_types:t_to_string(t_product(SigArgs))]),
- ?debug("NewArgsSig: ~s\n", [erl_types:t_to_string(t_product(NewArgsSig))]),
+ ?debug("SigArgs ~ts\n", [erl_types:t_to_string(t_product(SigArgs))]),
+ ?debug("NewArgsSig: ~ts\n", [erl_types:t_to_string(t_product(NewArgsSig))]),
NewArgsContract = t_inf_lists(CArgs, ArgTypes, Opaques),
- ?debug("NewArgsContract: ~s\n",
+ ?debug("NewArgsContract: ~ts\n",
[erl_types:t_to_string(t_product(NewArgsContract))]),
NewArgsBif = t_inf_lists(BifArgs, ArgTypes, Opaques),
- ?debug("NewArgsBif: ~s\n", [erl_types:t_to_string(t_product(NewArgsBif))]),
+ ?debug("NewArgsBif: ~ts\n", [erl_types:t_to_string(t_product(NewArgsBif))]),
NewArgTypes0 = t_inf_lists(NewArgsSig, NewArgsContract),
NewArgTypes = t_inf_lists(NewArgTypes0, NewArgsBif, Opaques),
- ?debug("NewArgTypes ~s\n", [erl_types:t_to_string(t_product(NewArgTypes))]),
+ ?debug("NewArgTypes ~ts\n", [erl_types:t_to_string(t_product(NewArgTypes))]),
?debug("\n", []),
BifRet = BifRange(NewArgTypes),
@@ -493,18 +511,18 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
RetWithoutContr = t_inf(SigRange, BifRet),
RetWithoutLocal = t_inf(ContrRet, RetWithoutContr),
- ?debug("RetWithoutContr: ~s\n",[erl_types:t_to_string(RetWithoutContr)]),
- ?debug("RetWithoutLocal: ~s\n", [erl_types:t_to_string(RetWithoutLocal)]),
- ?debug("BifRet: ~s\n", [erl_types:t_to_string(BifRange(NewArgTypes))]),
- ?debug("SigRange: ~s\n", [erl_types:t_to_string(SigRange)]),
- ?debug("ContrRet: ~s\n", [erl_types:t_to_string(CRange(NewArgTypes))]),
- ?debug("LocalRet: ~s\n", [erl_types:t_to_string(LocalRet)]),
+ ?debug("RetWithoutContr: ~ts\n",[erl_types:t_to_string(RetWithoutContr)]),
+ ?debug("RetWithoutLocal: ~ts\n", [erl_types:t_to_string(RetWithoutLocal)]),
+ ?debug("BifRet: ~ts\n", [erl_types:t_to_string(BifRange(NewArgTypes))]),
+ ?debug("SigRange: ~ts\n", [erl_types:t_to_string(SigRange)]),
+ ?debug("ContrRet: ~ts\n", [erl_types:t_to_string(ContrRet)]),
+ ?debug("LocalRet: ~ts\n", [erl_types:t_to_string(LocalRet)]),
State1 =
case is_race_analysis_enabled(State) of
true ->
Ann = cerl:get_ann(Tree),
- File = get_file(Ann),
+ File = get_file(Ann, State),
Line = abs(get_line(Ann)),
dialyzer_races:store_race_call(Fun, ArgTypes, Args,
{File, Line}, State);
@@ -578,17 +596,33 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
false -> t_inf(RetWithoutLocal, LocalRet)
end,
NewAccRet = t_sup(AccRet, TotalRet),
- ?debug("NewAccRet: ~s\n", [t_to_string(NewAccRet)]),
+ ?debug("NewAccRet: ~ts\n", [t_to_string(NewAccRet)]),
+ {NewWarnings, State4} = state__remove_added_warnings(State, State3),
+ {HowMany, OldWarnings} = Warns,
+ NewWarns =
+ case HowMany of
+ none -> {one, NewWarnings};
+ _ ->
+ case OldWarnings =:= [] of
+ true -> {many, []};
+ false ->
+ case NewWarnings =:= [] of
+ true -> {many, []};
+ false -> {many, NewWarnings ++ OldWarnings}
+ end
+ end
+ end,
handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
- State3, NewAccArgTypes, NewAccRet, HadExternal);
+ State4, NewAccArgTypes, NewAccRet, HadExternal, NewWarns);
handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
- AccArgTypes, AccRet, HadExternal) ->
+ AccArgTypes, AccRet, HadExternal, {_, Warnings}) ->
+ State1 = state__add_warnings(Warnings, State),
case HadExternal of
false ->
NewMap = enter_type_lists(Args, AccArgTypes, Map),
- {State, NewMap, AccRet};
+ {State1, NewMap, AccRet};
true ->
- {had_external, State}
+ {had_external, State1}
end.
apply_fail_reason(FailedSig, FailedBif, FailedContract) ->
@@ -937,12 +971,21 @@ handle_case(Tree, Map, State) ->
false -> State1
end,
Map2 = join_maps_begin(Map1),
- {MapList, State3, Type} =
+ {MapList, State3, Type, Warns} =
handle_clauses(Clauses, Arg, ArgType, ArgType, State2,
- [], Map2, [], []),
+ [], Map2, [], [], []),
+ %% Non-Erlang BEAM languages, such as Elixir, expand language constructs
+ %% into case statements. In that case, we do not want to warn on
+ %% individual clauses not matching unless none of them can.
+ SupressForced = is_compiler_generated(cerl:get_ann(Tree))
+ andalso not (t_is_none(Type)),
+ State4 = lists:foldl(fun({T,R,M,F}, S) ->
+ state__add_warning(
+ S,T,R,M,F andalso (not SupressForced))
+ end, State3, Warns),
Map3 = join_maps_end(MapList, Map2),
debug_pp_map(Map3),
- {State3, Map3, Type}
+ {State4, Map3, Type}
end.
%%----------------------------------------
@@ -1041,22 +1084,24 @@ handle_receive(Tree, Map, State) ->
RaceListSize + 1, State);
false -> State
end,
- {MapList, State2, ReceiveType} =
+ {MapList, State2, ReceiveType, Warns} =
handle_clauses(Clauses, ?no_arg, t_any(), t_any(), State1, [], Map,
- [], []),
+ [], [], []),
+ State3 = lists:foldl(fun({T,R,M,F}, S) -> state__add_warning(S,T,R,M,F) end,
+ State2, Warns),
Map1 = join_maps(MapList, Map),
- {State3, Map2, TimeoutType} = traverse(Timeout, Map1, State2),
- Opaques = State3#state.opaques,
+ {State4, Map2, TimeoutType} = traverse(Timeout, Map1, State3),
+ Opaques = State4#state.opaques,
case (t_is_atom(TimeoutType, Opaques) andalso
(t_atom_vals(TimeoutType, Opaques) =:= ['infinity'])) of
true ->
- {State3, Map2, ReceiveType};
+ {State4, Map2, ReceiveType};
false ->
Action = cerl:receive_action(Tree),
- {State4, Map3, ActionType} = traverse(Action, Map, State3),
+ {State5, Map3, ActionType} = traverse(Action, Map, State4),
Map4 = join_maps([Map3, Map1], Map),
Type = t_sup(ReceiveType, ActionType),
- {State4, Map4, Type}
+ {State5, Map4, Type}
end.
%%----------------------------------------
@@ -1092,15 +1137,54 @@ handle_try(Tree, Map, State) ->
%%----------------------------------------
handle_map(Tree,Map,State) ->
- Pairs = cerl:map_es(Tree),
- {State1, Map1, TypePairs} = traverse_list(Pairs,Map,State),
- {State1, Map1, t_map(TypePairs)}.
+ Pairs = cerl:map_es(Tree),
+ Arg = cerl:map_arg(Tree),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ ArgType1 = t_inf(t_map(), ArgType),
+ case t_is_none_or_unit(ArgType1) of
+ true ->
+ {State1, Map1, ArgType1};
+ false ->
+ {State2, Map2, TypePairs, ExactKeys} =
+ traverse_map_pairs(Pairs, Map1, State1, t_none(), [], []),
+ InsertPair = fun({KV,assoc,_},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact,KVTree},Acc) ->
+ case t_is_none(T=erl_types:t_map_update(KV,Acc)) of
+ true -> throw({none, Acc, KV, KVTree});
+ false -> T
+ end
+ end,
+ try lists:foldl(InsertPair, ArgType1, TypePairs)
+ of ResT ->
+ BindT = t_map([{K, t_any()} || K <- ExactKeys]),
+ case bind_pat_vars_reverse([Arg], [BindT], [], Map2, State2) of
+ {error, _, _, _, _} -> {State2, Map2, ResT};
+ {Map3, _} -> {State2, Map3, ResT}
+ end
+ catch {none, MapType, {K,_}, KVTree} ->
+ Msg2 = {map_update, [format_type(MapType, State2),
+ format_type(K, State2)]},
+ {state__add_warning(State2, ?WARN_MAP_CONSTRUCTION, KVTree, Msg2),
+ Map2, t_none()}
+ end
+ end.
-handle_map_pair(Tree,Map,State) ->
- Key = cerl:map_pair_key(Tree),
- Val = cerl:map_pair_val(Tree),
+traverse_map_pairs([], Map, State, _ShadowKeys, PairAcc, KeyAcc) ->
+ {State, Map, lists:reverse(PairAcc), KeyAcc};
+traverse_map_pairs([Pair|Pairs], Map, State, ShadowKeys, PairAcc, KeyAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
{State1, Map1, [K,V]} = traverse_list([Key,Val],Map,State),
- {State1, Map1, {K,V}}.
+ KeyAcc1 =
+ case cerl:is_literal(Op) andalso cerl:concrete(Op) =:= exact andalso
+ t_is_singleton(K, State#state.opaques) andalso
+ t_is_none(t_inf(ShadowKeys, K)) of
+ true -> [K|KeyAcc];
+ false -> KeyAcc
+ end,
+ traverse_map_pairs(Pairs, Map1, State1, t_sup(K, ShadowKeys),
+ [{{K,V},cerl:concrete(Op),Pair}|PairAcc], KeyAcc1).
%%----------------------------------------
@@ -1120,7 +1204,7 @@ handle_tuple(Tree, Map, State) ->
TagVal = cerl:atom_val(Tag),
case state__lookup_record(TagVal, length(Left), State1) of
error -> {State1, Map1, TupleType};
- {ok, RecType} ->
+ {ok, RecType, FieldNames} ->
InfTupleType = t_inf(RecType, TupleType),
case t_is_none(InfTupleType) of
true ->
@@ -1141,10 +1225,13 @@ handle_tuple(Tree, Map, State) ->
Tree, Msg),
{State2, Map1, t_none()};
{error, opaque, ErrorPat, ErrorType, OpaqueType} ->
+ OpaqueStr = format_type(OpaqueType, State1),
+ Name = field_name(Elements, ErrorPat, FieldNames),
Msg = {opaque_match,
- [format_patterns(ErrorPat),
- format_type(ErrorType, State1),
- format_type(OpaqueType, State1)]},
+ ["record field" ++ Name ++
+ " declared to be of type " ++
+ format_type(ErrorType, State1),
+ OpaqueStr, OpaqueStr]},
State2 = state__add_warning(State1, ?WARN_OPAQUE,
Tree, Msg),
{State2, Map1, t_none()};
@@ -1161,11 +1248,20 @@ handle_tuple(Tree, Map, State) ->
end
end.
+field_name(Elements, ErrorPat, FieldNames) ->
+ try
+ [Pat] = ErrorPat,
+ Take = lists:takewhile(fun(X) -> X =/= Pat end, Elements),
+ " " ++ format_atom(lists:nth(length(Take), FieldNames))
+ catch
+ _:_ -> ""
+ end.
+
%%----------------------------------------
%% Clauses
%%
handle_clauses([C|Left], Arg, ArgType, OrigArgType, State, CaseTypes, MapIn,
- Acc, ClauseAcc) ->
+ Acc, ClauseAcc, WarnAcc0) ->
IsRaceAnalysisEnabled = is_race_analysis_enabled(State),
State1 =
case IsRaceAnalysisEnabled of
@@ -1178,8 +1274,8 @@ handle_clauses([C|Left], Arg, ArgType, OrigArgType, State, CaseTypes, MapIn,
State);
false -> State
end,
- {State2, ClauseMap, BodyType, NewArgType} =
- do_clause(C, Arg, ArgType, OrigArgType, MapIn, State1),
+ {State2, ClauseMap, BodyType, NewArgType, WarnAcc} =
+ do_clause(C, Arg, ArgType, OrigArgType, MapIn, State1, WarnAcc0),
{NewClauseAcc, State3} =
case IsRaceAnalysisEnabled of
true ->
@@ -1197,9 +1293,9 @@ handle_clauses([C|Left], Arg, ArgType, OrigArgType, State, CaseTypes, MapIn,
false -> {[BodyType|CaseTypes], [ClauseMap|Acc]}
end,
handle_clauses(Left, Arg, NewArgType, OrigArgType, State3,
- NewCaseTypes, MapIn, NewAcc, NewClauseAcc);
+ NewCaseTypes, MapIn, NewAcc, NewClauseAcc, WarnAcc);
handle_clauses([], _Arg, _ArgType, _OrigArgType, State, CaseTypes, _MapIn, Acc,
- ClauseAcc) ->
+ ClauseAcc, WarnAcc) ->
State1 =
case is_race_analysis_enabled(State) of
true ->
@@ -1209,9 +1305,9 @@ handle_clauses([], _Arg, _ArgType, _OrigArgType, State, CaseTypes, _MapIn, Acc,
RaceListSize + 1, State);
false -> State
end,
- {lists:reverse(Acc), State1, t_sup(CaseTypes)}.
+ {lists:reverse(Acc), State1, t_sup(CaseTypes), WarnAcc}.
-do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
+do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) ->
Pats = cerl:clause_pats(C),
Guard = cerl:clause_guard(C),
Body = cerl:clause_body(C),
@@ -1239,17 +1335,15 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
end,
case BindRes of
{error, ErrorType, NewPats, Type, OpaqueTerm} ->
- ?debug("Failed binding pattern: ~s\nto ~s\n",
+ ?debug("Failed binding pattern: ~ts\nto ~ts\n",
[cerl_prettypr:format(C), format_type(ArgType0, State1)]),
case state__warning_mode(State1) of
false ->
- {State1, Map, t_none(), ArgType0};
+ {State1, Map, t_none(), ArgType0, Warns};
true ->
{Msg, Force} =
case t_is_none(ArgType0) of
true ->
- PatString = format_patterns(Pats),
- PatTypes = [PatString, format_type(OrigArgType, State1)],
%% See if this is covered by an earlier clause or if it
%% simply cannot match
OrigArgTypes =
@@ -1257,17 +1351,27 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
true -> Any = t_any(), [Any || _ <- Pats];
false -> t_to_tlist(OrigArgType)
end,
+ PatString = format_patterns(Pats),
+ ArgTypeString = format_type(OrigArgType, State1),
+ BindResOrig =
+ bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1),
Tag =
- case bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1) of
+ case BindResOrig of
{error, bind, _, _, _} -> pattern_match;
{error, record, _, _, _} -> record_match;
{error, opaque, _, _, _} -> opaque_match;
{_, _} -> pattern_match_cov
end,
- {{Tag, PatTypes}, false};
+ PatTypes = case BindResOrig of
+ {error, opaque, _, _, OpaqueType} ->
+ [PatString, ArgTypeString,
+ format_type(OpaqueType, State1)];
+ _ -> [PatString, ArgTypeString]
+ end,
+ {{Tag, PatTypes}, false};
false ->
%% Try to find out if this is a default clause in a list
- %% comprehension and supress this. A real Hack(tm)
+ %% comprehension and suppress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
@@ -1323,8 +1427,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
{record_match, _} -> ?WARN_MATCHING;
{pattern_match_cov, _} -> ?WARN_MATCHING
end,
- {state__add_warning(State1, WarnType, C, Msg, Force),
- Map, t_none(), ArgType0}
+ {State1, Map, t_none(), ArgType0, [{WarnType, C, Msg, Force}|Warns]}
end;
{Map2, PatTypes} ->
Map3 =
@@ -1348,7 +1451,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
end,
case bind_guard(Guard, Map3, State1) of
{error, Reason} ->
- ?debug("Failed guard: ~s\n",
+ ?debug("Failed guard: ~ts\n",
[cerl_prettypr:format(C, [{hook, cerl_typean:pp_hook()}])]),
PatString = format_patterns(Pats),
DefaultMsg =
@@ -1357,9 +1460,9 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
false ->
{guard_fail_pat, [PatString, format_type(ArgType0, State1)]}
end,
- State2 =
+ Warn =
case Reason of
- none -> state__add_warning(State1, ?WARN_MATCHING, C, DefaultMsg);
+ none -> {?WARN_MATCHING, C, DefaultMsg, false};
{FailGuard, Msg} ->
case is_compiler_generated(cerl:get_ann(FailGuard)) of
false ->
@@ -1368,15 +1471,15 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
{neg_guard_fail, _} -> ?WARN_MATCHING;
{opaque_guard, _} -> ?WARN_OPAQUE
end,
- state__add_warning(State1, WarnType, FailGuard, Msg);
+ {WarnType, FailGuard, Msg, false};
true ->
- state__add_warning(State1, ?WARN_MATCHING, C, Msg)
+ {?WARN_MATCHING, C, Msg, false}
end
end,
- {State2, Map, t_none(), NewArgType};
+ {State1, Map, t_none(), NewArgType, [Warn|Warns]};
Map4 ->
{RetState, RetMap, BodyType} = traverse(Body, Map4, State1),
- {RetState, RetMap, BodyType, NewArgType}
+ {RetState, RetMap, BodyType, NewArgType, Warns}
end
end.
@@ -1429,13 +1532,15 @@ bind_pat_vars_reverse(Pats, Types, Acc, Map, State) ->
end.
bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
- ?debug("Binding pat: ~w to ~s\n", [cerl:type(Pat), format_type(Type, State)]
+ ?debug("Binding pat: ~tw to ~ts\n", [cerl:type(Pat), format_type(Type, State)]
),
Opaques = State#state.opaques,
{NewMap, TypeOut} =
case cerl:type(Pat) of
alias ->
- AliasPat = cerl:alias_pat(Pat),
+ %% Map patterns are more allowing than the type of their literal. We
+ %% must unfold AliasPat if it is a literal.
+ AliasPat = dialyzer_utils:refold_pattern(cerl:alias_pat(Pat)),
Var = cerl:alias_var(Pat),
Map1 = enter_subst(Var, AliasPat, Map),
{Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
@@ -1476,14 +1581,59 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
{Map1, t_cons(HdType, TlType)}
end;
literal ->
- Literal = literal_type(Pat),
- case t_is_none(t_inf(Literal, Type, Opaques)) of
+ Pat0 = dialyzer_utils:refold_pattern(Pat),
+ case cerl:is_literal(Pat0) of
true ->
- bind_opaque_pats(Literal, Type, Pat, State);
- false -> {Map, Literal}
+ Literal = literal_type(Pat),
+ case t_is_none(t_inf(Literal, Type, Opaques)) of
+ true ->
+ bind_opaque_pats(Literal, Type, Pat, State);
+ false -> {Map, Literal}
+ end;
+ false ->
+ %% Retry with the unfolded pattern
+ {Map1, [PatType]}
+ = bind_pat_vars([Pat0], [Type], [], Map, State, Rev),
+ {Map1, PatType}
end;
map ->
- {Map, t_map([])};
+ MapT = t_inf(Type, t_map(), Opaques),
+ case t_is_none(MapT) of
+ true ->
+ bind_opaque_pats(t_map(), Type, Pat, State);
+ false ->
+ case Rev of
+ %% TODO: Reverse matching (propagating a matched subset back to a value)
+ true -> {Map, MapT};
+ false ->
+ FoldFun =
+ fun(Pair, {MapAcc, ListAcc}) ->
+ %% Only exact (:=) can appear in patterns
+ exact = cerl:concrete(cerl:map_pair_op(Pair)),
+ Key = cerl:map_pair_key(Pair),
+ KeyType =
+ case cerl:type(Key) of
+ var ->
+ case state__lookup_type_for_letrec(Key, State) of
+ error -> lookup_type(Key, MapAcc);
+ {ok, RecType} -> RecType
+ end;
+ literal ->
+ literal_type(Key)
+ end,
+ Bind = erl_types:t_map_get(KeyType, MapT),
+ {MapAcc1, [ValType]} =
+ bind_pat_vars([cerl:map_pair_val(Pair)],
+ [Bind], [], MapAcc, State, Rev),
+ case t_is_singleton(KeyType, Opaques) of
+ true -> {MapAcc1, [{KeyType, ValType}|ListAcc]};
+ false -> {MapAcc1, ListAcc}
+ end
+ end,
+ {Map1, Pairs} = lists:foldl(FoldFun, {Map, []}, cerl:map_es(Pat)),
+ {Map1, t_inf(MapT, t_map(Pairs))}
+ end
+ end;
tuple ->
Es = cerl:tuple_es(Pat),
{TypedRecord, Prototype} =
@@ -1495,7 +1645,7 @@ bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
TagAtom = cerl:atom_val(Tag),
case state__lookup_record(TagAtom, length(Left), State) of
error -> {false, t_tuple(length(Es))};
- {ok, Record} ->
+ {ok, Record, _FieldNames} ->
[_Head|AnyTail] = [t_any() || _ <- Es],
UntypedRecord = t_tuple([t_atom(TagAtom)|AnyTail]),
{not t_is_equal(Record, UntypedRecord), Record}
@@ -1672,7 +1822,7 @@ bind_opaque_pats(GenType, Type, Pat, State) ->
%%
bind_guard(Guard, Map, State) ->
- try bind_guard(Guard, Map, dict:new(), pos, State) of
+ try bind_guard(Guard, Map, maps:new(), pos, State) of
{Map1, _Type} -> Map1
catch
throw:{fail, Warning} -> {error, Warning};
@@ -1680,7 +1830,7 @@ bind_guard(Guard, Map, State) ->
end.
bind_guard(Guard, Map, Env, Eval, State) ->
- ?debug("Handling ~w guard: ~s\n",
+ ?debug("Handling ~tw guard: ~ts\n",
[Eval, cerl_prettypr:format(Guard, [{noann, true}])]),
case cerl:type(Guard) of
binary ->
@@ -1700,20 +1850,63 @@ bind_guard(Guard, Map, Env, Eval, State) ->
'try' ->
Arg = cerl:try_arg(Guard),
[Var] = cerl:try_vars(Guard),
+ EVars = cerl:try_evars(Guard),
%%?debug("Storing: ~w\n", [Var]),
- NewEnv = dict:store(get_label(Var), Arg, Env),
- bind_guard(cerl:try_body(Guard), Map, NewEnv, Eval, State);
+ Map1 = join_maps_begin(Map),
+ Map2 = mark_as_fresh(EVars, Map1),
+ %% Visit handler first so we know if it should be ignored
+ {{HandlerMap, HandlerType}, HandlerE} =
+ try {bind_guard(cerl:try_handler(Guard), Map2, Env, Eval, State), none}
+ catch throw:HE ->
+ {{Map2, t_none()}, HE}
+ end,
+ BodyEnv = maps:put(get_label(Var), Arg, Env),
+ Wanted = case Eval of pos -> t_atom(true); neg -> t_atom(false);
+ dont_know -> t_any() end,
+ case t_is_none(t_inf(HandlerType, Wanted)) of
+ %% Handler won't save us; pretend it does not exist
+ true -> bind_guard(cerl:try_body(Guard), Map, BodyEnv, Eval, State);
+ false ->
+ {{BodyMap, BodyType}, BodyE} =
+ try {bind_guard(cerl:try_body(Guard), Map1, BodyEnv,
+ Eval, State), none}
+ catch throw:BE ->
+ {{Map1, t_none()}, BE}
+ end,
+ Map3 = join_maps_end([BodyMap, HandlerMap], Map1),
+ case t_is_none(Sup = t_sup(BodyType, HandlerType)) of
+ true ->
+ %% Pick a reason. N.B. We assume that the handler is always
+ %% compiler-generated if the body is; that way, we won't need to
+ %% check.
+ Fatality = case {BodyE, HandlerE} of
+ {{fatal_fail, _}, _} -> fatal_fail;
+ {_, {fatal_fail, _}} -> fatal_fail;
+ _ -> fail
+ end,
+ throw({Fatality,
+ case {BodyE, HandlerE} of
+ {{_, Rsn}, _} when Rsn =/= none -> Rsn;
+ {_, {_,Rsn}} -> Rsn;
+ _ -> none
+ end});
+ false -> {Map3, Sup}
+ end
+ end;
tuple ->
Es0 = cerl:tuple_es(Guard),
{Map1, Es} = bind_guard_list(Es0, Map, Env, dont_know, State),
{Map1, t_tuple(Es)};
map ->
- {Map, t_map([])};
+ case Eval of
+ dont_know -> handle_guard_map(Guard, Map, Env, State);
+ _PosOrNeg -> {Map, t_none()} %% Map exprs do not produce bools
+ end;
'let' ->
Arg = cerl:let_arg(Guard),
[Var] = cerl:let_vars(Guard),
%%?debug("Storing: ~w\n", [Var]),
- NewEnv = dict:store(get_label(Var), Arg, Env),
+ NewEnv = maps:put(get_label(Var), Arg, Env),
bind_guard(cerl:let_body(Guard), Map, NewEnv, Eval, State);
values ->
Es = cerl:values_es(Guard),
@@ -1722,7 +1915,7 @@ bind_guard(Guard, Map, Env, Eval, State) ->
{Map, Type};
var ->
?debug("Looking for var(~w)...", [cerl_trees:get_label(Guard)]),
- case dict:find(get_label(Guard), Env) of
+ case maps:find(get_label(Guard), Env) of
error ->
?debug("Did not find it\n", []),
Type = lookup_type(Guard, Map),
@@ -1751,7 +1944,7 @@ handle_guard_call(Guard, Map, Env, Eval, State) ->
{erlang, F, 1} when F =:= is_atom; F =:= is_boolean;
F =:= is_binary; F =:= is_bitstring;
F =:= is_float; F =:= is_function;
- F =:= is_integer; F =:= is_list;
+ F =:= is_integer; F =:= is_list; F =:= is_map;
F =:= is_number; F =:= is_pid; F =:= is_port;
F =:= is_reference; F =:= is_tuple ->
handle_guard_type_test(Guard, F, Map, Env, Eval, State);
@@ -1816,7 +2009,7 @@ handle_guard_type_test(Guard, F, Map, Env, Eval, State) ->
?debug("Type test: ~w failed\n", [F]),
signal_guard_fail(Eval, Guard, [ArgType], State);
{ok, NewArgType, Ret} ->
- ?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
+ ?debug("Type test: ~w succeeded, NewType: ~ts, Ret: ~ts\n",
[F, t_to_string(NewArgType), t_to_string(Ret)]),
{enter_type(Arg, NewArgType, Map1), Ret}
end.
@@ -1831,6 +2024,7 @@ bind_type_test(Eval, TypeTest, ArgType, State) ->
is_function -> t_fun();
is_integer -> t_integer();
is_list -> t_maybe_improper_list();
+ is_map -> t_map();
is_number -> t_number();
is_pid -> t_pid();
is_port -> t_port();
@@ -1979,7 +2173,7 @@ handle_guard_is_record(Guard, Map, Env, Eval, State) ->
TupleType =
case state__lookup_record(Tag, ArityMin1, State) of
error -> Tuple;
- {ok, Prototype} -> Prototype
+ {ok, Prototype, _FieldNames} -> Prototype
end,
Type = t_inf(TupleType, RecType, State#state.opaques),
case t_is_none(Type) of
@@ -2101,8 +2295,8 @@ handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
{Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
{Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
- ?debug("Types are:~s =:= ~s\n", [t_to_string(Type1),
- t_to_string(Type2)]),
+ ?debug("Types are:~ts =:= ~ts\n", [t_to_string(Type1),
+ t_to_string(Type2)]),
Opaques = State#state.opaques,
Inf = t_inf(Type1, Type2, Opaques),
case t_is_none(Inf) of
@@ -2339,6 +2533,30 @@ bind_guard_list([G|Gs], Map, Env, Eval, State, Acc) ->
bind_guard_list([], Map, _Env, _Eval, _State, Acc) ->
{Map, lists:reverse(Acc)}.
+handle_guard_map(Guard, Map, Env, State) ->
+ Pairs = cerl:map_es(Guard),
+ Arg = cerl:map_arg(Guard),
+ {Map1, ArgType0} = bind_guard(Arg, Map, Env, dont_know, State),
+ ArgType1 = t_inf(t_map(), ArgType0),
+ case t_is_none_or_unit(ArgType1) of
+ true -> {Map1, t_none()};
+ false ->
+ {Map2, TypePairs} = bind_guard_map_pairs(Pairs, Map1, Env, State, []),
+ {Map2, lists:foldl(fun({KV,assoc},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact},Acc) -> erl_types:t_map_update(KV,Acc)
+ end, ArgType1, TypePairs)}
+ end.
+
+bind_guard_map_pairs([], Map, _Env, _State, PairAcc) ->
+ {Map, lists:reverse(PairAcc)};
+bind_guard_map_pairs([Pair|Pairs], Map, Env, State, PairAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
+ {Map1, [K,V]} = bind_guard_list([Key,Val],Map,Env,dont_know,State),
+ bind_guard_map_pairs(Pairs, Map1, Env, State,
+ [{{K,V},cerl:concrete(Op)}|PairAcc]).
+
-type eval() :: 'pos' | 'neg' | 'dont_know'.
-spec signal_guard_fail(eval(), cerl:c_call(), [type()],
@@ -2405,13 +2623,15 @@ bind_guard_case_clauses(Arg, Clauses, Map0, Env, Eval, State) ->
Map = join_maps_begin(Map0),
{GenMap, GenArgType} = bind_guard(Arg, Map, Env, dont_know, State),
bind_guard_case_clauses(GenArgType, GenMap, Arg, Clauses1, Map, Env, Eval,
- t_none(), [], State).
+ t_none(), [], [], State).
filter_fail_clauses([Clause|Left]) ->
case (cerl:clause_pats(Clause) =:= []) of
true ->
Body = cerl:clause_body(Clause),
- case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) of
+ case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) orelse
+ cerl:is_c_primop(Body) andalso
+ (cerl:atom_val(cerl:primop_name(Body)) =:= match_fail) of
true -> filter_fail_clauses(Left);
false -> [Clause|filter_fail_clauses(Left)]
end;
@@ -2422,7 +2642,7 @@ filter_fail_clauses([]) ->
[].
bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
- Map, Env, Eval, AccType, AccMaps, State) ->
+ Map, Env, Eval, AccType, AccMaps, Throws, State) ->
Pats = cerl:clause_pats(Clause),
{NewMap0, ArgType} =
case Pats of
@@ -2466,9 +2686,9 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
case (NewMap1 =:= none) orelse t_is_none(GenArgType) of
true ->
bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
- Eval, AccType, AccMaps, State);
+ Eval, AccType, AccMaps, Throws, State);
false ->
- {NewAccType, NewAccMaps} =
+ {NewAccType, NewAccMaps, NewThrows} =
try
{NewMap2, GuardType} = bind_guard(Guard, NewMap1, Env, pos, State),
case t_is_none(t_inf(t_atom(true), GuardType)) of
@@ -2492,17 +2712,26 @@ bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
dont_know ->
ok
end,
- {t_sup(AccType, CType), [NewMap3|AccMaps]}
+ {t_sup(AccType, CType), [NewMap3|AccMaps], Throws}
catch
- throw:{fail, _What} -> {AccType, AccMaps}
+ throw:{fail, Reason} ->
+ Throws1 = case Reason of
+ none -> Throws;
+ _ -> Throws ++ [Reason]
+ end,
+ {AccType, AccMaps, Throws1}
end,
bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
- Eval, NewAccType, NewAccMaps, State)
+ Eval, NewAccType, NewAccMaps, NewThrows, State)
end;
bind_guard_case_clauses(_GenArgType, _GenMap, _ArgExpr, [], Map, _Env, _Eval,
- AccType, AccMaps, _State) ->
+ AccType, AccMaps, Throws, _State) ->
case t_is_none(AccType) of
- true -> throw({fail, none});
+ true ->
+ case Throws of
+ [Throw|_] -> throw({fail, Throw});
+ [] -> throw({fail, none})
+ end;
false -> {join_maps_end(AccMaps, Map), AccType}
end.
@@ -2525,10 +2754,10 @@ join_maps_end(Maps, MapOut) ->
#map{ref = Ref, modified_stack = [{M1,R1} | S]} = MapOut,
true = lists:all(fun(M) -> M#map.ref =:= Ref end, Maps), % sanity
Keys0 = lists:usort(lists:append([M#map.modified || M <- Maps])),
- #map{dict = Dict, subst = Subst} = MapOut,
+ #map{map = Map, subst = Subst} = MapOut,
Keys = [Key ||
Key <- Keys0,
- dict:is_key(Key, Dict) orelse dict:is_key(Key, Subst)],
+ maps:is_key(Key, Map) orelse maps:is_key(Key, Subst)],
Out = case Maps of
[] -> join_maps(Maps, MapOut);
_ -> join_maps(Keys, Maps, MapOut)
@@ -2539,8 +2768,8 @@ join_maps_end(Maps, MapOut) ->
modified_stack = S}.
join_maps(Maps, MapOut) ->
- #map{dict = Dict, subst = Subst} = MapOut,
- Keys = ordsets:from_list(dict:fetch_keys(Dict) ++ dict:fetch_keys(Subst)),
+ #map{map = Map, subst = Subst} = MapOut,
+ Keys = ordsets:from_list(maps:keys(Map) ++ maps:keys(Subst)),
join_maps(Keys, Maps, MapOut).
join_maps(Keys, Maps, MapOut) ->
@@ -2569,11 +2798,11 @@ join_maps_one_key([], _Key, AccType) ->
-ifdef(DEBUG).
debug_join_check(Maps, MapOut, Out) ->
- #map{dict = Dict, subst = Subst} = Out,
- #map{dict = Dict2, subst = Subst2} = join_maps(Maps, MapOut),
- F = fun(D) -> lists:keysort(1, dict:to_list(D)) end,
+ #map{map = Map, subst = Subst} = Out,
+ #map{map = Map2, subst = Subst2} = join_maps(Maps, MapOut),
+ F = fun(D) -> lists:keysort(1, maps:to_list(D)) end,
[throw({bug, join_maps}) ||
- F(Dict) =/= F(Dict2) orelse F(Subst) =/= F(Subst2)].
+ F(Map) =/= F(Map2) orelse F(Subst) =/= F(Subst2)].
-else.
debug_join_check(_Maps, _MapOut, _Out) -> ok.
-endif.
@@ -2604,15 +2833,15 @@ enter_type(Key, Val, MS) ->
enter_type_lists(Keys, t_to_tlist(Val), MS)
end;
false ->
- #map{dict = Dict, subst = Subst} = MS,
+ #map{map = Map, subst = Subst} = MS,
KeyLabel = get_label(Key),
- case dict:find(KeyLabel, Subst) of
+ case maps:find(KeyLabel, Subst) of
{ok, NewKey} ->
?debug("Binding ~p to ~p\n", [KeyLabel, NewKey]),
enter_type(NewKey, Val, MS);
error ->
- ?debug("Entering ~p :: ~s\n", [KeyLabel, t_to_string(Val)]),
- case dict:find(KeyLabel, Dict) of
+ ?debug("Entering ~p :: ~ts\n", [KeyLabel, t_to_string(Val)]),
+ case maps:find(KeyLabel, Map) of
{ok, Value} ->
case erl_types:t_is_equal(Val, Value) of
true -> MS;
@@ -2624,13 +2853,14 @@ enter_type(Key, Val, MS) ->
end
end.
-store_map(Key, Val, #map{dict = Dict, ref = undefined} = Map) ->
- Map#map{dict = dict:store(Key, Val, Dict)};
-store_map(Key, Val, #map{dict = Dict, modified = Mod} = Map) ->
- Map#map{dict = dict:store(Key, Val, Dict), modified = [Key | Mod]}.
+store_map(Key, Val, #map{map = Map, ref = undefined} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map)};
+store_map(Key, Val, #map{map = Map, modified = Mod} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map), modified = [Key | Mod]}.
-enter_subst(Key, Val, #map{subst = Subst} = MS) ->
+enter_subst(Key, Val0, #map{subst = Subst} = MS) ->
KeyLabel = get_label(Key),
+ Val = dialyzer_utils:refold_pattern(Val0),
case cerl:is_literal(Val) of
true ->
store_map(KeyLabel, literal_type(Val), MS);
@@ -2639,7 +2869,7 @@ enter_subst(Key, Val, #map{subst = Subst} = MS) ->
false -> MS;
true ->
ValLabel = get_label(Val),
- case dict:find(ValLabel, Subst) of
+ case maps:find(ValLabel, Subst) of
{ok, NewVal} ->
enter_subst(Key, NewVal, MS);
error ->
@@ -2653,22 +2883,22 @@ enter_subst(Key, Val, #map{subst = Subst} = MS) ->
end.
store_subst(Key, Val, #map{subst = S, ref = undefined} = Map) ->
- Map#map{subst = dict:store(Key, Val, S)};
+ Map#map{subst = maps:put(Key, Val, S)};
store_subst(Key, Val, #map{subst = S, modified = Mod} = Map) ->
- Map#map{subst = dict:store(Key, Val, S), modified = [Key | Mod]}.
+ Map#map{subst = maps:put(Key, Val, S), modified = [Key | Mod]}.
-lookup_type(Key, #map{dict = Dict, subst = Subst}) ->
- lookup(Key, Dict, Subst, t_none()).
+lookup_type(Key, #map{map = Map, subst = Subst}) ->
+ lookup(Key, Map, Subst, t_none()).
-lookup(Key, Dict, Subst, AnyNone) ->
+lookup(Key, Map, Subst, AnyNone) ->
case cerl:is_literal(Key) of
true -> literal_type(Key);
false ->
Label = get_label(Key),
- case dict:find(Label, Subst) of
- {ok, NewKey} -> lookup(NewKey, Dict, Subst, AnyNone);
+ case maps:find(Label, Subst) of
+ {ok, NewKey} -> lookup(NewKey, Map, Subst, AnyNone);
error ->
- case dict:find(Label, Dict) of
+ case maps:find(Label, Map) of
{ok, Val} -> Val;
error -> AnyNone
end
@@ -2693,6 +2923,9 @@ mark_as_fresh([Tree|Left], Map) ->
bitstr ->
%% The Size field is not fresh.
{SubTrees1 -- [cerl:bitstr_size(Tree)], Map};
+ map_pair ->
+ %% The keys are not fresh
+ {SubTrees1 -- [cerl:map_pair_key(Tree)], Map};
var ->
{SubTrees1, enter_type(Tree, t_any(), Map)};
_ ->
@@ -2703,12 +2936,12 @@ mark_as_fresh([], Map) ->
Map.
-ifdef(DEBUG).
-debug_pp_map(#map{dict = Dict}=Map) ->
- Keys = dict:fetch_keys(Dict),
+debug_pp_map(#map{map = Map}=MapRec) ->
+ Keys = maps:keys(Map),
io:format("Map:\n", []),
lists:foreach(fun (Key) ->
- io:format("\t~w :: ~s\n",
- [Key, t_to_string(lookup_type(Key, Map))])
+ io:format("\t~w :: ~ts\n",
+ [Key, t_to_string(lookup_type(Key, MapRec))])
end, Keys),
ok.
-else.
@@ -2753,11 +2986,15 @@ is_call_to_send(Tree) ->
Arity = cerl:call_arity(Tree),
cerl:is_c_atom(Mod)
andalso cerl:is_c_atom(Name)
- andalso (cerl:atom_val(Name) =:= '!')
+ andalso is_send(cerl:atom_val(Name))
andalso (cerl:atom_val(Mod) =:= erlang)
andalso (Arity =:= 2)
end.
+is_send('!') -> true;
+is_send(send) -> true;
+is_send(_) -> false.
+
is_lc_simple_list(Tree, TreeType, State) ->
Opaques = State#state.opaques,
Ann = cerl:get_ann(Tree),
@@ -2854,23 +3091,36 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- WarningInfo = {get_file(Ann),
+ WarningInfo = {get_file(Ann, State),
abs(get_line(Ann)),
State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
- ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
+ ?debug("MSG ~ts\n", [dialyzer:format_warning(Warn)]),
State#state{warnings = [Warn|Warnings]};
false ->
case is_compiler_generated(Ann) of
true -> State;
false ->
- WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ WarningInfo = {get_file(Ann, State),
+ get_line(Ann),
+ State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
- ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
+ case Tag of
+ ?WARN_CONTRACT_RANGE -> ok;
+ _ -> ?debug("MSG ~ts\n", [dialyzer:format_warning(Warn)])
+ end,
State#state{warnings = [Warn|Warnings]}
end
end.
+state__remove_added_warnings(OldState, NewState) ->
+ #state{warnings = OldWarnings} = OldState,
+ #state{warnings = NewWarnings} = NewState,
+ {NewWarnings -- OldWarnings, NewState#state{warnings = OldWarnings}}.
+
+state__add_warnings(Warns, #state{warnings = Warnings} = State) ->
+ State#state{warnings = Warns ++ Warnings}.
+
-spec state__set_curr_fun(curr_fun(), state()) -> state().
state__set_curr_fun(undefined, State) ->
@@ -2981,7 +3231,8 @@ state__lookup_record(Tag, Arity, #state{records = Records}) ->
RecType =
t_tuple([t_atom(Tag)|
[FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
- {ok, RecType};
+ FieldNames = [FieldName || {FieldName, _Abstr, _FieldType} <- Fields],
+ {ok, RecType, FieldNames};
error ->
error
end.
@@ -3066,7 +3317,9 @@ state__clean_not_called(#state{fun_tab = FunTab} = State) ->
state__all_fun_types(State) ->
#state{fun_tab = FunTab} = state__clean_not_called(State),
Tab1 = dict:erase(top, FunTab),
- dict:map(fun(_Fun, {Args, Ret}) -> t_fun(Args, Ret)end, Tab1).
+ List = [{Fun, t_fun(Args, Ret)} ||
+ {Fun, {Args, Ret}} <- dict:to_list(Tab1)],
+ orddict:from_list(List).
state__fun_type(Fun, #state{fun_tab = FunTab}) ->
Label =
@@ -3100,14 +3353,14 @@ state__update_fun_entry(Tree, ArgTypes, Out0,
SameOut = t_is_equal(OldOut, Out),
if
SameArgs, SameOut ->
- ?debug("Fixpoint for ~w: ~s\n",
+ ?debug("Fixpoint for ~tw: ~ts\n",
[state__lookup_name(Fun, State),
t_to_string(t_fun(ArgTypes, Out))]),
State;
true ->
%% Can only happen in self-recursive functions.
NewEntry = {OldArgTypes, Out},
- ?debug("New Entry for ~w: ~s\n",
+ ?debug("New Entry for ~tw: ~ts\n",
[state__lookup_name(Fun, State),
t_to_string(t_fun(OldArgTypes, Out))]),
NewFunTab = dict:store(Fun, NewEntry, FunTab),
@@ -3129,7 +3382,7 @@ state__add_work_from_fun(Tree, #state{callgraph = Callgraph,
|| MFA <- MFAList],
%% Must filter the result for results in this module.
FilteredList = [L || {ok, L} <- LabelList, dict:is_key(L, TreeMap)],
- ?debug("~w: Will try to add:~w\n",
+ ?debug("~tw: Will try to add:~tw\n",
[state__lookup_name(Label, State), MFAList]),
lists:foldl(fun(L, AccState) ->
state__add_work(L, AccState)
@@ -3176,7 +3429,7 @@ state__fun_info(Fun, #state{callgraph = CG, fun_tab = FunTab, plt = PLT}) ->
{not_handled, {_Args, Ret}} -> Ret;
{_Args, Ret} -> Ret
end,
- ?debug("LocalRet: ~s\n", [t_to_string(LocalRet)]),
+ ?debug("LocalRet: ~ts\n", [t_to_string(LocalRet)]),
{Fun, Sig, Contract, LocalRet}.
forward_args(Fun, ArgTypes, #state{work = Work, fun_tab = FunTab} = State) ->
@@ -3194,7 +3447,7 @@ forward_args(Fun, ArgTypes, #state{work = Work, fun_tab = FunTab} = State) ->
NewArgTypes = [t_sup(X, Y) ||
{X, Y} <- lists:zip(ArgTypes, OldArgTypes)],
NewWork = add_work(Fun, Work),
- ?debug("~w: forwarding args ~s\n",
+ ?debug("~tw: forwarding args ~ts\n",
[state__lookup_name(Fun, State),
t_to_string(t_product(NewArgTypes))]),
NewFunTab = dict:store(Fun, {NewArgTypes, OldOut}, FunTab),
@@ -3251,6 +3504,12 @@ state__put_races(Races, State) ->
state__records_only(#state{records = Records}) ->
#state{records = Records}.
+-spec state__translate_file(file:filename(), state()) -> file:filename().
+
+state__translate_file(FakeFile, State) ->
+ #state{codeserver = CodeServer, module = Module} = State,
+ dialyzer_codeserver:translate_fake_file(CodeServer, Module, FakeFile).
+
%%% ===========================================================================
%%%
%%% Races
@@ -3322,9 +3581,11 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([]) -> [];
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file([], _State) -> [];
+get_file([{file, FakeFile}|_], State) ->
+ state__translate_file(FakeFile, State);
+get_file([_|Tail], State) ->
+ get_file(Tail, State).
is_compiler_generated(Ann) ->
lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
@@ -3359,6 +3620,7 @@ format_arg(Arg) ->
case cerl:var_name(Arg) of
Atom when is_atom(Atom) ->
case atom_to_list(Atom) of
+ "@"++_ -> Default;
"cor"++_ -> Default;
"rec"++_ -> Default;
Name -> Name ++ "::"
@@ -3419,6 +3681,7 @@ map_pats(Pats) ->
case cerl:var_name(Tree) of
Atom when is_atom(Atom) ->
case atom_to_list(Atom) of
+ "@"++_ -> cerl:c_var('');
"cor"++_ -> cerl:c_var('');
"rec"++_ -> cerl:c_var('');
_ -> cerl:set_ann(Tree, [])
@@ -3434,6 +3697,9 @@ map_pats(Pats) ->
fold_literals(TreeList) ->
[cerl:fold_literal(Tree) || Tree <- TreeList].
+format_atom(A) ->
+ format_cerl(cerl:c_atom(A)).
+
type(Tree) ->
Folded = cerl:fold_literal(Tree),
case cerl:type(Folded) of
diff --git a/lib/dialyzer/src/dialyzer_dep.erl b/lib/dialyzer/src/dialyzer_dep.erl
index a7bc074d02..36cdc0876c 100644
--- a/lib/dialyzer/src/dialyzer_dep.erl
+++ b/lib/dialyzer/src/dialyzer_dep.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_dep.erl
@@ -59,8 +52,14 @@
%% separately.
%%
--spec analyze(cerl:c_module()) ->
- {dict:dict(), ordsets:ordset('external' | label()), dict:dict(), dict:dict()}.
+-type dep_ordset() :: ordsets:ordset(label() | 'external').
+
+-type deps() :: dict:dict(label() | 'external' | 'top', dep_ordset()).
+-type esc() :: dep_ordset().
+-type calls() :: dict:dict(label(), ordsets:ordset(label())).
+-type letrecs() :: dict:dict(label(), label()).
+
+-spec analyze(cerl:c_module()) -> {deps(), esc(), calls(), letrecs()}.
analyze(Tree) ->
%% io:format("Handling ~w\n", [cerl:atom_val(cerl:module_name(Tree))]),
@@ -79,22 +78,26 @@ traverse(Tree, Out, State, CurrentFun) ->
apply ->
Op = cerl:apply_op(Tree),
Args = cerl:apply_args(Tree),
- %% Op is always a variable and should not be marked as escaping
- %% based on its use.
case var =:= cerl:type(Op) of
- false -> erlang:error({apply_op_not_a_variable, cerl:type(Op)});
- true -> ok
- end,
- OpFuns = case map__lookup(cerl_trees:get_label(Op), Out) of
- none -> output(none);
- {value, OF} -> OF
- end,
- {ArgFuns, State2} = traverse_list(Args, Out, State, CurrentFun),
- State3 = state__add_esc(merge_outs(ArgFuns), State2),
- State4 = state__add_deps(CurrentFun, OpFuns, State3),
- State5 = state__store_callsite(cerl_trees:get_label(Tree),
- OpFuns, length(Args), State4),
- {output(set__singleton(external)), State5};
+ false ->
+ %% We have discovered an error here, but we ignore it and let
+ %% later passes handle it; we do not modify the dependencies.
+ %% erlang:error({apply_op_not_a_variable, cerl:type(Op)});
+ {output(none), State};
+ true ->
+ %% Op is a variable and should not be marked as escaping
+ %% based on its use.
+ OpFuns = case map__lookup(cerl_trees:get_label(Op), Out) of
+ none -> output(none);
+ {value, OF} -> OF
+ end,
+ {ArgFuns, State2} = traverse_list(Args, Out, State, CurrentFun),
+ State3 = state__add_esc(merge_outs(ArgFuns), State2),
+ State4 = state__add_deps(CurrentFun, OpFuns, State3),
+ State5 = state__store_callsite(cerl_trees:get_label(Tree),
+ OpFuns, length(Args), State4),
+ {output(set__singleton(external)), State5}
+ end;
binary ->
{output(none), State};
'case' ->
@@ -481,11 +484,11 @@ all_vars(Tree, AccIn) ->
-type local_set() :: 'none' | #set{}.
--record(state, {deps :: dict:dict(),
+-record(state, {deps :: deps(),
esc :: local_set(),
- call :: dict:dict(),
- arities :: dict:dict(),
- letrecs :: dict:dict()}).
+ calls :: calls(),
+ arities :: dict:dict(label() | 'top', arity()),
+ letrecs :: letrecs()}).
state__new(Tree) ->
Exports = set__from_list([X || X <- cerl:module_exports(Tree)]),
@@ -503,7 +506,7 @@ state__new(Tree) ->
%% init the escaping function labels to exported + called from on_load
InitEsc = set__from_list(OnLoadLs ++ ExpLs),
Arities = cerl_trees:fold(fun find_arities/2, dict:new(), Tree),
- #state{deps = map__new(), esc = InitEsc, call = map__new(),
+ #state{deps = map__new(), esc = InitEsc, calls = map__new(),
arities = Arities, letrecs = map__new()}.
find_arities(Tree, AccMap) ->
@@ -518,7 +521,7 @@ find_arities(Tree, AccMap) ->
state__add_deps(_From, #output{content = none}, State) ->
State;
-state__add_deps(From, #output{type = single, content=To},
+state__add_deps(From, #output{type = single, content = To},
#state{deps = Map} = State) ->
%% io:format("Adding deps from ~w to ~w\n", [From, set__to_ordsets(To)]),
State#state{deps = map__add(From, To, Map)}.
@@ -544,16 +547,16 @@ state__esc(#state{esc = Esc}) ->
state__store_callsite(_From, #output{content = none}, _CallArity, State) ->
State;
state__store_callsite(From, To, CallArity,
- #state{call = Calls, arities = Arities} = State) ->
+ #state{calls = Calls, arities = Arities} = State) ->
Filter = fun(external) -> true;
(Fun) -> CallArity =:= dict:fetch(Fun, Arities)
end,
case filter_outs(To, Filter) of
#output{content = none} -> State;
- To1 -> State#state{call = map__store(From, To1, Calls)}
+ To1 -> State#state{calls = map__store(From, To1, Calls)}
end.
-state__calls(#state{call = Calls}) ->
+state__calls(#state{calls = Calls}) ->
Calls.
%%------------------------------------------------------------
diff --git a/lib/dialyzer/src/dialyzer_explanation.erl b/lib/dialyzer/src/dialyzer_explanation.erl
index 20fbcfed35..10b3ef8ea5 100644
--- a/lib/dialyzer/src/dialyzer_explanation.erl
+++ b/lib/dialyzer/src/dialyzer_explanation.erl
@@ -1,9 +1,5 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2009. All Rights Reserved.
-%%
+%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_explanation.erl
@@ -50,4 +43,3 @@ expl_loop(Parent, CServer, Plt) ->
send_explanation(Parent, Expl) ->
Parent ! {self(), explanation, Expl},
ok.
-
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index ff54a91ce1..538327d4d1 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%------------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2009-2013. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-----------------------------------------------------------------------
%%% File : dialyzer_gui_wx.erl
@@ -52,7 +45,6 @@
add_dir :: wx:wx_object(),
add_rec :: wx:wx_object(),
chosen_box :: wx:wx_object(),
- analysis_pid :: pid(),
del_file :: wx:wx_object(),
doc_plt :: dialyzer_plt:plt(),
clear_chosen :: wx:wx_object(),
@@ -72,11 +64,11 @@
stop :: wx:wx_object(),
frame :: wx:wx_object(),
warnings_box :: wx:wx_object(),
- explanation_box :: wx:wx_object(),
+ explanation_box :: wx:wx_object() | 'undefined',
wantedWarnings :: list(),
rawWarnings :: list(),
- backend_pid :: pid(),
- expl_pid :: pid()}).
+ backend_pid :: pid() | 'undefined',
+ expl_pid :: pid() | 'undefined'}).
%%------------------------------------------------------------------------
@@ -311,7 +303,7 @@ createWarningsMenu() ->
addCheckedItem(WarningsMenu, ?menuID_WARN_FAIL_FUN_CALLS,
"Failing function calls"),
addCheckedItem(WarningsMenu, ?menuID_WARN_BAD_FUN, "Bad fun applications"),
- addCheckedItem(WarningsMenu, ?menuID_WARN_OPAQUE, "Opaqueness violations"),
+ addCheckedItem(WarningsMenu, ?menuID_WARN_OPAQUE, "Opacity violations"),
addCheckedItem(WarningsMenu, ?menuID_WARN_LIST_CONSTR,
"Improper list constructions"),
addCheckedItem(WarningsMenu, ?menuID_WARN_UNUSED_FUN, "Unused functions"),
@@ -423,7 +415,7 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
#wx{id=?menuID_HELP_ABOUT, obj=Frame,
event=#wxCommand{type=command_menu_selected}} ->
Message = " This is DIALYZER version " ++ ?VSN ++ " \n"++
- "DIALYZER is a DIscrepany AnaLYZer for ERlang programs.\n\n"++
+ "DIALYZER is a DIscrepancy AnaLYZer for ERlang programs.\n\n"++
" Copyright (C) Tobias Lindahl <[email protected]>\n"++
" Kostis Sagonas <[email protected]>\n\n",
output_sms(State, "About Dialyzer", Message, info),
@@ -477,18 +469,18 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
Msg = io_lib:format("The following functions are called "
"but type information about them is not available.\n"
"The analysis might get more precise by including "
- "the modules containing these functions:\n\n\t~p\n",
+ "the modules containing these functions:\n\n\t~tp\n",
[ExtCalls]),
free_editor(State,"Analysis Done", Msg),
gui_loop(State);
{BackendPid, ext_types, ExtTypes} ->
- Map = fun({M,F,A}) -> io_lib:format("~p:~p/~p",[M,F,A]) end,
+ Map = fun({M,F,A}) -> io_lib:format("~tp:~tp/~p",[M,F,A]) end,
ExtTypeString = string:join(lists:map(Map, ExtTypes), "\n"),
Msg = io_lib:format("The following remote types are being used "
"but information about them is not available.\n"
"The analysis might get more precise by including "
"the modules containing these types and making sure "
- "that they are exported:\n~s\n", [ExtTypeString]),
+ "that they are exported:\n~ts\n", [ExtTypeString]),
free_editor(State, "Analysis done", Msg),
gui_loop(State);
{BackendPid, log, LogMsg} ->
@@ -506,8 +498,9 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
end,
ExplanationPid = spawn_link(Fun),
gui_loop(State#gui_state{expl_pid = ExplanationPid});
- {BackendPid, done, _NewPlt, NewDocPlt} ->
+ {BackendPid, done, NewPlt, NewDocPlt} ->
message(State, "Analysis done"),
+ dialyzer_plt:delete(NewPlt),
config_gui_stop(State),
gui_loop(State#gui_state{doc_plt = NewDocPlt});
{'EXIT', BackendPid, {error, Reason}} ->
@@ -515,7 +508,7 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
config_gui_stop(State),
gui_loop(State);
{'EXIT', BackendPid, Reason} when Reason =/= 'normal' ->
- free_editor(State, ?DIALYZER_ERROR_TITLE, io_lib:format("~p", [Reason])),
+ free_editor(State, ?DIALYZER_ERROR_TITLE, io_lib:format("~tp", [Reason])),
config_gui_stop(State),
gui_loop(State)
end.
@@ -933,7 +926,7 @@ include_dialog(#gui_state{gui = Wx, frame = Frame, options = Options}) ->
wxButton:connect(DeleteAllButton, command_button_clicked),
wxButton:connect(Ok, command_button_clicked),
wxButton:connect(Cancel, command_button_clicked),
- Dirs = [io_lib:format("~s", [X]) || X <- Options#options.include_dirs],
+ Dirs = [io_lib:format("~ts", [X]) || X <- Options#options.include_dirs],
wxListBox:set(Box, Dirs),
Layout = wxBoxSizer:new(?wxVERTICAL),
Buttons = wxBoxSizer:new(?wxHORIZONTAL),
@@ -1125,13 +1118,13 @@ handle_help(State, Title, Txt) ->
case file:open(FileName, [read]) of
{error, Reason} ->
error_sms(State,
- io_lib:format("Could not find doc/~s file!\n\n ~p",
+ io_lib:format("Could not find doc/~ts file!\n\n ~tp",
[Txt, Reason]));
{ok, _Handle} ->
case file:read_file(FileName) of
{error, Reason} ->
error_sms(State,
- io_lib:format("Could not read doc/~s file!\n\n ~p",
+ io_lib:format("Could not read doc/~ts file!\n\n ~tp",
[Txt, Reason]));
{ok, Binary} ->
Contents = binary_to_list(Binary),
@@ -1230,7 +1223,7 @@ update_explanation(#gui_state{explanation_box = Box}, Explanation) ->
wxTextCtrl:appendText(Box, ExplString).
format_explanation({function_return, {M, F, A}, NewList}) ->
- io_lib:format("The function ~p: ~p/~p returns ~p\n",
+ io_lib:format("The function ~w:~tw/~w returns ~ts\n",
[M, F, A, erl_types:t_to_string(NewList)]);
format_explanation(Explanation) ->
io_lib:format("~p\n", [Explanation]).
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.hrl b/lib/dialyzer/src/dialyzer_gui_wx.hrl
index e81eeb1ab5..fe763a9abc 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.hrl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.hrl
@@ -1,3 +1,15 @@
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
-include_lib("wx/include/wx.hrl").
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index ce84c17f43..03040f8e38 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -16,16 +12,9 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-%% %CopyrightEnd%
-%%
-
-%%%----------------------------------------------------------------------
-%%% File : dialyzer_options.erl
-%%% Authors : Richard Carlsson <[email protected]>
-%%% Description : Provides a better way to start Dialyzer from a script.
-%%%
-%%% Created : 17 Oct 2004 by Richard Carlsson <[email protected]>
-%%%----------------------------------------------------------------------
+%% @copyright 2004 Richard Carlsson
+%% @author Richard Carlsson <[email protected]>
+%% @doc Provides a better way to start Dialyzer from a script.
-module(dialyzer_options).
@@ -47,6 +36,7 @@ build(Opts) ->
?WARN_CALLGRAPH,
?WARN_FAILING_CALL,
?WARN_BIN_CONSTRUCTION,
+ ?WARN_MAP_CONSTRUCTION,
?WARN_CONTRACT_RANGE,
?WARN_CONTRACT_TYPES,
?WARN_CONTRACT_SYNTAX,
@@ -72,9 +62,15 @@ preprocess_opts([Opt|Opts]) ->
[Opt|preprocess_opts(Opts)].
postprocess_opts(Opts = #options{}) ->
+ check_file_existence(Opts),
Opts1 = check_output_plt(Opts),
adapt_get_warnings(Opts1).
+check_file_existence(#options{analysis_type = plt_remove}) -> ok;
+check_file_existence(#options{files = Files, files_rec = FilesRec}) ->
+ assert_filenames_exist(Files),
+ assert_filenames_exist(FilesRec).
+
check_output_plt(Opts = #options{analysis_type = Mode, from = From,
output_plt = OutPLT}) ->
case is_plt_mode(Mode) of
@@ -116,7 +112,7 @@ adapt_get_warnings(Opts = #options{analysis_type = Mode,
-spec bad_option(string(), term()) -> no_return().
bad_option(String, Term) ->
- Msg = io_lib:format("~s: ~P", [String, Term, 25]),
+ Msg = io_lib:format("~ts: ~tP", [String, Term, 25]),
throw({dialyzer_options_error, lists:flatten(Msg)}).
build_options([{OptName, undefined}|Rest], Options) when is_atom(OptName) ->
@@ -126,14 +122,14 @@ build_options([{OptionName, Value} = Term|Rest], Options) ->
apps ->
OldValues = Options#options.files_rec,
AppDirs = get_app_dirs(Value),
- assert_filenames(Term, AppDirs),
+ assert_filenames_form(Term, AppDirs),
build_options(Rest, Options#options{files_rec = AppDirs ++ OldValues});
files ->
- assert_filenames(Term, Value),
+ assert_filenames_form(Term, Value),
build_options(Rest, Options#options{files = Value});
files_rec ->
OldValues = Options#options.files_rec,
- assert_filenames(Term, Value),
+ assert_filenames_form(Term, Value),
build_options(Rest, Options#options{files_rec = Value ++ OldValues});
analysis_type ->
NewOptions =
@@ -210,16 +206,26 @@ get_app_dirs(Apps) when is_list(Apps) ->
get_app_dirs(Apps) ->
bad_option("Use a list of otp applications", Apps).
-assert_filenames(Term, [FileName|Left]) when length(FileName) >= 0 ->
+assert_filenames(Term, Files) ->
+ assert_filenames_form(Term, Files),
+ assert_filenames_exist(Files).
+
+assert_filenames_form(Term, [FileName|Left]) when length(FileName) >= 0 ->
+ assert_filenames_form(Term, Left);
+assert_filenames_form(_Term, []) ->
+ ok;
+assert_filenames_form(Term, [_|_]) ->
+ bad_option("Malformed or non-existing filename", Term).
+
+assert_filenames_exist([FileName|Left]) ->
case filelib:is_file(FileName) orelse filelib:is_dir(FileName) of
true -> ok;
- false -> bad_option("No such file, directory or application", FileName)
+ false ->
+ bad_option("No such file, directory or application", FileName)
end,
- assert_filenames(Term, Left);
-assert_filenames(_Term, []) ->
- ok;
-assert_filenames(Term, [_|_]) ->
- bad_option("Malformed or non-existing filename", Term).
+ assert_filenames_exist(Left);
+assert_filenames_exist([]) ->
+ ok.
assert_filename(FileName) when length(FileName) >= 0 ->
ok;
@@ -271,6 +277,9 @@ assert_solvers([Term|_]) ->
-spec build_warnings([atom()], dial_warn_tags()) -> dial_warn_tags().
+%% The warning options are checked by the code linter.
+%% The function erl_lint:is_module_dialyzer_option/1 must
+%% be updated if options are added or removed.
build_warnings([Opt|Opts], Warnings) ->
NewWarnings =
case Opt of
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 634871b2eb..47994fc35b 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_plt.erl
@@ -38,15 +31,15 @@
included_files/1,
from_file/1,
get_default_plt/0,
- get_types/1,
+ get_module_types/2,
get_exported_types/1,
- %% insert/3,
insert_list/2,
insert_contract_list/2,
insert_callbacks/2,
insert_types/2,
insert_exported_types/2,
lookup/2,
+ is_contract/2,
lookup_contract/2,
lookup_callbacks/2,
lookup_module/2,
@@ -57,8 +50,7 @@
get_specs/1,
get_specs/4,
to_file/4,
- get_mini_plt/1,
- restore_full_plt/2
+ delete/1
]).
%% Debug utilities
@@ -66,6 +58,8 @@
-export_type([plt/0, plt_info/0]).
+-include_lib("stdlib/include/ms_transform.hrl").
+
%%----------------------------------------------------------------------
-type mod_deps() :: dialyzer_callgraph:mod_deps().
@@ -81,18 +75,16 @@
%%----------------------------------------------------------------------
--record(plt, {info = table_new() :: dict:dict(),
- types = table_new() :: dict:dict(),
- contracts = table_new() :: dict:dict(),
- callbacks = table_new() :: dict:dict(),
- exported_types = sets:new() :: sets:set()}).
-
--record(mini_plt, {info :: ets:tid(),
- contracts :: ets:tid(),
- callbacks :: ets:tid()
- }).
+-record(plt, {info :: ets:tid(), %% {mfa() | integer(), ret_args_types()}
+ types :: ets:tid(), %% {module(), erl_types:type_table()}
+ contracts :: ets:tid(), %% {mfa(), #contract{}}
+ callbacks :: ets:tid(), %% {module(),
+ %% [{mfa(),
+ %% dialyzer_contracts:file_contract()}]
+ exported_types :: ets:tid() %% {module(), sets:set()}
+ }).
--opaque plt() :: #plt{} | #mini_plt{}.
+-opaque plt() :: #plt{}.
-include("dialyzer.hrl").
@@ -114,7 +106,17 @@
-spec new() -> plt().
new() ->
- #plt{}.
+ [ETSInfo, ETSContracts] =
+ [ets:new(Name, [public]) ||
+ Name <- [plt_info, plt_contracts]],
+ [ETSTypes, ETSCallbacks, ETSExpTypes] =
+ [ets:new(Name, [compressed, public]) ||
+ Name <- [plt_types, plt_callbacks, plt_exported_types]],
+ #plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes}.
-spec delete_module(plt(), atom()) -> plt().
@@ -125,50 +127,55 @@ delete_module(#plt{info = Info, types = Types,
#plt{info = table_delete_module(Info, Mod),
types = table_delete_module2(Types, Mod),
contracts = table_delete_module(Contracts, Mod),
- callbacks = table_delete_module(Callbacks, Mod),
+ callbacks = table_delete_module2(Callbacks, Mod),
exported_types = table_delete_module1(ExpTypes, Mod)}.
-spec delete_list(plt(), [mfa() | integer()]) -> plt().
-delete_list(#plt{info = Info, types = Types,
- contracts = Contracts,
- callbacks = Callbacks,
- exported_types = ExpTypes}, List) ->
- #plt{info = table_delete_list(Info, List),
- types = Types,
- contracts = table_delete_list(Contracts, List),
- callbacks = table_delete_list(Callbacks, List),
- exported_types = ExpTypes}.
+delete_list(#plt{info = Info,
+ contracts = Contracts}=Plt, List) ->
+ Plt#plt{info = ets_table_delete_list(Info, List),
+ contracts = ets_table_delete_list(Contracts, List)}.
-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
-insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) ->
+insert_contract_list(#plt{contracts = Contracts} = PLT, List) ->
true = ets:insert(Contracts, List),
PLT.
-spec insert_callbacks(plt(), dialyzer_codeserver:codeserver()) -> plt().
insert_callbacks(#plt{callbacks = Callbacks} = Plt, Codeserver) ->
- List = dialyzer_codeserver:get_callbacks(Codeserver),
- Plt#plt{callbacks = table_insert_list(Callbacks, List)}.
+ CallbacksList = dialyzer_codeserver:get_callbacks(Codeserver),
+ CallbacksByModule =
+ [{M, [Cb || {{M1,_,_},_} = Cb <- CallbacksList, M1 =:= M]} ||
+ M <- lists:usort([M || {{M,_,_},_} <- CallbacksList])],
+ true = ets:insert(Callbacks, CallbacksByModule),
+ Plt.
+
+-spec is_contract(plt(), mfa()) -> boolean().
+
+is_contract(#plt{contracts = ETSContracts},
+ {M, F, _} = MFA) when is_atom(M), is_atom(F) ->
+ ets:member(ETSContracts, MFA).
-spec lookup_contract(plt(), mfa_patt()) -> 'none' | {'value', #contract{}}.
-lookup_contract(#mini_plt{contracts = ETSContracts},
+lookup_contract(#plt{contracts = ETSContracts},
{M, F, _} = MFA) when is_atom(M), is_atom(F) ->
ets_table_lookup(ETSContracts, MFA).
-spec lookup_callbacks(plt(), module()) ->
'none' | {'value', [{mfa(), dialyzer_contracts:file_contract()}]}.
-lookup_callbacks(#mini_plt{callbacks = ETSCallbacks}, Mod) when is_atom(Mod) ->
+lookup_callbacks(#plt{callbacks = ETSCallbacks}, Mod) when is_atom(Mod) ->
ets_table_lookup(ETSCallbacks, Mod).
-type ret_args_types() :: {erl_types:erl_type(), [erl_types:erl_type()]}.
-spec insert_list(plt(), [{mfa() | integer(), ret_args_types()}]) -> plt().
-insert_list(#mini_plt{info = Info} = PLT, List) ->
+insert_list(#plt{info = Info} = PLT, List) ->
true = ets:insert(Info, List),
PLT.
@@ -180,28 +187,31 @@ lookup(Plt, {M, F, _} = MFA) when is_atom(M), is_atom(F) ->
lookup(Plt, Label) when is_integer(Label) ->
lookup_1(Plt, Label).
-lookup_1(#mini_plt{info = Info}, MFAorLabel) ->
+lookup_1(#plt{info = Info}, MFAorLabel) ->
ets_table_lookup(Info, MFAorLabel).
--spec insert_types(plt(), dict:dict()) -> plt().
+-spec insert_types(plt(), ets:tid()) -> plt().
-insert_types(PLT, Rec) ->
- PLT#plt{types = Rec}.
+insert_types(PLT, Records) ->
+ ok = dialyzer_utils:ets_move(Records, PLT#plt.types),
+ PLT.
--spec insert_exported_types(plt(), sets:set()) -> plt().
+-spec insert_exported_types(plt(), ets:tid()) -> plt().
-insert_exported_types(PLT, Set) ->
- PLT#plt{exported_types = Set}.
+insert_exported_types(PLT, ExpTypes) ->
+ ok = dialyzer_utils:ets_move(ExpTypes, PLT#plt.exported_types),
+ PLT.
--spec get_types(plt()) -> dict:dict().
+-spec get_module_types(plt(), atom()) ->
+ 'none' | {'value', erl_types:type_table()}.
-get_types(#plt{types = Types}) ->
- Types.
+get_module_types(#plt{types = Types}, M) when is_atom(M) ->
+ ets_table_lookup(Types, M).
-spec get_exported_types(plt()) -> sets:set().
-get_exported_types(#plt{exported_types = ExpTypes}) ->
- ExpTypes.
+get_exported_types(#plt{exported_types = ETSExpTypes}) ->
+ sets:from_list([E || {E} <- table_to_list(ETSExpTypes)]).
-type mfa_types() :: {mfa(), erl_types:erl_type(), [erl_types:erl_type()]}.
@@ -218,20 +228,15 @@ all_modules(#plt{info = Info, contracts = Cs}) ->
-spec contains_mfa(plt(), mfa()) -> boolean().
contains_mfa(#plt{info = Info, contracts = Contracts}, MFA) ->
- (table_lookup(Info, MFA) =/= none)
- orelse (table_lookup(Contracts, MFA) =/= none).
+ ets:member(Info, MFA) orelse ets:member(Contracts, MFA).
-spec get_default_plt() -> file:filename().
get_default_plt() ->
case os:getenv("DIALYZER_PLT") of
false ->
- case os:getenv("HOME") of
- false ->
- plt_error("The HOME environment variable needs to be set " ++
- "so that Dialyzer knows where to find the default PLT");
- HomeDir -> filename:join(HomeDir, ".dialyzer_plt")
- end;
+ {ok,[[HomeDir]]} = init:get_argument(home),
+ filename:join(HomeDir, ".dialyzer_plt");
UserSpecPlt -> UserSpecPlt
end.
@@ -246,30 +251,60 @@ from_file(FileName) ->
from_file(FileName, false).
from_file(FileName, ReturnInfo) ->
+ Plt = new(),
+ Fun = fun() -> from_file1(Plt, FileName, ReturnInfo) end,
+ case subproc(Fun) of
+ {ok, Return} ->
+ Return;
+ {error, Msg} ->
+ delete(Plt),
+ plt_error(Msg)
+ end.
+
+from_file1(Plt, FileName, ReturnInfo) ->
case get_record_from_file(FileName) of
{ok, Rec} ->
case check_version(Rec) of
error ->
- Msg = io_lib:format("Old PLT file ~s\n", [FileName]),
- plt_error(Msg);
+ Msg = io_lib:format("Old PLT file ~ts\n", [FileName]),
+ {error, Msg};
ok ->
- Plt = #plt{info = Rec#file_plt.info,
- types = Rec#file_plt.types,
- contracts = Rec#file_plt.contracts,
- callbacks = Rec#file_plt.callbacks,
- exported_types = Rec#file_plt.exported_types},
+ #file_plt{info = FileInfo,
+ contracts = FileContracts,
+ callbacks = FileCallbacks,
+ types = FileTypes,
+ exported_types = FileExpTypes} = Rec,
+ Types = [{Mod, maps:from_list(dict:to_list(Types))} ||
+ {Mod, Types} <- dict:to_list(FileTypes)],
+ CallbacksList = dict:to_list(FileCallbacks),
+ CallbacksByModule =
+ [{M, [Cb || {{M1,_,_},_} = Cb <- CallbacksList, M1 =:= M]} ||
+ M <- lists:usort([M || {{M,_,_},_} <- CallbacksList])],
+ #plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes} = Plt,
+ [true, true, true] =
+ [ets:insert(ETS, Data) ||
+ {ETS, Data} <- [{ETSInfo, dict:to_list(FileInfo)},
+ {ETSTypes, Types},
+ {ETSContracts, dict:to_list(FileContracts)}]],
+ true = ets:insert(ETSCallbacks, CallbacksByModule),
+ true = ets:insert(ETSExpTypes, [{ET} ||
+ ET <- sets:to_list(FileExpTypes)]),
case ReturnInfo of
- false -> Plt;
+ false -> {ok, Plt};
true ->
PltInfo = {Rec#file_plt.file_md5_list,
Rec#file_plt.mod_deps},
- {Plt, PltInfo}
+ {ok, {Plt, PltInfo}}
end
end;
{error, Reason} ->
- Msg = io_lib:format("Could not read PLT file ~s: ~p\n",
+ Msg = io_lib:format("Could not read PLT file ~ts: ~p\n",
[FileName, Reason]),
- plt_error(Msg)
+ {error, Msg}
end.
-type err_rsn() :: 'not_valid' | 'no_such_file' | 'read_error'.
@@ -278,6 +313,10 @@ from_file(FileName, ReturnInfo) ->
| {'error', err_rsn()}.
included_files(FileName) ->
+ Fun = fun() -> included_files1(FileName) end,
+ subproc(Fun).
+
+included_files1(FileName) ->
case get_record_from_file(FileName) of
{ok, #file_plt{file_md5_list = Md5}} ->
{ok, [File || {File, _} <- Md5]};
@@ -310,6 +349,9 @@ get_record_from_file(FileName) ->
-spec merge_plts([plt()]) -> plt().
+%% One of the PLTs of the list is augmented with the contents of the
+%% other PLTs, and returned. The other PLTs are deleted.
+
merge_plts(List) ->
{InfoList, TypesList, ExpTypesList, ContractsList, CallbacksList} =
group_fields(List),
@@ -322,6 +364,12 @@ merge_plts(List) ->
-spec merge_disj_plts([plt()]) -> plt().
+%% One of the PLTs of the list is augmented with the contents of the
+%% other PLTs, and returned. The other PLTs are deleted.
+%%
+%% The keys are compared when checking for disjointness. Sometimes the
+%% key is a module(), sometimes an mfa(). It boils down to checking if
+%% any module occurs more than once.
merge_disj_plts(List) ->
{InfoList, TypesList, ExpTypesList, ContractsList, CallbacksList} =
group_fields(List),
@@ -351,7 +399,7 @@ merge_plts_or_report_conflicts(PltFiles, Plts) ->
ConfFiles = find_duplicates(IncFiles),
Msg = io_lib:format("Could not merge PLTs since they are not disjoint\n"
"The following files are included in more than one "
- "PLTs:\n~p\n", [ConfFiles]),
+ "PLTs:\n~tp\n", [ConfFiles]),
plt_error(Msg)
end.
@@ -362,21 +410,42 @@ find_duplicates(List) ->
-spec to_file(file:filename(), plt(), mod_deps(), {[file_md5()], mod_deps()}) -> 'ok'.
-to_file(FileName,
- #plt{info = Info, types = Types, contracts = Contracts,
- callbacks = Callbacks, exported_types = ExpTypes},
+%% Write the PLT to file, and deletes the PLT.
+to_file(FileName, Plt, ModDeps, MD5_OldModDeps) ->
+ Fun = fun() -> to_file1(FileName, Plt, ModDeps, MD5_OldModDeps) end,
+ Return = subproc(Fun),
+ delete(Plt),
+ case Return of
+ ok -> ok;
+ Thrown -> throw(Thrown)
+ end.
+
+to_file1(FileName,
+ #plt{info = ETSInfo, types = ETSTypes, contracts = ETSContracts,
+ callbacks = ETSCallbacks, exported_types = ETSExpTypes},
ModDeps, {MD5, OldModDeps}) ->
NewModDeps = dict:merge(fun(_Key, OldVal, NewVal) ->
ordsets:union(OldVal, NewVal)
end,
OldModDeps, ModDeps),
ImplMd5 = compute_implementation_md5(),
+ CallbacksList =
+ [Cb ||
+ {_M, Cbs} <- tab2list(ETSCallbacks),
+ Cb <- Cbs],
+ Callbacks = dict:from_list(CallbacksList),
+ Info = dict:from_list(tab2list(ETSInfo)),
+ Types = tab2list(ETSTypes),
+ Contracts = dict:from_list(tab2list(ETSContracts)),
+ ExpTypes = sets:from_list([E || {E} <- tab2list(ETSExpTypes)]),
+ FileTypes = dict:from_list([{Mod, dict:from_list(maps:to_list(MTypes))} ||
+ {Mod, MTypes} <- Types]),
Record = #file_plt{version = ?VSN,
file_md5_list = MD5,
info = Info,
contracts = Contracts,
callbacks = Callbacks,
- types = Types,
+ types = FileTypes,
exported_types = ExpTypes,
mod_deps = NewModDeps,
implementation_md5 = ImplMd5},
@@ -384,9 +453,9 @@ to_file(FileName,
case file:write_file(FileName, Bin) of
ok -> ok;
{error, Reason} ->
- Msg = io_lib:format("Could not write PLT file ~s: ~w\n",
+ Msg = io_lib:format("Could not write PLT file ~ts: ~w\n",
[FileName, Reason]),
- throw({dialyzer_error, Msg})
+ {dialyzer_error, Msg}
end.
-type md5_diff() :: [{'differ', atom()} | {'removed', atom()}].
@@ -399,6 +468,10 @@ to_file(FileName,
| {'old_version', [file_md5()]}.
check_plt(FileName, RemoveFiles, AddFiles) ->
+ Fun = fun() -> check_plt1(FileName, RemoveFiles, AddFiles) end,
+ subproc(Fun).
+
+check_plt1(FileName, RemoveFiles, AddFiles) ->
case get_record_from_file(FileName) of
{ok, #file_plt{file_md5_list = Md5, mod_deps = ModDeps} = Rec} ->
case check_version(Rec) of
@@ -460,15 +533,14 @@ compute_md5_from_files(Files) ->
compute_md5_from_file(File) ->
case filelib:is_regular(File) of
false ->
- Msg = io_lib:format("Not a regular file: ~s\n", [File]),
+ Msg = io_lib:format("Not a regular file: ~ts\n", [File]),
throw({dialyzer_error, Msg});
true ->
- case dialyzer_utils:get_abstract_code_from_beam(File) of
- error ->
- Msg = io_lib:format("Could not get abstract code for file: ~s (please recompile it with +debug_info)\n", [File]),
- throw({dialyzer_error, Msg});
- {ok, Abs} ->
- erlang:md5(term_to_binary(Abs))
+ case dialyzer_utils:get_core_from_beam(File) of
+ {error, Error} ->
+ throw({dialyzer_error, Error});
+ {ok, Core} ->
+ erlang:md5(term_to_binary(Core))
end
end.
@@ -508,33 +580,29 @@ init_md5_list_1([], DiffList, Acc) ->
init_md5_list_1(Md5List, [], Acc) ->
{ok, lists:reverse(Acc, Md5List)}.
--spec get_mini_plt(plt()) -> plt().
-
-get_mini_plt(#plt{info = Info, contracts = Contracts, callbacks = Callbacks}) ->
- [ETSInfo, ETSContracts, ETSCallbacks] =
- [ets:new(Name, [public]) || Name <- [plt_info, plt_contracts, plt_callbacks]],
- CallbackList = dict:to_list(Callbacks),
- CallbacksByModule =
- [{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} ||
- M <- lists:usort([M || {{M,_,_},_} <- CallbackList])],
- [true, true] =
- [ets:insert(ETS, dict:to_list(Data)) ||
- {ETS, Data} <- [{ETSInfo, Info}, {ETSContracts, Contracts}]],
- true = ets:insert(ETSCallbacks, CallbacksByModule),
- #mini_plt{info = ETSInfo, contracts = ETSContracts, callbacks = ETSCallbacks};
-get_mini_plt(undefined) ->
- undefined.
-
--spec restore_full_plt(plt(), plt()) -> plt().
-
-restore_full_plt(#mini_plt{info = ETSInfo, contracts = ETSContracts}, Plt) ->
- Info = dict:from_list(ets:tab2list(ETSInfo)),
- Contracts = dict:from_list(ets:tab2list(ETSContracts)),
- ets:delete(ETSContracts),
- ets:delete(ETSInfo),
- Plt#plt{info = Info, contracts = Contracts};
-restore_full_plt(undefined, undefined) ->
- undefined.
+-spec delete(plt()) -> 'ok'.
+
+delete(#plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes}) ->
+ true = ets:delete(ETSContracts),
+ true = ets:delete(ETSTypes),
+ true = ets:delete(ETSInfo),
+ true = ets:delete(ETSCallbacks),
+ true = ets:delete(ETSExpTypes),
+ ok.
+
+tab2list(Tab) ->
+ dialyzer_utils:ets_tab2list(Tab).
+
+subproc(Fun) ->
+ F = fun() -> exit(Fun()) end,
+ {Pid, Ref} = erlang:spawn_monitor(F),
+ receive {'DOWN', Ref, process, Pid, Return} ->
+ Return
+ end.
%%---------------------------------------------------------------------------
%% Edoc
@@ -543,7 +611,8 @@ restore_full_plt(undefined, undefined) ->
get_specs(#plt{info = Info}) ->
%% TODO: Should print contracts as well.
- L = lists:sort([{MFA, Val} || {{_,_,_} = MFA, Val} <- table_to_list(Info)]),
+ L = lists:sort([{MFA, Val} ||
+ {{_,_,_} = MFA, Val} <- table_to_list(Info)]),
lists:flatten(create_specs(L, [])).
beam_file_to_module(Filename) ->
@@ -553,13 +622,13 @@ beam_file_to_module(Filename) ->
get_specs(#plt{info = Info}, M, F, A) when is_atom(M), is_atom(F) ->
MFA = {M, F, A},
- case table_lookup(Info, MFA) of
+ case ets_table_lookup(Info, MFA) of
none -> none;
{value, Val} -> lists:flatten(create_specs([{MFA, Val}], []))
end.
create_specs([{{M, F, _A}, {Ret, Args}}|Left], M) ->
- [io_lib:format("-spec ~w(~s) -> ~s\n",
+ [io_lib:format("-spec ~tw(~ts) -> ~ts\n",
[F, expand_args(Args), erl_types:t_to_string(Ret)])
| create_specs(Left, M)];
create_specs(List = [{{M, _F, _A}, {_Ret, _Args}}| _], _M) ->
@@ -590,41 +659,30 @@ plt_error(Msg) ->
%%---------------------------------------------------------------------------
%% Ets table
-table_new() ->
- dict:new().
-
table_to_list(Plt) ->
- dict:to_list(Plt).
-
-table_delete_module(Plt, Mod) ->
- dict:filter(fun({M, _F, _A}, _Val) -> M =/= Mod;
- (_, _) -> true
- end, Plt).
-
-table_delete_module1(Plt, Mod) ->
- sets:filter(fun({M, _F, _A}) -> M =/= Mod end, Plt).
-
-table_delete_module2(Plt, Mod) ->
- dict:filter(fun(M, _Val) -> M =/= Mod end, Plt).
-
-table_delete_list(Plt, [H|T]) ->
- table_delete_list(dict:erase(H, Plt), T);
-table_delete_list(Plt, []) ->
- Plt.
-
-table_insert_list(Plt, [{Key, Val}|Left]) ->
- table_insert_list(table_insert(Plt, Key, Val), Left);
-table_insert_list(Plt, []) ->
- Plt.
-
-table_insert(Plt, Key, {_File, #contract{}, _Xtra} = C) ->
- dict:store(Key, C, Plt).
-
-table_lookup(Plt, Obj) ->
- case dict:find(Obj, Plt) of
- error -> none;
- {ok, Val} -> {value, Val}
- end.
+ ets:tab2list(Plt).
+
+table_delete_module(Tab, Mod) ->
+ MS = ets:fun2ms(fun({{M, _F, _A}, _Val}) -> M =:= Mod;
+ ({_, _}) -> false
+ end),
+ _NumDeleted = ets:select_delete(Tab, MS),
+ Tab.
+
+table_delete_module1(Tab, Mod) ->
+ MS = ets:fun2ms(fun({{M, _F, _A}}) -> M =:= Mod end),
+ _NumDeleted = ets:select_delete(Tab, MS),
+ Tab.
+
+table_delete_module2(Tab, Mod) ->
+ true = ets:delete(Tab, Mod),
+ Tab.
+
+ets_table_delete_list(Tab, [H|T]) ->
+ ets:delete(Tab, H),
+ ets_table_delete_list(Tab, T);
+ets_table_delete_list(Tab, []) ->
+ Tab.
ets_table_lookup(Plt, Obj) ->
try ets:lookup_element(Plt, Obj, 2) of
@@ -633,25 +691,28 @@ ets_table_lookup(Plt, Obj) ->
_:_ -> none
end.
-table_lookup_module(Plt, Mod) ->
- List = dict:fold(fun(Key, Val, Acc) ->
- case Key of
- {Mod, _F, _A} -> [{Key, element(1, Val),
- element(2, Val)}|Acc];
- _ -> Acc
- end
- end, [], Plt),
+table_lookup_module(Tab, Mod) ->
+ MS = ets:fun2ms(fun({{M, F, A}, V}) when M =:= Mod ->
+ {{M, F, A}, V} end),
+ List = [begin
+ {V1, V2} = V,
+ {MFA, V1, V2}
+ end || {MFA, V} <- ets:select(Tab, MS)],
case List =:= [] of
true -> none;
false -> {value, List}
end.
-table_all_modules(Plt) ->
- Fold =
- fun({M, _F, _A}, _Val, Acc) -> sets:add_element(M, Acc);
- (_, _, Acc) -> Acc
- end,
- dict:fold(Fold, sets:new(), Plt).
+table_all_modules(Tab) ->
+ Ks = ets:match(Tab, {'$1', '_'}, 100),
+ all_mods(Ks, sets:new()).
+
+all_mods('$end_of_table', S) ->
+ S;
+all_mods({ListsOfKeys, Cont}, S) ->
+ S1 = lists:foldl(fun([{M, _F, _A}], S0) -> sets:add_element(M, S0)
+ end, S, ListsOfKeys),
+ all_mods(ets:match(Cont), S1).
table_merge([H|T]) ->
table_merge(T, H).
@@ -659,7 +720,7 @@ table_merge([H|T]) ->
table_merge([], Acc) ->
Acc;
table_merge([Plt|Plts], Acc) ->
- NewAcc = dict:merge(fun(_Key, Val, Val) -> Val end, Plt, Acc),
+ NewAcc = merge_tables(Plt, Acc),
table_merge(Plts, NewAcc).
table_disj_merge([H|T]) ->
@@ -670,24 +731,18 @@ table_disj_merge([], Acc) ->
table_disj_merge([Plt|Plts], Acc) ->
case table_is_disjoint(Plt, Acc) of
true ->
- NewAcc = dict:merge(fun(_Key, _Val1, _Val2) -> gazonk end,
- Plt, Acc),
+ NewAcc = merge_tables(Plt, Acc),
table_disj_merge(Plts, NewAcc);
false -> throw({dialyzer_error, not_disjoint_plts})
end.
-table_is_disjoint(T1, T2) ->
- K1 = dict:fetch_keys(T1),
- K2 = dict:fetch_keys(T2),
- lists:all(fun(E) -> not lists:member(E, K2) end, K1).
-
sets_merge([H|T]) ->
sets_merge(T, H).
sets_merge([], Acc) ->
Acc;
sets_merge([Plt|Plts], Acc) ->
- NewAcc = sets:union(Plt, Acc),
+ NewAcc = merge_tables(Plt, Acc),
sets_merge(Plts, NewAcc).
sets_disj_merge([H|T]) ->
@@ -696,13 +751,39 @@ sets_disj_merge([H|T]) ->
sets_disj_merge([], Acc) ->
Acc;
sets_disj_merge([Plt|Plts], Acc) ->
- case sets:is_disjoint(Plt, Acc) of
+ case table_is_disjoint(Plt, Acc) of
true ->
- NewAcc = sets:union(Plt, Acc),
+ NewAcc = merge_tables(Plt, Acc),
sets_disj_merge(Plts, NewAcc);
false -> throw({dialyzer_error, not_disjoint_plts})
end.
+table_is_disjoint(T1, T2) ->
+ tab_is_disj(ets:first(T1), T1, T2).
+
+tab_is_disj('$end_of_table', _T1, _T2) ->
+ true;
+tab_is_disj(K1, T1, T2) ->
+ case ets:member(T2, K1) of
+ false ->
+ tab_is_disj(ets:next(T1, K1), T1, T2);
+ true ->
+ false
+ end.
+
+merge_tables(T1, T2) ->
+ tab_merge(ets:first(T1), T1, T2).
+
+tab_merge('$end_of_table', T1, T2) ->
+ true = ets:delete(T1),
+ T2;
+tab_merge(K1, T1, T2) ->
+ Vs = ets:lookup(T1, K1),
+ NextK1 = ets:next(T1, K1),
+ true = ets:delete(T1, K1),
+ true = ets:insert(T2, Vs),
+ tab_merge(NextK1, T1, T2).
+
%%---------------------------------------------------------------------------
%% Debug utilities.
@@ -720,7 +801,7 @@ pp_non_returning() ->
io:format("= Loops =\n"),
io:format("=========================================\n\n"),
lists:foreach(fun({{M, F, _}, Type}) ->
- io:format("~w:~w~s.\n",
+ io:format("~w:~tw~ts.\n",
[M, F, dialyzer_utils:format_sig(Type)])
end, lists:sort(Unit)),
io:format("\n"),
@@ -730,7 +811,8 @@ pp_non_returning() ->
lists:foreach(fun({{M, F, _}, Type}) ->
io:format("~w:~w~s.\n",
[M, F, dialyzer_utils:format_sig(Type)])
- end, lists:sort(None)).
+ end, lists:sort(None)),
+ delete(Plt).
-spec pp_mod(atom()) -> 'ok'.
@@ -742,8 +824,9 @@ pp_mod(Mod) when is_atom(Mod) ->
lists:foreach(fun({{_, F, _}, Ret, Args}) ->
T = erl_types:t_fun(Args, Ret),
S = dialyzer_utils:format_sig(T),
- io:format("-spec ~w~s.\n", [F, S])
+ io:format("-spec ~tw~ts.\n", [F, S])
end, lists:sort(List));
none ->
io:format("dialyzer: Found no module named '~s' in the PLT\n", [Mod])
- end.
+ end,
+ delete(Plt).
diff --git a/lib/dialyzer/src/dialyzer_race_data_server.erl b/lib/dialyzer/src/dialyzer_race_data_server.erl
new file mode 100644
index 0000000000..953cebfdf1
--- /dev/null
+++ b/lib/dialyzer/src/dialyzer_race_data_server.erl
@@ -0,0 +1,127 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_race_data_server.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 18 Sep 2015 by Luca Favatella <[email protected]>
+%%%-------------------------------------------------------------------
+-module(dialyzer_race_data_server).
+
+-export([new/0,
+ duplicate/1,
+ stop/1,
+ call/2,
+ cast/2]).
+
+-include("dialyzer.hrl").
+
+%%----------------------------------------------------------------------
+
+-record(state, {race_code = dict:new() :: dict:dict(),
+ public_tables = [] :: [label()],
+ named_tables = [] :: [string()],
+ beh_api_calls = [] :: [{mfa(), mfa()}]}).
+
+%%----------------------------------------------------------------------
+
+-spec new() -> pid().
+
+new() ->
+ spawn_link(fun() -> loop(#state{}) end).
+
+-spec duplicate(pid()) -> pid().
+
+duplicate(Server) ->
+ call(dup, Server).
+
+-spec stop(pid()) -> ok.
+
+stop(Server) ->
+ cast(stop, Server).
+
+-spec call(atom(), pid()) -> term().
+
+call(Query, Server) ->
+ Ref = make_ref(),
+ Server ! {call, self(), Ref, Query},
+ receive
+ {Ref, Reply} -> Reply
+ end.
+
+-spec cast(atom() | {atom(), term()}, pid()) -> ok.
+
+cast(Message, Server) ->
+ Server ! {cast, Message},
+ ok.
+
+%%----------------------------------------------------------------------
+
+loop(State) ->
+ receive
+ {call, From, Ref, Query} ->
+ Reply = handle_call(Query, State),
+ From ! {Ref, Reply},
+ loop(State);
+ {cast, stop} ->
+ ok;
+ {cast, Message} ->
+ NewState = handle_cast(Message, State),
+ loop(NewState)
+ end.
+
+handle_cast(race_code_new, State) ->
+ State#state{race_code = dict:new()};
+handle_cast({Tag, Data}, State) ->
+ case Tag of
+ renew_race_info -> renew_race_info_handler(Data, State);
+ renew_race_code -> renew_race_code_handler(Data, State);
+ renew_race_public_tables -> renew_race_public_tables_handler(Data, State);
+ put_race_code -> State#state{race_code = Data};
+ put_public_tables -> State#state{public_tables = Data};
+ put_named_tables -> State#state{named_tables = Data};
+ put_behaviour_api_calls -> State#state{beh_api_calls = Data}
+ end.
+
+handle_call(Query,
+ #state{race_code = RaceCode,
+ public_tables = PublicTables,
+ named_tables = NamedTables,
+ beh_api_calls = BehApiCalls}
+ = State) ->
+ case Query of
+ dup -> spawn_link(fun() -> loop(State) end);
+ get_race_code -> RaceCode;
+ get_public_tables -> PublicTables;
+ get_named_tables -> NamedTables;
+ get_behaviour_api_calls -> BehApiCalls
+ end.
+
+%%----------------------------------------------------------------------
+
+renew_race_info_handler({RaceCode, PublicTables, NamedTables},
+ #state{} = State) ->
+ State#state{race_code = RaceCode,
+ public_tables = PublicTables,
+ named_tables = NamedTables}.
+
+renew_race_code_handler({Fun, FunArgs, Code},
+ #state{race_code = RaceCode} = State) ->
+ State#state{race_code = dict:store(Fun, [FunArgs, Code], RaceCode)}.
+
+renew_race_public_tables_handler(VarLabel,
+ #state{public_tables = PT} = State) ->
+ State#state{public_tables = ordsets:add_element(VarLabel, PT)}.
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 39de071bde..7fe64c3e11 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%----------------------------------------------------------------------
%%% File : dialyzer_races.erl
@@ -92,27 +85,28 @@
-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
| ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
--record(beg_clause, {arg :: var_to_map1(),
- pats :: var_to_map1(),
- guard :: cerl:cerl()}).
--record(end_clause, {arg :: var_to_map1(),
- pats :: var_to_map1(),
- guard :: cerl:cerl()}).
+-record(beg_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
+-record(end_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
-record(end_case, {clauses :: [#end_clause{}]}).
--record(curr_fun, {status :: 'in' | 'out',
- mfa :: dialyzer_callgraph:mfa_or_funlbl(),
- label :: label(),
- def_vars :: [core_vars()],
- arg_types :: [erl_types:erl_type()],
- call_vars :: [core_vars()],
- var_map :: dict:dict()}).
+-record(curr_fun, {status :: 'in' | 'out' | 'undefined',
+ mfa :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ label :: label() | 'undefined',
+ def_vars :: [core_vars()] | 'undefined',
+ arg_types :: [erl_types:erl_type()] | 'undefined',
+ call_vars :: [core_vars()] | 'undefined',
+ var_map :: dict:dict() | 'undefined'}).
-record(dep_call, {call_name :: dep_calls(),
- args :: args(),
+ args :: args() | 'undefined',
arg_types :: [erl_types:erl_type()],
vars :: [core_vars()],
state :: dialyzer_dataflow:state(),
file_line :: file_line(),
- var_map :: dict:dict()}).
+ var_map :: dict:dict() | 'undefined'}).
-record(fun_call, {caller :: dialyzer_callgraph:mfa_or_funlbl(),
callee :: dialyzer_callgraph:mfa_or_funlbl(),
arg_types :: [erl_types:erl_type()],
@@ -121,7 +115,7 @@
arg :: var_to_map1()}).
-record(warn_call, {call_name :: warn_calls(),
args :: args(),
- var_map :: dict:dict()}).
+ var_map :: dict:dict() | 'undefined'}).
-type case_tags() :: 'beg_case' | #beg_clause{} | #end_clause{} | #end_case{}.
-type code() :: [#dep_call{} | #fun_call{} | #warn_call{} |
@@ -139,8 +133,9 @@
fun_mfa :: dialyzer_callgraph:mfa_or_funlbl(),
fun_label :: label()}).
--record(races, {curr_fun :: dialyzer_callgraph:mfa_or_funlbl(),
- curr_fun_label :: label(),
+-record(races, {curr_fun :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ curr_fun_label :: label() | 'undefined',
curr_fun_args = 'empty' :: core_args(),
new_table = 'no_t' :: table(),
race_list = [] :: code(),
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 18f02e6742..48115bb683 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_succ_typings.erl
@@ -36,7 +29,7 @@
-export([
find_succ_types_for_scc/2,
refine_one_module/2,
- find_required_by/2,
+ %% find_required_by/2,
find_depends_on/2,
collect_warnings/2,
lookup_names/2
@@ -96,7 +89,7 @@ analyze_callgraph(Callgraph, Plt, Codeserver, TimingServer, Solvers, Parent) ->
NewState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- dialyzer_plt:restore_full_plt(NewState#st.plt, Plt).
+ NewState#st.plt.
%%--------------------------------------------------------------------
@@ -104,7 +97,7 @@ init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent) ->
{SCCs, Callgraph1} =
?timing(TimingServer, "order", dialyzer_callgraph:finalize(Callgraph)),
- State = #st{callgraph = Callgraph1, plt = dialyzer_plt:get_mini_plt(Plt),
+ State = #st{callgraph = Callgraph1, plt = Plt,
codeserver = Codeserver, parent = Parent,
timing_server = TimingServer, solvers = Solvers},
get_refined_success_typings(SCCs, State).
@@ -134,7 +127,6 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
end
end.
--type doc_plt() :: 'undefined' | dialyzer_plt:plt().
-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
doc_plt(), dialyzer_codeserver:codeserver(),
dialyzer_timing:timing_server(), [solver()], pid()) ->
@@ -146,18 +138,15 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
Mods = dialyzer_callgraph:modules(InitState#st.callgraph),
- MiniPlt = InitState#st.plt,
- FindOpaques = lookup_and_find_opaques_fun(Codeserver),
+ Plt = InitState#st.plt,
CWarns =
- dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver,
- MiniPlt, FindOpaques),
- MiniDocPlt = dialyzer_plt:get_mini_plt(DocPlt),
+ dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver, Plt),
ModWarns =
?timing(TimingServer, "warning",
- get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
+ get_warnings_from_modules(Mods, InitState, DocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
- dialyzer_plt:restore_full_plt(MiniPlt, Plt),
- dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
+ Plt,
+ DocPlt}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
@@ -169,16 +158,16 @@ get_warnings_from_modules(Mods, State, DocPlt) ->
collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver),
- Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver),
AllFuns = collect_fun_info([ModCode]),
%% Check if there are contracts for functions that do not exist
Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
+ Attrs = cerl:module_attrs(ModCode),
+ Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
{Warnings2, FunTypes} =
dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
Records),
- Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
DocPlt = insert_into_doc_plt(FunTypes, Callgraph, DocPlt),
@@ -243,10 +232,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver,
find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
dialyzer_callgraph:get_depends_on(SCC, Callgraph).
--spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
+%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
-find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
- dialyzer_callgraph:get_required_by(SCC, Callgraph).
+%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
+%% dialyzer_callgraph:get_required_by(SCC, Callgraph).
-spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()].
@@ -258,24 +247,22 @@ lookup_names(Labels, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
ModCode = dialyzer_codeserver:lookup_mod_code(M, CodeServer),
AllFuns = collect_fun_info([ModCode]),
- Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
FunTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
+ Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
NewFunTypes =
dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
- Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
- Contracts = orddict:from_list(dict:to_list(Contracts1)),
- FindOpaques = find_opaques_fun(Records),
- DecoratedFunTypes =
- decorate_succ_typings(Contracts, Callgraph, NewFunTypes, FindOpaques),
- %% ?debug("NewFunTypes ~p\n ~n", [dict:to_list(NewFunTypes)]),
- %% ?debug("refine DecoratedFunTypes ~p\n ~n", [dict:to_list(DecoratedFunTypes)]),
+ {FunMFAContracts, ModOpaques} =
+ prepare_decoration(NewFunTypes, Callgraph, CodeServer),
+ DecoratedFunTypes = decorate_succ_typings(FunMFAContracts, ModOpaques),
+ %% ?Debug("NewFunTypes ~tp\n ~n", [NewFunTypes]),
+ %% ?debug("refine DecoratedFunTypes ~tp\n ~n", [DecoratedFunTypes]),
debug_pp_functions("Refine", NewFunTypes, DecoratedFunTypes, Callgraph),
case reached_fixpoint(FunTypes, DecoratedFunTypes) of
true -> [];
{false, NotFixpoint} ->
?debug("Not fixpoint\n", []),
- Plt = insert_into_plt(dict:from_list(NotFixpoint), Callgraph, Plt),
+ Plt = insert_into_plt(orddict:from_list(NotFixpoint), Callgraph, Plt),
[FunLbl || {FunLbl,_Type} <- NotFixpoint]
end.
@@ -289,22 +276,20 @@ reached_fixpoint_strict(OldTypes, NewTypes) ->
end.
reached_fixpoint(OldTypes0, NewTypes0, Strict) ->
- MapFun = fun(_Key, Type) ->
+ MapFun = fun({Key, Type}) ->
case is_failed_or_not_called_fun(Type) of
- true -> failed_fun;
- false -> erl_types:t_limit(Type, ?TYPE_LIMIT)
+ true -> {Key, failed_fun};
+ false -> {Key, erl_types:t_limit(Type, ?TYPE_LIMIT)}
end
end,
- OldTypes = dict:map(MapFun, OldTypes0),
- NewTypes = dict:map(MapFun, NewTypes0),
+ OldTypes = lists:map(MapFun, orddict:to_list(OldTypes0)),
+ NewTypes = lists:map(MapFun, orddict:to_list(NewTypes0)),
compare_types(OldTypes, NewTypes, Strict).
is_failed_or_not_called_fun(Type) ->
erl_types:any_none([erl_types:t_fun_range(Type)|erl_types:t_fun_args(Type)]).
-compare_types(Dict1, Dict2, Strict) ->
- List1 = lists:keysort(1, dict:to_list(Dict1)),
- List2 = lists:keysort(1, dict:to_list(Dict2)),
+compare_types(List1, List2, Strict) ->
compare_types_1(List1, List2, Strict, []).
compare_types_1([{X, _Type1}|Left1], [{X, failed_fun}|Left2],
@@ -321,7 +306,7 @@ compare_types_1([{X, Type1}|Left1], [{X, Type2}|Left2], Strict, NotFixpoint) ->
case Res of
true -> compare_types_1(Left1, Left2, Strict, NotFixpoint);
false ->
- ?debug("Failed fixpoint for ~w: ~s =/= ~s\n",
+ ?debug("Failed fixpoint for ~w: ~ts =/= ~ts\n",
[X, erl_types:t_to_string(Type1), erl_types:t_to_string(Type2)]),
compare_types_1(Left1, Left2, Strict, [{X, Type2}|NotFixpoint])
end;
@@ -349,88 +334,95 @@ find_succ_typings(SCCs, #st{codeserver = Codeserver, callgraph = Callgraph,
-spec find_succ_types_for_scc(scc(), typesig_init_data()) -> [mfa_or_funlbl()].
-find_succ_types_for_scc(SCC, {Codeserver, Callgraph, Plt, Solvers}) ->
- SCC_Info = [{MFA,
- dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- dialyzer_codeserver:lookup_mod_records(M, Codeserver)}
- || {M, _, _} = MFA <- SCC],
- Contracts1 = [{MFA, dialyzer_codeserver:lookup_mfa_contract(MFA, Codeserver)}
- || {_, _, _} = MFA <- SCC],
- Contracts2 = [{MFA, Contract} || {MFA, {ok, Contract}} <- Contracts1],
- Contracts3 = orddict:from_list(Contracts2),
+find_succ_types_for_scc(SCC0, {Codeserver, Callgraph, Plt, Solvers}) ->
+ SCC = [MFA || {_, _, _} = MFA <- SCC0],
Label = dialyzer_codeserver:get_next_core_label(Codeserver),
- AllFuns = collect_fun_info([Fun || {_MFA, {_Var, Fun}, _Rec} <- SCC_Info]),
+ AllFuns = lists:append(
+ [begin
+ {_Var, Fun} =
+ dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ collect_fun_info([Fun])
+ end || MFA <- SCC]),
PropTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
%% Assume that the PLT contains the current propagated types
- FunTypes = dialyzer_typesig:analyze_scc(SCC_Info, Label, Callgraph,
- Plt, PropTypes, Solvers),
+ FunTypes = dialyzer_typesig:analyze_scc(SCC, Label, Callgraph,
+ Codeserver, Plt, PropTypes,
+ Solvers),
AllFunSet = sets:from_list([X || {X, _} <- AllFuns]),
FilteredFunTypes =
- dict:filter(fun(X, _) -> sets:is_element(X, AllFunSet) end, FunTypes),
- FindOpaques = lookup_and_find_opaques_fun(Codeserver),
- DecoratedFunTypes =
- decorate_succ_typings(Contracts3, Callgraph, FilteredFunTypes, FindOpaques),
+ orddict:filter(fun(F, _T) -> sets:is_element(F, AllFunSet)
+ end, FunTypes),
+ {FunMFAContracts, ModOpaques} =
+ prepare_decoration(FilteredFunTypes, Callgraph, Codeserver),
+ DecoratedFunTypes = decorate_succ_typings(FunMFAContracts, ModOpaques),
%% Check contracts
+ Contracts = orddict:from_list([{MFA, Contract} ||
+ {_, {MFA, Contract}} <- FunMFAContracts]),
PltContracts =
- dialyzer_contracts:check_contracts(Contracts3, Callgraph,
- DecoratedFunTypes, FindOpaques),
- %% ?debug("FilteredFunTypes ~p\n ~n", [dict:to_list(FilteredFunTypes)]),
- %% ?debug("SCC DecoratedFunTypes ~p\n ~n", [dict:to_list(DecoratedFunTypes)]),
+ dialyzer_contracts:check_contracts(Contracts, Callgraph,
+ DecoratedFunTypes,
+ ModOpaques),
+ %% ?debug("FilteredFunTypes ~tp\n ~n", [FilteredFunTypes]),
+ %% ?debug("SCC DecoratedFunTypes ~tp\n ~n", [DecoratedFunTypes]),
debug_pp_functions("SCC", FilteredFunTypes, DecoratedFunTypes, Callgraph),
- ContractFixpoint =
- lists:all(fun({MFA, _C}) ->
- %% Check the non-deleted PLT
- case dialyzer_plt:lookup_contract(Plt, MFA) of
- none -> false;
- {value, _} -> true
- end
- end, PltContracts),
+ NewPltContracts = [MC ||
+ {MFA, _C}=MC <- PltContracts,
+ %% Check the non-deleted PLT
+ not dialyzer_plt:is_contract(Plt, MFA)],
+ ContractFixpoint = NewPltContracts =:= [],
Plt = insert_into_plt(DecoratedFunTypes, Callgraph, Plt),
- Plt = dialyzer_plt:insert_contract_list(Plt, PltContracts),
+ Plt = dialyzer_plt:insert_contract_list(Plt, NewPltContracts),
case (ContractFixpoint andalso
reached_fixpoint_strict(PropTypes, DecoratedFunTypes)) of
true -> [];
false ->
- ?debug("Not fixpoint for: ~w\n", [AllFuns]),
+ ?debug("Not fixpoint for: ~tw\n", [AllFuns]),
[Fun || {Fun, _Arity} <- AllFuns]
end.
-decorate_succ_typings(Contracts, Callgraph, FunTypes, FindOpaques) ->
- F = fun(Label, Type) ->
+prepare_decoration(FunTypes, Callgraph, Codeserver) ->
+ F = fun({Label, _Type}=LabelType, Acc) ->
case dialyzer_callgraph:lookup_name(Label, Callgraph) of
{ok, MFA} ->
- case orddict:find(MFA, Contracts) of
+ case dialyzer_codeserver:lookup_mfa_contract(MFA, Codeserver) of
{ok, {_FileLine, Contract, _Xtra}} ->
- Args = dialyzer_contracts:get_contract_args(Contract),
- Ret = dialyzer_contracts:get_contract_return(Contract),
- C = erl_types:t_fun(Args, Ret),
- {M, _, _} = MFA,
- Opaques = FindOpaques(M),
- erl_types:t_decorate_with_opaque(Type, C, Opaques);
- error -> Type
+ [{LabelType, {MFA, Contract}}|Acc];
+ error -> [{LabelType, no}|Acc]
end;
- error -> Type
+ error -> [{LabelType, no}|Acc]
end
end,
- dict:map(F, FunTypes).
-
-lookup_and_find_opaques_fun(Codeserver) ->
- fun(Module) ->
- Records = dialyzer_codeserver:lookup_mod_records(Module, Codeserver),
- (find_opaques_fun(Records))(Module)
- end.
+ Contracts = lists:foldl(F, [], orddict:to_list(FunTypes)),
+ ModOpaques =
+ [{M, lookup_opaques(M, Codeserver)} ||
+ M <- lists:usort([M || {_LabelType, {{M, _, _}, _Con}} <- Contracts])],
+ {Contracts, orddict:from_list(ModOpaques)}.
+
+decorate_succ_typings(FunTypesContracts, ModOpaques) ->
+ F = fun({{Label, Type}, {{M, _, _}, Contract}}) ->
+ Args = dialyzer_contracts:get_contract_args(Contract),
+ Ret = dialyzer_contracts:get_contract_return(Contract),
+ C = erl_types:t_fun(Args, Ret),
+ {M, Opaques} = lists:keyfind(M, 1, ModOpaques),
+ R = erl_types:t_decorate_with_opaque(Type, C, Opaques),
+ {Label, R};
+ ({LabelType, no}) ->
+ LabelType
+ end,
+ orddict:from_list(lists:map(F, FunTypesContracts)).
-find_opaques_fun(Records) ->
- fun(_Module) -> erl_types:t_opaque_from_records(Records) end.
+lookup_opaques(Module, Codeserver) ->
+ Records = dialyzer_codeserver:lookup_mod_records(Module, Codeserver),
+ erl_types:t_opaque_from_records(Records).
get_fun_types_from_plt(FunList, Callgraph, Plt) ->
- get_fun_types_from_plt(FunList, Callgraph, Plt, dict:new()).
+ get_fun_types_from_plt(FunList, Callgraph, Plt, []).
get_fun_types_from_plt([{FunLabel, Arity}|Left], Callgraph, Plt, Map) ->
Type = lookup_fun_type(FunLabel, Arity, Callgraph, Plt),
- get_fun_types_from_plt(Left, Callgraph, Plt, dict:store(FunLabel, Type, Map));
+ get_fun_types_from_plt(Left, Callgraph, Plt, [{FunLabel, Type}|Map]);
get_fun_types_from_plt([], _Callgraph, _Plt, Map) ->
- Map.
+ orddict:from_list(Map).
collect_fun_info(Trees) ->
collect_fun_info(Trees, []).
@@ -466,7 +458,7 @@ insert_into_plt(SuccTypes0, Callgraph, Plt) ->
dialyzer_plt:insert_list(Plt, SuccTypes).
format_succ_types(SuccTypes, Callgraph) ->
- format_succ_types(dict:to_list(SuccTypes), Callgraph, []).
+ format_succ_types(SuccTypes, Callgraph, []).
format_succ_types([{Label, Type0}|Left], Callgraph, Acc) ->
Type = erl_types:t_limit(Type0, ?TYPE_LIMIT+1),
@@ -479,28 +471,26 @@ format_succ_types([], _Callgraph, Acc) ->
-ifdef(DEBUG).
debug_pp_succ_typings(SuccTypes) ->
?debug("Succ typings:\n", []),
- [?debug(" ~w :: ~s\n",
+ [?debug(" ~tw :: ~ts\n",
[MFA, erl_types:t_to_string(erl_types:t_fun(ArgT, RetT))])
|| {MFA, {RetT, ArgT}} <- SuccTypes],
?debug("Contracts:\n", []),
- [?debug(" ~w :: ~s\n",
+ [?debug(" ~tw :: ~ts\n",
[MFA, erl_types:t_to_string(erl_types:t_fun(ArgT, RetFun(ArgT)))])
|| {MFA, {contract, RetFun, ArgT}} <- SuccTypes],
?debug("\n", []),
ok.
-debug_pp_functions(Header, FunTypes, DecoratedFunTypes, Callgraph) ->
+debug_pp_functions(Header, FTypes, DTypes, Callgraph) ->
?debug("FunTypes (~s)\n", [Header]),
- FTypes = lists:keysort(1, dict:to_list(FunTypes)),
- DTypes = lists:keysort(1, dict:to_list(DecoratedFunTypes)),
Fun = fun({{Label, Type},{Label, DecoratedType}}) ->
Name = lookup_name(Label, Callgraph),
- ?debug("~w (~w): ~s\n",
+ ?debug("~tw (~w): ~ts\n",
[Name, Label, erl_types:t_to_string(Type)]),
case erl_types:t_is_equal(Type, DecoratedType) of
true -> ok;
false ->
- ?debug(" With opaque types: ~s\n",
+ ?debug(" With opaque types: ~ts\n",
[erl_types:t_to_string(DecoratedType)])
end
end,
diff --git a/lib/dialyzer/src/dialyzer_timing.erl b/lib/dialyzer/src/dialyzer_timing.erl
index 33fd008732..056ccd51d8 100644
--- a/lib/dialyzer/src/dialyzer_timing.erl
+++ b/lib/dialyzer/src/dialyzer_timing.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_timing.erl
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 0b8b244cc9..c4d8f45447 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_typesig.erl
@@ -29,7 +22,7 @@
-module(dialyzer_typesig).
--export([analyze_scc/6]).
+-export([analyze_scc/7]).
-export([get_safe_underapprox/2]).
%%-import(helper, %% 'helper' could be any module doing sanity checks...
@@ -48,6 +41,7 @@
t_is_float/1, t_is_fun/1,
t_is_integer/1, t_non_neg_integer/0,
t_is_list/1, t_is_nil/1, t_is_none/1, t_is_number/1,
+ t_is_singleton/1,
t_limit/2, t_list/0, t_list/1,
t_list_elements/1, t_nonempty_list/1, t_maybe_improper_list/0,
@@ -57,7 +51,7 @@
t_timeout/0, t_tuple/0, t_tuple/1,
t_var/1, t_var_name/1,
t_none/0, t_unit/0,
- t_map/1
+ t_map/0, t_map/1, t_map_get/2, t_map_put/2
]).
-include("dialyzer.hrl").
@@ -65,10 +59,12 @@
%%-----------------------------------------------------------------------------
-type dep() :: integer(). %% type variable names used as constraint ids
+-type deps() :: ordsets:ordset(dep()).
+
-type type_var() :: erl_types:erl_type(). %% actually: {'c','var',_,_}
--record(fun_var, {'fun' :: fun((_) -> erl_types:erl_type()), deps :: [dep()],
- origin :: integer()}).
+-record(fun_var, {'fun' :: fun((_) -> erl_types:erl_type()), deps :: deps(),
+ origin :: integer() | 'undefined'}).
-type constr_op() :: 'eq' | 'sub'.
-type fvar_or_type() :: #fun_var{} | erl_types:erl_type().
@@ -76,20 +72,21 @@
-record(constraint, {lhs :: erl_types:erl_type(),
op :: constr_op(),
rhs :: fvar_or_type(),
- deps :: [dep()]}).
+ deps :: deps()}).
-type constraint() :: #constraint{}.
+-type mask() :: ordsets:ordset(non_neg_integer()).
+
-record(constraint_list, {type :: 'conj' | 'disj',
list :: [constr()],
- deps :: [dep()],
- masks :: [{dep(),[non_neg_integer()]}] |
- {'d',dict:dict(dep(), [non_neg_integer()])},
- id :: {'list', dep()}}).
+ deps :: deps(),
+ masks :: #{dep() => mask()} | 'undefined',
+ id :: {'list', dep()} | 'undefined'}).
-type constraint_list() :: #constraint_list{}.
--record(constraint_ref, {id :: type_var(), deps :: [dep()]}).
+-record(constraint_ref, {id :: type_var(), deps :: deps()}).
-type constraint_ref() :: #constraint_ref{}.
@@ -97,33 +94,35 @@
-type types() :: erl_types:type_table().
--type typesig_scc() :: [{mfa(), {cerl:c_var(), cerl:c_fun()}, types()}].
--type typesig_funmap() :: [{type_var(), type_var()}]. %% Orddict
-
--type prop_types() :: dict:dict(label(), types()).
-
--type dict_or_ets() :: {'d', prop_types()} | {'e', ets:tid()}.
-
--record(state, {callgraph :: dialyzer_callgraph:callgraph(),
- cs = [] :: [constr()],
- cmap = {'d', dict:new()} :: dict_or_ets(),
- fun_map = [] :: typesig_funmap(),
- fun_arities = dict:new() :: dict:dict(type_var(), arity()),
- in_match = false :: boolean(),
- in_guard = false :: boolean(),
- module :: module(),
- name_map = dict:new() :: dict:dict(mfa(),
- cerl:c_fun()),
- next_label = 0 :: label(),
- self_rec :: 'false' | erl_types:erl_type(),
- plt :: dialyzer_plt:plt(),
- prop_types = {'d', dict:new()} :: dict_or_ets(),
- records = dict:new() :: types(),
- scc = [] :: [type_var()],
- mfas :: [tuple()],
- solvers = [] :: [solver()]
+-type typesig_funmap() :: #{type_var() => type_var()}.
+
+-type prop_types() :: orddict:orddict(label(), erl_types:erl_type()).
+-type dict_prop_types() :: dict:dict(label(), erl_types:erl_type()).
+
+-record(state, {callgraph :: dialyzer_callgraph:callgraph()
+ | 'undefined',
+ cserver :: dialyzer_codeserver:codeserver(),
+ cs = [] :: [constr()],
+ cmap = maps:new() :: #{type_var() => constr()},
+ fun_map = maps:new() :: typesig_funmap(),
+ fun_arities = maps:new() :: #{type_var() => arity()},
+ in_match = false :: boolean(),
+ in_guard = false :: boolean(),
+ module :: module(),
+ name_map = maps:new() :: #{mfa() => cerl:c_fun()},
+ next_label = 0 :: label(),
+ self_rec :: 'false' | erl_types:erl_type(),
+ plt :: dialyzer_plt:plt()
+ | 'undefined',
+ prop_types = dict:new() :: dict_prop_types(),
+ mod_records = [] :: [{module(), types()}],
+ scc = [] :: ordsets:ordset(type_var()),
+ mfas :: [mfa()],
+ solvers = [] :: [solver()]
}).
+-type state() :: #state{}.
+
%%-----------------------------------------------------------------------------
-define(TYPE_LIMIT, 4).
@@ -141,9 +140,11 @@
-ifdef(DEBUG).
-define(debug(__String, __Args), io:format(__String, __Args)).
-define(mk_fun_var(Fun, Vars), mk_fun_var(?LINE, Fun, Vars)).
+-define(pp_map(S, M), pp_map(S, M)).
-else.
-define(debug(__String, __Args), ok).
-define(mk_fun_var(Fun, Vars), mk_fun_var(Fun, Vars)).
+-define(pp_map(S, M), ok).
-endif.
%% ============================================================================
@@ -155,11 +156,10 @@
%%-----------------------------------------------------------------------------
%% Analysis of strongly connected components.
%%
-%% analyze_scc(SCC, NextLabel, CallGraph, PLT, PropTypes, Solvers) -> FunTypes
+%% analyze_scc(SCC, NextLabel, CallGraph, CodeServer,
+%% PLT, PropTypes, Solvers) -> FunTypes
%%
-%% SCC - [{MFA, Def, Records}]
-%% where Def = {Var, Fun} as in the Core Erlang module definitions.
-%% Records = dict(RecName, {Arity, [{FieldName, FieldType}]})
+%% SCC - [{MFA}]
%% NextLabel - An integer that is higher than any label in the code.
%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
%% Note: The callgraph must have been built with all the
@@ -171,26 +171,23 @@
%% Solvers - User specified solvers.
%%-----------------------------------------------------------------------------
--spec analyze_scc(typesig_scc(), label(),
+-spec analyze_scc([mfa()], label(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(), prop_types(), [solver()]) -> prop_types().
-analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers0) ->
+analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) ->
Solvers = solvers(Solvers0),
- assert_format_of_scc(SCC),
- State1 = new_state(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers),
- DefSet = add_def_list([Var || {_MFA, {Var, _Fun}, _Rec} <- SCC], sets:new()),
- State2 = traverse_scc(SCC, DefSet, State1),
+ State1 = new_state(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes,
+ Solvers),
+ DefSet = add_def_list(maps:values(State1#state.name_map), sets:new()),
+ State2 = traverse_scc(SCC, CServer, DefSet, State1),
State3 = state__finalize(State2),
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
constraints_to_dot_scc(Funs, State3),
- solve(Funs, State3).
-
-assert_format_of_scc([{_MFA, {_Var, _Fun}, _Records}|Left]) ->
- assert_format_of_scc(Left);
-assert_format_of_scc([]) ->
- ok.
+ T = solve(Funs, State3),
+ orddict:from_list(maps:to_list(T)).
solvers([]) -> [v2];
solvers(Solvers) -> Solvers.
@@ -201,12 +198,14 @@ solvers(Solvers) -> Solvers.
%%
%% ============================================================================
-traverse_scc([{_MFA, Def, Rec}|Left], DefSet, AccState) ->
- TmpState1 = state__set_rec_dict(AccState, Rec),
+traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, AccState) ->
+ TmpState1 = state__set_module(AccState, M),
+ Def = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
- {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1),
- traverse_scc(Left, DefSet, NewAccState);
-traverse_scc([], _DefSet, AccState) ->
+ TmpState2 = state__new_constraint_context(TmpState1),
+ {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2),
+ traverse_scc(Left, Codeserver, DefSet, NewAccState);
+traverse_scc([], _Codeserver, _DefSet, AccState) ->
AccState.
traverse(Tree, DefinedVars, State) ->
@@ -309,7 +308,7 @@ traverse(Tree, DefinedVars, State) ->
Hd = cerl:cons_hd(Tree),
Tl = cerl:cons_tl(Tree),
{State1, [HdVar, TlVar]} = traverse_list([Hd, Tl], DefinedVars, State),
- case cerl:is_literal(cerl:fold_literal(Tree)) of
+ case cerl:is_literal(fold_literal_maybe_match(Tree, State)) of
true ->
%% We do not need to do anything more here.
{State, t_cons(HdVar, TlVar)};
@@ -390,8 +389,18 @@ traverse(Tree, DefinedVars, State) ->
{State2, _} = traverse_list(Funs, DefinedVars1, State1),
traverse(Body, DefinedVars1, State2);
literal ->
- Type = t_from_term(cerl:concrete(Tree)),
- {State, Type};
+ %% Maps are special; a literal pattern matches more than just the value
+ %% constructed by the literal. For example #{} constructs the empty map,
+ %% but matches every map.
+ case state__is_in_match(State) of
+ true ->
+ Tree1 = dialyzer_utils:refold_pattern(Tree),
+ case cerl:is_literal(Tree1) of
+ false -> traverse(Tree1, DefinedVars, State);
+ true -> {State, t_from_term(cerl:concrete(Tree))}
+ end;
+ _ -> {State, t_from_term(cerl:concrete(Tree))}
+ end;
module ->
Defs = cerl:module_defs(Tree),
Funs = [Fun || {_Var, Fun} <- Defs],
@@ -435,7 +444,7 @@ traverse(Tree, DefinedVars, State) ->
Elements = cerl:tuple_es(Tree),
{State1, EVars} = traverse_list(Elements, DefinedVars, State),
{State2, TupleType} =
- case cerl:is_literal(cerl:fold_literal(Tree)) of
+ case cerl:is_literal(fold_literal_maybe_match(Tree, State1)) of
true ->
%% We do not need to do anything more here.
{State, t_tuple(EVars)};
@@ -462,19 +471,122 @@ traverse(Tree, DefinedVars, State) ->
true ->
%% Check if a record is constructed.
Arity = length(Fields),
- Records = State2#state.records,
- case lookup_record(Records, cerl:atom_val(Tag), Arity) of
- error -> {State2, TupleType};
- {ok, RecType} ->
- State3 = state__store_conj(TupleType, sub, RecType, State2),
- {State3, TupleType}
+ case lookup_record(State2, cerl:atom_val(Tag), Arity) of
+ {error, State3} -> {State3, TupleType};
+ {ok, RecType, State3} ->
+ State4 = state__store_conj(TupleType, sub, RecType, State3),
+ {State4, TupleType}
end;
false -> {State2, TupleType}
end;
[] -> {State2, TupleType}
end;
map ->
- {State, t_map([])};
+ Entries = cerl:map_es(Tree),
+ MapFoldFun = fun(Entry, AccState) ->
+ AccState1 = state__set_in_match(AccState, false),
+ {AccState2, KeyVar} = traverse(cerl:map_pair_key(Entry),
+ DefinedVars, AccState1),
+ AccState3 = state__set_in_match(
+ AccState2, state__is_in_match(AccState)),
+ {AccState4, ValVar} = traverse(cerl:map_pair_val(Entry),
+ DefinedVars, AccState3),
+ {{KeyVar, ValVar}, AccState4}
+ end,
+ {Pairs, State1} = lists:mapfoldl(MapFoldFun, State, Entries),
+ %% We mustn't recurse into map arguments to matches. Not only are they
+ %% syntactically only allowed to be the literal #{}, but that would also
+ %% cause an infinite recursion, since traverse/3 unfolds literals with
+ %% maps in them using dialyzer_utils:reflow_pattern/1.
+ {State2, ArgVar} =
+ case state__is_in_match(State) of
+ false -> traverse(cerl:map_arg(Tree), DefinedVars, State1);
+ true -> {State1, t_map()}
+ end,
+ MapVar = mk_var(Tree),
+ MapType = ?mk_fun_var(
+ fun(Map) ->
+ lists:foldl(
+ fun({K,V}, TypeAcc) ->
+ t_map_put({lookup_type(K, Map),
+ lookup_type(V, Map)},
+ TypeAcc)
+ end, t_inf(t_map(), lookup_type(ArgVar, Map)),
+ Pairs)
+ end, [ArgVar | lists:append([[K,V] || {K,V} <- Pairs])]),
+ %% TODO: does the "same element appearing several times" problem apply
+ %% here too?
+ Fun =
+ fun({KeyVar, ValVar}, {AccState, ShadowKeys}) ->
+ %% If Val is known to be the last association of Key (i.e. Key
+ %% is not in ShadowKeys), Val must be a subtype of what is
+ %% associated to Key in Tree
+ TypeFun =
+ fun(Map) ->
+ KeyType = lookup_type(KeyVar, Map),
+ case t_is_singleton(KeyType) of
+ false -> t_any();
+ true ->
+ MT = t_inf(lookup_type(MapVar, Map), t_map()),
+ case t_is_none(MT) of
+ true -> t_none();
+ false ->
+ DisjointFromKeyType =
+ fun(ShadowKey) ->
+ t_is_none(t_inf(lookup_type(ShadowKey, Map),
+ KeyType))
+ end,
+ case lists:all(DisjointFromKeyType, ShadowKeys) of
+ true -> t_map_get(KeyType, MT);
+ %% A later association might shadow this one
+ false -> t_any()
+ end
+ end
+ end
+ end,
+ ValType = ?mk_fun_var(TypeFun, [KeyVar, MapVar | ShadowKeys]),
+ {state__store_conj(ValVar, sub, ValType, AccState),
+ [KeyVar | ShadowKeys]}
+ end,
+ %% Accumulate shadowing keys right-to-left
+ {State3, _} = lists:foldr(Fun, {State2, []}, Pairs),
+ %% In a map expression, Arg must contain all keys that are inserted with
+ %% the exact (:=) operator, and are known (i.e. are not in ShadowedKeys)
+ %% to not have been introduced by a previous association
+ State4 =
+ case state__is_in_match(State) of
+ true -> State3;
+ false ->
+ ArgFun =
+ fun(Map) ->
+ FoldFun =
+ fun({{KeyVar, _}, Entry}, {AccType, ShadowedKeys}) ->
+ OpTree = cerl:map_pair_op(Entry),
+ KeyType = lookup_type(KeyVar, Map),
+ AccType1 =
+ case cerl:is_literal(OpTree) andalso
+ cerl:concrete(OpTree) =:= exact of
+ true ->
+ case t_is_none(t_inf(ShadowedKeys, KeyType)) of
+ true ->
+ t_map_put({KeyType, t_any()}, AccType);
+ false ->
+ AccType
+ end;
+ false ->
+ AccType
+ end,
+ {AccType1, t_sup(KeyType, ShadowedKeys)}
+ end,
+ %% Accumulate shadowed keys left-to-right
+ {ResType, _} = lists:foldl(FoldFun, {t_map(), t_none()},
+ lists:zip(Pairs, Entries)),
+ ResType
+ end,
+ ArgType = ?mk_fun_var(ArgFun, [KeyVar || {KeyVar, _} <- Pairs]),
+ state__store_conj(ArgVar, sub, ArgType, State3)
+ end,
+ {state__store_conj(MapVar, sub, MapType, State4), MapVar};
values ->
%% We can get into trouble when unifying products that have the
%% same element appearing several times. Handle these cases by
@@ -659,7 +771,7 @@ handle_call(Call, DefinedVars, State) ->
{ok, Var} ->
%% This is part of the SCC currently analyzed.
%% Intercept and change this to an apply instead.
- ?debug("Found the call to ~w\n", [MFA]),
+ ?debug("Found the call to ~tw\n", [MFA]),
Label = cerl_trees:get_label(Call),
Apply = cerl:ann_c_apply([{label, Label}], Var, Args),
traverse(Apply, DefinedVars, State)
@@ -825,11 +937,11 @@ get_safe_underapprox(Pats, Guard) ->
Map1 = cerl_trees:fold(fun(X, Acc) ->
case cerl:is_c_var(X) of
true ->
- dict:store(cerl_trees:get_label(X), t_any(),
- Acc);
+ maps:put(cerl_trees:get_label(X), t_any(),
+ Acc);
false -> Acc
end
- end, dict:new(), cerl:c_values(Pats)),
+ end, maps:new(), cerl:c_values(Pats)),
{Type, Map2} = get_underapprox_from_guard(Guard, Map1),
Map3 = case t_is_none(t_inf(t_from_term(true), Type)) of
true -> throw(dont_know);
@@ -837,8 +949,8 @@ get_safe_underapprox(Pats, Guard) ->
case cerl:is_c_var(Guard) of
false -> Map2;
true ->
- dict:store(cerl_trees:get_label(Guard),
- t_from_term(true), Map2)
+ maps:put(cerl_trees:get_label(Guard),
+ t_from_term(true), Map2)
end
end,
{Ts, _Map4} = get_safe_underapprox_1(Pats, [], Map3),
@@ -864,7 +976,7 @@ get_underapprox_from_guard(Tree, Map) ->
case t_is_none(Inf) of
true -> throw(dont_know);
false ->
- {True, dict:store(cerl_trees:get_label(Fun), Inf, Map1)}
+ {True, maps:put(cerl_trees:get_label(Fun), Inf, Map1)}
end
end;
MFA ->
@@ -880,7 +992,7 @@ get_underapprox_from_guard(Tree, Map) ->
case cerl:is_literal(Arg) of
true -> {True, Map1};
false ->
- {True, dict:store(cerl_trees:get_label(Arg), Inf, Map1)}
+ {True, maps:put(cerl_trees:get_label(Arg), Inf, Map1)}
end
end;
error ->
@@ -912,7 +1024,7 @@ get_underapprox_from_guard(Tree, Map) ->
end;
var ->
Type =
- case dict:find(cerl_trees:get_label(Tree), Map) of
+ case maps:find(cerl_trees:get_label(Tree), Map) of
error -> throw(dont_know);
{ok, T} -> T
end,
@@ -946,6 +1058,7 @@ get_type_test({erlang, is_float, 1}) -> {ok, t_float()};
get_type_test({erlang, is_function, 1}) -> {ok, t_fun()};
get_type_test({erlang, is_integer, 1}) -> {ok, t_integer()};
get_type_test({erlang, is_list, 1}) -> {ok, t_list()};
+get_type_test({erlang, is_map, 1}) -> {ok, t_map()};
get_type_test({erlang, is_number, 1}) -> {ok, t_number()};
get_type_test({erlang, is_pid, 1}) -> {ok, t_pid()};
get_type_test({erlang, is_port, 1}) -> {ok, t_port()};
@@ -1002,7 +1115,9 @@ bitstr_val_constr(SizeType, UnitVal, Flags) ->
end
end.
-get_safe_underapprox_1([Pat|Left], Acc, Map) ->
+get_safe_underapprox_1([Pat0|Left], Acc, Map) ->
+ %% Maps should be treated as patterns, not as literals
+ Pat = dialyzer_utils:refold_pattern(Pat0),
case cerl:type(Pat) of
alias ->
APat = cerl:alias_pat(Pat),
@@ -1013,7 +1128,7 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
case t_is_none(Inf) of
true -> throw(dont_know);
false ->
- Map3 = dict:store(cerl_trees:get_label(AVar), Inf, Map2),
+ Map3 = maps:put(cerl_trees:get_label(AVar), Inf, Map2),
get_safe_underapprox_1(Left, [Inf|Acc], Map3)
end;
binary ->
@@ -1046,15 +1161,42 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
Type = t_tuple(Ts),
get_safe_underapprox_1(Left, [Type|Acc], Map1);
map ->
- %% TODO: Can maybe do something here
- throw(dont_know);
+ %% Some assertions in case the syntax gets more premissive in the future
+ true = #{} =:= cerl:concrete(cerl:map_arg(Pat)),
+ true = lists:all(fun(P) ->
+ cerl:is_literal(Op = cerl:map_pair_op(P)) andalso
+ exact =:= cerl:concrete(Op)
+ end, cerl:map_es(Pat)),
+ KeyTrees = lists:map(fun cerl:map_pair_key/1, cerl:map_es(Pat)),
+ ValTrees = lists:map(fun cerl:map_pair_val/1, cerl:map_es(Pat)),
+ %% Keys must not be underapproximated. Overapproximations are safe.
+ Keys = get_safe_overapprox(KeyTrees),
+ {Vals, Map1} = get_safe_underapprox_1(ValTrees, [], Map),
+ case lists:all(fun erl_types:t_is_singleton/1, Keys) of
+ false -> throw(dont_know);
+ true -> ok
+ end,
+ SortedPairs = lists:sort(lists:zip(Keys, Vals)),
+ %% We need to deal with duplicates ourselves
+ SquashDuplicates =
+ fun SquashDuplicates([{K,First},{K,Second}|List]) ->
+ case t_is_none(Inf = t_inf(First, Second)) of
+ true -> throw(dont_know);
+ false -> [{K, Inf}|SquashDuplicates(List)]
+ end;
+ SquashDuplicates([Good|Rest]) ->
+ [Good|SquashDuplicates(Rest)];
+ SquashDuplicates([]) -> []
+ end,
+ Type = t_map(SquashDuplicates(SortedPairs)),
+ get_safe_underapprox_1(Left, [Type|Acc], Map1);
values ->
Es = cerl:values_es(Pat),
{Ts, Map1} = get_safe_underapprox_1(Es, [], Map),
Type = t_product(Ts),
get_safe_underapprox_1(Left, [Type|Acc], Map1);
var ->
- case dict:find(cerl_trees:get_label(Pat), Map) of
+ case maps:find(cerl_trees:get_label(Pat), Map) of
error -> throw(dont_know);
{ok, VarType} -> get_safe_underapprox_1(Left, [VarType|Acc], Map)
end
@@ -1062,6 +1204,15 @@ get_safe_underapprox_1([Pat|Left], Acc, Map) ->
get_safe_underapprox_1([], Acc, Map) ->
{lists:reverse(Acc), Map}.
+get_safe_overapprox(Pats) ->
+ lists:map(fun get_safe_overapprox_1/1, Pats).
+
+get_safe_overapprox_1(Pat) ->
+ case cerl:is_literal(Lit = cerl:fold_literal(Pat)) of
+ true -> t_from_term(cerl:concrete(Lit));
+ false -> t_any()
+ end.
+
%%----------------------------------------
%% Guards
%%
@@ -1261,6 +1412,8 @@ get_bif_constr({erlang, is_integer, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_integer(), State);
get_bif_constr({erlang, is_list, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_maybe_improper_list(), State);
+get_bif_constr({erlang, is_map, 1}, Dst, [Arg], State) ->
+ get_bif_test_constr(Dst, Arg, t_map(), State);
get_bif_constr({erlang, is_number, 1}, Dst, [Arg], State) ->
get_bif_test_constr(Dst, Arg, t_number(), State);
get_bif_constr({erlang, is_pid, 1}, Dst, [Arg], State) ->
@@ -1287,7 +1440,6 @@ get_bif_constr({erlang, is_record, 2}, Dst, [Var, Tag] = Args, _State) ->
mk_constraint(Var, sub, ArgV)]);
get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
%% TODO: Revise this to make it precise for Tag and Arity.
- Records = State#state.records,
ArgFun =
fun(Map) ->
case t_is_any_atom(true, lookup_type(Dst, Map)) of
@@ -1304,10 +1456,10 @@ get_bif_constr({erlang, is_record, 3}, Dst, [Var, Tag, Arity] = Args, State) ->
GenRecord = t_tuple([TagType|AnyElems]),
case t_atom_vals(TagType) of
[TagVal] ->
- case lookup_record(Records, TagVal, ArityVal - 1) of
- {ok, Type} ->
+ case lookup_record(State, TagVal, ArityVal - 1) of
+ {ok, Type, _NewState} ->
Type;
- error -> GenRecord
+ {error, _NewState} -> GenRecord
end;
_ -> GenRecord
end;
@@ -1636,18 +1788,22 @@ get_bif_test_constr(Dst, Arg, Type, _State) ->
%%=============================================================================
solve([Fun], State) ->
- ?debug("============ Analyzing Fun: ~w ===========\n",
+ ?debug("============ Analyzing Fun: ~tw ===========\n",
[debug_lookup_name(Fun)]),
solve_fun(Fun, map_new(), State);
solve([_|_] = SCC, State) ->
- ?debug("============ Analyzing SCC: ~w ===========\n",
+ ?debug("============ Analyzing SCC: ~tw ===========\n",
[[debug_lookup_name(F) || F <- SCC]]),
- {Parallel, NewState} =
- case parallel_split(SCC) of
- false -> {false, State};
- SplitSCC -> {SplitSCC, minimize_state(State)}
- end,
- solve_scc(SCC, Parallel, map_new(), NewState, false).
+ Users = comp_users(SCC, State),
+ solve_scc(SCC, map_new(), State, Users, _ToSolve=SCC, false).
+
+comp_users(SCC, State) ->
+ Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
+ Vars = lists:sort([t_var_name(Var) || {_, {ok, Var}} <- Vars0]),
+ family([{t_var(V), F} ||
+ F <- SCC,
+ V <- ordsets:intersection(get_deps(state__get_cs(F, State)),
+ Vars)]).
solve_fun(Fun, FunMap, State) ->
Cs = state__get_cs(Fun, State),
@@ -1662,7 +1818,7 @@ solve_fun(Fun, FunMap, State) ->
end,
enter_type(Fun, NewType, NewFunMap1).
-solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
+solve_scc(SCC, Map, State, Users, ToSolve, TryingUnit) ->
Vars0 = [{Fun, state__get_rec_var(Fun, State)} || Fun <- SCC],
Vars = [Var || {_, {ok, Var}} <- Vars0],
Funs = [Fun || {Fun, {ok, _}} <- Vars0],
@@ -1670,18 +1826,15 @@ solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
RecTypes = [t_limit(Type, ?TYPE_LIMIT) || Type <- Types],
CleanMap = lists:foldl(fun(Fun, AccFunMap) ->
erase_type(t_var_name(Fun), AccFunMap)
- end, Map, SCC),
+ end, Map, ToSolve),
Map1 = enter_type_lists(Vars, RecTypes, CleanMap),
- ?debug("Checking SCC: ~w\n", [[debug_lookup_name(F) || F <- SCC]]),
- FunSet = ordsets:from_list([t_var_name(F) || F <- SCC]),
- Map2 =
- case Parallel of
- false -> solve_whole_scc(SCC, Map1, State);
- SplitSCC -> solve_whole_scc_parallel(SplitSCC, Map1, State)
- end,
- case maps_are_equal(Map2, Map, FunSet) of
+ ?debug("Checking SCC: ~tw\n", [[debug_lookup_name(F) || F <- SCC]]),
+ SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
+ Map2 = lists:foldl(SolveFun, Map1, ToSolve),
+ Updated = updated_vars_only(Vars, Map, Map2),
+ case Updated =:= [] of
true ->
- ?debug("SCC ~w reached fixpoint\n", [SCC]),
+ ?debug("SCC ~tw reached fixpoint\n", [SCC]),
NewTypes = unsafe_lookup_type_list(Funs, Map2),
case erl_types:any_none([t_fun_range(T) || T <- NewTypes])
andalso TryingUnit =:= false of
@@ -1692,127 +1845,21 @@ solve_scc(SCC, Parallel, Map, State, TryingUnit) ->
true -> t_fun(t_fun_args(T), t_unit())
end || T <- NewTypes],
Map3 = enter_type_lists(Funs, UnitTypes, Map2),
- solve_scc(SCC, Parallel, Map3, State, true);
+ solve_scc(SCC, Map3, State, Users, SCC, true);
false ->
- case Parallel of
- false -> true;
- _ -> dispose_state(State)
- end,
Map2
end;
false ->
- ?debug("SCC ~w did not reach fixpoint\n", [SCC]),
- solve_scc(SCC, Parallel, Map2, State, TryingUnit)
- end.
-
-solve_whole_scc(SCC, Map, State) ->
- SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
- lists:foldl(SolveFun, Map, SCC).
-
-%%------------------------------------------------------------------------------
-
--define(worth_it, 42).
-
-parallel_split(SCC) ->
- Length = length(SCC),
- case Length > 2*?worth_it of
- false -> false;
- true ->
- case min(dialyzer_utils:parallelism(), 8) of
- 1 -> false;
- CPUs ->
- FullShare = Length div CPUs + 1,
- Unit = max(FullShare, ?worth_it),
- split(SCC, Unit, [])
- end
+ ?debug("SCC ~tw did not reach fixpoint\n", [SCC]),
+ ToSolve1 = affected(Updated, Users),
+ solve_scc(SCC, Map2, State, Users, ToSolve1, TryingUnit)
end.
-minimize_state(#state{
- cmap = {d, CMap},
- fun_map = FunMap,
- fun_arities = FunArities,
- self_rec = SelfRec,
- prop_types = {d, PropTypes},
- solvers = Solvers
- }) ->
- Opts = [{read_concurrency, true}],
- ETSCMap = ets:new(cmap, Opts),
- ETSPropTypes = ets:new(prop_types, Opts),
- true = ets:insert(ETSCMap, dict:to_list(CMap)),
- true = ets:insert(ETSPropTypes, dict:to_list(PropTypes)),
- #state
- {cmap = {e, ETSCMap},
- fun_map = FunMap,
- fun_arities = FunArities,
- self_rec = SelfRec,
- prop_types = {e, ETSPropTypes},
- solvers = Solvers
- }.
-
-dispose_state(#state{cmap = {e, ETSCMap},
- prop_types = {e, ETSPropTypes}}) ->
- true = ets:delete(ETSCMap),
- true = ets:delete(ETSPropTypes).
-
-solve_whole_scc_parallel(SplitSCC, Map, State) ->
- Workers = spawn_workers(SplitSCC, Map, State),
- wait_results(Workers, Map, fold_res_fun(State)).
-
-spawn_workers(SplitSCC, Map, State) ->
- Spawner = solve_scc_spawner(self(), Map, State),
- lists:foreach(Spawner, SplitSCC),
- length(SplitSCC).
-
-wait_results(0, Map, _FoldResFun) ->
- Map;
-wait_results(Pending, Map, FoldResFun) ->
- Res = receive_scc_result(),
- NewMap = lists:foldl(FoldResFun, Map, Res),
- wait_results(Pending-1, NewMap, FoldResFun).
-
-solve_scc_spawner(Parent, Map, State) ->
- fun(SCCPart) ->
- spawn_link(fun() -> solve_scc_worker(Parent, SCCPart, Map, State) end)
- end.
-
-split([], _Unit, Acc) ->
- Acc;
-split(List, Unit, Acc) ->
- {Taken, Rest} =
- try
- lists:split(Unit, List)
- catch
- _:_ -> {List, []}
- end,
- split(Rest, Unit, [Taken|Acc]).
-
-solve_scc_worker(Parent, SCCPart, Map, State) ->
- SolveFun = fun(X, Y) -> scc_fold_fun(X, Y, State) end,
- FinalMap = lists:foldl(SolveFun, Map, SCCPart),
- Res =
- [{F, t_limit(unsafe_lookup_type(F, FinalMap), ?TYPE_LIMIT)} ||
- F <- SCCPart],
- send_scc_result(Parent, Res).
-
-fold_res_fun(State) ->
- fun({F, Type}, Map) ->
- case state__get_rec_var(F, State) of
- {ok, R} ->
- enter_type(R, Type, enter_type(F, Type, Map));
- error ->
- enter_type(F, Type, Map)
- end
- end.
-
-receive_scc_result() ->
- receive
- {scc_fun, Res} -> Res
- end.
-
-send_scc_result(Parent, Res) ->
- Parent ! {scc_fun, Res}.
-
-%%------------------------------------------------------------------------------
+affected(Updated, Users) ->
+ lists:umerge([case lists:keyfind(V, 1, Users) of
+ {V, Vs} -> Vs;
+ false -> []
+ end || V <- Updated]).
scc_fold_fun(F, FunMap, State) ->
Deps = get_deps(state__get_cs(F, State)),
@@ -1827,8 +1874,8 @@ scc_fold_fun(F, FunMap, State) ->
error ->
enter_type(F, NewType, FunMap)
end,
- ?debug("Done solving for function ~w :: ~s\n", [debug_lookup_name(F),
- format_type(NewType)]),
+ ?debug("Done solving for function ~tw :: ~ts\n", [debug_lookup_name(F),
+ format_type(NewType)]),
NewFunMap.
solve(Fun, Cs, FunMap, State) ->
@@ -1847,14 +1894,14 @@ solver(Solver, SolveFun) ->
[Solver, _R, 60]),
throw(error)
catch E:R ->
- io:format("Solver ~w failed: ~w:~p\n ~p\n",
+ io:format("Solver ~w failed: ~w:~p\n ~tp\n",
[Solver, E, R, erlang:get_stacktrace()]),
throw(error)
end.
solve_fun(v1, _Fun, Cs, FunMap, State) ->
fun() ->
- {ok, _MapDict, NewMap} = solve_ref_or_list(Cs, FunMap, dict:new(), State),
+ {ok, _MapDict, NewMap} = solve_ref_or_list(Cs, FunMap, map_new(), State),
{ok, NewMap}
end;
solve_fun(v2, Fun, _Cs, FunMap, State) ->
@@ -1869,8 +1916,8 @@ check_solutions([{S1,Map1,_Time1}|Maps], Fun, S, Map) ->
check_solutions(Maps, Fun, S1, Map1);
false ->
?debug("Constraint solvers do not agree on ~w\n", [Fun]),
- pp_map(atom_to_list(S), Map),
- pp_map(atom_to_list(S1), Map1),
+ ?pp_map(atom_to_list(S), Map),
+ ?pp_map(atom_to_list(S1), Map1),
io:format("A bug was found. Please report it, and use the option "
"`--solver v1' until the bug has been fixed.\n"),
throw(error)
@@ -1884,9 +1931,9 @@ sane_maps(Map1, Map2, Keys, _S1, _S2) ->
true -> true;
false ->
?debug("Constraint solvers do not agree on ~w\n", [Key]),
- ?debug("~w: ~s\n",
+ ?debug("~w: ~ts\n",
[_S1, format_type(unsafe_lookup_type(Key, Map1))]),
- ?debug("~w: ~s\n",
+ ?debug("~w: ~ts\n",
[_S2, format_type(unsafe_lookup_type(Key, Map2))]),
false
end
@@ -1894,8 +1941,8 @@ sane_maps(Map1, Map2, Keys, _S1, _S2) ->
%% Solver v2
--record(v2_state, {constr_data = dict:new() :: dict:dict(),
- state :: #state{}}).
+-record(v2_state, {constr_data = maps:new() :: map(),
+ state :: state()}).
v2_solve_ref(Fun, Map, State) ->
V2State = #v2_state{state = State},
@@ -1918,8 +1965,8 @@ v2_solve(#constraint_ref{id = Id}, Map, V2State) ->
v2_solve_reference(Id, Map, V2State).
v2_solve_reference(Id, Map, V2State0) ->
- ?debug("Checking ref to fun: ~w\n", [debug_lookup_name(Id)]),
- pp_map("Map", Map),
+ ?debug("Checking ref to fun: ~tw\n", [debug_lookup_name(Id)]),
+ ?pp_map("Map", Map),
pp_constr_data("solve_ref", V2State0),
Map1 = restore_local_map(V2State0, Id, Map),
State = V2State0#v2_state.state,
@@ -1932,7 +1979,7 @@ v2_solve_reference(Id, Map, V2State0) ->
{FunType, V2State} =
case Res of
{error, V2State1} ->
- ?debug("Error solving for function ~p\n", [debug_lookup_name(Id)]),
+ ?debug("Error solving for function ~tp\n", [debug_lookup_name(Id)]),
Arity = state__fun_arity(Id, State),
FunType0 =
case state__prop_domain(t_var_name(Id), State) of
@@ -1941,12 +1988,12 @@ v2_solve_reference(Id, Map, V2State0) ->
end,
{FunType0, V2State1};
{ok, NewMap, V2State1, U} ->
- ?debug("Done solving fun: ~p\n", [debug_lookup_name(Id)]),
+ ?debug("Done solving fun: ~tp\n", [debug_lookup_name(Id)]),
FunType0 = lookup_type(Id, NewMap),
V2State2 = save_local_map(V2State1, Id, U, NewMap),
{FunType0, V2State2}
end,
- ?debug("ref Id=~w Assigned ~s\n", [Id, format_type(FunType)]),
+ ?debug("ref Id=~w Assigned ~ts\n", [Id, format_type(FunType)]),
{NewMap1, U1} = enter_var_type(Id, FunType, Map),
{NewMap2, U2} =
case state__get_rec_var(Id, State) of
@@ -1956,10 +2003,10 @@ v2_solve_reference(Id, Map, V2State0) ->
{ok, NewMap2, V2State, lists:umerge(U1, U2)}.
v2_solve_self_recursive(Cs, Map, Id, RecType0, V2State0) ->
- ?debug("Solving self recursive ~w\n", [debug_lookup_name(Id)]),
+ ?debug("Solving self recursive ~tw\n", [debug_lookup_name(Id)]),
State = V2State0#v2_state.state,
{ok, RecVar} = state__get_rec_var(Id, State),
- ?debug("OldRecType ~s\n", [format_type(RecType0)]),
+ ?debug("OldRecType ~ts\n", [format_type(RecType0)]),
RecType = t_limit(RecType0, ?TYPE_LIMIT),
{Map1, U0} = enter_var_type(RecVar, RecType, Map),
V2State1 = save_updated_vars1(V2State0, Cs, U0), % Probably not necessary
@@ -1975,7 +2022,7 @@ v2_solve_self_recursive(Cs, Map, Id, RecType0, V2State0) ->
Error
end;
{ok, NewMap, V2State, U} ->
- pp_map("recursive finished", NewMap),
+ ?pp_map("recursive finished", NewMap),
NewRecType = unsafe_lookup_type(Id, NewMap),
case is_equal(NewRecType, RecType0) of
true ->
@@ -1993,7 +2040,7 @@ enter_var_type(Var, Type, Map0) ->
v2_solve_disjunct(Disj, Map, V2State0) ->
#constraint_list{type = disj, id = _Id, list = Cs, masks = Masks} = Disj,
?debug("disjunct Id=~w~n", [_Id]),
- pp_map("Map", Map),
+ ?pp_map("Map", Map),
pp_constr_data("disjunct", V2State0),
case get_flags(V2State0, Disj) of
{V2State1, failed_list} -> {error, V2State1}; % cannot happen
@@ -2021,7 +2068,7 @@ v2_solve_disjunct(Disj, Map, V2State0) ->
U1 = [V || V <- U0,
var_occurs_everywhere(V, Masks, NotFailed)],
NewMap = join_maps(U1, MapL, Map),
- pp_map("NewMap", NewMap),
+ ?pp_map("NewMap", NewMap),
U = updated_vars_only(U1, Map, NewMap),
?debug("disjunct finished _Id=~w\n", [_Id]),
{ok, NewMap, V2State, U}
@@ -2044,43 +2091,49 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
{ok, Map, V2State1, U} ->
?debug("disj I=~w U=~w~n", [I, U]),
V2State = save_local_map(V2State1, Id, U, Map),
- pp_map("DMap", Map),
+ ?pp_map("DMap", Map),
v2_solve_disj(Is, Cs, I+1, Map0, V2State, [U|UL], [Map|MapL],
[I|Eval], Uneval, Failed0)
end;
v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
{ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed};
+v2_solve_disj([every_i], Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I, every_i]
+ end,
+ v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed);
v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) ->
Uneval = [{I,C#constraint_list.id} ||
not is_failed_list(C, V2State)] ++ Uneval0,
v2_solve_disj(Is, Cs, I+1, Map, V2State, UL, MapL, Eval, Uneval, Failed).
save_local_map(#v2_state{constr_data = ConData}=V2State, Id, U, Map) ->
- Part0 = [{V,dict:fetch(V, Map)} || V <- U],
+ Part0 = [{V,maps:get(V, Map)} || V <- U],
Part1 =
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> []; % cannot happen
{ok, {Part2,[]}} -> Part2
end,
?debug("save local map Id=~w:\n", [Id]),
Part = lists:ukeymerge(1, lists:keysort(1, Part0), Part1),
- pp_map("New Part", dict:from_list(Part0)),
- pp_map("Old Part", dict:from_list(Part1)),
- pp_map(" => Part", dict:from_list(Part)),
- V2State#v2_state{constr_data = dict:store(Id, {Part,[]}, ConData)}.
+ ?pp_map("New Part", maps:from_list(Part0)),
+ ?pp_map("Old Part", maps:from_list(Part1)),
+ ?pp_map(" => Part", maps:from_list(Part)),
+ V2State#v2_state{constr_data = maps:put(Id, {Part,[]}, ConData)}.
restore_local_map(#v2_state{constr_data = ConData}, Id, Map0) ->
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> Map0;
{ok, failed} -> Map0;
{ok, {[],_}} -> Map0;
{ok, {Part0,U}} ->
Part = [KV || {K,_V} = KV <- Part0, not lists:member(K, U)],
?debug("restore local map Id=~w U=~w\n", [Id, U]),
- pp_map("Part", dict:from_list(Part)),
- pp_map("Map0", Map0),
- Map = lists:foldl(fun({K,V}, D) -> dict:store(K, V, D) end, Map0, Part),
- pp_map("Map", Map),
+ ?pp_map("Part", maps:from_list(Part)),
+ ?pp_map("Map0", Map0),
+ Map = lists:foldl(fun({K,V}, D) -> maps:put(K, V, D) end, Map0, Part),
+ ?pp_map("Map", Map),
Map
end.
@@ -2121,7 +2174,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0,
M = lists:keydelete(I, 1, vars_per_child(U, Masks)),
{V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1),
{NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0),
- Is1 = lists:umerge(Is, F),
+ Is1 = umerge_mask(Is, F),
NewFs = [NewF|NewFs0],
v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2,
[U|UL], NewFs, VarsUp, LastMap, LastFlags)
@@ -2143,6 +2196,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State,
[], [], [U|VarsUp], Map, NewFlags)
end;
+v2_solve_conj([every_i], Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I, every_i]
+ end,
+ v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags);
v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
LastMap, LastFlags) ->
v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
@@ -2159,31 +2220,31 @@ report_detected_loop(_) ->
add_mask_to_flags(Flags, [Im|M], I, L) when I > Im ->
add_mask_to_flags(Flags, M, I, [Im|L]);
add_mask_to_flags(Flags, [_|M], _I, L) ->
- {lists:umerge(Flags, M), lists:reverse(L)}.
+ {umerge_mask(Flags, M), lists:reverse(L)}.
+
+umerge_mask([every_i]=Is, _F) ->
+ Is;
+umerge_mask(Is, F) ->
+ lists:umerge(Is, F).
-get_mask(V, {d, Masks}) ->
- case dict:find(V, Masks) of
+get_mask(V, Masks) ->
+ case maps:find(V, Masks) of
error -> [];
{ok, M} -> M
- end;
-get_mask(V, Masks) ->
- case lists:keyfind(V, 1, Masks) of
- false -> [];
- {V, M} -> M
end.
get_flags(#v2_state{constr_data = ConData}=V2State0, C) ->
#constraint_list{id = Id, list = Cs, masks = Masks} = C,
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error ->
?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]),
- V2State = V2State0#v2_state{constr_data = dict:store(Id, {[],[]}, ConData)},
- {V2State, lists:seq(1, length(Cs))};
+ V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)},
+ {V2State, [every_i]};
{ok, failed} ->
{V2State0, failed_list};
{ok, {Part,U}} when U =/= [] ->
?debug("get_flags Id=~w U=~w\n", [Id, U]),
- V2State = V2State0#v2_state{constr_data = dict:store(Id, {Part,[]}, ConData)},
+ V2State = V2State0#v2_state{constr_data = maps:put(Id, {Part,[]}, ConData)},
save_updated_vars_list(Cs, vars_per_child(U, Masks), V2State)
end.
@@ -2212,13 +2273,13 @@ save_updated_vars(#constraint_ref{id = Id}, U, V2State) ->
save_updated_vars1(V2State, C, U) ->
#v2_state{constr_data = ConData} = V2State,
#constraint_list{id = Id} = C,
- case dict:find(Id, ConData) of
+ case maps:find(Id, ConData) of
error -> V2State; % error means everything is flagged
{ok, failed} -> V2State;
{ok, {Part,U0}} ->
%% Duplicates are not so common; let masks/2 remove them.
U1 = U ++ U0,
- V2State#v2_state{constr_data = dict:store(Id, {Part,U1}, ConData)}
+ V2State#v2_state{constr_data = maps:put(Id, {Part,U1}, ConData)}
end.
-ifdef(DEBUG).
@@ -2228,12 +2289,12 @@ pp_constr_data(_Tag, #v2_state{constr_data = D}) ->
case _PartU of
{_Part, _U} ->
io:format("Id: ~w Vars: ~w\n", [_Id, _U]),
- [pp_map("Part", dict:from_list(_Part)) || _Part =/= []];
+ [?pp_map("Part", maps:from_list(_Part)) || _Part =/= []];
failed ->
io:format("Id: ~w failed list\n", [_Id])
end
end ||
- {_Id, _PartU} <- lists:keysort(1, dict:to_list(D))],
+ {_Id, _PartU} <- lists:keysort(1, maps:to_list(D))],
ok.
-else.
@@ -2243,21 +2304,21 @@ pp_constr_data(_Tag, _V2State) ->
failed_list(#constraint_list{id = Id}, #v2_state{constr_data = D}=V2State) ->
?debug("error list ~w~n", [Id]),
- V2State#v2_state{constr_data = dict:store(Id, failed, D)}.
+ V2State#v2_state{constr_data = maps:put(Id, failed, D)}.
is_failed_list(#constraint_list{id = Id}, #v2_state{constr_data = D}) ->
- dict:find(Id, D) =:= {ok, failed}.
+ maps:find(Id, D) =:= {ok, failed}.
%% Solver v1
solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
Map, MapDict, State) ->
{OldLocalMap, Check} =
- case dict:find(Id, MapDict) of
+ case maps:find(Id, MapDict) of
error -> {map_new(), false};
{ok, M} -> {M, true}
end,
- ?debug("Checking ref to fun: ~w\n", [debug_lookup_name(Id)]),
+ ?debug("Checking ref to fun: ~tw\n", [debug_lookup_name(Id)]),
%% Note: mk_constraint_ref() has already removed Id from Deps. The
%% reason for doing it there is that it makes it easy for
%% calculate_masks() to make the corresponding adjustment for
@@ -2279,7 +2340,7 @@ solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
{NewMapDict, FunType} =
case Res of
{error, NewMapDict0} ->
- ?debug("Error solving for function ~p\n", [debug_lookup_name(Id)]),
+ ?debug("Error solving for function ~tp\n", [debug_lookup_name(Id)]),
Arity = state__fun_arity(Id, State),
FunType0 =
case state__prop_domain(t_var_name(Id), State) of
@@ -2288,23 +2349,23 @@ solve_ref_or_list(#constraint_ref{id = Id, deps = Deps},
end,
{NewMapDict0, FunType0};
{ok, NewMapDict0, NewMap} ->
- ?debug("Done solving fun: ~p\n", [debug_lookup_name(Id)]),
+ ?debug("Done solving fun: ~tp\n", [debug_lookup_name(Id)]),
FunType0 = lookup_type(Id, NewMap),
{NewMapDict0, FunType0}
end,
- ?debug(" Id=~w Assigned ~s\n", [Id, format_type(FunType)]),
+ ?debug(" Id=~w Assigned ~ts\n", [Id, format_type(FunType)]),
NewMap1 = enter_type(Id, FunType, Map),
NewMap2 =
case state__get_rec_var(Id, State) of
{ok, Var} -> enter_type(Var, FunType, NewMap1);
error -> NewMap1
end,
- {ok, dict:store(Id, NewMap2, NewMapDict), NewMap2}
+ {ok, maps:put(Id, NewMap2, NewMapDict), NewMap2}
end;
solve_ref_or_list(#constraint_list{type=Type, list = Cs, deps = Deps, id = Id},
Map, MapDict, State) ->
{OldLocalMap, Check} =
- case dict:find(Id, MapDict) of
+ case maps:find(Id, MapDict) of
error -> {map_new(), false};
{ok, M} -> {M, true}
end,
@@ -2314,21 +2375,21 @@ solve_ref_or_list(#constraint_list{type=Type, list = Cs, deps = Deps, id = Id},
true ->
case Check andalso maps_are_equal(OldLocalMap, Map, Deps) of
true ->
- ?debug("~w equal ~w\n", [Type, Id]),
+ ?debug("~tw equal ~w\n", [Type, Id]),
{ok, MapDict, Map};
false ->
- ?debug("~w not equal: ~w. Solving\n", [Type, Id]),
+ ?debug("~tw not equal: ~w. Solving\n", [Type, Id]),
solve_clist(Cs, Type, Id, Deps, MapDict, Map, State)
end
end.
solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
- ?debug("Solving self recursive ~w\n", [debug_lookup_name(Id)]),
+ ?debug("Solving self recursive ~tw\n", [debug_lookup_name(Id)]),
{ok, RecVar} = state__get_rec_var(Id, State),
- ?debug("OldRecType ~s\n", [format_type(RecType0)]),
+ ?debug("OldRecType ~ts\n", [format_type(RecType0)]),
RecType = t_limit(RecType0, ?TYPE_LIMIT),
Map1 = enter_type(RecVar, RecType, erase_type(t_var_name(Id), Map)),
- pp_map("Map1", Map1),
+ ?pp_map("Map1", Map1),
case solve_ref_or_list(Cs, Map1, MapDict, State) of
{error, _} = Error ->
case t_is_none(RecType0) of
@@ -2341,7 +2402,7 @@ solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
Error
end;
{ok, NewMapDict, NewMap} ->
- pp_map("NewMap", NewMap),
+ ?pp_map("NewMap", NewMap),
NewRecType = unsafe_lookup_type(Id, NewMap),
case is_equal(NewRecType, RecType0) of
true ->
@@ -2354,7 +2415,7 @@ solve_self_recursive(Cs, Map, MapDict, Id, RecType0, State) ->
solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
case solve_cs(Cs, Map, MapDict, State) of
{error, NewMapDict} ->
- {error, dict:store(Id, error, NewMapDict)};
+ {error, maps:put(Id, error, NewMapDict)};
{ok, NewMapDict, NewMap} = Ret ->
case Cs of
[_] ->
@@ -2362,7 +2423,7 @@ solve_clist(Cs, conj, Id, Deps, MapDict, Map, State) ->
Ret;
_ ->
case maps_are_equal(Map, NewMap, Deps) of
- true -> {ok, dict:store(Id, NewMap, NewMapDict), NewMap};
+ true -> {ok, maps:put(Id, NewMap, NewMapDict), NewMap};
false -> solve_clist(Cs, conj, Id, Deps, NewMapDict, NewMap, State)
end
end
@@ -2376,10 +2437,10 @@ solve_clist(Cs, disj, Id, _Deps, MapDict, Map, State) ->
end,
{Maps, NewMapDict} = lists:mapfoldl(Fun, MapDict, Cs),
case [X || {ok, X} <- Maps] of
- [] -> {error, dict:store(Id, error, NewMapDict)};
+ [] -> {error, maps:put(Id, error, NewMapDict)};
MapList ->
NewMap = join_maps(MapList),
- {ok, dict:store(Id, NewMap, NewMapDict), NewMap}
+ {ok, maps:put(Id, NewMap, NewMapDict), NewMap}
end.
solve_cs([#constraint_ref{} = C|Tail], Map, MapDict, State) ->
@@ -2407,7 +2468,7 @@ solve_one_c(#constraint{lhs = Lhs, rhs = Rhs, op = Op}, Map) ->
LhsType = lookup_type(Lhs, Map),
RhsType = lookup_type(Rhs, Map),
Inf = t_inf(LhsType, RhsType),
- ?debug("Solving: ~s :: ~s ~w ~s :: ~s\n\tInf: ~s\n",
+ ?debug("Solving: ~ts :: ~ts ~w ~ts :: ~ts\n\tInf: ~ts\n",
[format_type(Lhs), format_type(LhsType), Op,
format_type(Rhs), format_type(RhsType), format_type(Inf)]),
case t_is_none(Inf) of
@@ -2439,14 +2500,14 @@ solve_subtype(Type, Inf, Map) ->
{_, List} -> {ok, enter_type_list(List, Map)}
catch
throw:{mismatch, _T1, _T2} ->
- ?debug("Mismatch between ~s and ~s\n",
+ ?debug("Mismatch between ~ts and ~ts\n",
[format_type(_T1), format_type(_T2)]),
error
end.
%% end.
report_failed_constraint(_C, _Map) ->
- ?debug("+++++++++++\nFailed: ~s :: ~s ~w ~s :: ~s\n+++++++++++\n",
+ ?debug("+++++++++++\nFailed: ~ts :: ~ts ~w ~ts :: ~ts\n+++++++++++\n",
[format_type(_C#constraint.lhs),
format_type(lookup_type(_C#constraint.lhs, _Map)),
_C#constraint.op,
@@ -2460,7 +2521,7 @@ report_failed_constraint(_C, _Map) ->
%% ============================================================================
map_new() ->
- dict:new().
+ maps:new().
join_maps([Map]) ->
Map;
@@ -2470,9 +2531,9 @@ join_maps(Maps) ->
constrained_keys(Maps) ->
lists:foldl(fun(TmpMap, AccKeys) ->
- [Key || Key <- AccKeys, dict:is_key(Key, TmpMap)]
+ [Key || Key <- AccKeys, maps:is_key(Key, TmpMap)]
end,
- dict:fetch_keys(hd(Maps)), tl(Maps)).
+ maps:keys(hd(Maps)), tl(Maps)).
join_maps([Key|Left], Maps = [Map|MapsLeft], AccMap) ->
NewType = join_one_key(Key, MapsLeft, lookup_type(Key, Map)),
@@ -2504,7 +2565,7 @@ maps_are_equal_1(Map1, Map2, [H|Tail]) ->
case is_equal(T1, T2) of
true -> maps_are_equal_1(Map1, Map2, Tail);
false ->
- ?debug("~w: ~s =/= ~s\n", [H, format_type(T1), format_type(T2)]),
+ ?debug("~w: ~ts =/= ~ts\n", [H, format_type(T1), format_type(T2)]),
false
end;
maps_are_equal_1(_Map1, _Map2, []) ->
@@ -2518,11 +2579,11 @@ prune_keys(Map1, Map2, Deps) ->
NofDeps = length(Deps),
case NofDeps > ?PRUNE_LIMIT of
true ->
- Keys1 = dict:fetch_keys(Map1),
+ Keys1 = maps:keys(Map1),
case length(Keys1) > NofDeps of
true ->
Set1 = lists:sort(Keys1),
- Set2 = lists:sort(dict:fetch_keys(Map2)),
+ Set2 = lists:sort(maps:keys(Map2)),
ordsets:intersection(ordsets:union(Set1, Set2), Deps);
false ->
Deps
@@ -2532,7 +2593,7 @@ prune_keys(Map1, Map2, Deps) ->
end.
enter_type(Key, Val, Map) when is_integer(Key) ->
- ?debug("Entering ~s :: ~s\n", [format_type(t_var(Key)), format_type(Val)]),
+ ?debug("Entering ~ts :: ~ts\n", [format_type(t_var(Key)), format_type(Val)]),
%% Keep any() in the map if it is opaque:
case is_equal(Val, t_any()) of
true ->
@@ -2541,9 +2602,9 @@ enter_type(Key, Val, Map) when is_integer(Key) ->
LimitedVal = t_limit(Val, ?INTERNAL_TYPE_LIMIT),
case is_equal(LimitedVal, Val) of
true -> ok;
- false -> ?debug("LimitedVal ~s\n", [format_type(LimitedVal)])
+ false -> ?debug("LimitedVal ~ts\n", [format_type(LimitedVal)])
end,
- case dict:find(Key, Map) of
+ case maps:find(Key, Map) of
{ok, Value} ->
case is_equal(Value, LimitedVal) of
true -> Map;
@@ -2576,17 +2637,17 @@ enter_type2(Key, Val, Map) ->
{Map1, [Key || not is_same(Key, Map, Map1)]}.
map_store(Key, Val, Map) ->
- ?debug("Storing ~w :: ~s\n", [Key, format_type(Val)]),
- dict:store(Key, Val, Map).
+ ?debug("Storing ~tw :: ~ts\n", [Key, format_type(Val)]),
+ maps:put(Key, Val, Map).
erase_type(Key, Map) ->
- dict:erase(Key, Map).
+ maps:remove(Key, Map).
lookup_type_list(List, Map) ->
[lookup_type(X, Map) || X <- List].
unsafe_lookup_type(Key, Map) ->
- case dict:find(t_var_name(Key), Map) of
+ case maps:find(t_var_name(Key), Map) of
{ok, Type} -> Type;
error -> t_none()
end.
@@ -2595,7 +2656,7 @@ unsafe_lookup_type_list(List, Map) ->
[unsafe_lookup_type(X, Map) || X <- List].
lookup_type(Key, Map) when is_integer(Key) ->
- case dict:find(Key, Map) of
+ case maps:find(Key, Map) of
error -> t_any();
{ok, Val} -> Val
end;
@@ -2640,22 +2701,20 @@ is_same(Key, Map1, Map2) ->
is_equal(Type1, Type2) ->
t_is_equal(Type1, Type2).
-pp_map(_S, _Map) ->
- ?debug("\t~s: ~p\n",
- [_S, [{X, lists:flatten(format_type(Y))} ||
- {X, Y} <- lists:keysort(1, dict:to_list(_Map))]]).
-
%% ============================================================================
%%
%% The State.
%%
%% ============================================================================
-new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
- List = [{MFA, Var} || {MFA, {Var, _Fun}, _Rec} <- SCC0],
- NameMap = dict:from_list(List),
- MFAs = [MFA || {MFA, _Var} <- List],
- SCC = [mk_var(Fun) || {_MFA, {_Var, Fun}, _Rec} <- SCC0],
+new_state(MFAs, NextLabel, CallGraph, CServer, Plt, PropTypes0, Solvers) ->
+ List_SCC =
+ [begin
+ {Var, Label} = dialyzer_codeserver:lookup_mfa_var_label(MFA, CServer),
+ {{MFA, Var}, t_var(Label)}
+ end || MFA <- MFAs],
+ {List, SCC} = lists:unzip(List_SCC),
+ NameMap = maps:from_list(List),
SelfRec =
case SCC of
[OneF] ->
@@ -2666,12 +2725,14 @@ new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
end;
_Many -> false
end,
+ PropTypes = dict:from_list(PropTypes0),
#state{callgraph = CallGraph, name_map = NameMap, next_label = NextLabel,
- prop_types = {d, PropTypes}, plt = Plt, scc = ordsets:from_list(SCC),
- mfas = MFAs, self_rec = SelfRec, solvers = Solvers}.
+ prop_types = PropTypes, plt = Plt, scc = ordsets:from_list(SCC),
+ mfas = MFAs, self_rec = SelfRec, solvers = Solvers,
+ cserver = CServer}.
-state__set_rec_dict(State, RecDict) ->
- State#state{records = RecDict}.
+state__set_module(State, Module) ->
+ State#state{module = Module}.
state__set_in_match(State, Bool) ->
State#state{in_match = Bool}.
@@ -2695,15 +2756,15 @@ state__get_fun_prototype(Op, Arity, State) ->
end.
state__lookup_rec_var_in_scope(MFA, #state{name_map = NameMap}) ->
- dict:find(MFA, NameMap).
+ maps:find(MFA, NameMap).
state__store_fun_arity(Tree, #state{fun_arities = Map} = State) ->
Arity = length(cerl:fun_vars(Tree)),
Id = mk_var(Tree),
- State#state{fun_arities = dict:store(Id, Arity, Map)}.
+ State#state{fun_arities = maps:put(Id, Arity, Map)}.
state__fun_arity(Id, #state{fun_arities = Map}) ->
- dict:fetch(Id, Map).
+ maps:get(Id, Map).
state__lookup_undef_var(Tree, #state{callgraph = CG, plt = Plt}) ->
Label = cerl_trees:get_label(Tree),
@@ -2763,21 +2824,14 @@ state__plt(#state{plt = PLT}) ->
state__new_constraint_context(State) ->
State#state{cs = []}.
-state__prop_domain(FunLabel, #state{prop_types = {e, ETSPropTypes}}) ->
- try ets:lookup_element(ETSPropTypes, FunLabel, 2) of
- {_Range_Fun, Dom} -> {ok, Dom};
- FunType -> {ok, t_fun_args(FunType)}
- catch
- _:_ -> error
- end;
-state__prop_domain(FunLabel, #state{prop_types = {d, PropTypes}}) ->
+state__prop_domain(FunLabel, #state{prop_types = PropTypes}) ->
case dict:find(FunLabel, PropTypes) of
error -> error;
{ok, {_Range_Fun, Dom}} -> {ok, Dom};
{ok, FunType} -> {ok, t_fun_args(FunType)}
end.
-state__add_prop_constrs(Tree, #state{prop_types = {d, PropTypes}} = State) ->
+state__add_prop_constrs(Tree, #state{prop_types = PropTypes} = State) ->
Label = cerl_trees:get_label(Tree),
case dict:find(Label, PropTypes) of
error -> State;
@@ -2788,7 +2842,7 @@ state__add_prop_constrs(Tree, #state{prop_types = {d, PropTypes}} = State) ->
case erl_types:any_none(ArgTypes) of
true -> not_called;
false ->
- ?debug("Adding propagated constr: ~s for function ~w\n",
+ ?debug("Adding propagated constr: ~ts for function ~tw\n",
[format_type(FunType), debug_lookup_name(mk_var(Tree))]),
FunVar = mk_var(Tree),
state__store_conj(FunVar, sub, FunType, State)
@@ -2840,14 +2894,12 @@ state__mk_vars(N, #state{next_label = NL} = State) ->
Vars = [t_var(X) || X <- lists:seq(NL, NewLabel-1)],
{State#state{next_label = NewLabel}, Vars}.
-state__store_constrs(Id, Cs, #state{cmap = {d, Dict}} = State) ->
- NewDict = dict:store(Id, Cs, Dict),
- State#state{cmap = {d, NewDict}}.
+state__store_constrs(Id, Cs, #state{cmap = Map} = State) ->
+ NewMap = maps:put(Id, Cs, Map),
+ State#state{cmap = NewMap}.
-state__get_cs(Var, #state{cmap = {e, ETSDict}}) ->
- ets:lookup_element(ETSDict, Var, 2);
-state__get_cs(Var, #state{cmap = {d, Dict}}) ->
- dict:fetch(Var, Dict).
+state__get_cs(Var, #state{cmap = Map}) ->
+ maps:get(Var, Map).
state__is_self_rec(Fun, #state{self_rec = SelfRec}) ->
not (SelfRec =:= 'false') andalso is_equal(Fun, SelfRec).
@@ -2856,19 +2908,17 @@ state__store_funs(Vars0, Funs0, #state{fun_map = Map} = State) ->
debug_make_name_map(Vars0, Funs0),
Vars = mk_var_list(Vars0),
Funs = mk_var_list(Funs0),
- NewMap = lists:foldl(fun({Var, Fun}, MP) -> orddict:store(Var, Fun, MP) end,
+ NewMap = lists:foldl(fun({Var, Fun}, MP) -> maps:put(Fun, Var, MP) end,
Map, lists:zip(Vars, Funs)),
State#state{fun_map = NewMap}.
state__get_rec_var(Fun, #state{fun_map = Map}) ->
- case [V || {V, FV} <- Map, FV =:= Fun] of
- [Var] -> {ok, Var};
- [] -> error
- end.
+ maps:find(Fun, Map).
state__finalize(State) ->
- State1 = enumerate_constraints(State),
- order_fun_constraints(State1).
+ State1 = state__new_constraint_context(State),
+ State2 = enumerate_constraints(State1),
+ order_fun_constraints(State2).
%% ============================================================================
%%
@@ -2884,8 +2934,7 @@ mk_constraint(Lhs, Op, Rhs) ->
case t_is_any(Lhs) orelse constraint_opnd_is_any(Rhs) of
false ->
Deps = find_constraint_deps([Lhs, Rhs]),
- C0 = mk_constraint_1(Lhs, Op, Rhs),
- C = C0#constraint{deps = Deps},
+ C = mk_constraint_1(Lhs, Op, Rhs, Deps),
case Deps =:= [] of
true ->
%% This constraint is constant. Solve it immediately.
@@ -2903,8 +2952,7 @@ mk_constraint(Lhs, Op, Rhs) ->
end.
mk_constraint_any(Op) ->
- C = mk_constraint_1(t_any(), Op, t_any()),
- C#constraint{deps = []}.
+ mk_constraint_1(t_any(), Op, t_any(), []).
%% the following function is used so that we do not call
%% erl_types:t_is_any/1 with a term other than an erl_type()
@@ -2923,6 +2971,11 @@ mk_fun_var(Line, Fun, Types) ->
Deps = [t_var_name(Var) || Var <- t_collect_vars(t_product(Types))],
#fun_var{'fun' = Fun, deps = ordsets:from_list(Deps), origin = Line}.
+pp_map(S, Map) ->
+ ?debug("\t~s: ~p\n",
+ [S, [{X, lists:flatten(format_type(Y))} ||
+ {X, Y} <- lists:keysort(1, maps:to_list(Map))]]).
+
-else.
-spec mk_fun_var(fun((_) -> erl_types:erl_type()), [erl_types:erl_type()]) -> #fun_var{}.
@@ -2933,13 +2986,13 @@ mk_fun_var(Fun, Types) ->
-endif.
--spec get_deps(constr()) -> [dep()].
+-spec get_deps(constr()) -> deps().
get_deps(#constraint{deps = D}) -> D;
get_deps(#constraint_list{deps = D}) -> D;
get_deps(#constraint_ref{deps = D}) -> D.
--spec find_constraint_deps([fvar_or_type()]) -> [dep()].
+-spec find_constraint_deps([fvar_or_type()]) -> deps().
find_constraint_deps(List) ->
ordsets:from_list(find_constraint_deps(List, [])).
@@ -2950,14 +3003,14 @@ find_constraint_deps([Type|Tail], Acc) ->
NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc],
find_constraint_deps(Tail, NewAcc);
find_constraint_deps([], Acc) ->
- lists:flatten(Acc).
+ lists:append(Acc).
-mk_constraint_1(Lhs, eq, Rhs) when Lhs < Rhs ->
- #constraint{lhs = Lhs, op = eq, rhs = Rhs};
-mk_constraint_1(Lhs, eq, Rhs) ->
- #constraint{lhs = Rhs, op = eq, rhs = Lhs};
-mk_constraint_1(Lhs, Op, Rhs) ->
- #constraint{lhs = Lhs, op = Op, rhs = Rhs}.
+mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs ->
+ #constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps};
+mk_constraint_1(Lhs, eq, Rhs, Deps) ->
+ #constraint{lhs = Rhs, op = eq, rhs = Lhs, deps = Deps};
+mk_constraint_1(Lhs, Op, Rhs, Deps) ->
+ #constraint{lhs = Lhs, op = Op, rhs = Rhs, deps = Deps}.
mk_constraints([Lhs|LhsTail], Op, [Rhs|RhsTail]) ->
[mk_constraint(Lhs, Op, Rhs) |
@@ -2972,13 +3025,24 @@ mk_constraint_ref(Id, Deps) ->
mk_constraint_list(Type, List) ->
List1 = ordsets:from_list(lift_lists(Type, List)),
- List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
- Deps = calculate_deps(List2),
+ case Type of
+ conj ->
+ List2 = ordsets:filter(fun(X) -> get_deps(X) =/= [] end, List1),
+ mk_constraint_list_cont(Type, List2);
+ disj ->
+ case lists:any(fun(X) -> get_deps(X) =:= [] end, List1) of
+ true -> mk_constraint_list_cont(Type, [mk_constraint_any(eq)]);
+ false -> mk_constraint_list_cont(Type, List1)
+ end
+ end.
+
+mk_constraint_list_cont(Type, List) ->
+ Deps = calculate_deps(List),
case Deps =:= [] of
true -> #constraint_list{type = conj,
list = [mk_constraint_any(eq)],
deps = []};
- false -> #constraint_list{type = Type, list = List2, deps = Deps}
+ false -> #constraint_list{type = Type, list = List, deps = Deps}
end.
lift_lists(Type, List) ->
@@ -3047,8 +3111,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
List1 = [C || C <- List, is_simple_constraint(C)],
%% Just an assert.
[] = [C || #constraint{} = C <- List1],
- Expanded = lists:flatten([expand_to_conjunctions(C)
- || #constraint_list{} = C <- List]),
+ Expanded = lists:append([expand_to_conjunctions(C)
+ || #constraint_list{} = C <- List]),
ReturnList = Expanded ++ List1,
if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
true -> ReturnList
@@ -3073,8 +3137,10 @@ calculate_deps(List) ->
calculate_deps([H|Tail], Acc) ->
Deps = get_deps(H),
calculate_deps(Tail, [Deps|Acc]);
+calculate_deps([], []) -> [];
+calculate_deps([], [L]) -> L;
calculate_deps([], Acc) ->
- ordsets:from_list(lists:flatten(Acc)).
+ lists:umerge(Acc).
mk_conj_constraint_list(List) ->
mk_constraint_list(conj, List).
@@ -3142,7 +3208,8 @@ order_fun_constraints(State) ->
order_fun_constraints([#constraint_ref{id = Id}|Tail], State) ->
Cs = state__get_cs(Id, State),
- {[NewCs], State1} = order_fun_constraints([Cs], [], [], State),
+ {[Cs1], State1} = order_fun_constraints([Cs], [], [], State),
+ NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps},
NewState = state__store_constrs(Id, NewCs, State1),
order_fun_constraints(Tail, NewState);
order_fun_constraints([], State) ->
@@ -3150,23 +3217,31 @@ order_fun_constraints([], State) ->
order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, [C|Funs], Acc, State);
-order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
+order_fun_constraints([#constraint_list{list = List,
+ type = Type,
+ masks = OldMasks} = C|Tail],
Funs, Acc, State) ->
- {NewList, NewState} =
- case Type of
- conj -> order_fun_constraints(List, [], [], State);
- disj ->
- FoldFun = fun(X, AccState) ->
- {[NewX], NewAccState} =
- order_fun_constraints([X], [], [], AccState),
- {NewX, NewAccState}
- end,
- lists:mapfoldl(FoldFun, State, List)
- end,
- C1 = update_constraint_list(C, NewList),
- Masks = calculate_masks(NewList, 1, []),
- NewAcc = [update_masks(C1, Masks)|Acc],
- order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ case OldMasks of
+ undefined ->
+ {NewList, NewState} =
+ case Type of
+ conj -> order_fun_constraints(List, [], [], State);
+ disj ->
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
+ order_fun_constraints([X], [], [], AccState),
+ {NewX, NewAccState}
+ end,
+ lists:mapfoldl(FoldFun, State, List)
+ end,
+ NewList2 = reset_deps(NewList, State),
+ C1 = update_constraint_list(C, NewList2),
+ Masks = calculate_masks(NewList, 1, []),
+ NewAcc = [update_masks(C1, Masks)|Acc],
+ order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ M when is_map(M) ->
+ order_fun_constraints(Tail, Funs, [C|Acc], State)
+ end;
order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, Funs, [C|Acc], State);
order_fun_constraints([], Funs, Acc, State) ->
@@ -3176,18 +3251,23 @@ order_fun_constraints([], Funs, Acc, State) ->
update_masks(C, Masks) ->
C#constraint_list{masks = Masks}.
--define(VARS_LIMIT, 50).
+reset_deps(ConstrList, #state{solvers = Solvers}) ->
+ case lists:member(v1, Solvers) of
+ true ->
+ ConstrList;
+ false ->
+ [reset_deps(Constr) || Constr <- ConstrList]
+ end.
+
+reset_deps(#constraint{}=C) -> C#constraint{deps = []};
+reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []};
+reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}.
calculate_masks([C|Cs], I, L0) ->
calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0);
calculate_masks([], _I, L) ->
M = family(L),
- case length(M) > ?VARS_LIMIT of
- true ->
- {d, dict:from_list(M)};
- false ->
- M
- end.
+ maps:from_list(M).
%% ============================================================================
%%
@@ -3260,15 +3340,34 @@ find_constraint(Tuple, [#constraint_list{list = List}|Cs]) ->
find_constraint(Tuple, [_|Cs]) ->
find_constraint(Tuple, Cs).
-lookup_record(Records, Tag, Arity) ->
- case erl_types:lookup_record(Tag, Arity, Records) of
+-spec fold_literal_maybe_match(cerl:cerl(), state()) -> cerl:cerl().
+
+fold_literal_maybe_match(Tree0, State) ->
+ Tree1 = cerl:fold_literal(Tree0),
+ case state__is_in_match(State) of
+ false -> Tree1;
+ true -> dialyzer_utils:refold_pattern(Tree1)
+ end.
+
+lookup_record(State, Tag, Arity) ->
+ #state{module = M, mod_records = ModRecs, cserver = CServer} = State,
+ {State1, Rec} =
+ case lists:keyfind(M, 1, ModRecs) of
+ {M, Rec0} ->
+ {State, Rec0};
+ false ->
+ Rec0 = dialyzer_codeserver:lookup_mod_records(M, CServer),
+ NewModRecs = [{M, Rec0}|ModRecs],
+ {State#state{mod_records = NewModRecs}, Rec0}
+ end,
+ case erl_types:lookup_record(Tag, Arity, Rec) of
{ok, Fields} ->
RecType =
t_tuple([t_from_term(Tag)|
[FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
- {ok, RecType};
+ {ok, RecType, State1};
error ->
- error
+ {error, State1}
end.
is_literal_record(Tree) ->
@@ -3293,10 +3392,10 @@ family(L) ->
-ifdef(DEBUG).
format_type(#fun_var{deps = Deps, origin = Origin}) ->
L = [format_type(t_var(X)) || X <- Deps],
- io_lib:format("Fun@L~p(~s)", [Origin, join_chars(L, ",")]);
+ io_lib:format("Fun@L~p(~ts)", [Origin, join_chars(L, ",")]);
format_type(Type) ->
case cerl:is_literal(Type) of
- true -> io_lib:format("~w", [cerl:concrete(Type)]);
+ true -> io_lib:format("~tw", [cerl:concrete(Type)]);
false -> erl_types:t_to_string(Type)
end.
@@ -3306,7 +3405,7 @@ join_chars([H|T], Sep) ->
[H|[[Sep,X] || X <- T]].
debug_lookup_name(Var) ->
- case dict:find(t_var_name(Var), get(dialyzer_typesig_map)) of
+ case maps:find(t_var_name(Var), get(dialyzer_typesig_map)) of
error -> Var;
{ok, Name} -> Name
end.
@@ -3316,7 +3415,7 @@ debug_lookup_name(Var) ->
debug_make_name_map(Vars, Funs) ->
Map = get(dialyzer_typesig_map),
NewMap =
- if Map =:= undefined -> debug_make_name_map(Vars, Funs, dict:new());
+ if Map =:= undefined -> debug_make_name_map(Vars, Funs, maps:new());
true -> debug_make_name_map(Vars, Funs, Map)
end,
put(dialyzer_typesig_map, NewMap).
@@ -3324,7 +3423,7 @@ debug_make_name_map(Vars, Funs) ->
debug_make_name_map([Var|VarLeft], [Fun|FunLeft], Map) ->
Name = {cerl:fname_id(Var), cerl:fname_arity(Var)},
FunLabel = cerl_trees:get_label(Fun),
- debug_make_name_map(VarLeft, FunLeft, dict:store(FunLabel, Name, Map));
+ debug_make_name_map(VarLeft, FunLeft, maps:put(FunLabel, Name, Map));
debug_make_name_map([], [], Map) ->
Map.
@@ -3338,7 +3437,7 @@ pp_constrs_scc(SCC, State) ->
[pp_constrs(Fun, state__get_cs(Fun, State), State) || Fun <- SCC].
pp_constrs(Fun, Cs, State) ->
- io:format("Constraints for fun: ~w", [debug_lookup_name(Fun)]),
+ io:format("Constraints for fun: ~tw", [debug_lookup_name(Fun)]),
MaxDepth = pp_constraints(Cs, State),
io:format("Depth: ~w\n", [MaxDepth]).
@@ -3376,7 +3475,7 @@ pp_constraints([#constraint_list{type = Type, list = List, id = Id}|Tail],
pp_op(#constraint{lhs = Lhs, op = Op, rhs = Rhs}, Level) ->
pp_indent(Level),
- io:format("~s ~w ~s", [format_type(Lhs), Op, format_type(Rhs)]).
+ io:format("~ts ~w ~ts", [format_type(Lhs), Op, format_type(Rhs)]).
pp_indent(Level) ->
io:format("\n~*s", [Level*2, ""]).
@@ -3388,19 +3487,25 @@ pp_constrs_scc(_SCC, _State) ->
-ifdef(TO_DOT).
constraints_to_dot_scc(SCC, State) ->
- io:format("SCC: ~p\n", [SCC]),
- Name = lists:flatten([io_lib:format("'~w'", [debug_lookup_name(Fun)])
- || Fun <- SCC]),
+ %% TODO: handle Unicode names.
+ io:format("SCC: ~tp\n", [SCC]),
+ Name = lists:flatten([format_lookup_name(debug_lookup_name(Fun))
+ || Fun <- SCC]),
Cs = [state__get_cs(Fun, State) || Fun <- SCC],
constraints_to_dot(Cs, Name, State).
+format_lookup_name({FunName, Arity}) ->
+ TupleS = io_lib:format("{~ts,~w}", [atom_to_list(FunName), Arity]),
+ io_lib:format("~tw", [list_to_atom(lists:flatten(TupleS))]).
+
constraints_to_dot(Cs0, Name, State) ->
NofCs = length(Cs0),
Cs = lists:zip(lists:seq(1, NofCs), Cs0),
{Graph, Opts, _N} = constraints_to_nodes(Cs, NofCs + 1, 1, [], [], State),
hipe_dot:translate_list(Graph, "/tmp/cs.dot", "foo", Opts),
+ %% "-T ps" works for Latin-1. hipe_dot cannot handle UTF-8 either.
Res = os:cmd("dot -o /tmp/"++ Name ++ ".ps -T ps /tmp/cs.dot"),
- io:format("Res: ~p~n", [Res]),
+ io:format("Res: ~ts~n", [Res]),
ok.
constraints_to_nodes([{Name, #constraint_list{type = Type, list = List, id=Id}}
@@ -3419,7 +3524,7 @@ constraints_to_nodes([{Name, #constraint_list{type = Type, list = List, id=Id}}
constraints_to_nodes(Left, N2, Level, NewGraph, NewOpts, State);
constraints_to_nodes([{Name, #constraint{lhs = Lhs, op = Op, rhs = Rhs}}|Left],
N, Level, Graph, Opts, State) ->
- Label = lists:flatten(io_lib:format("~s ~w ~s",
+ Label = lists:flatten(io_lib:format("~ts ~w ~ts",
[format_type(Lhs), Op,
format_type(Rhs)])),
ThisNode = [{Name, Opt} || Opt <- [{label, Label}, {level, Level}]],
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index b585646fde..511a6d66bf 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,9 +11,6 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
%%%-------------------------------------------------------------------
%%% File : dialyzer_utils.erl
@@ -31,24 +24,23 @@
-export([
format_sig/1,
format_sig/2,
- get_abstract_code_from_beam/1,
- get_compile_options_from_beam/1,
- get_abstract_code_from_src/1,
- get_abstract_code_from_src/2,
- get_core_from_abstract_code/1,
- get_core_from_abstract_code/2,
get_core_from_src/1,
get_core_from_src/2,
+ get_core_from_beam/1,
+ get_core_from_beam/2,
get_record_and_type_info/1,
get_spec_info/3,
get_fun_meta_info/3,
is_suppressed_fun/2,
is_suppressed_tag/3,
- merge_records/2,
pp_hook/0,
process_record_remote_types/1,
+ merge_types/2,
sets_filter/2,
src_compiler_opts/0,
+ refold_pattern/1,
+ ets_tab2list/1,
+ ets_move/2,
parallelism/0,
family/1
]).
@@ -66,15 +58,15 @@ print_types1([], _) ->
ok;
print_types1([{type, _Name, _NArgs} = Key|T], RecDict) ->
{ok, {{_Mod, _FileLine, _Form, _Args}, Type}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, Type]),
+ io:format("\n~tw: ~tw\n", [Key, Type]),
print_types1(T, RecDict);
print_types1([{opaque, _Name, _NArgs} = Key|T], RecDict) ->
{ok, {{_Mod, _FileLine, _Form, _Args}, Type}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, Type]),
+ io:format("\n~tw: ~tw\n", [Key, Type]),
print_types1(T, RecDict);
print_types1([{record, _Name} = Key|T], RecDict) ->
{ok, {_FileLine, [{_Arity, _Fields} = AF]}} = dict:find(Key, RecDict),
- io:format("~w: ~w\n\n", [Key, AF]),
+ io:format("~tw: ~tw\n\n", [Key, AF]),
print_types1(T, RecDict).
-define(debug(D_), print_types(D_)).
-else.
@@ -83,9 +75,7 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
%% ----------------------------------------------------------------------------
--type abstract_code() :: [tuple()]. %% XXX: import from somewhere
-type comp_options() :: [compile:option()].
--type mod_or_fname() :: module() | file:filename().
-type fa() :: {atom(), arity()}.
-type codeserver() :: dialyzer_codeserver:codeserver().
@@ -95,63 +85,82 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
%%
%% ============================================================================
--spec get_abstract_code_from_src(mod_or_fname()) ->
- {'ok', abstract_code()} | {'error', [string()]}.
+-type get_core_from_src_ret() :: {'ok', cerl:c_module()} | {'error', string()}.
-get_abstract_code_from_src(File) ->
- get_abstract_code_from_src(File, src_compiler_opts()).
+-spec get_core_from_src(file:filename()) -> get_core_from_src_ret().
+
+get_core_from_src(File) ->
+ get_core_from_src(File, []).
--spec get_abstract_code_from_src(mod_or_fname(), comp_options()) ->
- {'ok', abstract_code()} | {'error', [string()]}.
+-spec get_core_from_src(file:filename(), comp_options()) -> get_core_from_src_ret().
-get_abstract_code_from_src(File, Opts) ->
- case compile:noenv_file(File, [to_pp, binary|Opts]) of
+get_core_from_src(File, Opts) ->
+ case compile:noenv_file(File, Opts ++ src_compiler_opts()) of
error -> {error, []};
{error, Errors, _} -> {error, format_errors(Errors)};
- {ok, _, AbstrCode} -> {ok, AbstrCode}
+ {ok, _, Core} -> {ok, Core}
end.
--type get_core_from_src_ret() :: {'ok', cerl:c_module()} | {'error', string()}.
+-type get_core_from_beam_ret() :: {'ok', cerl:c_module()} | {'error', string()}.
--spec get_core_from_src(file:filename()) -> get_core_from_src_ret().
+-spec get_core_from_beam(file:filename()) -> get_core_from_beam_ret().
-get_core_from_src(File) ->
- get_core_from_src(File, []).
+get_core_from_beam(File) ->
+ get_core_from_beam(File, []).
--spec get_core_from_src(file:filename(), comp_options()) -> get_core_from_src_ret().
+-spec get_core_from_beam(file:filename(), comp_options()) -> get_core_from_beam_ret().
-get_core_from_src(File, Opts) ->
- case get_abstract_code_from_src(File, Opts) of
- {error, _} = Error -> Error;
+get_core_from_beam(File, Opts) ->
+ case beam_lib:chunks(File, [debug_info]) of
+ {ok, {Module, [{debug_info, {debug_info_v1, Backend, Metadata}}]}} ->
+ case Backend:debug_info(core_v1, Module, Metadata, Opts ++ src_compiler_opts()) of
+ {ok, Core} ->
+ {ok, Core};
+ {error, _} ->
+ {error, " Could not get Core Erlang code for: " ++ File ++ "\n"}
+ end;
+ _ ->
+ deprecated_get_core_from_beam(File, Opts)
+ end.
+
+deprecated_get_core_from_beam(File, Opts) ->
+ case get_abstract_code_from_beam(File) of
+ error ->
+ {error, " Could not get abstract code for: " ++ File ++ "\n" ++
+ " Recompile with +debug_info or analyze starting from source code"};
{ok, AbstrCode} ->
- case get_core_from_abstract_code(AbstrCode, Opts) of
- error -> {error, " Could not get Core Erlang code from abstract code"};
- {ok, _Core} = C -> C
+ case get_compile_options_from_beam(File) of
+ error ->
+ {error, " Could not get compile options for: " ++ File ++ "\n" ++
+ " Recompile or analyze starting from source code"};
+ {ok, CompOpts} ->
+ case get_core_from_abstract_code(AbstrCode, Opts ++ CompOpts) of
+ error ->
+ {error, " Could not get core Erlang code for: " ++ File};
+ {ok, _} = Core ->
+ Core
+ end
end
end.
--spec get_abstract_code_from_beam(file:filename()) -> 'error' | {'ok', abstract_code()}.
-
get_abstract_code_from_beam(File) ->
case beam_lib:chunks(File, [abstract_code]) of
{ok, {_, List}} ->
case lists:keyfind(abstract_code, 1, List) of
- {abstract_code, {raw_abstract_v1, Abstr}} -> {ok, Abstr};
- _ -> error
+ {abstract_code, {raw_abstract_v1, Abstr}} -> {ok, Abstr};
+ _ -> error
end;
_ ->
%% No or unsuitable abstract code.
error
end.
--spec get_compile_options_from_beam(file:filename()) -> 'error' | {'ok', [compile:option()]}.
-
get_compile_options_from_beam(File) ->
case beam_lib:chunks(File, [compile_info]) of
{ok, {_, List}} ->
case lists:keyfind(compile_info, 1, List) of
- {compile_info, CompInfo} -> compile_info_to_options(CompInfo);
- _ -> error
+ {compile_info, CompInfo} -> compile_info_to_options(CompInfo);
+ _ -> error
end;
_ ->
%% No or unsuitable compile info.
@@ -164,15 +173,6 @@ compile_info_to_options(CompInfo) ->
_ -> error
end.
--type get_core_from_abs_ret() :: {'ok', cerl:c_module()} | 'error'.
-
--spec get_core_from_abstract_code(abstract_code()) -> get_core_from_abs_ret().
-
-get_core_from_abstract_code(AbstrCode) ->
- get_core_from_abstract_code(AbstrCode, []).
-
--spec get_core_from_abstract_code(abstract_code(), comp_options()) -> get_core_from_abs_ret().
-
get_core_from_abstract_code(AbstrCode, Opts) ->
%% We do not want the parse_transforms around since we already
%% performed them. In some cases we end up in trouble when
@@ -181,67 +181,83 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
%% Remove parse_transforms (and other options) from compile options.
Opts2 = cleanup_compile_options(Opts),
try compile:noenv_forms(AbstrCode1, Opts2 ++ src_compiler_opts()) of
- {ok, _, Core} -> {ok, Core};
- _What -> error
+ {ok, _, Core} -> {ok, Core};
+ _What -> error
catch
error:_ -> error
end.
+cleanup_parse_transforms([{attribute, _, compile, {parse_transform, _}}|Left]) ->
+ cleanup_parse_transforms(Left);
+cleanup_parse_transforms([Other|Left]) ->
+ [Other|cleanup_parse_transforms(Left)];
+cleanup_parse_transforms([]) ->
+ [].
+
+cleanup_compile_options(Opts) ->
+ lists:filter(fun keep_compile_option/1, Opts).
+
+%% Using abstract, not asm or core.
+keep_compile_option(from_asm) -> false;
+keep_compile_option(from_core) -> false;
+%% The parse transform will already have been applied, may cause
+%% problems if it is re-applied.
+keep_compile_option({parse_transform, _}) -> false;
+keep_compile_option(warnings_as_errors) -> false;
+keep_compile_option(_) -> true.
+
%% ============================================================================
%%
%% Typed Records
%%
%% ============================================================================
--spec get_record_and_type_info(abstract_code()) ->
- {'ok', dict:dict()} | {'error', string()}.
-
-get_record_and_type_info(AbstractCode) ->
- Module = get_module(AbstractCode),
- get_record_and_type_info(AbstractCode, Module, dict:new()).
+-type type_table() :: erl_types:type_table().
--spec get_record_and_type_info(abstract_code(), module(), dict:dict()) ->
- {'ok', dict:dict()} | {'error', string()}.
+-spec get_record_and_type_info(cerl:c_module()) ->
+ {'ok', type_table()} | {'error', string()}.
-get_record_and_type_info(AbstractCode, Module, RecDict) ->
- get_record_and_type_info(AbstractCode, Module, RecDict, "nofile").
+get_record_and_type_info(Core) ->
+ Module = cerl:concrete(cerl:module_name(Core)),
+ Tuples = core_to_attr_tuples(Core),
+ get_record_and_type_info(Tuples, Module, maps:new(), "nofile").
-get_record_and_type_info([{attribute, A, record, {Name, Fields0}}|Left],
+get_record_and_type_info([{record, Line, [{Name, Fields0}]}|Left],
Module, RecDict, File) ->
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
- FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
+ FN = {File, Line},
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
-get_record_and_type_info([{attribute, A, type, {{record, Name}, Fields0, []}}
+get_record_and_type_info([{type, Line, [{{record, Name}, Fields0, []}]}
|Left], Module, RecDict, File) ->
%% This overrides the original record declaration.
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
- FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
+ FN = {File, Line},
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
-get_record_and_type_info([{attribute, A, Attr, {Name, TypeForm}}|Left],
+get_record_and_type_info([{Attr, Line, [{Name, TypeForm}]}|Left],
Module, RecDict, File)
when Attr =:= 'type'; Attr =:= 'opaque' ->
- FN = {File, erl_anno:line(A)},
+ FN = {File, Line},
try add_new_type(Attr, Name, TypeForm, [], Module, FN, RecDict) of
NewRecDict ->
get_record_and_type_info(Left, Module, NewRecDict, File)
catch
throw:{error, _} = Error -> Error
end;
-get_record_and_type_info([{attribute, A, Attr, {Name, TypeForm, Args}}|Left],
+get_record_and_type_info([{Attr, Line, [{Name, TypeForm, Args}]}|Left],
Module, RecDict, File)
when Attr =:= 'type'; Attr =:= 'opaque' ->
- FN = {File, erl_anno:line(A)},
+ FN = {File, Line},
try add_new_type(Attr, Name, TypeForm, Args, Module, FN, RecDict) of
NewRecDict ->
get_record_and_type_info(Left, Module, NewRecDict, File)
catch
throw:{error, _} = Error -> Error
end;
-get_record_and_type_info([{attribute, _, file, {IncludeFile, _}}|Left],
+get_record_and_type_info([{file, _, [{IncludeFile, _}]}|Left],
Module, RecDict, _File) ->
get_record_and_type_info(Left, Module, RecDict, IncludeFile);
get_record_and_type_info([_Other|Left], Module, RecDict, File) ->
@@ -254,17 +270,17 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, FN,
Arity = length(ArgForms),
case erl_types:type_is_defined(TypeOrOpaque, Name, Arity, RecDict) of
true ->
- Msg = flat_format("Type ~s/~w already defined\n", [Name, Arity]),
+ Msg = flat_format("Type ~ts/~w already defined\n", [Name, Arity]),
throw({error, Msg});
false ->
try erl_types:t_var_names(ArgForms) of
ArgNames ->
- dict:store({TypeOrOpaque, Name, Arity},
- {{Module, FN, TypeForm, ArgNames},
- erl_types:t_any()}, RecDict)
+ maps:put({TypeOrOpaque, Name, Arity},
+ {{Module, FN, TypeForm, ArgNames},
+ erl_types:t_any()}, RecDict)
catch
_:_ ->
- throw({error, flat_format("Type declaration for ~w does not "
+ throw({error, flat_format("Type declaration for ~tw does not "
"have variables as parameters", [Name])})
end
end.
@@ -296,90 +312,137 @@ get_record_fields([], _RecDict, Acc) ->
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
- TempRecords = dialyzer_codeserver:get_temp_records(CServer),
- TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
- TempRecords1 = process_opaque_types0(TempRecords, TempExpTypes),
+ ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
+ Mods = dialyzer_codeserver:all_temp_modules(CServer),
+ process_opaque_types0(Mods, CServer, ExpTypes),
+ VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun(Module, Record) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
- fun(Key, Value) ->
+ fun({Key, Value}, C2) ->
case Key of
- {record, _Name} ->
+ {record, Name} ->
FieldFun =
- fun(_Arity, Fields) ->
- [{Name, Field,
- erl_types:t_from_form(Field,
- TempExpTypes,
- Module,
- TempRecords1)}
- || {Name, Field, _} <- Fields]
+ fun({Arity, Fields}, C4) ->
+ Site = {record, {Module, Name, Arity}},
+ {Fields1, C7} =
+ lists:mapfoldl(fun({FieldName, Field, _}, C5) ->
+ {FieldT, C6} =
+ erl_types:t_from_form
+ (Field, ExpTypes, Site,
+ RecordTable, VarTable,
+ C5),
+ {{FieldName, Field, FieldT}, C6}
+ end, C4, Fields),
+ {{Arity, Fields1}, C7}
end,
{FileLine, Fields} = Value,
- {FileLine, orddict:map(FieldFun, Fields)};
- _Other -> Value
+ {FieldsList, C3} =
+ lists:mapfoldl(FieldFun, C2, orddict:to_list(Fields)),
+ {{Key, {FileLine, orddict:from_list(FieldsList)}}, C3};
+ {type, Name, NArgs} ->
+ %% Make sure warnings about unknown types are output
+ %% also for types unused by specs.
+ Site = {type, {Module, Name, NArgs}},
+ L = erl_anno:new(0),
+ Args = lists:duplicate(NArgs, {var, L, '_'}),
+ UserType = {user_type, L, Name, Args},
+ {_NewType, C3} =
+ erl_types:t_from_form(UserType, ExpTypes, Site,
+ RecordTable, VarTable, C2),
+ {{Key, Value}, C3};
+ {opaque, _Name, _NArgs} ->
+ {{Key, Value}, C2}
end
end,
- dict:map(RecordFun, Record)
+ Cache = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- NewRecords = dict:map(ModuleFun, TempRecords1),
- ok = check_record_fields(NewRecords, TempExpTypes),
- CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
- dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1).
+ lists:foreach(ModuleFun, Mods),
+ check_record_fields(Mods, CServer, ExpTypes),
+ dialyzer_codeserver:finalize_records(CServer).
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes) ->
- TempRecords1 = process_opaque_types(TempRecords0, TempExpTypes),
- process_opaque_types(TempRecords1, TempExpTypes).
+process_opaque_types0(AllModules, CServer, TempExpTypes) ->
+ process_opaque_types(AllModules, CServer, TempExpTypes),
+ process_opaque_types(AllModules, CServer, TempExpTypes).
-process_opaque_types(TempRecords, TempExpTypes) ->
+process_opaque_types(AllModules, CServer, TempExpTypes) ->
+ VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun(Module, Record) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
- fun(Key, Value) ->
+ fun({Key, Value}, C2) ->
case Key of
- {opaque, _Name, _NArgs} ->
+ {opaque, Name, NArgs} ->
{{_Module, _FileLine, Form, _ArgNames}=F, _Type} = Value,
- Type = erl_types:t_from_form(Form, TempExpTypes, Module,
- TempRecords),
- {F, Type};
- _Other -> Value
+ Site = {type, {Module, Name, NArgs}},
+ {Type, C3} =
+ erl_types:t_from_form(Form, TempExpTypes, Site,
+ RecordTable, VarTable, C2),
+ {{Key, {F, Type}}, C3};
+ {type, _Name, _NArgs} ->
+ {{Key, Value}, C2};
+ {record, _RecName} ->
+ {{Key, Value}, C2}
end
end,
- dict:map(RecordFun, Record)
+ C0 = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- dict:map(ModuleFun, TempRecords).
+ lists:foreach(ModuleFun, AllModules).
-check_record_fields(Records, TempExpTypes) ->
+check_record_fields(AllModules, CServer, TempExpTypes) ->
+ VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
CheckFun =
- fun({Module, Element}) ->
- CheckForm = fun(F) ->
- erl_types:t_check_record_fields(F, TempExpTypes,
- Module, Records)
+ fun(Module) ->
+ CheckForm = fun(Form, Site, C1) ->
+ erl_types:t_check_record_fields(Form, TempExpTypes,
+ Site, RecordTable,
+ VarTable, C1)
end,
- ElemFun =
- fun({Key, Value}) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
+ RecordFun =
+ fun({Key, Value}, C2) ->
case Key of
- {record, _Name} ->
+ {record, Name} ->
FieldFun =
- fun({_Arity, Fields}) ->
- _ = [ok = CheckForm(Field) || {_, Field, _} <- Fields],
- ok
+ fun({Arity, Fields}, C3) ->
+ Site = {record, {Module, Name, Arity}},
+ lists:foldl(fun({_, Field, _}, C4) ->
+ CheckForm(Field, Site, C4)
+ end, C3, Fields)
end,
{FileLine, Fields} = Value,
- Fun = fun() -> lists:foreach(FieldFun, Fields) end,
+ Fun = fun() -> lists:foldl(FieldFun, C2, Fields) end,
msg_with_position(Fun, FileLine);
- {_OpaqueOrType, _Name, _} ->
+ {_OpaqueOrType, Name, NArgs} ->
+ Site = {type, {Module, Name, NArgs}},
{{_Module, FileLine, Form, _ArgNames}, _Type} = Value,
- Fun = fun() -> ok = CheckForm(Form) end,
+ Fun = fun() -> CheckForm(Form, Site, C2) end,
msg_with_position(Fun, FileLine)
end
end,
- lists:foreach(ElemFun, dict:to_list(Element))
+ C0 = erl_types:cache__new(),
+ _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap))
end,
- lists:foreach(CheckFun, dict:to_list(Records)).
+ lists:foreach(CheckFun, AllModules).
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -387,14 +450,41 @@ msg_with_position(Fun, FileLine) ->
throw:{error, Msg} ->
{File, Line} = FileLine,
BaseName = filename:basename(File),
- NewMsg = io_lib:format("~s:~p: ~s", [BaseName, Line, Msg]),
+ NewMsg = io_lib:format("~ts:~p: ~ts", [BaseName, Line, Msg]),
throw({error, NewMsg})
end.
--spec merge_records(dict:dict(), dict:dict()) -> dict:dict().
+-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver().
-merge_records(NewRecords, OldRecords) ->
- dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
+merge_types(CServer, Plt) ->
+ AllNewModules = dialyzer_codeserver:all_temp_modules(CServer),
+ AllNewModulesSet = sets:from_list(AllNewModules),
+ AllOldModulesSet = dialyzer_plt:all_modules(Plt),
+ AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet),
+ ModuleFun =
+ fun(Module) ->
+ KeepOldFun =
+ fun() ->
+ case dialyzer_plt:get_module_types(Plt, Module) of
+ none -> no;
+ {value, OldRecords} ->
+ case sets:is_element(Module, AllNewModulesSet) of
+ true -> no;
+ false -> {yes, OldRecords}
+ end
+ end
+ end,
+ Records =
+ case KeepOldFun() of
+ no ->
+ dialyzer_codeserver:lookup_temp_mod_records(Module, CServer);
+ {yes, OldRecords} ->
+ OldRecords
+ end,
+ dialyzer_codeserver:store_temp_records(Module, Records, CServer)
+ end,
+ lists:foreach(ModuleFun, sets:to_list(AllModulesSet)),
+ CServer.
%% ============================================================================
%%
@@ -402,26 +492,21 @@ merge_records(NewRecords, OldRecords) ->
%%
%% ============================================================================
--type spec_dict() :: dict:dict().
--type callback_dict() :: dict:dict().
+-type spec_map() :: dialyzer_codeserver:contracts().
+-type callback_map() :: dialyzer_codeserver:contracts().
--spec get_spec_info(module(), abstract_code(), dict:dict()) ->
- {'ok', spec_dict(), callback_dict()} | {'error', string()}.
+-spec get_spec_info(module(), cerl:c_module(), type_table()) ->
+ {'ok', spec_map(), callback_map()} | {'error', string()}.
-get_spec_info(ModName, AbstractCode, RecordsDict) ->
- OptionalCallbacks0 = get_optional_callbacks(AbstractCode, ModName),
+get_spec_info(ModName, Core, RecordsMap) ->
+ Tuples = core_to_attr_tuples(Core),
+ OptionalCallbacks0 = get_optional_callbacks(Tuples, ModName),
OptionalCallbacks = gb_sets:from_list(OptionalCallbacks0),
- get_spec_info(AbstractCode, dict:new(), dict:new(),
- RecordsDict, ModName, OptionalCallbacks, "nofile").
-
-get_optional_callbacks(Abs, ModName) ->
- [{ModName, F, A} || {F, A} <- get_optional_callbacks(Abs)].
+ get_spec_info(Tuples, maps:new(), maps:new(),
+ RecordsMap, ModName, OptionalCallbacks, "nofile").
-get_optional_callbacks(Abs) ->
- L = [O ||
- {attribute, _, optional_callbacks, O} <- Abs,
- is_fa_list(O)],
- lists:append(L).
+get_optional_callbacks(Tuples, ModName) ->
+ [{ModName, F, A} || {optional_callbacks, _, O} <- Tuples, is_fa_list(O), {F, A} <- O].
%% TypeSpec is a list of conditional contracts for a function.
%% Each contract is of the form {[Argument], Range, [Constraint]} where
@@ -429,68 +514,82 @@ get_optional_callbacks(Abs) ->
%% - Constraint is of the form {subtype, T1, T2} where T1 and T2
%% are erl_types:erl_type()
-get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, File)
+get_spec_info([{Contract, Ln, [{Id, TypeSpec}]}|Left],
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, File)
when ((Contract =:= 'spec') or (Contract =:= 'callback')),
is_list(TypeSpec) ->
- Ln = erl_anno:line(Anno),
MFA = case Id of
{_, _, _} = T -> T;
{F, A} -> {ModName, F, A}
end,
Xtra = [optional_callback || gb_sets:is_member(MFA, OptCb)],
- ActiveDict =
+ ActiveMap =
case Contract of
- spec -> SpecDict;
- callback -> CallbackDict
+ spec -> SpecMap;
+ callback -> CallbackMap
end,
- try dict:find(MFA, ActiveDict) of
+ try maps:find(MFA, ActiveMap) of
error ->
SpecData = {TypeSpec, Xtra},
- NewActiveDict =
+ NewActiveMap =
dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, SpecData,
- ActiveDict, RecordsDict),
- {NewSpecDict, NewCallbackDict} =
+ ActiveMap, RecordsMap),
+ {NewSpecMap, NewCallbackMap} =
case Contract of
- spec -> {NewActiveDict, CallbackDict};
- callback -> {SpecDict, NewActiveDict}
+ spec -> {NewActiveMap, CallbackMap};
+ callback -> {SpecMap, NewActiveMap}
end,
- get_spec_info(Left, NewSpecDict, NewCallbackDict,
- RecordsDict, ModName, OptCb, File);
+ get_spec_info(Left, NewSpecMap, NewCallbackMap,
+ RecordsMap, ModName, OptCb, File);
{ok, {{OtherFile, L}, _D}} ->
{Mod, Fun, Arity} = MFA,
- Msg = flat_format(" Contract/callback for function ~w:~w/~w "
- "already defined in ~s:~w\n",
+ Msg = flat_format(" Contract/callback for function ~w:~tw/~w "
+ "already defined in ~ts:~w\n",
[Mod, Fun, Arity, OtherFile, L]),
throw({error, Msg})
catch
throw:{error, Error} ->
- {error, flat_format(" Error while parsing contract in line ~w: ~s\n",
+ {error, flat_format(" Error while parsing contract in line ~w: ~ts\n",
[Ln, Error])}
end;
-get_spec_info([{attribute, _, file, {IncludeFile, _}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, _File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, IncludeFile);
-get_spec_info([_Other|Left], SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File);
-get_spec_info([], SpecDict, CallbackDict,
- _RecordsDict, _ModName, _OptCb, _File) ->
- {ok, SpecDict, CallbackDict}.
-
--spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
- dialyzer_codeserver:fun_meta_info().
-
-get_fun_meta_info(M, Abs, LegalWarnings) ->
- NoWarn = get_nowarn_unused_function(M, Abs),
- FuncSupp = get_func_suppressions(M, Abs),
- Warnings0 = get_options(Abs, LegalWarnings),
- Warnings = ordsets:to_list(Warnings0),
- ModuleWarnings = [{M, W} || W <- Warnings],
- RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]),
- process_options(dialyzer_utils:family(RawProps), Warnings0).
+get_spec_info([{file, _, [{IncludeFile, _}]}|Left],
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, _File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, IncludeFile);
+get_spec_info([_Other|Left], SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File);
+get_spec_info([], SpecMap, CallbackMap,
+ _RecordsMap, _ModName, _OptCb, _File) ->
+ {ok, SpecMap, CallbackMap}.
+
+core_to_attr_tuples(Core) ->
+ [{cerl:concrete(Key), get_core_line(cerl:get_ann(Key)), cerl:concrete(Value)} ||
+ {Key, Value} <- cerl:module_attrs(Core)].
+
+get_core_line([L | _As]) when is_integer(L) -> L;
+get_core_line([_ | As]) -> get_core_line(As);
+get_core_line([]) -> undefined.
+
+-spec get_fun_meta_info(module(), cerl:c_module(), [dial_warn_tag()]) ->
+ dialyzer_codeserver:fun_meta_info() | {'error', string()}.
+
+get_fun_meta_info(M, Core, LegalWarnings) ->
+ Functions = lists:map(fun cerl:var_name/1, cerl:module_vars(Core)),
+ try
+ {get_nowarn_unused_function(M, Core, Functions),
+ get_func_suppressions(M, Core, Functions)}
+ of
+ {NoWarn, FuncSupp} ->
+ Warnings0 = get_options(Core, LegalWarnings),
+ Warnings = ordsets:to_list(Warnings0),
+ ModuleWarnings = [{M, W} || W <- Warnings],
+ RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]),
+ process_options(dialyzer_utils:family(RawProps), Warnings0)
+ catch throw:{error, _} = Error ->
+ Error
+ end.
process_options([{M, _}=Mod|Left], Warnings) when is_atom(M) ->
[Mod|process_options(Left, Warnings)];
@@ -509,74 +608,75 @@ process_options([{{_M, _F, _A}=MFA, Opts}|Left], Warnings) ->
end;
process_options([], _Warnings) -> [].
--spec get_nowarn_unused_function(module(), abstract_code()) ->
+-spec get_nowarn_unused_function(module(), cerl:c_module(), [fa()]) ->
[{mfa(), 'no_unused'}].
-get_nowarn_unused_function(M, Abs) ->
- Opts = get_options_with_tag(compile, Abs),
+get_nowarn_unused_function(M, Core, Functions) ->
+ Opts = get_options_with_tag(compile, Core),
Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
true, Opts),
- Functions = [{F, A} || {function, _, F, A, _} <- Abs],
- AttrFile = collect_attribute(Abs, compile),
+ AttrFile = collect_attribute(Core, compile),
TagsFaList = check_fa_list(AttrFile, nowarn_unused_function, Functions),
FAs = case Warn of
false -> Functions;
true ->
- [FA || {{nowarn_unused_function,_L,_File}, FA} <- TagsFaList]
+ [FA || {{[nowarn_unused_function],_L,_File}, FA} <- TagsFaList]
end,
[{{M, F, A}, no_unused} || {F, A} <- FAs].
--spec get_func_suppressions(module(), abstract_code()) ->
+-spec get_func_suppressions(module(), cerl:c_module(), [fa()]) ->
[{mfa(), 'nowarn_function' | dial_warn_tag()}].
-get_func_suppressions(M, Abs) ->
- Functions = [{F, A} || {function, _, F, A, _} <- Abs],
- AttrFile = collect_attribute(Abs, dialyzer),
+get_func_suppressions(M, Core, Functions) ->
+ AttrFile = collect_attribute(Core, dialyzer),
TagsFAs = check_fa_list(AttrFile, '*', Functions),
%% Check the options:
- Fun = fun({{nowarn_function, _L, _File}, _FA}) -> ok;
+ Fun = fun({{[nowarn_function], _L, _File}, _FA}) -> ok;
({OptLFile, _FA}) ->
_ = get_options1([OptLFile], ordsets:new())
end,
lists:foreach(Fun, TagsFAs),
- [{{M, F, A}, W} || {{W, _L, _File}, {F, A}} <- TagsFAs].
+ [{{M, F, A}, W} || {{Warnings, _L, _File}, {F, A}} <- TagsFAs, W <- Warnings].
--spec get_options(abstract_code(), [dial_warn_tag()]) ->
+-spec get_options(cerl:c_module(), [dial_warn_tag()]) ->
ordsets:ordset(dial_warn_tag()).
-get_options(Abs, LegalWarnings) ->
- AttrFile = collect_attribute(Abs, dialyzer),
+get_options(Core, LegalWarnings) ->
+ AttrFile = collect_attribute(Core, dialyzer),
get_options1(AttrFile, LegalWarnings).
get_options1([{Args, L, File}|Left], Warnings) ->
- Opts = [O ||
- O <- lists:flatten([Args]),
- is_atom(O)],
+ Opts = [O || O <- Args, is_atom(O)],
try dialyzer_options:build_warnings(Opts, Warnings) of
NewWarnings ->
get_options1(Left, NewWarnings)
catch
throw:{dialyzer_options_error, Msg} ->
- Msg1 = flat_format(" ~s:~w: ~s", [File, L, Msg]),
+ Msg1 = flat_format(" ~ts:~w: ~ts", [File, L, Msg]),
throw({error, Msg1})
end;
get_options1([], Warnings) ->
Warnings.
-type collected_attribute() ::
- {Args :: term(), erl_anno:line(), file:filename()}.
-
-collect_attribute(Abs, Tag) ->
- collect_attribute(Abs, Tag, "nofile").
-
-collect_attribute([{attribute, L, Tag, Args}|Left], Tag, File) ->
- CollAttr = {Args, L, File},
- [CollAttr | collect_attribute(Left, Tag, File)];
-collect_attribute([{attribute, _, file, {IncludeFile, _}}|Left], Tag, _) ->
- collect_attribute(Left, Tag, IncludeFile);
-collect_attribute([_Other|Left], Tag, File) ->
- collect_attribute(Left, Tag, File);
-collect_attribute([], _Tag, _File) -> [].
+ {Args :: [term()], erl_anno:line(), file:filename()}.
+
+collect_attribute(Core, Tag) ->
+ collect_attribute(cerl:module_attrs(Core), Tag, "nofile").
+
+collect_attribute([{Key, Value}|T], Tag, File) ->
+ case cerl:concrete(Key) of
+ Tag ->
+ [{cerl:concrete(Value), get_core_line(cerl:get_ann(Key)), File} |
+ collect_attribute(T, Tag, File)];
+ file ->
+ [{IncludeFile, _}] = cerl:concrete(Value),
+ collect_attribute(T, Tag, IncludeFile);
+ _ ->
+ collect_attribute(T, Tag, File)
+ end;
+collect_attribute([], _Tag, _File) ->
+ [].
-spec is_suppressed_fun(mfa(), codeserver()) -> boolean().
@@ -627,40 +727,11 @@ src_compiler_opts() ->
no_inline, strict_record_tests, strict_record_updates,
dialyzer].
--spec get_module(abstract_code()) -> module().
-
-get_module([{attribute, _, module, {M, _As}} | _]) -> M;
-get_module([{attribute, _, module, M} | _]) -> M;
-get_module([_ | Rest]) -> get_module(Rest).
-
--spec cleanup_parse_transforms(abstract_code()) -> abstract_code().
-
-cleanup_parse_transforms([{attribute, _, compile, {parse_transform, _}}|Left]) ->
- cleanup_parse_transforms(Left);
-cleanup_parse_transforms([Other|Left]) ->
- [Other|cleanup_parse_transforms(Left)];
-cleanup_parse_transforms([]) ->
- [].
-
--spec cleanup_compile_options([compile:option()]) -> [compile:option()].
-
-cleanup_compile_options(Opts) ->
- lists:filter(fun keep_compile_option/1, Opts).
-
-%% Using abstract, not asm or core.
-keep_compile_option(from_asm) -> false;
-keep_compile_option(from_core) -> false;
-%% The parse transform will already have been applied, may cause
-%% problems if it is re-applied.
-keep_compile_option({parse_transform, _}) -> false;
-keep_compile_option(warnings_as_errors) -> false;
-keep_compile_option(_) -> true.
-
-spec format_errors([{module(), string()}]) -> [string()].
format_errors([{Mod, Errors}|Left]) ->
FormatedError =
- [io_lib:format("~s:~w: ~s\n", [Mod, Line, M:format_error(Desc)])
+ [io_lib:format("~ts:~w: ~ts\n", [Mod, Line, M:format_error(Desc)])
|| {Line, M, Desc} <- Errors],
[lists:flatten(FormatedError) | format_errors(Left)];
format_errors([]) ->
@@ -669,9 +740,9 @@ format_errors([]) ->
-spec format_sig(erl_types:erl_type()) -> string().
format_sig(Type) ->
- format_sig(Type, dict:new()).
+ format_sig(Type, maps:new()).
--spec format_sig(erl_types:erl_type(), dict:dict()) -> string().
+-spec format_sig(erl_types:erl_type(), type_table()) -> string().
format_sig(Type, RecDict) ->
"fun(" ++ Sig = lists:flatten(erl_types:t_to_string(Type, RecDict)),
@@ -681,10 +752,12 @@ format_sig(Type, RecDict) ->
flat_format(Fmt, Lst) ->
lists:flatten(io_lib:format(Fmt, Lst)).
--spec get_options_with_tag(atom(), abstract_code()) -> [term()].
+-spec get_options_with_tag(atom(), cerl:c_module()) -> [term()].
-get_options_with_tag(Tag, Abs) ->
- lists:flatten([O || {attribute, _, Tag0, O} <- Abs, Tag =:= Tag0]).
+get_options_with_tag(Tag, Core) ->
+ [O || {Key, Value} <- cerl:module_attrs(Core),
+ cerl:concrete(Key) =:= Tag,
+ O <- cerl:concrete(Value)].
%% Check F/A, and collect (unchecked) warning tags with line and file.
-spec check_fa_list([collected_attribute()], atom(), [fa()]) ->
@@ -695,22 +768,22 @@ check_fa_list(AttrFile, Tag, Functions) ->
check_fa_list1(AttrFile, Tag, FuncTab).
check_fa_list1([{Args, L, File}|Left], Tag, Funcs) ->
- TermsL = [{{Tag0, L, File}, Term} ||
- {Tags, Terms0} <- lists:flatten([Args]),
+ TermsL = [{{[Tag0], L, File}, Term} ||
+ {Tags, Terms0} <- Args,
Tag0 <- lists:flatten([Tags]),
Tag =:= '*' orelse Tag =:= Tag0,
Term <- lists:flatten([Terms0])],
case lists:dropwhile(fun({_, T}) -> is_fa(T) end, TermsL) of
[] -> ok;
[{_, Bad}|_] ->
- Msg1 = flat_format(" Bad function ~w in line ~s:~w",
+ Msg1 = flat_format(" Bad function ~tw in line ~ts:~w",
[Bad, File, L]),
throw({error, Msg1})
end,
case lists:dropwhile(fun({_, FA}) -> is_known(FA, Funcs) end, TermsL) of
[] -> ok;
[{_, {F, A}}|_] ->
- Msg2 = flat_format(" Unknown function ~w/~w in line ~s:~w",
+ Msg2 = flat_format(" Unknown function ~tw/~w in line ~ts:~w",
[F, A, File, L]),
throw({error, Msg2})
end,
@@ -748,6 +821,13 @@ pp_hook(Node, Ctxt, Cont) ->
pp_binary(Node, Ctxt, Cont);
bitstr ->
pp_segment(Node, Ctxt, Cont);
+ map ->
+ pp_map(Node, Ctxt, Cont);
+ literal ->
+ case is_map(cerl:concrete(Node)) of
+ true -> pp_map(Node, Ctxt, Cont);
+ false -> Cont(Node, Ctxt)
+ end;
_ ->
Cont(Node, Ctxt)
end.
@@ -828,14 +908,122 @@ pp_atom(Atom) ->
String = atom_to_list(cerl:atom_val(Atom)),
prettypr:text(String).
+pp_map(Node, Ctxt, Cont) ->
+ Arg = cerl:map_arg(Node),
+ Before = case cerl:is_c_map_empty(Arg) of
+ true -> prettypr:floating(prettypr:text("#{"));
+ false ->
+ prettypr:beside(Cont(Arg,Ctxt),
+ prettypr:floating(prettypr:text("#{")))
+ end,
+ prettypr:beside(
+ Before, prettypr:beside(
+ prettypr:par(seq(cerl:map_es(Node),
+ prettypr:floating(prettypr:text(",")),
+ Ctxt, Cont)),
+ prettypr:floating(prettypr:text("}")))).
+
+seq([H | T], Separator, Ctxt, Fun) ->
+ case T of
+ [] -> [Fun(H, Ctxt)];
+ _ -> [prettypr:beside(Fun(H, Ctxt), Separator)
+ | seq(T, Separator, Ctxt, Fun)]
+ end;
+seq([], _, _, _) ->
+ [prettypr:empty()].
+
+%%------------------------------------------------------------------------------
+
+-spec refold_pattern(cerl:cerl()) -> cerl:cerl().
+
+refold_pattern(Pat) ->
+ %% Avoid the churn of unfolding and refolding
+ case cerl:is_literal(Pat) andalso find_map(cerl:concrete(Pat)) of
+ true ->
+ Tree = refold_concrete_pat(cerl:concrete(Pat)),
+ PatAnn = cerl:get_ann(Pat),
+ case proplists:is_defined(label, PatAnn) of
+ %% Literals are not normally annotated with a label, but can be if, for
+ %% example, they were created by cerl:fold_literal/1.
+ true -> cerl:set_ann(Tree, PatAnn);
+ false ->
+ [{label, Label}] = cerl:get_ann(Tree),
+ cerl:set_ann(Tree, [{label, Label}|PatAnn])
+ end;
+ false -> Pat
+ end.
+
+find_map(#{}) -> true;
+find_map(Tuple) when is_tuple(Tuple) -> find_map(tuple_to_list(Tuple));
+find_map([H|T]) -> find_map(H) orelse find_map(T);
+find_map(_) -> false.
+
+refold_concrete_pat(Val) ->
+ case Val of
+ _ when is_tuple(Val) ->
+ Els = lists:map(fun refold_concrete_pat/1, tuple_to_list(Val)),
+ case lists:all(fun cerl:is_literal/1, Els) of
+ true -> cerl:abstract(Val);
+ false -> label(cerl:c_tuple_skel(Els))
+ end;
+ [H|T] ->
+ case cerl:is_literal(HP=refold_concrete_pat(H))
+ and cerl:is_literal(TP=refold_concrete_pat(T))
+ of
+ true -> cerl:abstract(Val);
+ false -> label(cerl:c_cons_skel(HP, TP))
+ end;
+ M when is_map(M) ->
+ %% Map patterns are not generated by the parser(!), but they have a
+ %% property we want, namely that they are never folded into literals.
+ %% N.B.: The key in a map pattern is an expression, *not* a pattern.
+ label(cerl:c_map_pattern([cerl:c_map_pair_exact(cerl:abstract(K),
+ refold_concrete_pat(V))
+ || {K, V} <- maps:to_list(M)]));
+ _ ->
+ cerl:abstract(Val)
+ end.
+
+label(Tree) ->
+ %% Sigh
+ Label = -erlang:unique_integer([positive]),
+ cerl:set_ann(Tree, [{label, Label}]).
+
%%------------------------------------------------------------------------------
+-spec ets_tab2list(ets:tid()) -> list().
+
+%% Deletes the contents of the table. Use:
+%% ets_tab2list(T), ets:delete(T)
+%% instead of:
+%% ets:tab2list(T), ets:delete(T)
+%% to save some memory at the expense of somewhat longer execution time.
+ets_tab2list(T) ->
+ F = fun(Vs, A) -> Vs ++ A end,
+ ets_take(ets:first(T), T, F, []).
+
+-spec ets_move(From :: ets:tid(), To :: ets:tid()) -> 'ok'.
+
+ets_move(T1, T2) ->
+ F = fun(Es, A) -> true = ets:insert(T2, Es), A end,
+ [] = ets_take(ets:first(T1), T1, F, []),
+ ok.
+
+ets_take('$end_of_table', T, F, A) ->
+ case ets:first(T) of % no safe_fixtable()...
+ '$end_of_table' -> A;
+ Key -> ets_take(Key, T, F, A)
+ end;
+ets_take(Key, T, F, A) ->
+ Vs = ets:lookup(T, Key),
+ Key1 = ets:next(T, Key),
+ true = ets:delete(T, Key),
+ ets_take(Key1, T, F, F(Vs, A)).
+
-spec parallelism() -> integer().
parallelism() ->
- CPUs = erlang:system_info(logical_processors_available),
- Schedulers = erlang:system_info(schedulers),
- min(CPUs, Schedulers).
+ erlang:system_info(schedulers_online).
-spec family([{K,V}]) -> [{K,[V]}].
diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl
index 4be93c75bf..af0f2e9e08 100644
--- a/lib/dialyzer/src/dialyzer_worker.erl
+++ b/lib/dialyzer/src/dialyzer_worker.erl
@@ -1,8 +1,4 @@
%% -*- erlang-indent-level: 2 -*-
-%%-----------------------------------------------------------------------
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2006-2012. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -15,26 +11,23 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-module(dialyzer_worker).
--export([launch/4, sequential/4]).
+-export([launch/4]).
-export_type([worker/0]).
--type worker() :: pid(). %%opaque
+-opaque worker() :: pid().
-type mode() :: dialyzer_coordinator:mode().
-type coordinator() :: dialyzer_coordinator:coordinator().
-type init_data() :: dialyzer_coordinator:init_data().
--type result() :: dialyzer_coordinator:result().
+-type job() :: dialyzer_coordinator:job().
-record(state, {
mode :: mode(),
- job :: mfa_or_funlbl() | file:filename(),
+ job :: job(),
coordinator :: coordinator(),
init_data :: init_data(),
depends_on = [] :: list()
@@ -52,40 +45,46 @@
%%--------------------------------------------------------------------
--spec launch(mode(), [mfa_or_funlbl()], init_data(), coordinator()) -> worker().
+-spec launch(mode(), job(), init_data(), coordinator()) -> worker().
launch(Mode, Job, InitData, Coordinator) ->
State = #state{mode = Mode,
job = Job,
init_data = InitData,
coordinator = Coordinator},
- InitState =
- case Mode of
- X when X =:= 'typesig'; X =:= 'dataflow' -> initializing;
- X when X =:= 'compile'; X =:= 'warnings' -> running
- end,
- spawn_link(fun() -> loop(InitState, State) end).
+ spawn_link(fun() -> init(State) end).
%%--------------------------------------------------------------------
-loop(updating, State) ->
- ?debug("Update: ~p\n",[State#state.job]),
+init(#state{job = SCC, mode = Mode, init_data = InitData,
+ coordinator = Coordinator} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
+ DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData),
+ ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]),
+ Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator),
+ ?debug("~w: PidsDeps ~p\n", [self(), Pids]),
+ DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids],
+ loop(updating, State#state{depends_on = DependsOn});
+init(#state{mode = Mode} = State) when
+ Mode =:= 'compile'; Mode =:= 'warnings' ->
+ loop(running, State).
+
+loop(updating, #state{mode = Mode} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
+ ?debug("~w: Update: ~p\n", [self(), State#state.job]),
NextStatus =
case waits_more_success_typings(State) of
true -> waiting;
false -> running
end,
loop(NextStatus, State);
-loop(initializing, #state{job = SCC, init_data = InitData} = State) ->
- DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
- ?debug("Deps ~p: ~p\n",[State#state.job, DependsOn]),
- loop(updating, State#state{depends_on = DependsOn});
-loop(waiting, State) ->
- ?debug("Wait: ~p\n",[State#state.job]),
+loop(waiting, #state{mode = Mode} = State) when
+ Mode =:= 'typesig'; Mode =:= 'dataflow' ->
+ ?debug("~w: Wait: ~p\n", [self(), State#state.job]),
NewState = wait_for_success_typings(State),
loop(updating, NewState);
loop(running, #state{mode = 'compile'} = State) ->
- dialyzer_coordinator:wait_activation(),
+ request_activation(State),
?debug("Compile: ~s\n",[State#state.job]),
Result =
case start_compilation(State) of
@@ -97,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) ->
end,
report_to_coordinator(Result, State);
loop(running, #state{mode = 'warnings'} = State) ->
- dialyzer_coordinator:wait_activation(),
+ request_activation(State),
?debug("Warning: ~s\n",[State#state.job]),
Result = collect_warnings(State),
report_to_coordinator(Result, State);
loop(running, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
request_activation(State),
- ?debug("Run: ~p\n",[State#state.job]),
+ ?debug("~w: Run: ~p\n", [self(), State#state.job]),
NotFixpoint = do_work(State),
- ok = broadcast_done(State),
report_to_coordinator(NotFixpoint, State).
waits_more_success_typings(#state{depends_on = Depends}) ->
Depends =/= [].
-broadcast_done(#state{job = SCC, init_data = InitData,
- coordinator = Coordinator}) ->
- RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData),
- {Callers, Unknown} =
- dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
-send_done(Callers, SCC) ->
- ?debug("Sending ~p: ~p\n",[SCC, Callers]),
- SendSTFun = fun(PID) -> PID ! {done, SCC} end,
- lists:foreach(SendSTFun, Callers).
-
-continue_broadcast_done([], _SCC, _Coordinator) -> ok;
-continue_broadcast_done(Rest, SCC, Coordinator) ->
- %% This time limit should be greater than the time required
- %% by the coordinator to spawn all processes.
- timer:sleep(500),
- {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
wait_for_success_typings(#state{depends_on = DependsOn} = State) ->
receive
- {done, SCC} ->
- ?debug("GOT ~p: ~p\n",[State#state.job, SCC]),
- State#state{depends_on = DependsOn -- [SCC]}
+ {'DOWN', Ref, process, Pid, _Info} ->
+ ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]),
+ State#state{depends_on = DependsOn -- [{Pid, Ref}]}
after
5000 ->
- ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]),
+ ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]),
State
end.
@@ -155,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) ->
end.
report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) ->
- ?debug("Done: ~p\n",[Job]),
+ ?debug("~w: Done: ~p\n",[self(), Job]),
dialyzer_coordinator:job_done(Job, Result, Coordinator).
start_compilation(#state{job = Job, init_data = InitData}) ->
@@ -169,22 +145,3 @@ continue_compilation(Label, Data) ->
collect_warnings(#state{job = Job, init_data = InitData}) ->
dialyzer_succ_typings:collect_warnings(Job, InitData).
-
-%%------------------------------------------------------------------------------
-
--type extra() :: label() | 'unused'.
-
--spec sequential(mode(), [mfa_or_funlbl()], init_data(), extra()) -> result().
-
-sequential('compile', Job, InitData, Extra) ->
- case dialyzer_analysis_callgraph:start_compilation(Job, InitData) of
- {ok, EstimatedSize, Data} ->
- {EstimatedSize, continue_compilation(Extra, Data)};
- {error, _Reason} = Error -> {0, Error}
- end;
-sequential('typesig', Job, InitData, _Extra) ->
- dialyzer_succ_typings:find_succ_types_for_scc(Job, InitData);
-sequential('dataflow', Job, InitData, _Extra) ->
- dialyzer_succ_typings:refine_one_module(Job, InitData);
-sequential('warnings', Job, InitData, _Extra) ->
- dialyzer_succ_typings:collect_warnings(Job, InitData).
diff --git a/lib/dialyzer/src/typer.erl b/lib/dialyzer/src/typer.erl
new file mode 100644
index 0000000000..bf5484e5f6
--- /dev/null
+++ b/lib/dialyzer/src/typer.erl
@@ -0,0 +1,1106 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+%%-----------------------------------------------------------------------
+%% File : typer.erl
+%% Author(s) : The first version of typer was written by Bingwen He
+%% with guidance from Kostis Sagonas and Tobias Lindahl.
+%% Since June 2008 typer is maintained by Kostis Sagonas.
+%% Description : An Erlang/OTP application that shows type information
+%% for Erlang modules to the user. Additionally, it can
+%% annotate the code of files with such type information.
+%%-----------------------------------------------------------------------
+
+-module(typer).
+
+-export([start/0]).
+
+%%-----------------------------------------------------------------------
+
+-define(SHOW, show).
+-define(SHOW_EXPORTED, show_exported).
+-define(ANNOTATE, annotate).
+-define(ANNOTATE_INC_FILES, annotate_inc_files).
+
+-type mode() :: ?SHOW | ?SHOW_EXPORTED | ?ANNOTATE | ?ANNOTATE_INC_FILES.
+
+%%-----------------------------------------------------------------------
+
+-type files() :: [file:filename()].
+-type callgraph() :: dialyzer_callgraph:callgraph().
+-type codeserver() :: dialyzer_codeserver:codeserver().
+-type plt() :: dialyzer_plt:plt().
+
+-record(analysis,
+ {mode :: mode() | 'undefined',
+ macros = [] :: [{atom(), term()}],
+ includes = [] :: files(),
+ codeserver = dialyzer_codeserver:new():: codeserver(),
+ callgraph = dialyzer_callgraph:new() :: callgraph(),
+ files = [] :: files(), % absolute names
+ plt = none :: 'none' | file:filename(),
+ no_spec = false :: boolean(),
+ show_succ = false :: boolean(),
+ %% For choosing between specs or edoc @spec comments
+ edoc = false :: boolean(),
+ %% Files in 'fms' are compilable with option 'to_pp'; we keep them
+ %% as {FileName, ModuleName} in case the ModuleName is different
+ fms = [] :: [{file:filename(), module()}],
+ ex_func = map__new() :: map_dict(),
+ record = map__new() :: map_dict(),
+ func = map__new() :: map_dict(),
+ inc_func = map__new() :: map_dict(),
+ trust_plt = dialyzer_plt:new() :: plt()}).
+-type analysis() :: #analysis{}.
+
+-record(args, {files = [] :: files(),
+ files_r = [] :: files(),
+ trusted = [] :: files()}).
+-type args() :: #args{}.
+
+%%--------------------------------------------------------------------
+
+-spec start() -> no_return().
+
+start() ->
+_ = io:setopts(standard_error, [{encoding,unicode}]),
+ {Args, Analysis} = process_cl_args(),
+ %% io:format("Args: ~p\n", [Args]),
+ %% io:format("Analysis: ~p\n", [Analysis]),
+ Timer = dialyzer_timing:init(false),
+ TrustedFiles = filter_fd(Args#args.trusted, [], fun is_erl_file/1),
+ Analysis2 = extract(Analysis, TrustedFiles),
+ All_Files = get_all_files(Args),
+ %% io:format("All_Files: ~tp\n", [All_Files]),
+ Analysis3 = Analysis2#analysis{files = All_Files},
+ Analysis4 = collect_info(Analysis3),
+ %% io:format("Final: ~p\n", [Analysis4#analysis.fms]),
+ TypeInfo = get_type_info(Analysis4),
+ dialyzer_timing:stop(Timer),
+ show_or_annotate(TypeInfo),
+ %% io:format("\nTyper analysis finished\n"),
+ erlang:halt(0).
+
+%%--------------------------------------------------------------------
+
+-spec extract(analysis(), files()) -> analysis().
+
+extract(#analysis{macros = Macros,
+ includes = Includes,
+ trust_plt = TrustPLT} = Analysis, TrustedFiles) ->
+ %% io:format("--- Extracting trusted typer_info... "),
+ Ds = [{d, Name, Value} || {Name, Value} <- Macros],
+ CodeServer = dialyzer_codeserver:new(),
+ Fun =
+ fun(File, CS) ->
+ %% We include one more dir; the one above the one we are trusting
+ %% E.g, for /home/tests/typer_ann/test.ann.erl, we should include
+ %% /home/tests/ rather than /home/tests/typer_ann/
+ AllIncludes = [filename:dirname(filename:dirname(File)) | Includes],
+ Is = [{i, Dir} || Dir <- AllIncludes],
+ CompOpts = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
+ case dialyzer_utils:get_core_from_src(File, CompOpts) of
+ {ok, Core} ->
+ case dialyzer_utils:get_record_and_type_info(Core) of
+ {ok, RecDict} ->
+ Mod = list_to_atom(filename:basename(File, ".erl")),
+ case dialyzer_utils:get_spec_info(Mod, Core, RecDict) of
+ {ok, SpecDict, CbDict} ->
+ CS1 = dialyzer_codeserver:store_temp_records(Mod, RecDict, CS),
+ dialyzer_codeserver:store_temp_contracts(Mod, SpecDict, CbDict, CS1);
+ {error, Reason} -> compile_error([Reason])
+ end;
+ {error, Reason} -> compile_error([Reason])
+ end;
+ {error, Reason} -> compile_error(Reason)
+ end
+ end,
+ CodeServer1 = lists:foldl(Fun, CodeServer, TrustedFiles),
+ %% Process remote types
+ NewCodeServer =
+ try
+ CodeServer2 =
+ dialyzer_utils:merge_types(CodeServer1,
+ TrustPLT), % XXX change to the PLT?
+ NewExpTypes = dialyzer_codeserver:get_temp_exported_types(CodeServer1),
+ case sets:size(NewExpTypes) of 0 -> ok end,
+ CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2),
+ CodeServer4 = dialyzer_utils:process_record_remote_types(CodeServer3),
+ dialyzer_contracts:process_contract_remote_types(CodeServer4)
+ catch
+ throw:{error, ErrorMsg} ->
+ compile_error(ErrorMsg)
+ end,
+ %% Create TrustPLT
+ ContractsDict = dialyzer_codeserver:get_contracts(NewCodeServer),
+ Contracts = orddict:from_list(dict:to_list(ContractsDict)),
+ NewTrustPLT = dialyzer_plt:insert_contract_list(TrustPLT, Contracts),
+ Analysis#analysis{trust_plt = NewTrustPLT}.
+
+%%--------------------------------------------------------------------
+
+-spec get_type_info(analysis()) -> analysis().
+
+get_type_info(#analysis{callgraph = CallGraph,
+ trust_plt = TrustPLT,
+ codeserver = CodeServer} = Analysis) ->
+ StrippedCallGraph = remove_external(CallGraph, TrustPLT),
+ %% io:format("--- Analyzing callgraph... "),
+ try
+ NewPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph,
+ TrustPLT,
+ CodeServer),
+ Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
+ catch
+ error:What ->
+ fatal_error(io_lib:format("Analysis failed with message: ~tp",
+ [{What, erlang:get_stacktrace()}]));
+ throw:{dialyzer_succ_typing_error, Msg} ->
+ fatal_error(io_lib:format("Analysis failed with message: ~ts", [Msg]))
+ end.
+
+-spec remove_external(callgraph(), plt()) -> callgraph().
+
+remove_external(CallGraph, PLT) ->
+ {StrippedCG0, Ext} = dialyzer_callgraph:remove_external(CallGraph),
+ case get_external(Ext, PLT) of
+ [] -> ok;
+ Externals ->
+ msg(io_lib:format(" Unknown functions: ~tp\n", [lists:usort(Externals)])),
+ ExtTypes = rcv_ext_types(),
+ case ExtTypes of
+ [] -> ok;
+ _ -> msg(io_lib:format(" Unknown types: ~tp\n", [ExtTypes]))
+ end
+ end,
+ StrippedCG0.
+
+-spec get_external([{mfa(), mfa()}], plt()) -> [mfa()].
+
+get_external(Exts, Plt) ->
+ Fun = fun ({_From, To = {M, F, A}}, Acc) ->
+ case dialyzer_plt:contains_mfa(Plt, To) of
+ false ->
+ case erl_bif_types:is_known(M, F, A) of
+ true -> Acc;
+ false -> [To|Acc]
+ end;
+ true -> Acc
+ end
+ end,
+ lists:foldl(Fun, [], Exts).
+
+%%--------------------------------------------------------------------
+%% Showing type information or annotating files with such information.
+%%--------------------------------------------------------------------
+
+-define(TYPER_ANN_DIR, "typer_ann").
+
+-type line() :: non_neg_integer().
+-type fa() :: {atom(), arity()}.
+-type func_info() :: {line(), atom(), arity()}.
+
+-record(info, {records = maps:new() :: erl_types:type_table(),
+ functions = [] :: [func_info()],
+ types = map__new() :: map_dict(),
+ edoc = false :: boolean()}).
+-record(inc, {map = map__new() :: map_dict(), filter = [] :: files()}).
+-type inc() :: #inc{}.
+
+-spec show_or_annotate(analysis()) -> 'ok'.
+
+show_or_annotate(#analysis{mode = Mode, fms = Files} = Analysis) ->
+ case Mode of
+ ?SHOW -> show(Analysis);
+ ?SHOW_EXPORTED -> show(Analysis);
+ ?ANNOTATE ->
+ Fun = fun ({File, Module}) ->
+ Info = get_final_info(File, Module, Analysis),
+ write_typed_file(File, Info)
+ end,
+ lists:foreach(Fun, Files);
+ ?ANNOTATE_INC_FILES ->
+ IncInfo = write_and_collect_inc_info(Analysis),
+ write_inc_files(IncInfo)
+ end.
+
+write_and_collect_inc_info(Analysis) ->
+ Fun = fun ({File, Module}, Inc) ->
+ Info = get_final_info(File, Module, Analysis),
+ write_typed_file(File, Info),
+ IncFuns = get_functions(File, Analysis),
+ collect_imported_functions(IncFuns, Info#info.types, Inc)
+ end,
+ NewInc = lists:foldl(Fun, #inc{}, Analysis#analysis.fms),
+ clean_inc(NewInc).
+
+write_inc_files(Inc) ->
+ Fun =
+ fun (File) ->
+ Val = map__lookup(File, Inc#inc.map),
+ %% Val is function with its type info
+ %% in form [{{Line,F,A},Type}]
+ Functions = [Key || {Key, _} <- Val],
+ Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val],
+ Info = #info{types = map__from_list(Val1),
+ records = maps:new(),
+ %% Note we need to sort functions here!
+ functions = lists:keysort(1, Functions)},
+ %% io:format("Types ~tp\n", [Info#info.types]),
+ %% io:format("Functions ~tp\n", [Info#info.functions]),
+ %% io:format("Records ~tp\n", [Info#info.records]),
+ write_typed_file(File, Info)
+ end,
+ lists:foreach(Fun, dict:fetch_keys(Inc#inc.map)).
+
+show(Analysis) ->
+ Fun = fun ({File, Module}) ->
+ Info = get_final_info(File, Module, Analysis),
+ show_type_info(File, Info)
+ end,
+ lists:foreach(Fun, Analysis#analysis.fms).
+
+get_final_info(File, Module, Analysis) ->
+ Records = get_records(File, Analysis),
+ Types = get_types(Module, Analysis, Records),
+ Functions = get_functions(File, Analysis),
+ Edoc = Analysis#analysis.edoc,
+ #info{records = Records, functions = Functions, types = Types, edoc = Edoc}.
+
+collect_imported_functions(Functions, Types, Inc) ->
+ %% Coming from other sourses, including:
+ %% FIXME: How to deal with yecc-generated file????
+ %% --.yrl (yecc-generated file)???
+ %% -- yeccpre.hrl (yecc-generated file)???
+ %% -- other cases
+ Fun = fun ({File, _} = Obj, I) ->
+ case is_yecc_gen(File, I) of
+ {true, NewI} -> NewI;
+ {false, NewI} ->
+ check_imported_functions(Obj, NewI, Types)
+ end
+ end,
+ lists:foldl(Fun, Inc, Functions).
+
+-spec is_yecc_gen(file:filename(), inc()) -> {boolean(), inc()}.
+
+is_yecc_gen(File, #inc{filter = Fs} = Inc) ->
+ case lists:member(File, Fs) of
+ true -> {true, Inc};
+ false ->
+ case filename:extension(File) of
+ ".yrl" ->
+ Rootname = filename:rootname(File, ".yrl"),
+ Obj = Rootname ++ ".erl",
+ case lists:member(Obj, Fs) of
+ true -> {true, Inc};
+ false ->
+ NewInc = Inc#inc{filter = [Obj|Fs]},
+ {true, NewInc}
+ end;
+ _ ->
+ case filename:basename(File) of
+ "yeccpre.hrl" -> {true, Inc};
+ _ -> {false, Inc}
+ end
+ end
+ end.
+
+check_imported_functions({File, {Line, F, A}}, Inc, Types) ->
+ IncMap = Inc#inc.map,
+ FA = {F, A},
+ Type = get_type_info(FA, Types),
+ case map__lookup(File, IncMap) of
+ none -> %% File is not added. Add it
+ Obj = {File,[{FA, {Line, Type}}]},
+ NewMap = map__insert(Obj, IncMap),
+ Inc#inc{map = NewMap};
+ Val -> %% File is already in. Check.
+ case lists:keyfind(FA, 1, Val) of
+ false ->
+ %% Function is not in; add it
+ Obj = {File, Val ++ [{FA, {Line, Type}}]},
+ NewMap = map__insert(Obj, IncMap),
+ Inc#inc{map = NewMap};
+ Type ->
+ %% Function is in and with same type
+ Inc;
+ _ ->
+ %% Function is in but with diff type
+ inc_warning(FA, File),
+ Elem = lists:keydelete(FA, 1, Val),
+ NewMap = case Elem of
+ [] -> map__remove(File, IncMap);
+ _ -> map__insert({File, Elem}, IncMap)
+ end,
+ Inc#inc{map = NewMap}
+ end
+ end.
+
+inc_warning({F, A}, File) ->
+ io:format(" ***Warning: Skip function ~tp/~p ", [F, A]),
+ io:format("in file ~tp because of inconsistent type\n", [File]).
+
+clean_inc(Inc) ->
+ Inc1 = remove_yecc_generated_file(Inc),
+ normalize_obj(Inc1).
+
+remove_yecc_generated_file(#inc{filter = Filter} = Inc) ->
+ Fun = fun (Key, #inc{map = Map} = I) ->
+ I#inc{map = map__remove(Key, Map)}
+ end,
+ lists:foldl(Fun, Inc, Filter).
+
+normalize_obj(TmpInc) ->
+ Fun = fun (Key, Val, Inc) ->
+ NewVal = [{{Line,F,A},Type} || {{F,A},{Line,Type}} <- Val],
+ map__insert({Key, NewVal}, Inc)
+ end,
+ TmpInc#inc{map = map__fold(Fun, map__new(), TmpInc#inc.map)}.
+
+get_records(File, Analysis) ->
+ map__lookup(File, Analysis#analysis.record).
+
+get_types(Module, Analysis, Records) ->
+ TypeInfoPlt = Analysis#analysis.trust_plt,
+ TypeInfo =
+ case dialyzer_plt:lookup_module(TypeInfoPlt, Module) of
+ none -> [];
+ {value, List} -> List
+ end,
+ CodeServer = Analysis#analysis.codeserver,
+ TypeInfoList =
+ case Analysis#analysis.show_succ of
+ true ->
+ [convert_type_info(I) || I <- TypeInfo];
+ false ->
+ [get_type(I, CodeServer, Records) || I <- TypeInfo]
+ end,
+ map__from_list(TypeInfoList).
+
+convert_type_info({{_M, F, A}, Range, Arg}) ->
+ {{F, A}, {Range, Arg}}.
+
+get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, Records) ->
+ case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of
+ error ->
+ {{F, A}, {Range, Arg}};
+ {ok, {_FileLine, Contract, _Xtra}} ->
+ Sig = erl_types:t_fun(Arg, Range),
+ case dialyzer_contracts:check_contract(Contract, Sig) of
+ ok -> {{F, A}, {contract, Contract}};
+ {error, {extra_range, _, _}} ->
+ {{F, A}, {contract, Contract}};
+ {error, {overlapping_contract, []}} ->
+ {{F, A}, {contract, Contract}};
+ {error, invalid_contract} ->
+ CString = dialyzer_contracts:contract_to_string(Contract),
+ SigString = dialyzer_utils:format_sig(Sig, Records),
+ Msg = io_lib:format("Error in contract of function ~w:~tw/~w\n"
+ "\t The contract is: " ++ CString ++ "\n" ++
+ "\t but the inferred signature is: ~ts",
+ [M, F, A, SigString]),
+ fatal_error(Msg);
+ {error, ErrorStr} when is_list(ErrorStr) -> % ErrorStr is a string()
+ Msg = io_lib:format("Error in contract of function ~w:~tw/~w: ~ts",
+ [M, F, A, ErrorStr]),
+ fatal_error(Msg)
+ end
+ end.
+
+get_functions(File, Analysis) ->
+ case Analysis#analysis.mode of
+ ?SHOW ->
+ Funcs = map__lookup(File, Analysis#analysis.func),
+ Inc_Funcs = map__lookup(File, Analysis#analysis.inc_func),
+ remove_module_info(Funcs) ++ normalize_incFuncs(Inc_Funcs);
+ ?SHOW_EXPORTED ->
+ Ex_Funcs = map__lookup(File, Analysis#analysis.ex_func),
+ remove_module_info(Ex_Funcs);
+ ?ANNOTATE ->
+ Funcs = map__lookup(File, Analysis#analysis.func),
+ remove_module_info(Funcs);
+ ?ANNOTATE_INC_FILES ->
+ map__lookup(File, Analysis#analysis.inc_func)
+ end.
+
+normalize_incFuncs(Functions) ->
+ [FunInfo || {_FileName, FunInfo} <- Functions].
+
+-spec remove_module_info([func_info()]) -> [func_info()].
+
+remove_module_info(FunInfoList) ->
+ F = fun ({_,module_info,0}) -> false;
+ ({_,module_info,1}) -> false;
+ ({Line,F,A}) when is_integer(Line), is_atom(F), is_integer(A) -> true
+ end,
+ lists:filter(F, FunInfoList).
+
+write_typed_file(File, Info) ->
+ io:format(" Processing file: ~tp\n", [File]),
+ Dir = filename:dirname(File),
+ RootName = filename:basename(filename:rootname(File)),
+ Ext = filename:extension(File),
+ TyperAnnDir = filename:join(Dir, ?TYPER_ANN_DIR),
+ TmpNewFilename = lists:concat([RootName, ".ann", Ext]),
+ NewFileName = filename:join(TyperAnnDir, TmpNewFilename),
+ case file:make_dir(TyperAnnDir) of
+ {error, Reason} ->
+ case Reason of
+ eexist -> %% TypEr dir exists; remove old typer files if they exist
+ case file:delete(NewFileName) of
+ ok -> ok;
+ {error, enoent} -> ok;
+ {error, _} ->
+ Msg = io_lib:format("Error in deleting file ~ts\n", [NewFileName]),
+ fatal_error(Msg)
+ end,
+ write_typed_file(File, Info, NewFileName);
+ enospc ->
+ Msg = io_lib:format("Not enough space in ~tp\n", [Dir]),
+ fatal_error(Msg);
+ eacces ->
+ Msg = io_lib:format("No write permission in ~tp\n", [Dir]),
+ fatal_error(Msg);
+ _ ->
+ Msg = io_lib:format("Unhandled error ~ts when writing ~tp\n",
+ [Reason, Dir]),
+ fatal_error(Msg)
+ end;
+ ok -> %% Typer dir does NOT exist
+ write_typed_file(File, Info, NewFileName)
+ end.
+
+write_typed_file(File, Info, NewFileName) ->
+ {ok, Binary} = file:read_file(File),
+ Chars = binary_to_list(Binary),
+ write_typed_file(Chars, NewFileName, Info, 1, []),
+ io:format(" Saved as: ~tp\n", [NewFileName]).
+
+write_typed_file(Chars, File, #info{functions = []}, _LNo, _Acc) ->
+ ok = file:write_file(File, list_to_binary(Chars), [append]);
+write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
+ [{Line,F,A}|RestFuncs] = Info#info.functions,
+ case Line of
+ 1 -> %% This will happen only for inc files
+ ok = raw_write(F, A, Info, File, []),
+ NewInfo = Info#info{functions = RestFuncs},
+ NewAcc = [],
+ write_typed_file(Chars, File, NewInfo, Line, NewAcc);
+ _ ->
+ case Ch of
+ 10 ->
+ NewLineNo = LineNo + 1,
+ {NewInfo, NewAcc} =
+ case NewLineNo of
+ Line ->
+ ok = raw_write(F, A, Info, File, [Ch|Acc]),
+ {Info#info{functions = RestFuncs}, []};
+ _ ->
+ {Info, [Ch|Acc]}
+ end,
+ write_typed_file(Chs, File, NewInfo, NewLineNo, NewAcc);
+ _ ->
+ write_typed_file(Chs, File, Info, LineNo, [Ch|Acc])
+ end
+ end.
+
+raw_write(F, A, Info, File, Content) ->
+ TypeInfo = get_type_string(F, A, Info, file),
+ ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n",
+ ContentBin = list_to_binary(ContentList),
+ file:write_file(File, ContentBin, [append]).
+
+get_type_string(F, A, Info, Mode) ->
+ Type = get_type_info({F,A}, Info#info.types),
+ TypeStr =
+ case Type of
+ {contract, C} ->
+ dialyzer_contracts:contract_to_string(C);
+ {RetType, ArgType} ->
+ Sig = erl_types:t_fun(ArgType, RetType),
+ dialyzer_utils:format_sig(Sig, Info#info.records)
+ end,
+ case Info#info.edoc of
+ false ->
+ case {Mode, Type} of
+ {file, {contract, _}} -> "";
+ _ ->
+ Prefix = lists:concat(["-spec ", erl_types:atom_to_string(F)]),
+ lists:concat([Prefix, TypeStr, "."])
+ end;
+ true ->
+ Prefix = lists:concat(["%% @spec ", F]),
+ lists:concat([Prefix, TypeStr, "."])
+ end.
+
+show_type_info(File, Info) ->
+ io:format("\n%% File: ~tp\n%% ", [File]),
+ OutputString = lists:concat(["~.", length(File)+8, "c~n"]),
+ io:fwrite(OutputString, [$-]),
+ Fun = fun ({_LineNo, F, A}) ->
+ TypeInfo = get_type_string(F, A, Info, show),
+ io:format("~ts\n", [TypeInfo])
+ end,
+ lists:foreach(Fun, Info#info.functions).
+
+get_type_info(Func, Types) ->
+ case map__lookup(Func, Types) of
+ none ->
+ %% Note: Typeinfo of any function should exist in
+ %% the result offered by dialyzer, otherwise there
+ %% *must* be something wrong with the analysis
+ Msg = io_lib:format("No type info for function: ~tp\n", [Func]),
+ fatal_error(Msg);
+ {contract, _Fun} = C -> C;
+ {_RetType, _ArgType} = RA -> RA
+ end.
+
+%%--------------------------------------------------------------------
+%% Processing of command-line options and arguments.
+%%--------------------------------------------------------------------
+
+-spec process_cl_args() -> {args(), analysis()}.
+
+process_cl_args() ->
+ ArgList = init:get_plain_arguments(),
+ %% io:format("Args is ~tp\n", [ArgList]),
+ {Args, Analysis} = analyze_args(ArgList, #args{}, #analysis{}),
+ %% if the mode has not been set, set it to the default mode (show)
+ {Args, case Analysis#analysis.mode of
+ undefined -> Analysis#analysis{mode = ?SHOW};
+ Mode when is_atom(Mode) -> Analysis
+ end}.
+
+analyze_args([], Args, Analysis) ->
+ {Args, Analysis};
+analyze_args(ArgList, Args, Analysis) ->
+ {Result, Rest} = cl(ArgList),
+ {NewArgs, NewAnalysis} = analyze_result(Result, Args, Analysis),
+ analyze_args(Rest, NewArgs, NewAnalysis).
+
+cl(["-h"|_]) -> help_message();
+cl(["--help"|_]) -> help_message();
+cl(["-v"|_]) -> version_message();
+cl(["--version"|_]) -> version_message();
+cl(["--edoc"|Opts]) -> {edoc, Opts};
+cl(["--show"|Opts]) -> {{mode, ?SHOW}, Opts};
+cl(["--show_exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
+cl(["--show-exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
+cl(["--show_success_typings"|Opts]) -> {show_succ, Opts};
+cl(["--show-success-typings"|Opts]) -> {show_succ, Opts};
+cl(["--annotate"|Opts]) -> {{mode, ?ANNOTATE}, Opts};
+cl(["--annotate-inc-files"|Opts]) -> {{mode, ?ANNOTATE_INC_FILES}, Opts};
+cl(["--no_spec"|Opts]) -> {no_spec, Opts};
+cl(["--plt",Plt|Opts]) -> {{plt, Plt}, Opts};
+cl(["-D"++Def|Opts]) ->
+ case Def of
+ "" -> fatal_error("no variable name specified after -D");
+ _ ->
+ DefPair = process_def_list(re:split(Def, "=", [{return, list}])),
+ {{def, DefPair}, Opts}
+ end;
+cl(["-I",Dir|Opts]) -> {{inc, Dir}, Opts};
+cl(["-I"++Dir|Opts]) ->
+ case Dir of
+ "" -> fatal_error("no include directory specified after -I");
+ _ -> {{inc, Dir}, Opts}
+ end;
+cl(["-T"|Opts]) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ case Files of
+ [] -> fatal_error("no file or directory specified after -T");
+ [_|_] -> {{trusted, Files}, RestOpts}
+ end;
+cl(["-r"|Opts]) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ {{files_r, Files}, RestOpts};
+cl(["-pa",Dir|Opts]) -> {{pa,Dir}, Opts};
+cl(["-pz",Dir|Opts]) -> {{pz,Dir}, Opts};
+cl(["-"++H|_]) -> fatal_error("unknown option -"++H);
+cl(Opts) ->
+ {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
+ {{files, Files}, RestOpts}.
+
+process_def_list(L) ->
+ case L of
+ [Name, Value] ->
+ {ok, Tokens, _} = erl_scan:string(Value ++ "."),
+ {ok, ErlValue} = erl_parse:parse_term(Tokens),
+ {list_to_atom(Name), ErlValue};
+ [Name] ->
+ {list_to_atom(Name), true}
+ end.
+
+%% Get information about files that the user trusts and wants to analyze
+analyze_result({files, Val}, Args, Analysis) ->
+ NewVal = Args#args.files ++ Val,
+ {Args#args{files = NewVal}, Analysis};
+analyze_result({files_r, Val}, Args, Analysis) ->
+ NewVal = Args#args.files_r ++ Val,
+ {Args#args{files_r = NewVal}, Analysis};
+analyze_result({trusted, Val}, Args, Analysis) ->
+ NewVal = Args#args.trusted ++ Val,
+ {Args#args{trusted = NewVal}, Analysis};
+analyze_result(edoc, Args, Analysis) ->
+ {Args, Analysis#analysis{edoc = true}};
+%% Get useful information for actual analysis
+analyze_result({mode, Mode}, Args, Analysis) ->
+ case Analysis#analysis.mode of
+ undefined -> {Args, Analysis#analysis{mode = Mode}};
+ OldMode -> mode_error(OldMode, Mode)
+ end;
+analyze_result({def, Val}, Args, Analysis) ->
+ NewVal = Analysis#analysis.macros ++ [Val],
+ {Args, Analysis#analysis{macros = NewVal}};
+analyze_result({inc, Val}, Args, Analysis) ->
+ NewVal = Analysis#analysis.includes ++ [Val],
+ {Args, Analysis#analysis{includes = NewVal}};
+analyze_result({plt, Plt}, Args, Analysis) ->
+ {Args, Analysis#analysis{plt = Plt}};
+analyze_result(show_succ, Args, Analysis) ->
+ {Args, Analysis#analysis{show_succ = true}};
+analyze_result(no_spec, Args, Analysis) ->
+ {Args, Analysis#analysis{no_spec = true}};
+analyze_result({pa, Dir}, Args, Analysis) ->
+ true = code:add_patha(Dir),
+ {Args, Analysis};
+analyze_result({pz, Dir}, Args, Analysis) ->
+ true = code:add_pathz(Dir),
+ {Args, Analysis}.
+
+%%--------------------------------------------------------------------
+%% File processing.
+%%--------------------------------------------------------------------
+
+-spec get_all_files(args()) -> [file:filename(),...].
+
+get_all_files(#args{files = Fs, files_r = Ds}) ->
+ case filter_fd(Fs, Ds, fun test_erl_file_exclude_ann/1) of
+ [] -> fatal_error("no file(s) to analyze");
+ AllFiles -> AllFiles
+ end.
+
+-spec test_erl_file_exclude_ann(file:filename()) -> boolean().
+
+test_erl_file_exclude_ann(File) ->
+ case is_erl_file(File) of
+ true -> %% Exclude files ending with ".ann.erl"
+ case re:run(File, "[\.]ann[\.]erl$") of
+ {match, _} -> false;
+ nomatch -> true
+ end;
+ false -> false
+ end.
+
+-spec is_erl_file(file:filename()) -> boolean().
+
+is_erl_file(File) ->
+ filename:extension(File) =:= ".erl".
+
+-type test_file_fun() :: fun((file:filename()) -> boolean()).
+
+-spec filter_fd(files(), files(), test_file_fun()) -> files().
+
+filter_fd(File_Dir, Dir_R, Fun) ->
+ All_File_1 = process_file_and_dir(File_Dir, Fun),
+ All_File_2 = process_dir_rec(Dir_R, Fun),
+ remove_dup(All_File_1 ++ All_File_2).
+
+-spec process_file_and_dir(files(), test_file_fun()) -> files().
+
+process_file_and_dir(File_Dir, TestFun) ->
+ Fun =
+ fun (Elem, Acc) ->
+ case filelib:is_regular(Elem) of
+ true -> process_file(Elem, TestFun, Acc);
+ false -> check_dir(Elem, false, Acc, TestFun)
+ end
+ end,
+ lists:foldl(Fun, [], File_Dir).
+
+-spec process_dir_rec(files(), test_file_fun()) -> files().
+
+process_dir_rec(Dirs, TestFun) ->
+ Fun = fun (Dir, Acc) -> check_dir(Dir, true, Acc, TestFun) end,
+ lists:foldl(Fun, [], Dirs).
+
+-spec check_dir(file:filename(), boolean(), files(), test_file_fun()) -> files().
+
+check_dir(Dir, Recursive, Acc, Fun) ->
+ case file:list_dir(Dir) of
+ {ok, Files} ->
+ {TmpDirs, TmpFiles} = split_dirs_and_files(Files, Dir),
+ case Recursive of
+ false ->
+ FinalFiles = process_file_and_dir(TmpFiles, Fun),
+ Acc ++ FinalFiles;
+ true ->
+ TmpAcc1 = process_file_and_dir(TmpFiles, Fun),
+ TmpAcc2 = process_dir_rec(TmpDirs, Fun),
+ Acc ++ TmpAcc1 ++ TmpAcc2
+ end;
+ {error, eacces} ->
+ fatal_error("no access permission to dir \""++Dir++"\"");
+ {error, enoent} ->
+ fatal_error("cannot access "++Dir++": No such file or directory");
+ {error, _Reason} ->
+ fatal_error("error involving a use of file:list_dir/1")
+ end.
+
+%% Same order as the input list
+-spec process_file(file:filename(), test_file_fun(), files()) -> files().
+
+process_file(File, TestFun, Acc) ->
+ case TestFun(File) of
+ true -> Acc ++ [File];
+ false -> Acc
+ end.
+
+%% Same order as the input list
+-spec split_dirs_and_files(files(), file:filename()) -> {files(), files()}.
+
+split_dirs_and_files(Elems, Dir) ->
+ Test_Fun =
+ fun (Elem, {DirAcc, FileAcc}) ->
+ File = filename:join(Dir, Elem),
+ case filelib:is_regular(File) of
+ false -> {[File|DirAcc], FileAcc};
+ true -> {DirAcc, [File|FileAcc]}
+ end
+ end,
+ {Dirs, Files} = lists:foldl(Test_Fun, {[], []}, Elems),
+ {lists:reverse(Dirs), lists:reverse(Files)}.
+
+%% Removes duplicate filenames but keeps the order of the input list
+-spec remove_dup(files()) -> files().
+
+remove_dup(Files) ->
+ Test_Dup = fun (File, Acc) ->
+ case lists:member(File, Acc) of
+ true -> Acc;
+ false -> [File|Acc]
+ end
+ end,
+ Reversed_Elems = lists:foldl(Test_Dup, [], Files),
+ lists:reverse(Reversed_Elems).
+
+%%--------------------------------------------------------------------
+%% Collect information.
+%%--------------------------------------------------------------------
+
+-type inc_file_info() :: {file:filename(), func_info()}.
+
+-record(tmpAcc, {file :: file:filename(),
+ module :: atom(),
+ funcAcc = [] :: [func_info()],
+ incFuncAcc = [] :: [inc_file_info()],
+ dialyzerObj = [] :: [{mfa(), {_, _}}]}).
+
+-spec collect_info(analysis()) -> analysis().
+
+collect_info(Analysis) ->
+ NewPlt =
+ try get_dialyzer_plt(Analysis) of
+ DialyzerPlt ->
+ dialyzer_plt:merge_plts([Analysis#analysis.trust_plt, DialyzerPlt])
+ catch
+ throw:{dialyzer_error,_Reason} ->
+ fatal_error("Dialyzer's PLT is missing or is not up-to-date; please (re)create it")
+ end,
+ NewAnalysis = lists:foldl(fun collect_one_file_info/2,
+ Analysis#analysis{trust_plt = NewPlt},
+ Analysis#analysis.files),
+ %% Process Remote Types
+ TmpCServer = NewAnalysis#analysis.codeserver,
+ NewCServer =
+ try
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer, NewPlt),
+ NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer),
+ OldExpTypes = dialyzer_plt:get_exported_types(NewPlt),
+ MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
+ TmpCServer2 =
+ dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
+ catch
+ throw:{error, ErrorMsg} ->
+ fatal_error(ErrorMsg)
+ end,
+ NewAnalysis#analysis{codeserver = NewCServer}.
+
+collect_one_file_info(File, Analysis) ->
+ Ds = [{d,Name,Val} || {Name,Val} <- Analysis#analysis.macros],
+ %% Current directory should also be included in "Includes".
+ Includes = [filename:dirname(File)|Analysis#analysis.includes],
+ Is = [{i,Dir} || Dir <- Includes],
+ Options = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
+ case dialyzer_utils:get_core_from_src(File, Options) of
+ {error, Reason} ->
+ %% io:format("File=~tp\n,Options=~p\n,Error=~p\n", [File,Options,Reason]),
+ compile_error(Reason);
+ {ok, Core} ->
+ case dialyzer_utils:get_record_and_type_info(Core) of
+ {error, Reason} -> compile_error([Reason]);
+ {ok, Records} ->
+ Mod = cerl:concrete(cerl:module_name(Core)),
+ case dialyzer_utils:get_spec_info(Mod, Core, Records) of
+ {error, Reason} -> compile_error([Reason]);
+ {ok, SpecInfo, CbInfo} ->
+ ExpTypes = get_exported_types_from_core(Core),
+ analyze_core_tree(Core, Records, SpecInfo, CbInfo,
+ ExpTypes, Analysis, File)
+ end
+ end
+ end.
+
+analyze_core_tree(Core, Records, SpecInfo, CbInfo, ExpTypes, Analysis, File) ->
+ Module = cerl:concrete(cerl:module_name(Core)),
+ TmpTree = cerl:from_records(Core),
+ CS1 = Analysis#analysis.codeserver,
+ NextLabel = dialyzer_codeserver:get_next_core_label(CS1),
+ {Tree, NewLabel} = cerl_trees:label(TmpTree, NextLabel),
+ CS2 = dialyzer_codeserver:insert(Module, Tree, CS1),
+ CS3 = dialyzer_codeserver:set_next_core_label(NewLabel, CS2),
+ CS4 = dialyzer_codeserver:store_temp_records(Module, Records, CS3),
+ CS5 =
+ case Analysis#analysis.no_spec of
+ true -> CS4;
+ false ->
+ dialyzer_codeserver:store_temp_contracts(Module, SpecInfo, CbInfo, CS4)
+ end,
+ OldExpTypes = dialyzer_codeserver:get_temp_exported_types(CS5),
+ MergedExpTypes = sets:union(ExpTypes, OldExpTypes),
+ CS6 = dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes, CS5),
+ Ex_Funcs = [{0,F,A} || {_,_,{F,A}} <- cerl:module_exports(Tree)],
+ CG = Analysis#analysis.callgraph,
+ {V, E} = dialyzer_callgraph:scan_core_tree(Tree, CG),
+ dialyzer_callgraph:add_edges(E, V, CG),
+ Fun = fun analyze_one_function/2,
+ All_Defs = cerl:module_defs(Tree),
+ Acc = lists:foldl(Fun, #tmpAcc{file = File, module = Module}, All_Defs),
+ Exported_FuncMap = map__insert({File, Ex_Funcs}, Analysis#analysis.ex_func),
+ %% we must sort all functions in the file which
+ %% originate from this file by *numerical order* of lineNo
+ Sorted_Functions = lists:keysort(1, Acc#tmpAcc.funcAcc),
+ FuncMap = map__insert({File, Sorted_Functions}, Analysis#analysis.func),
+ %% we do not need to sort functions which are imported from included files
+ IncFuncMap = map__insert({File, Acc#tmpAcc.incFuncAcc},
+ Analysis#analysis.inc_func),
+ FMs = Analysis#analysis.fms ++ [{File, Module}],
+ RecordMap = map__insert({File, Records}, Analysis#analysis.record),
+ Analysis#analysis{fms = FMs,
+ callgraph = CG,
+ codeserver = CS6,
+ ex_func = Exported_FuncMap,
+ inc_func = IncFuncMap,
+ record = RecordMap,
+ func = FuncMap}.
+
+analyze_one_function({Var, FunBody} = Function, Acc) ->
+ F = cerl:fname_id(Var),
+ A = cerl:fname_arity(Var),
+ TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function},
+ NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj],
+ Anno = cerl:get_ann(FunBody),
+ LineNo = get_line(Anno),
+ FileName = get_file(Anno),
+ BaseName = filename:basename(FileName),
+ FuncInfo = {LineNo, F, A},
+ OriginalName = Acc#tmpAcc.file,
+ {FuncAcc, IncFuncAcc} =
+ case (FileName =:= OriginalName) orelse (BaseName =:= OriginalName) of
+ true -> %% Coming from original file
+ %% io:format("Added function ~tp\n", [{LineNo, F, A}]),
+ {Acc#tmpAcc.funcAcc ++ [FuncInfo], Acc#tmpAcc.incFuncAcc};
+ false ->
+ %% Coming from other sourses, including:
+ %% -- .yrl (yecc-generated file)
+ %% -- yeccpre.hrl (yecc-generated file)
+ %% -- other cases
+ {Acc#tmpAcc.funcAcc, Acc#tmpAcc.incFuncAcc ++ [{FileName, FuncInfo}]}
+ end,
+ Acc#tmpAcc{funcAcc = FuncAcc,
+ incFuncAcc = IncFuncAcc,
+ dialyzerObj = NewDialyzerObj}.
+
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|T]) -> get_line(T);
+get_line([]) -> none.
+
+get_file([{file,File}|_]) -> File;
+get_file([_|T]) -> get_file(T);
+get_file([]) -> "no_file". % should not happen
+
+-spec get_dialyzer_plt(analysis()) -> plt().
+
+get_dialyzer_plt(#analysis{plt = PltFile0}) ->
+ PltFile =
+ case PltFile0 =:= none of
+ true -> dialyzer_plt:get_default_plt();
+ false -> PltFile0
+ end,
+ dialyzer_plt:from_file(PltFile).
+
+%% Exported Types
+
+get_exported_types_from_core(Core) ->
+ Attrs = cerl:module_attrs(Core),
+ ExpTypes1 = [cerl:concrete(L2) || {L1, L2} <- Attrs,
+ cerl:is_literal(L1),
+ cerl:is_literal(L2),
+ cerl:concrete(L1) =:= 'export_type'],
+ ExpTypes2 = lists:flatten(ExpTypes1),
+ M = cerl:atom_val(cerl:module_name(Core)),
+ sets:from_list([{M, F, A} || {F, A} <- ExpTypes2]).
+
+%%--------------------------------------------------------------------
+%% Utilities for error reporting.
+%%--------------------------------------------------------------------
+
+-spec fatal_error(string()) -> no_return().
+
+fatal_error(Slogan) ->
+ msg(io_lib:format("typer: ~ts\n", [Slogan])),
+ erlang:halt(1).
+
+-spec mode_error(mode(), mode()) -> no_return().
+
+mode_error(OldMode, NewMode) ->
+ Msg = io_lib:format("Mode was previously set to '~s'; "
+ "can not set it to '~s' now",
+ [OldMode, NewMode]),
+ fatal_error(Msg).
+
+-spec compile_error([string()]) -> no_return().
+
+compile_error(Reason) ->
+ JoinedString = lists:flatten([X ++ "\n" || X <- Reason]),
+ Msg = "Analysis failed with error report:\n" ++ JoinedString,
+ fatal_error(Msg).
+
+-spec msg(string()) -> 'ok'.
+
+msg(Msg) ->
+ io:format(standard_error, "~ts", [Msg]).
+
+%%--------------------------------------------------------------------
+%% Version and help messages.
+%%--------------------------------------------------------------------
+
+-spec version_message() -> no_return().
+
+version_message() ->
+ io:format("TypEr version "++?VSN++"\n"),
+ erlang:halt(0).
+
+-spec help_message() -> no_return().
+
+help_message() ->
+ S = <<" Usage: typer [--help] [--version] [--plt PLT] [--edoc]
+ [--show | --show-exported | --annotate | --annotate-inc-files]
+ [-Ddefine]* [-I include_dir]* [-pa dir]* [-pz dir]*
+ [-T application]* [-r] file*
+
+ Options:
+ -r dir*
+ search directories recursively for .erl files below them
+ --show
+ Prints type specifications for all functions on stdout.
+ (this is the default behaviour; this option is not really needed)
+ --show-exported (or --show_exported)
+ Same as --show, but prints specifications for exported functions only
+ Specs are displayed sorted alphabetically on the function's name
+ --annotate
+ Annotates the specified files with type specifications
+ --annotate-inc-files
+ Same as --annotate but annotates all -include() files as well as
+ all .erl files (use this option with caution - has not been tested much)
+ --edoc
+ Prints type information as Edoc @spec comments, not as type specs
+ --plt PLT
+ Use the specified dialyzer PLT file rather than the default one
+ -T file*
+ The specified file(s) already contain type specifications and these
+ are to be trusted in order to print specs for the rest of the files
+ (Multiple files or dirs, separated by spaces, can be specified.)
+ -Dname (or -Dname=value)
+ pass the defined name(s) to TypEr
+ (The syntax of defines is the same as that used by \"erlc\".)
+ -I include_dir
+ pass the include_dir to TypEr
+ (The syntax of includes is the same as that used by \"erlc\".)
+ -pa dir
+ -pz dir
+ Set code path options to TypEr
+ (This is useful for files that use parse tranforms.)
+ --version (or -v)
+ prints the Typer version and exits
+ --help (or -h)
+ prints this message and exits
+
+ Note:
+ * denotes that multiple occurrences of these options are possible.
+">>,
+ io:put_chars(S),
+ erlang:halt(0).
+
+%%--------------------------------------------------------------------
+%% Handle messages.
+%%--------------------------------------------------------------------
+
+rcv_ext_types() ->
+ Self = self(),
+ Self ! {Self, done},
+ rcv_ext_types(Self, []).
+
+rcv_ext_types(Self, ExtTypes) ->
+ receive
+ {Self, ext_types, ExtType} ->
+ rcv_ext_types(Self, [ExtType|ExtTypes]);
+ {Self, done} ->
+ lists:usort(ExtTypes)
+ end.
+
+%%--------------------------------------------------------------------
+%% A convenient abstraction of a Key-Value mapping data structure
+%% specialized for the uses in this module
+%%--------------------------------------------------------------------
+
+-type map_dict() :: dict:dict().
+
+-spec map__new() -> map_dict().
+map__new() ->
+ dict:new().
+
+-spec map__insert({term(), term()}, map_dict()) -> map_dict().
+map__insert(Object, Map) ->
+ {Key, Value} = Object,
+ dict:store(Key, Value, Map).
+
+-spec map__lookup(term(), map_dict()) -> term().
+map__lookup(Key, Map) ->
+ try dict:fetch(Key, Map) catch error:_ -> none end.
+
+-spec map__from_list([{fa(), term()}]) -> map_dict().
+map__from_list(List) ->
+ dict:from_list(List).
+
+-spec map__remove(term(), map_dict()) -> map_dict().
+map__remove(Key, Dict) ->
+ dict:erase(Key, Dict).
+
+-spec map__fold(fun((term(), term(), term()) -> map_dict()), map_dict(), map_dict()) -> map_dict().
+map__fold(Fun, Acc0, Dict) ->
+ dict:fold(Fun, Acc0, Dict).
diff --git a/lib/dialyzer/test/Makefile b/lib/dialyzer/test/Makefile
index f43e04dd59..43c8a61ce1 100644
--- a/lib/dialyzer/test/Makefile
+++ b/lib/dialyzer/test/Makefile
@@ -12,7 +12,9 @@ AUXILIARY_FILES=\
dialyzer_common.erl\
file_utils.erl\
dialyzer_SUITE.erl\
- plt_SUITE.erl
+ abstract_SUITE.erl\
+ plt_SUITE.erl\
+ typer_SUITE.erl
# ----------------------------------------------------
# Release directory specification
diff --git a/lib/dialyzer/test/abstract_SUITE.erl b/lib/dialyzer/test/abstract_SUITE.erl
new file mode 100644
index 0000000000..37fb39cf27
--- /dev/null
+++ b/lib/dialyzer/test/abstract_SUITE.erl
@@ -0,0 +1,107 @@
+%% This suite contains cases that cannot be written
+%% in Erlang itself and must be done via the abstract
+%% format.
+
+-module(abstract_SUITE).
+
+-include_lib("common_test/include/ct.hrl").
+-include("dialyzer_test_constants.hrl").
+
+-export([suite/0, all/0, init_per_suite/0, init_per_suite/1, end_per_suite/1]).
+-export([generated_case/1]).
+
+suite() ->
+ [{timetrap, {minutes, 1}}].
+all() ->
+ [generated_case].
+
+init_per_suite() ->
+ [{timetrap, ?plt_timeout}].
+init_per_suite(Config) ->
+ OutDir = ?config(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, []}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ %% This function is required since init_per_suite/1 exists.
+ ok.
+
+generated_case(Config) when is_list(Config) ->
+ %% Equivalent to:
+ %%
+ %% -module(foo).
+ %% -export(bar).
+ %% bar() ->
+ %% Arg = sample,
+ %% case Arg of
+ %% #{} -> map;
+ %% _ -> Arg:fn()
+ %% end.
+ %%
+ %% Except the case statement and its clauses are marked as autogenerated.
+ [] =
+ test([{attribute,1,module,foo},
+ {attribute,2,export,[{bar,0}]},
+ {function,3,bar,0,
+ [{clause,3,[],[],
+ [{match,4,{var,4,'Arg'},{atom,4,sample}},
+ {'case',[{location,5},{generated,true}],{var,5,'Arg'},
+ [{clause,[{location,6},{generated,true}],[{map,6,[]}],[],
+ [{atom,6,map}]},
+ {clause,[{location,7},{generated,true}],[{var,7,'_'}],[],
+ [{call,7,{remote,7,{var,7,'Arg'},{atom,7,fn}},[]}]}]}]}]}],
+ Config, [], []),
+ %% With the first clause not auto-generated
+ [{warn_matching,{_,6},_}] =
+ test([{attribute,1,module,foo},
+ {attribute,2,export,[{bar,0}]},
+ {function,3,bar,0,
+ [{clause,3,[],[],
+ [{match,4,{var,4,'Arg'},{atom,4,sample}},
+ {'case',[{location,5},{generated,true}],{var,5,'Arg'},
+ [{clause,6,[{map,6,[]}],[],
+ [{atom,6,map}]},
+ {clause,[{location,7},{generated,true}],[{var,7,'_'}],[],
+ [{call,7,{remote,7,{var,7,'Arg'},{atom,7,fn}},[]}]}]}]}]}],
+ Config, [], []),
+ %% With Arg set to [] so neither clause matches
+ [{warn_return_no_exit,{_,3},_},
+ {warn_matching,{_,6},_},
+ {warn_failing_call,{_,7},_}] =
+ test([{attribute,1,module,foo},
+ {attribute,2,export,[{bar,0}]},
+ {function,3,bar,0,
+ [{clause,3,[],[],
+ [{match,4,{var,4,'Arg'},{nil,4}},
+ {'case',[{location,5},{generated,true}],{var,5,'Arg'},
+ [{clause,[{location,6},{generated,true}],[{map,6,[]}],[],
+ [{atom,6,map}]},
+ {clause,[{location,7},{generated,true}],[{var,7,'_'}],[],
+ [{call,7,{remote,7,{var,7,'Arg'},{atom,7,fn}},[]}]}]}]}]}],
+ Config, [], []),
+ ok.
+
+test(Prog0, Config, COpts, DOpts) ->
+ Prog = erl_parse:anno_from_term(Prog0),
+ {ok, BeamFile} = compile(Config, Prog, COpts),
+ run_dialyzer(Config, succ_typings, [BeamFile], DOpts).
+
+compile(Config, Prog, CompileOpts) ->
+ OutDir = ?config(priv_dir, Config),
+ Opts = [{outdir, OutDir}, debug_info, return_errors | CompileOpts],
+ {ok, Module, Source} = compile:forms(Prog, Opts),
+ BeamFile = filename:join([OutDir, lists:concat([Module, ".beam"])]),
+ ok = file:write_file(BeamFile, Source),
+ {ok, BeamFile}.
+
+run_dialyzer(Config, Analysis, Files, Opts) ->
+ OutDir = ?config(priv_dir, Config),
+ PltFilename = dialyzer_common:plt_file(OutDir),
+ dialyzer:run([{analysis_type, Analysis},
+ {files, Files},
+ {init_plt, PltFilename},
+ {check_plt, false},
+ {from, byte_code} |
+ Opts]).
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
index 50991c9bc5..365b4798c5 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
@@ -1 +1,2 @@
{dialyzer_options, []}.
+{time_limit, 5}.
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/callbacks_and_specs b/lib/dialyzer/test/behaviour_SUITE_data/results/callbacks_and_specs
index 33d135048e..843d26ee59 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/callbacks_and_specs
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/callbacks_and_specs
@@ -1,5 +1,5 @@
-my_callbacks_wrong.erl:26: The return type #state{parent::'undefined' | pid(),status::'closed' | 'init' | 'open',subscribe::[{pid(),integer()}],counter::integer()} in the specification of callback_init/1 is not a subtype of {'ok',_}, which is the expected return type for the callback of my_behaviour behaviour
-my_callbacks_wrong.erl:28: The inferred return type of callback_init/1 (#state{parent::'undefined' | pid(),status::'init',subscribe::[],counter::1}) has nothing in common with {'ok',_}, which is the expected return type for the callback of my_behaviour behaviour
-my_callbacks_wrong.erl:30: The return type {'reply',#state{parent::'undefined' | pid(),status::'closed' | 'init' | 'open',subscribe::[{pid(),integer()}],counter::integer()}} in the specification of callback_cast/3 is not a subtype of {'noreply',_}, which is the expected return type for the callback of my_behaviour behaviour
+my_callbacks_wrong.erl:26: The return type #state{parent::pid(),status::'closed' | 'init' | 'open',subscribe::[{pid(),integer()}],counter::integer()} in the specification of callback_init/1 is not a subtype of {'ok',_}, which is the expected return type for the callback of the my_behaviour behaviour
+my_callbacks_wrong.erl:28: The inferred return type of callback_init/1 (#state{parent::pid(),status::'init',subscribe::[],counter::1}) has nothing in common with {'ok',_}, which is the expected return type for the callback of the my_behaviour behaviour
+my_callbacks_wrong.erl:30: The return type {'reply',#state{parent::pid(),status::'closed' | 'init' | 'open',subscribe::[{pid(),integer()}],counter::integer()}} in the specification of callback_cast/3 is not a subtype of {'noreply',_}, which is the expected return type for the callback of the my_behaviour behaviour
my_callbacks_wrong.erl:39: The specified type for the 2nd argument of callback_call/3 (atom()) is not a supertype of pid(), which is expected type for this argument in the callback of the my_behaviour behaviour
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_event_incorrect_return b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_event_incorrect_return
index e646eea383..c3ebee05da 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_event_incorrect_return
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_event_incorrect_return
@@ -1,2 +1,2 @@
-gen_event_incorrect_return.erl:16: The inferred return type of init/1 ('error') has nothing in common with {'error',_} | {'ok',_} | {'ok',_,'hibernate'}, which is the expected return type for the callback of gen_event behaviour
+gen_event_incorrect_return.erl:16: The inferred return type of init/1 ('error') has nothing in common with {'error',_} | {'ok',_} | {'ok',_,'hibernate'}, which is the expected return type for the callback of the gen_event behaviour
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
index 3e98da785f..1eb8cd455b 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_incorrect_args
@@ -1,8 +1,5 @@
-gen_server_incorrect_args.erl:3: Undefined callback function code_change/3 (behaviour 'gen_server')
-gen_server_incorrect_args.erl:3: Undefined callback function handle_cast/2 (behaviour 'gen_server')
-gen_server_incorrect_args.erl:3: Undefined callback function handle_info/2 (behaviour 'gen_server')
-gen_server_incorrect_args.erl:3: Undefined callback function init/1 (behaviour 'gen_server')
-gen_server_incorrect_args.erl:3: Undefined callback function terminate/2 (behaviour 'gen_server')
-gen_server_incorrect_args.erl:7: The inferred return type of handle_call/3 ({'no'} | {'ok'}) has nothing in common with {'noreply',_} | {'noreply',_,'hibernate' | 'infinity' | non_neg_integer()} | {'reply',_,_} | {'stop',_,_} | {'reply',_,_,'hibernate' | 'infinity' | non_neg_integer()} | {'stop',_,_,_}, which is the expected return type for the callback of gen_server behaviour
+gen_server_incorrect_args.erl:3: Undefined callback function handle_cast/2 (behaviour gen_server)
+gen_server_incorrect_args.erl:3: Undefined callback function init/1 (behaviour gen_server)
+gen_server_incorrect_args.erl:7: The inferred return type of handle_call/3 ({'no'} | {'ok'}) has nothing in common with {'noreply',_} | {'noreply',_,'hibernate' | 'infinity' | non_neg_integer()} | {'reply',_,_} | {'stop',_,_} | {'reply',_,_,'hibernate' | 'infinity' | non_neg_integer()} | {'stop',_,_,_}, which is the expected return type for the callback of the gen_server behaviour
gen_server_incorrect_args.erl:7: The inferred type for the 2nd argument of handle_call/3 ('boo' | 'foo') is not a supertype of {pid(),_}, which is expected type for this argument in the callback of the gen_server behaviour
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_missing_callbacks b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_missing_callbacks
index 5e0ed5fd27..188f7822fa 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_missing_callbacks
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/gen_server_missing_callbacks
@@ -1,3 +1,2 @@
-gen_server_missing_callbacks.erl:3: Undefined callback function handle_cast/2 (behaviour 'gen_server')
-gen_server_missing_callbacks.erl:3: Undefined callback function handle_info/2 (behaviour 'gen_server')
+gen_server_missing_callbacks.erl:3: Undefined callback function handle_cast/2 (behaviour gen_server)
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour b/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour
index 8cecabccaa..68adf4cfa0 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour
@@ -1,9 +1,9 @@
-sample_callback_wrong.erl:16: The inferred return type of sample_callback_2/0 (42) has nothing in common with atom(), which is the expected return type for the callback of sample_behaviour behaviour
-sample_callback_wrong.erl:17: The inferred return type of sample_callback_3/0 ('fair') has nothing in common with 'fail' | {'ok',1..255}, which is the expected return type for the callback of sample_behaviour behaviour
-sample_callback_wrong.erl:18: The inferred return type of sample_callback_4/1 ('fail') has nothing in common with 'ok', which is the expected return type for the callback of sample_behaviour behaviour
-sample_callback_wrong.erl:20: The inferred return type of sample_callback_5/1 (string()) has nothing in common with 'fail' | 'ok', which is the expected return type for the callback of sample_behaviour behaviour
+sample_callback_wrong.erl:16: The inferred return type of sample_callback_2/0 (42) has nothing in common with atom(), which is the expected return type for the callback of the sample_behaviour behaviour
+sample_callback_wrong.erl:17: The inferred return type of sample_callback_3/0 ('fair') has nothing in common with 'fail' | {'ok',1..255}, which is the expected return type for the callback of the sample_behaviour behaviour
+sample_callback_wrong.erl:18: The inferred return type of sample_callback_4/1 ('fail') has nothing in common with 'ok', which is the expected return type for the callback of the sample_behaviour behaviour
+sample_callback_wrong.erl:20: The inferred return type of sample_callback_5/1 (string()) has nothing in common with 'fail' | 'ok', which is the expected return type for the callback of the sample_behaviour behaviour
sample_callback_wrong.erl:20: The inferred type for the 1st argument of sample_callback_5/1 (atom()) is not a supertype of 1..255, which is expected type for this argument in the callback of the sample_behaviour behaviour
-sample_callback_wrong.erl:22: The inferred return type of sample_callback_6/3 ({'okk',number()}) has nothing in common with 'fail' | {'ok',1..255}, which is the expected return type for the callback of sample_behaviour behaviour
+sample_callback_wrong.erl:22: The inferred return type of sample_callback_6/3 ({'okk',number()}) has nothing in common with 'fail' | {'ok',1..255}, which is the expected return type for the callback of the sample_behaviour behaviour
sample_callback_wrong.erl:22: The inferred type for the 3rd argument of sample_callback_6/3 (atom()) is not a supertype of string(), which is expected type for this argument in the callback of the sample_behaviour behaviour
-sample_callback_wrong.erl:4: Undefined callback function sample_callback_1/0 (behaviour 'sample_behaviour')
+sample_callback_wrong.erl:4: Undefined callback function sample_callback_1/0 (behaviour sample_behaviour)
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour_old b/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour_old
index f0181bb59c..67aa872984 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour_old
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/sample_behaviour_old
@@ -1,4 +1,4 @@
incorrect_args_callback.erl:12: The inferred type for the 2nd argument of bar/2 ('yes') is not a supertype of [any()], which is expected type for this argument in the callback of the correct_behaviour behaviour
-incorrect_return_callback.erl:9: The inferred return type of foo/0 ('error') has nothing in common with 'no' | 'yes', which is the expected return type for the callback of correct_behaviour behaviour
-missing_callback.erl:5: Undefined callback function foo/0 (behaviour 'correct_behaviour')
+incorrect_return_callback.erl:9: The inferred return type of foo/0 ('error') has nothing in common with 'no' | 'yes', which is the expected return type for the callback of the correct_behaviour behaviour
+missing_callback.erl:5: Undefined callback function foo/0 (behaviour correct_behaviour)
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return b/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return
index 4103a2d8b4..5a063a12b8 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/supervisor_incorrect_return
@@ -1,2 +1,2 @@
-supervisor_incorrect_return.erl:14: The inferred return type of init/1 ({'ok',{{'one_against_one',0,1},[{_,_,_,_,_,_},...]}}) has nothing in common with 'ignore' | {'ok',{{'one_for_all',non_neg_integer(),pos_integer()} | {'one_for_one',non_neg_integer(),pos_integer()} | {'rest_for_one',non_neg_integer(),pos_integer()} | {'simple_one_for_one',non_neg_integer(),pos_integer()} | #{},[{_,{atom() | tuple(),atom(),'undefined' | [any()]},'permanent' | 'temporary' | 'transient','brutal_kill' | 'infinity' | non_neg_integer(),'supervisor' | 'worker','dynamic' | [atom() | tuple()]} | #{}]}}, which is the expected return type for the callback of supervisor behaviour
+supervisor_incorrect_return.erl:14: The inferred return type of init/1 ({'ok',{{'one_against_one',0,1},[{_,_,_,_,_,_},...]}}) has nothing in common with 'ignore' | {'ok',{{'one_for_all',non_neg_integer(),pos_integer()} | {'one_for_one',non_neg_integer(),pos_integer()} | {'rest_for_one',non_neg_integer(),pos_integer()} | {'simple_one_for_one',non_neg_integer(),pos_integer()} | #{'intensity'=>non_neg_integer(), 'period'=>pos_integer(), 'strategy'=>'one_for_all' | 'one_for_one' | 'rest_for_one' | 'simple_one_for_one'},[{_,{atom(),atom(),'undefined' | [any()]},'permanent' | 'temporary' | 'transient','brutal_kill' | 'infinity' | non_neg_integer(),'supervisor' | 'worker','dynamic' | [atom()]} | #{'id':=_, 'start':={atom(),atom(),'undefined' | [any()]}, 'modules'=>'dynamic' | [atom()], 'restart'=>'permanent' | 'temporary' | 'transient', 'shutdown'=>'brutal_kill' | 'infinity' | non_neg_integer(), 'type'=>'supervisor' | 'worker'}]}}, which is the expected return type for the callback of the supervisor behaviour
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/results/vars_in_beh_spec b/lib/dialyzer/test/behaviour_SUITE_data/results/vars_in_beh_spec
index 5284e412f0..f313c24420 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/results/vars_in_beh_spec
+++ b/lib/dialyzer/test/behaviour_SUITE_data/results/vars_in_beh_spec
@@ -1,6 +1,4 @@
-vars_in_beh_spec.erl:3: Undefined callback function handle_call/3 (behaviour 'gen_server')
-vars_in_beh_spec.erl:3: Undefined callback function handle_cast/2 (behaviour 'gen_server')
-vars_in_beh_spec.erl:3: Undefined callback function handle_info/2 (behaviour 'gen_server')
-vars_in_beh_spec.erl:3: Undefined callback function init/1 (behaviour 'gen_server')
-vars_in_beh_spec.erl:3: Undefined callback function terminate/2 (behaviour 'gen_server')
+vars_in_beh_spec.erl:3: Undefined callback function handle_call/3 (behaviour gen_server)
+vars_in_beh_spec.erl:3: Undefined callback function handle_cast/2 (behaviour gen_server)
+vars_in_beh_spec.erl:3: Undefined callback function init/1 (behaviour gen_server)
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/compile_flags.hrl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/compile_flags.hrl
new file mode 100644
index 0000000000..e5ee44ace1
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/compile_flags.hrl
@@ -0,0 +1,2 @@
+-define(AT_LEAST_19, 1).
+-define(AT_LEAST_17, 1).
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_common.hrl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_common.hrl
new file mode 100644
index 0000000000..c10626c5cc
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_common.hrl
@@ -0,0 +1,55 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+%%% @doc Common parts of user and internal header files
+
+
+%%------------------------------------------------------------------------------
+%% Test generation macros
+%%------------------------------------------------------------------------------
+
+-define(FORALL(X,RawType,Prop), proper:forall(RawType,fun(X) -> Prop end)).
+-define(IMPLIES(Pre,Prop), proper:implies(Pre,?DELAY(Prop))).
+-define(WHENFAIL(Action,Prop), proper:whenfail(?DELAY(Action),?DELAY(Prop))).
+-define(TRAPEXIT(Prop), proper:trapexit(?DELAY(Prop))).
+-define(TIMEOUT(Limit,Prop), proper:timeout(Limit,?DELAY(Prop))).
+%% TODO: -define(ALWAYS(Tests,Prop), proper:always(Tests,?DELAY(Prop))).
+%% TODO: -define(SOMETIMES(Tests,Prop), proper:sometimes(Tests,?DELAY(Prop))).
+
+
+%%------------------------------------------------------------------------------
+%% Generator macros
+%%------------------------------------------------------------------------------
+
+-define(FORCE(X), (X)()).
+-define(DELAY(X), fun() -> X end).
+-define(LAZY(X), proper_types:lazy(?DELAY(X))).
+-define(SIZED(SizeArg,Gen), proper_types:sized(fun(SizeArg) -> Gen end)).
+-define(LET(X,RawType,Gen), proper_types:bind(RawType,fun(X) -> Gen end,false)).
+-define(SHRINK(Gen,AltGens),
+ proper_types:shrinkwith(?DELAY(Gen),?DELAY(AltGens))).
+-define(LETSHRINK(Xs,RawType,Gen),
+ proper_types:bind(RawType,fun(Xs) -> Gen end,true)).
+-define(SUCHTHAT(X,RawType,Condition),
+ proper_types:add_constraint(RawType,fun(X) -> Condition end,true)).
+-define(SUCHTHATMAYBE(X,RawType,Condition),
+ proper_types:add_constraint(RawType,fun(X) -> Condition end,false)).
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_gen.erl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_gen.erl
new file mode 100644
index 0000000000..b64a139e4d
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_gen.erl
@@ -0,0 +1,611 @@
+%%% Copyright 2010-2015 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2015 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Generator subsystem and generators for basic types.
+%%%
+%%% You can use <a href="#index">these</a> functions to try out the random
+%%% instance generation and shrinking subsystems.
+%%%
+%%% CAUTION: These functions should never be used inside properties. They are
+%%% meant for demonstration purposes only.
+
+-module(proper_gen).
+-export([pick/1, pick/2, pick/3,
+ sample/1, sample/3, sampleshrink/1, sampleshrink/2]).
+
+-export([safe_generate/1]).
+-export([generate/1, normal_gen/1, alt_gens/1, clean_instance/1,
+ get_ret_type/1]).
+-export([integer_gen/3, float_gen/3, atom_gen/1, atom_rev/1, binary_gen/1,
+ binary_rev/1, binary_len_gen/1, bitstring_gen/1, bitstring_rev/1,
+ bitstring_len_gen/1, list_gen/2, distlist_gen/3, vector_gen/2,
+ union_gen/1, weighted_union_gen/1, tuple_gen/1, loose_tuple_gen/2,
+ loose_tuple_rev/2, exactly_gen/1, fixed_list_gen/1, function_gen/2,
+ any_gen/1, native_type_gen/2, safe_weighted_union_gen/1,
+ safe_union_gen/1]).
+
+-export_type([instance/0, imm_instance/0, sized_generator/0, nosize_generator/0,
+ generator/0, reverse_gen/0, combine_fun/0, alt_gens/0]).
+
+-include("proper_internal.hrl").
+
+%%-----------------------------------------------------------------------------
+%% Types
+%%-----------------------------------------------------------------------------
+
+%% TODO: update imm_instance() when adding more types: be careful when reading
+%% anything that returns it
+%% @private_type
+-type imm_instance() :: proper_types:raw_type()
+ | instance()
+ | {'$used', imm_instance(), imm_instance()}
+ | {'$to_part', imm_instance()}.
+-type instance() :: term().
+%% A value produced by the random instance generator.
+-type error_reason() :: 'arity_limit' | 'cant_generate' | {'typeserver',term()}.
+
+%% @private_type
+-type sized_generator() :: fun((size()) -> imm_instance()).
+%% @private_type
+-type typed_sized_generator() :: {'typed',
+ fun((proper_types:type(),size()) ->
+ imm_instance())}.
+%% @private_type
+-type nosize_generator() :: fun(() -> imm_instance()).
+%% @private_type
+-type typed_nosize_generator() :: {'typed',
+ fun((proper_types:type()) ->
+ imm_instance())}.
+%% @private_type
+-type generator() :: sized_generator()
+ | typed_sized_generator()
+ | nosize_generator()
+ | typed_nosize_generator().
+%% @private_type
+-type plain_reverse_gen() :: fun((instance()) -> imm_instance()).
+%% @private_type
+-type typed_reverse_gen() :: {'typed',
+ fun((proper_types:type(),instance()) ->
+ imm_instance())}.
+%% @private_type
+-type reverse_gen() :: plain_reverse_gen() | typed_reverse_gen().
+%% @private_type
+-type combine_fun() :: fun((instance()) -> imm_instance()).
+%% @private_type
+-type alt_gens() :: fun(() -> [imm_instance()]).
+%% @private_type
+-type fun_seed() :: {non_neg_integer(),non_neg_integer()}.
+
+
+%%-----------------------------------------------------------------------------
+%% Instance generation functions
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec safe_generate(proper_types:raw_type()) ->
+ {'ok',imm_instance()} | {'error',error_reason()}.
+safe_generate(RawType) ->
+ try generate(RawType) of
+ ImmInstance -> {ok, ImmInstance}
+ catch
+ throw:'$arity_limit' -> {error, arity_limit};
+ throw:'$cant_generate' -> {error, cant_generate};
+ throw:{'$typeserver',SubReason} -> {error, {typeserver,SubReason}}
+ end.
+
+%% @private
+-spec generate(proper_types:raw_type()) -> imm_instance().
+generate(RawType) ->
+ Type = proper_types:cook_outer(RawType),
+ ok = add_parameters(Type),
+ Instance = generate(Type, get('$constraint_tries'), none),
+ ok = remove_parameters(Type),
+ Instance.
+
+-spec add_parameters(proper_types:type()) -> 'ok'.
+add_parameters(Type) ->
+ case proper_types:find_prop(parameters, Type) of
+ {ok, Params} ->
+ OldParams = erlang:get('$parameters'),
+ case OldParams of
+ undefined ->
+ erlang:put('$parameters', Params);
+ _ ->
+ erlang:put('$parameters', Params ++ OldParams)
+ end,
+ ok;
+ _ ->
+ ok
+ end.
+
+-spec remove_parameters(proper_types:type()) -> 'ok'.
+remove_parameters(Type) ->
+ case proper_types:find_prop(parameters, Type) of
+ {ok, Params} ->
+ AllParams = erlang:get('$parameters'),
+ case AllParams of
+ Params->
+ erlang:erase('$parameters');
+ _ ->
+ erlang:put('$parameters', AllParams -- Params)
+ end,
+ ok;
+ _ ->
+ ok
+ end.
+
+-spec generate(proper_types:type(), non_neg_integer(),
+ 'none' | {'ok',imm_instance()}) -> imm_instance().
+generate(_Type, 0, none) ->
+ throw('$cant_generate');
+generate(_Type, 0, {ok,Fallback}) ->
+ Fallback;
+generate(Type, TriesLeft, Fallback) ->
+ ImmInstance =
+ case proper_types:get_prop(kind, Type) of
+ constructed ->
+ PartsType = proper_types:get_prop(parts_type, Type),
+ Combine = proper_types:get_prop(combine, Type),
+ ImmParts = generate(PartsType),
+ Parts = clean_instance(ImmParts),
+ ImmInstance1 = Combine(Parts),
+ %% TODO: We can just generate the internal type: if it's not
+ %% a type, it will turn into an exactly.
+ ImmInstance2 =
+ case proper_types:is_raw_type(ImmInstance1) of
+ true -> generate(ImmInstance1);
+ false -> ImmInstance1
+ end,
+ {'$used',ImmParts,ImmInstance2};
+ _ ->
+ ImmInstance1 = normal_gen(Type),
+ case proper_types:is_raw_type(ImmInstance1) of
+ true -> generate(ImmInstance1);
+ false -> ImmInstance1
+ end
+ end,
+ case proper_types:satisfies_all(clean_instance(ImmInstance), Type) of
+ {_,true} -> ImmInstance;
+ {true,false} -> generate(Type, TriesLeft - 1, {ok,ImmInstance});
+ {false,false} -> generate(Type, TriesLeft - 1, Fallback)
+ end.
+
+%% @equiv pick(Type, 10)
+-spec pick(Type::proper_types:raw_type()) -> {'ok',instance()} | 'error'.
+pick(RawType) ->
+ pick(RawType, 10).
+
+%% @equiv pick(Type, Size, os:timestamp())
+-spec pick(Type::proper_types:raw_type(), size()) -> {'ok',instance()} | 'error'.
+pick(RawType, Size) ->
+ pick(RawType, Size, os:timestamp()).
+
+%% @doc Generates a random instance of `Type', of size `Size' with seed `Seed'.
+-spec pick(Type::proper_types:raw_type(), size(), seed()) ->
+ {'ok',instance()} | 'error'.
+pick(RawType, Size, Seed) ->
+ proper:global_state_init_size_seed(Size, Seed),
+ case clean_instance(safe_generate(RawType)) of
+ {ok,Instance} = Result ->
+ Msg = "WARNING: Some garbage has been left in the process registry "
+ "and the code server~n"
+ "to allow for the returned function(s) to run normally.~n"
+ "Please run proper:global_state_erase() when done.~n",
+ case contains_fun(Instance) of
+ true -> io:format(Msg, []);
+ false -> proper:global_state_erase()
+ end,
+ Result;
+ {error,Reason} ->
+ proper:report_error(Reason, fun io:format/2),
+ proper:global_state_erase(),
+ error
+ end.
+
+%% @equiv sample(Type, 10, 20)
+-spec sample(Type::proper_types:raw_type()) -> 'ok'.
+sample(RawType) ->
+ sample(RawType, 10, 20).
+
+%% @doc Generates and prints one random instance of `Type' for each size from
+%% `StartSize' up to `EndSize'.
+-spec sample(Type::proper_types:raw_type(), size(), size()) -> 'ok'.
+sample(RawType, StartSize, EndSize) when StartSize =< EndSize ->
+ Tests = EndSize - StartSize + 1,
+ Prop = ?FORALL(X, RawType, begin io:format("~p~n",[X]), true end),
+ Opts = [quiet,{start_size,StartSize},{max_size,EndSize},{numtests,Tests}],
+ _ = proper:quickcheck(Prop, Opts),
+ ok.
+
+%% @equiv sampleshrink(Type, 10)
+-spec sampleshrink(Type::proper_types:raw_type()) -> 'ok'.
+sampleshrink(RawType) ->
+ sampleshrink(RawType, 10).
+
+%% @doc Generates a random instance of `Type', of size `Size', then shrinks it
+%% as far as it goes. The value produced on each step of the shrinking process
+%% is printed on the screen.
+-spec sampleshrink(Type::proper_types:raw_type(), size()) -> 'ok'.
+sampleshrink(RawType, Size) ->
+ proper:global_state_init_size(Size),
+ Type = proper_types:cook_outer(RawType),
+ case safe_generate(Type) of
+ {ok,ImmInstance} ->
+ Shrunk = keep_shrinking(ImmInstance, [], Type),
+ PrintInst = fun(I) -> io:format("~p~n",[clean_instance(I)]) end,
+ lists:foreach(PrintInst, Shrunk);
+ {error,Reason} ->
+ proper:report_error(Reason, fun io:format/2)
+ end,
+ proper:global_state_erase(),
+ ok.
+
+-spec keep_shrinking(imm_instance(), [imm_instance()], proper_types:type()) ->
+ [imm_instance(),...].
+keep_shrinking(ImmInstance, Acc, Type) ->
+ keep_shrinking(ImmInstance, Acc, Type, init).
+
+keep_shrinking(ImmInstance, Acc, Type, State) ->
+ case proper_shrink:shrink(ImmInstance, Type, State) of
+ {[], done} -> %% no more shrinkers
+ lists:reverse([ImmInstance|Acc]);
+ {[], NewState} ->
+ %% try next shrinker
+ keep_shrinking(ImmInstance, Acc, Type, NewState);
+ {[Shrunk|_Rest], _NewState} ->
+ Acc2 = [ImmInstance|Acc],
+ case lists:member(Shrunk, Acc2) of
+ true ->
+ %% Avoid infinite loops
+ lists:reverse(Acc2);
+ false ->
+ keep_shrinking(Shrunk, Acc2, Type)
+ end
+ end.
+
+-spec contains_fun(term()) -> boolean().
+contains_fun(List) when is_list(List) ->
+ proper_arith:safe_any(fun contains_fun/1, List);
+contains_fun(Tuple) when is_tuple(Tuple) ->
+ contains_fun(tuple_to_list(Tuple));
+contains_fun(Fun) when is_function(Fun) ->
+ true;
+contains_fun(_Term) ->
+ false.
+
+
+%%-----------------------------------------------------------------------------
+%% Utility functions
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec normal_gen(proper_types:type()) -> imm_instance().
+normal_gen(Type) ->
+ case proper_types:get_prop(generator, Type) of
+ {typed, Gen} ->
+ if
+ is_function(Gen, 1) -> Gen(Type);
+ is_function(Gen, 2) -> Gen(Type, proper:get_size(Type))
+ end;
+ Gen ->
+ if
+ is_function(Gen, 0) -> Gen();
+ is_function(Gen, 1) -> Gen(proper:get_size(Type))
+ end
+ end.
+
+%% @private
+-spec alt_gens(proper_types:type()) -> [imm_instance()].
+alt_gens(Type) ->
+ case proper_types:find_prop(alt_gens, Type) of
+ {ok, AltGens} -> ?FORCE(AltGens);
+ error -> []
+ end.
+
+%% @private
+-spec clean_instance(imm_instance()) -> instance().
+clean_instance({'$used',_ImmParts,ImmInstance}) ->
+ clean_instance(ImmInstance);
+clean_instance({'$to_part',ImmInstance}) ->
+ clean_instance(ImmInstance);
+clean_instance(ImmInstance) ->
+ if
+ is_list(ImmInstance) ->
+ %% CAUTION: this must handle improper lists
+ proper_arith:safe_map(fun clean_instance/1, ImmInstance);
+ is_tuple(ImmInstance) ->
+ proper_arith:tuple_map(fun clean_instance/1, ImmInstance);
+ true ->
+ ImmInstance
+ end.
+
+
+%%-----------------------------------------------------------------------------
+%% Basic type generators
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec integer_gen(size(), proper_types:extint(), proper_types:extint()) ->
+ integer().
+integer_gen(Size, inf, inf) ->
+ proper_arith:rand_int(Size);
+integer_gen(Size, inf, High) ->
+ High - proper_arith:rand_non_neg_int(Size);
+integer_gen(Size, Low, inf) ->
+ Low + proper_arith:rand_non_neg_int(Size);
+integer_gen(Size, Low, High) ->
+ proper_arith:smart_rand_int(Size, Low, High).
+
+%% @private
+-spec float_gen(size(), proper_types:extnum(), proper_types:extnum()) ->
+ float().
+float_gen(Size, inf, inf) ->
+ proper_arith:rand_float(Size);
+float_gen(Size, inf, High) ->
+ High - proper_arith:rand_non_neg_float(Size);
+float_gen(Size, Low, inf) ->
+ Low + proper_arith:rand_non_neg_float(Size);
+float_gen(_Size, Low, High) ->
+ proper_arith:rand_float(Low, High).
+
+%% @private
+-spec atom_gen(size()) -> proper_types:type().
+%% We make sure we never clash with internal atoms by checking that the first
+%% character is not '$'.
+atom_gen(Size) ->
+ ?LET(Str,
+ ?SUCHTHAT(X,
+ proper_types:resize(Size,
+ proper_types:list(proper_types:byte())),
+ X =:= [] orelse hd(X) =/= $$),
+ list_to_atom(Str)).
+
+%% @private
+-spec atom_rev(atom()) -> imm_instance().
+atom_rev(Atom) ->
+ {'$used', atom_to_list(Atom), Atom}.
+
+%% @private
+-spec binary_gen(size()) -> proper_types:type().
+binary_gen(Size) ->
+ ?LET(Bytes,
+ proper_types:resize(Size,
+ proper_types:list(proper_types:byte())),
+ list_to_binary(Bytes)).
+
+%% @private
+-spec binary_rev(binary()) -> imm_instance().
+binary_rev(Binary) ->
+ {'$used', binary_to_list(Binary), Binary}.
+
+%% @private
+-spec binary_len_gen(length()) -> proper_types:type().
+binary_len_gen(Len) ->
+ ?LET(Bytes,
+ proper_types:vector(Len, proper_types:byte()),
+ list_to_binary(Bytes)).
+
+%% @private
+-spec bitstring_gen(size()) -> proper_types:type().
+bitstring_gen(Size) ->
+ ?LET({BytesHead, NumBits, TailByte},
+ {proper_types:resize(Size,proper_types:binary()),
+ proper_types:range(0,7), proper_types:range(0,127)},
+ <<BytesHead/binary, TailByte:NumBits>>).
+
+%% @private
+-spec bitstring_rev(bitstring()) -> imm_instance().
+bitstring_rev(BitString) ->
+ List = bitstring_to_list(BitString),
+ {BytesList, BitsTail} = lists:splitwith(fun erlang:is_integer/1, List),
+ {NumBits, TailByte} = case BitsTail of
+ [] -> {0, 0};
+ [Bits] -> N = bit_size(Bits),
+ <<Byte:N>> = Bits,
+ {N, Byte}
+ end,
+ {'$used',
+ {{'$used',BytesList,list_to_binary(BytesList)}, NumBits, TailByte},
+ BitString}.
+
+%% @private
+-spec bitstring_len_gen(length()) -> proper_types:type().
+bitstring_len_gen(Len) ->
+ BytesLen = Len div 8,
+ BitsLen = Len rem 8,
+ ?LET({BytesHead, NumBits, TailByte},
+ {proper_types:binary(BytesLen), BitsLen,
+ proper_types:range(0, 1 bsl BitsLen - 1)},
+ <<BytesHead/binary, TailByte:NumBits>>).
+
+%% @private
+-spec list_gen(size(), proper_types:type()) -> [imm_instance()].
+list_gen(Size, ElemType) ->
+ Len = proper_arith:rand_int(0, Size),
+ vector_gen(Len, ElemType).
+
+%% @private
+-spec distlist_gen(size(), sized_generator(), boolean()) -> [imm_instance()].
+distlist_gen(RawSize, Gen, NonEmpty) ->
+ Len = case NonEmpty of
+ true -> proper_arith:rand_int(1, erlang:max(1,RawSize));
+ false -> proper_arith:rand_int(0, RawSize)
+ end,
+ Size = case Len of
+ 1 -> RawSize - 1;
+ _ -> RawSize
+ end,
+ %% TODO: this produces a lot of types: maybe a simple 'div' is sufficient?
+ Sizes = proper_arith:distribute(Size, Len),
+ InnerTypes = [Gen(S) || S <- Sizes],
+ fixed_list_gen(InnerTypes).
+
+%% @private
+-spec vector_gen(length(), proper_types:type()) -> [imm_instance()].
+vector_gen(Len, ElemType) ->
+ vector_gen_tr(Len, ElemType, []).
+
+-spec vector_gen_tr(length(), proper_types:type(), [imm_instance()]) ->
+ [imm_instance()].
+vector_gen_tr(0, _ElemType, AccList) ->
+ AccList;
+vector_gen_tr(Left, ElemType, AccList) ->
+ vector_gen_tr(Left - 1, ElemType, [generate(ElemType) | AccList]).
+
+%% @private
+-spec union_gen([proper_types:type(),...]) -> imm_instance().
+union_gen(Choices) ->
+ {_Choice,Type} = proper_arith:rand_choose(Choices),
+ generate(Type).
+
+%% @private
+-spec weighted_union_gen([{frequency(),proper_types:type()},...]) ->
+ imm_instance().
+weighted_union_gen(FreqChoices) ->
+ {_Choice,Type} = proper_arith:freq_choose(FreqChoices),
+ generate(Type).
+
+%% @private
+-spec safe_union_gen([proper_types:type(),...]) -> imm_instance().
+safe_union_gen(Choices) ->
+ {Choice,Type} = proper_arith:rand_choose(Choices),
+ try generate(Type)
+ catch
+ error:_ ->
+ safe_union_gen(proper_arith:list_remove(Choice, Choices))
+ end.
+
+%% @private
+-spec safe_weighted_union_gen([{frequency(),proper_types:type()},...]) ->
+ imm_instance().
+safe_weighted_union_gen(FreqChoices) ->
+ {Choice,Type} = proper_arith:freq_choose(FreqChoices),
+ try generate(Type)
+ catch
+ error:_ ->
+ safe_weighted_union_gen(proper_arith:list_remove(Choice,
+ FreqChoices))
+ end.
+
+%% @private
+-spec tuple_gen([proper_types:type()]) -> tuple().
+tuple_gen(Fields) ->
+ list_to_tuple(fixed_list_gen(Fields)).
+
+%% @private
+-spec loose_tuple_gen(size(), proper_types:type()) -> proper_types:type().
+loose_tuple_gen(Size, ElemType) ->
+ ?LET(L,
+ proper_types:resize(Size, proper_types:list(ElemType)),
+ list_to_tuple(L)).
+
+%% @private
+-spec loose_tuple_rev(tuple(), proper_types:type()) -> imm_instance().
+loose_tuple_rev(Tuple, ElemType) ->
+ CleanList = tuple_to_list(Tuple),
+ List = case proper_types:find_prop(reverse_gen, ElemType) of
+ {ok,{typed, ReverseGen}} ->
+ [ReverseGen(ElemType,X) || X <- CleanList];
+ {ok,ReverseGen} -> [ReverseGen(X) || X <- CleanList];
+ error -> CleanList
+ end,
+ {'$used', List, Tuple}.
+
+%% @private
+-spec exactly_gen(T) -> T.
+exactly_gen(X) ->
+ X.
+
+%% @private
+-spec fixed_list_gen([proper_types:type()]) -> imm_instance()
+ ; ({[proper_types:type()],proper_types:type()}) ->
+ maybe_improper_list(imm_instance(), imm_instance() | []).
+fixed_list_gen({ProperHead,ImproperTail}) ->
+ [generate(F) || F <- ProperHead] ++ generate(ImproperTail);
+fixed_list_gen(ProperFields) ->
+ [generate(F) || F <- ProperFields].
+
+%% @private
+-spec function_gen(arity(), proper_types:type()) -> function().
+function_gen(Arity, RetType) ->
+ FunSeed = {proper_arith:rand_int(0, ?SEED_RANGE - 1),
+ proper_arith:rand_int(0, ?SEED_RANGE - 1)},
+ create_fun(Arity, RetType, FunSeed).
+
+%% @private
+-spec any_gen(size()) -> imm_instance().
+any_gen(Size) ->
+ case get('$any_type') of
+ undefined -> real_any_gen(Size);
+ {type,AnyType} -> generate(proper_types:resize(Size, AnyType))
+ end.
+
+-spec real_any_gen(size()) -> imm_instance().
+real_any_gen(0) ->
+ SimpleTypes = [proper_types:integer(), proper_types:float(),
+ proper_types:atom()],
+ union_gen(SimpleTypes);
+real_any_gen(Size) ->
+ FreqChoices = [{?ANY_SIMPLE_PROB,simple}, {?ANY_BINARY_PROB,binary},
+ {?ANY_EXPAND_PROB,expand}],
+ case proper_arith:freq_choose(FreqChoices) of
+ {_,simple} ->
+ real_any_gen(0);
+ {_,binary} ->
+ generate(proper_types:resize(Size, proper_types:bitstring()));
+ {_,expand} ->
+ %% TODO: statistics of produced terms?
+ NumElems = proper_arith:rand_int(0, Size - 1),
+ ElemSizes = proper_arith:distribute(Size - 1, NumElems),
+ ElemTypes = [?LAZY(real_any_gen(S)) || S <- ElemSizes],
+ case proper_arith:rand_int(1,2) of
+ 1 -> fixed_list_gen(ElemTypes);
+ 2 -> tuple_gen(ElemTypes)
+ end
+ end.
+
+%% @private
+-spec native_type_gen(mod_name(), string()) -> proper_types:type().
+native_type_gen(Mod, TypeStr) ->
+ case proper_typeserver:translate_type({Mod,TypeStr}) of
+ {ok,Type} -> Type;
+ {error,Reason} -> throw({'$typeserver',Reason})
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Function-generation functions
+%%------------------------------------------------------------------------------
+
+-spec create_fun(arity(), proper_types:type(), fun_seed()) -> function().
+create_fun(_Arity, _RetType, _FunSeed) ->
+ fun() -> throw('$arity_limit') end.
+
+%% @private
+-spec get_ret_type(function()) -> proper_types:type().
+get_ret_type(Fun) ->
+ {arity,Arity} = erlang:fun_info(Fun, arity),
+ put('$get_ret_type', true),
+ RetType = apply(Fun, lists:duplicate(Arity,dummy)),
+ erase('$get_ret_type'),
+ RetType.
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_internal.hrl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_internal.hrl
new file mode 100644
index 0000000000..89e6b34296
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_internal.hrl
@@ -0,0 +1,98 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2016 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+%%% @doc Internal header file: This header is included in all PropEr source
+%%% files.
+
+-include("compile_flags.hrl").
+-include("proper_common.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Activate strip_types parse transform
+%%------------------------------------------------------------------------------
+
+-ifdef(NO_TYPES).
+-compile({parse_transform, strip_types}).
+-endif.
+
+%%------------------------------------------------------------------------------
+%% Random generator selection
+%%------------------------------------------------------------------------------
+
+-ifdef(USE_SFMT).
+-define(RANDOM_MOD, sfmt).
+-define(SEED_NAME, sfmt_seed).
+-else.
+-define(RANDOM_MOD, random).
+-define(SEED_NAME, random_seed).
+-endif.
+
+%%------------------------------------------------------------------------------
+%% Macros
+%%------------------------------------------------------------------------------
+
+-define(PROPERTY_PREFIX, "prop_").
+
+
+%%------------------------------------------------------------------------------
+%% Constants
+%%------------------------------------------------------------------------------
+
+-define(SEED_RANGE, 4294967296).
+-define(MAX_ARITY, 20).
+-define(MAX_TRIES_FACTOR, 5).
+-define(ANY_SIMPLE_PROB, 3).
+-define(ANY_BINARY_PROB, 1).
+-define(ANY_EXPAND_PROB, 8).
+-define(SMALL_RANGE_THRESHOLD, 16#FFFF).
+
+
+%%------------------------------------------------------------------------------
+%% Common type aliases
+%%------------------------------------------------------------------------------
+
+%% TODO: Perhaps these should be moved inside modules.
+-type mod_name() :: atom().
+-type fun_name() :: atom().
+-type size() :: non_neg_integer().
+-type length() :: non_neg_integer().
+-type position() :: pos_integer().
+-type frequency() :: pos_integer().
+-type seed() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
+
+-type abs_form() :: erl_parse:abstract_form().
+-type abs_expr() :: erl_parse:abstract_expr().
+-type abs_clause() :: erl_parse:abstract_clause().
+
+%% TODO: Replace these with the appropriate types from stdlib.
+-ifdef(AT_LEAST_19).
+-type abs_type() :: erl_parse:abstract_type().
+-type abs_rec_field() :: term(). % erl_parse:af_field_decl().
+-else.
+-type abs_type() :: term().
+-type abs_rec_field() :: term().
+-endif.
+
+-type loose_tuple(T) :: {} | {T} | {T,T} | {T,T,T} | {T,T,T,T} | {T,T,T,T,T}
+ | {T,T,T,T,T,T} | {T,T,T,T,T,T,T} | {T,T,T,T,T,T,T,T}
+ | {T,T,T,T,T,T,T,T,T} | {T,T,T,T,T,T,T,T,T,T} | tuple().
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_types.erl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_types.erl
new file mode 100644
index 0000000000..6b154b813b
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_types.erl
@@ -0,0 +1,1353 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Type manipulation functions and predefined types.
+%%%
+%%% == Basic types ==
+%%% This module defines all the basic types of the PropEr type system as
+%%% functions. See the <a href="#index">function index</a> for an overview.
+%%%
+%%% Types can be combined in tuples or lists to produce other types. Exact
+%%% values (such as exact numbers, atoms, binaries and strings) can be combined
+%%% with types inside such structures, like in this example of the type of a
+%%% tagged tuple: ``{'result', integer()}''.
+%%%
+%%% When including the PropEr header file, all
+%%% <a href="#index">API functions</a> of this module are automatically
+%%% imported, unless `PROPER_NO_IMPORTS' is defined.
+%%%
+%%% == Customized types ==
+%%% The following operators can be applied to basic types in order to produce
+%%% new ones:
+%%%
+%%% <dl>
+%%% <dt>`?LET(<Xs>, <Xs_type>, <In>)'</dt>
+%%% <dd>To produce an instance of this type, all appearances of the variables
+%%% in `<Xs>' are replaced inside `<In>' by their corresponding values in a
+%%% randomly generated instance of `<Xs_type>'. It's OK for the `<In>' part to
+%%% evaluate to a type - in that case, an instance of the inner type is
+%%% generated recursively.</dd>
+%%% <dt>`?SUCHTHAT(<X>, <Type>, <Condition>)'</dt>
+%%% <dd>This produces a specialization of `<Type>', which only includes those
+%%% members of `<Type>' that satisfy the constraint `<Condition>' - that is,
+%%% those members for which the function `fun(<X>) -> <Condition> end' returns
+%%% `true'. If the constraint is very strict - that is, only a small
+%%% percentage of instances of `<Type>' pass the test - it will take a lot of
+%%% tries for the instance generation subsystem to randomly produce a valid
+%%% instance. This will result in slower testing, and testing may even be
+%%% stopped short, in case the `constraint_tries' limit is reached (see the
+%%% "Options" section in the documentation of the {@link proper} module). If
+%%% this is the case, it would be more appropriate to generate valid instances
+%%% of the specialized type using the `?LET' macro. Also make sure that even
+%%% small instances can satisfy the constraint, since PropEr will only try
+%%% small instances at the start of testing. If this is not possible, you can
+%%% instruct PropEr to start at a larger size, by supplying a suitable value
+%%% for the `start_size' option (see the "Options" section in the
+%%% documentation of the {@link proper} module).</dd>
+%%% <dt>`?SUCHTHATMAYBE(<X>, <Type>, <Condition>)'</dt>
+%%% <dd>Equivalent to the `?SUCHTHAT' macro, but the constraint `<Condition>'
+%%% is considered non-strict: if the `constraint_tries' limit is reached, the
+%%% generator will just return an instance of `<Type>' instead of failing,
+%%% even if that instance doesn't satisfy the constraint.</dd>
+%%% <dt>`?SHRINK(<Generator>, <List_of_alt_gens>)'</dt>
+%%% <dd>This creates a type whose instances are generated by evaluating the
+%%% statement block `<Generator>' (this may evaluate to a type, which will
+%%% then be generated recursively). If an instance of such a type is to be
+%%% shrunk, the generators in `<List_of_alt_gens>' are first run to produce
+%%% hopefully simpler instances of the type. Thus, the generators in the
+%%% second argument should be simpler than the default. The simplest ones
+%%% should be at the front of the list, since those are the generators
+%%% preferred by the shrinking subsystem. Like the main `<Generator>', the
+%%% alternatives may also evaluate to a type, which is generated recursively.
+%%% </dd>
+%%% <dt>`?LETSHRINK(<List_of_variables>, <List_of_types>, <Generator>)'</dt>
+%%% <dd>This is created by combining a `?LET' and a `?SHRINK' macro. Instances
+%%% are generated by applying a randomly generated list of values inside
+%%% `<Generator>' (just like a `?LET', with the added constraint that the
+%%% variables and types must be provided in a list - alternatively,
+%%% `<List_of_types>' may be a list or vector type). When shrinking instances
+%%% of such a type, the sub-instances that were combined to produce it are
+%%% first tried in place of the failing instance.</dd>
+%%% <dt>`?LAZY(<Generator>)'</dt>
+%%% <dd>This construct returns a type whose only purpose is to delay the
+%%% evaluation of `<Generator>' (`<Generator>' can return a type, which will
+%%% be generated recursively). Using this, you can simulate the lazy
+%%% generation of instances:
+%%% ``` stream() -> ?LAZY(frequency([ {1,[]}, {3,[0|stream()]} ])). '''
+%%% The above type produces lists of zeroes with an average length of 3. Note
+%%% that, had we not enclosed the generator with a `?LAZY' macro, the
+%%% evaluation would continue indefinitely, due to the eager evaluation of
+%%% the Erlang language.</dd>
+%%% <dt>`non_empty(<List_or_binary_type>)'</dt>
+%%% <dd>See the documentation for {@link non_empty/1}.</dd>
+%%% <dt>`noshrink(<Type>)'</dt>
+%%% <dd>See the documentation for {@link noshrink/1}.</dd>
+%%% <dt>`default(<Default_value>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link default/2}.</dd>
+%%% <dt>`with_parameter(<Parameter>, <Value>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link with_parameter/3}.</dd>
+%%% <dt>`with_parameters(<Param_value_pairs>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link with_parameters/2}.</dd>
+%%% </dl>
+%%%
+%%% == Size manipulation ==
+%%% The following operators are related to the `size' parameter, which controls
+%%% the maximum size of produced instances. The actual size of a produced
+%%% instance is chosen randomly, but can never exceed the value of the `size'
+%%% parameter at the moment of generation. A more accurate definition is the
+%%% following: the maximum instance of `size S' can never be smaller than the
+%%% maximum instance of `size S-1'. The actual size of an instance is measured
+%%% differently for each type: the actual size of a list is its length, while
+%%% the actual size of a tree may be the number of its internal nodes. Some
+%%% types, e.g. unions, have no notion of size, thus their generation is not
+%%% influenced by the value of `size'. The `size' parameter starts at 1 and
+%%% grows automatically during testing.
+%%%
+%%% <dl>
+%%% <dt>`?SIZED(<S>, <Generator>)'</dt>
+%%% <dd>Creates a new type, whose instances are produced by replacing all
+%%% appearances of the `<S>' parameter inside the statement block
+%%% `<Generator>' with the value of the `size' parameter. It's OK for the
+%%% `<Generator>' to return a type - in that case, an instance of the inner
+%%% type is generated recursively.</dd>
+%%% <dt>`resize(<New_size>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link resize/2}.</dd>
+%%% </dl>
+
+-module(proper_types).
+-export([is_inst/2, is_inst/3]).
+
+-export([integer/2, float/2, atom/0, binary/0, binary/1, bitstring/0,
+ bitstring/1, list/1, vector/2, union/1, weighted_union/1, tuple/1,
+ loose_tuple/1, exactly/1, fixed_list/1, function/2, any/0,
+ shrink_list/1, safe_union/1, safe_weighted_union/1]).
+-export([integer/0, non_neg_integer/0, pos_integer/0, neg_integer/0, range/2,
+ float/0, non_neg_float/0, number/0, boolean/0, byte/0, char/0,
+ list/0, tuple/0, string/0, wunion/1, term/0, timeout/0, arity/0]).
+-export([int/0, nat/0, largeint/0, real/0, bool/0, choose/2, elements/1,
+ oneof/1, frequency/1, return/1, default/2, orderedlist/1, function0/1,
+ function1/1, function2/1, function3/1, function4/1,
+ weighted_default/2]).
+-export([resize/2, non_empty/1, noshrink/1]).
+
+-export([cook_outer/1, is_type/1, equal_types/2, is_raw_type/1, to_binary/1,
+ from_binary/1, get_prop/2, find_prop/2, safe_is_instance/2,
+ is_instance/2, unwrap/1, weakly/1, strongly/1, satisfies_all/2,
+ new_type/2, subtype/2]).
+-export([lazy/1, sized/1, bind/3, shrinkwith/2, add_constraint/3,
+ native_type/2, distlist/3, with_parameter/3, with_parameters/2,
+ parameter/1, parameter/2]).
+-export([le/2]).
+
+-export_type([type/0, raw_type/0, extint/0, extnum/0]).
+
+-include("proper_internal.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Comparison with erl_types
+%%------------------------------------------------------------------------------
+
+%% Missing types
+%% -------------------
+%% will do:
+%% records, maybe_improper_list(T,S), nonempty_improper_list(T,S)
+%% maybe_improper_list(), maybe_improper_list(T), iolist, iodata
+%% don't need:
+%% nonempty_{list,string,maybe_improper_list}
+%% won't do:
+%% pid, port, ref, identifier, none, no_return, module, mfa, node
+%% array, dict, digraph, set, gb_tree, gb_set, queue, tid
+
+%% Missing type information
+%% ------------------------
+%% bin types:
+%% other unit sizes? what about size info?
+%% functions:
+%% generally some fun, unspecified number of arguments but specified
+%% return type
+%% any:
+%% doesn't cover functions and improper lists
+
+
+%%------------------------------------------------------------------------------
+%% Type declaration macros
+%%------------------------------------------------------------------------------
+
+-define(BASIC(PropList), new_type(PropList,basic)).
+-define(WRAPPER(PropList), new_type(PropList,wrapper)).
+-define(CONSTRUCTED(PropList), new_type(PropList,constructed)).
+-define(CONTAINER(PropList), new_type(PropList,container)).
+-define(SUBTYPE(Type,PropList), subtype(PropList,Type)).
+
+
+%%------------------------------------------------------------------------------
+%% Types
+%%------------------------------------------------------------------------------
+
+-type type_kind() :: 'basic' | 'wrapper' | 'constructed' | 'container' | atom().
+-type instance_test() :: fun((proper_gen:imm_instance()) -> boolean())
+ | {'typed',
+ fun((proper_types:type(),
+ proper_gen:imm_instance()) -> boolean())}.
+-type index() :: pos_integer().
+%% @alias
+-type value() :: term().
+%% @private_type
+%% @alias
+-type extint() :: integer() | 'inf'.
+%% @private_type
+%% @alias
+-type extnum() :: number() | 'inf'.
+-type constraint_fun() :: fun((proper_gen:instance()) -> boolean()).
+
+-opaque type() :: {'$type', [type_prop()]}.
+%% A type of the PropEr type system
+%% @type raw_type(). You can consider this as an equivalent of {@type type()}.
+-type raw_type() :: type() | [raw_type()] | loose_tuple(raw_type()) | term().
+-type type_prop_name() :: 'kind' | 'generator' | 'reverse_gen' | 'parts_type'
+ | 'combine' | 'alt_gens' | 'shrink_to_parts'
+ | 'size_transform' | 'is_instance' | 'shrinkers'
+ | 'noshrink' | 'internal_type' | 'internal_types'
+ | 'get_length' | 'split' | 'join' | 'get_indices'
+ | 'remove' | 'retrieve' | 'update' | 'constraints'
+ | 'parameters' | 'env' | 'subenv'.
+
+-type type_prop_value() :: term().
+-type type_prop() ::
+ {'kind', type_kind()}
+ | {'generator', proper_gen:generator()}
+ | {'reverse_gen', proper_gen:reverse_gen()}
+ | {'parts_type', type()}
+ | {'combine', proper_gen:combine_fun()}
+ | {'alt_gens', proper_gen:alt_gens()}
+ | {'shrink_to_parts', boolean()}
+ | {'size_transform', fun((size()) -> size())}
+ | {'is_instance', instance_test()}
+ | {'shrinkers', [proper_shrink:shrinker()]}
+ | {'noshrink', boolean()}
+ | {'internal_type', raw_type()}
+ | {'internal_types', tuple() | maybe_improper_list(type(),type() | [])}
+ %% The items returned by 'remove' must be of this type.
+ | {'get_length', fun((proper_gen:imm_instance()) -> length())}
+ %% If this is a container type, this should return the number of elements
+ %% it contains.
+ | {'split', fun((proper_gen:imm_instance()) -> [proper_gen:imm_instance()])
+ | fun((length(),proper_gen:imm_instance()) ->
+ {proper_gen:imm_instance(),proper_gen:imm_instance()})}
+ %% If present, the appropriate form depends on whether get_length is
+ %% defined: if get_length is undefined, this must be in the one-argument
+ %% form (e.g. a tree should be split into its subtrees), else it must be
+ %% in the two-argument form (e.g. a list should be split in two at the
+ %% index provided).
+ | {'join', fun((proper_gen:imm_instance(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'get_indices', fun((proper_types:type(),
+ proper_gen:imm_instance()) -> [index()])}
+ %% If this is a container type, this should return a list of indices we
+ %% can use to remove or insert elements from the given instance.
+ | {'remove', fun((index(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'retrieve', fun((index(), proper_gen:imm_instance() | tuple()
+ | maybe_improper_list(type(),type() | [])) ->
+ value() | type())}
+ | {'update', fun((index(),value(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'constraints', [{constraint_fun(), boolean()}]}
+ %% A list of constraints on instances of this type: each constraint is a
+ %% tuple of a fun that must return 'true' for each valid instance and a
+ %% boolean field that specifies whether the condition is strict.
+ | {'parameters', [{atom(),value()}]}
+ | {'env', term()}
+ | {'subenv', term()}.
+
+
+%%------------------------------------------------------------------------------
+%% Type manipulation functions
+%%------------------------------------------------------------------------------
+
+%% TODO: We shouldn't need the fully qualified type name in the range of these
+%% functions.
+
+%% @private
+%% TODO: just cook/1 ?
+-spec cook_outer(raw_type()) -> proper_types:type().
+cook_outer(Type = {'$type',_Props}) ->
+ Type;
+cook_outer(RawType) ->
+ if
+ is_tuple(RawType) -> tuple(tuple_to_list(RawType));
+ %% CAUTION: this must handle improper lists
+ is_list(RawType) -> fixed_list(RawType);
+ %% default case (covers integers, floats, atoms, binaries, ...):
+ true -> exactly(RawType)
+ end.
+
+%% @private
+-spec is_type(term()) -> boolean().
+is_type({'$type',_Props}) ->
+ true;
+is_type(_) ->
+ false.
+
+%% @private
+-spec equal_types(proper_types:type(), proper_types:type()) -> boolean().
+equal_types(SameType, SameType) ->
+ true;
+equal_types(_, _) ->
+ false.
+
+%% @private
+-spec is_raw_type(term()) -> boolean().
+is_raw_type({'$type',_TypeProps}) ->
+ true;
+is_raw_type(X) ->
+ if
+ is_tuple(X) -> is_raw_type_list(tuple_to_list(X));
+ is_list(X) -> is_raw_type_list(X);
+ true -> false
+ end.
+
+-spec is_raw_type_list(maybe_improper_list()) -> boolean().
+%% CAUTION: this must handle improper lists
+is_raw_type_list(List) ->
+ proper_arith:safe_any(fun is_raw_type/1, List).
+
+%% @private
+-spec to_binary(proper_types:type()) -> binary().
+to_binary(Type) ->
+ term_to_binary(Type).
+
+%% @private
+-ifdef(AT_LEAST_17).
+-spec from_binary(binary()) -> proper_types:type().
+-endif.
+from_binary(Binary) ->
+ binary_to_term(Binary).
+
+-spec type_from_list([type_prop()]) -> proper_types:type().
+type_from_list(KeyValueList) ->
+ {'$type',KeyValueList}.
+
+-spec add_prop(type_prop_name(), type_prop_value(), proper_types:type()) ->
+ proper_types:type().
+add_prop(PropName, Value, {'$type',Props}) ->
+ {'$type',lists:keystore(PropName, 1, Props, {PropName, Value})}.
+
+-spec add_props([type_prop()], proper_types:type()) -> proper_types:type().
+add_props(PropList, {'$type',OldProps}) ->
+ {'$type', lists:foldl(fun({N,_}=NV,Acc) ->
+ lists:keystore(N, 1, Acc, NV)
+ end, OldProps, PropList)}.
+
+-spec append_to_prop(type_prop_name(), type_prop_value(),
+ proper_types:type()) -> proper_types:type().
+append_to_prop(PropName, Value, {'$type',Props}) ->
+ Val = case lists:keyfind(PropName, 1, Props) of
+ {PropName, V} ->
+ V;
+ _ ->
+ []
+ end,
+ {'$type', lists:keystore(PropName, 1, Props,
+ {PropName, lists:reverse([Value|Val])})}.
+
+-spec append_list_to_prop(type_prop_name(), [type_prop_value()],
+ proper_types:type()) -> proper_types:type().
+append_list_to_prop(PropName, List, {'$type',Props}) ->
+ {PropName, Val} = lists:keyfind(PropName, 1, Props),
+ {'$type', lists:keystore(PropName, 1, Props, {PropName, Val++List})}.
+
+%% @private
+-spec get_prop(type_prop_name(), proper_types:type()) -> type_prop_value().
+get_prop(PropName, {'$type',Props}) ->
+ {_PropName, Val} = lists:keyfind(PropName, 1, Props),
+ Val.
+
+%% @private
+-spec find_prop(type_prop_name(), proper_types:type()) ->
+ {'ok',type_prop_value()} | 'error'.
+find_prop(PropName, {'$type',Props}) ->
+ case lists:keyfind(PropName, 1, Props) of
+ {PropName, Value} ->
+ {ok, Value};
+ _ ->
+ error
+ end.
+
+%% @private
+-spec new_type([type_prop()], type_kind()) -> proper_types:type().
+new_type(PropList, Kind) ->
+ Type = type_from_list(PropList),
+ add_prop(kind, Kind, Type).
+
+%% @private
+-spec subtype([type_prop()], proper_types:type()) -> proper_types:type().
+%% TODO: should the 'is_instance' function etc. be reset for subtypes?
+subtype(PropList, Type) ->
+ add_props(PropList, Type).
+
+%% @private
+-spec is_inst(proper_gen:instance(), raw_type()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+is_inst(Instance, RawType) ->
+ is_inst(Instance, RawType, 10).
+
+%% @private
+-spec is_inst(proper_gen:instance(), raw_type(), size()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+is_inst(Instance, RawType, Size) ->
+ proper:global_state_init_size(Size),
+ Result = safe_is_instance(Instance, RawType),
+ proper:global_state_erase(),
+ Result.
+
+%% @private
+-spec safe_is_instance(proper_gen:imm_instance(), raw_type()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+safe_is_instance(ImmInstance, RawType) ->
+ try is_instance(ImmInstance, RawType) catch
+ throw:{'$typeserver',SubReason} -> {error, {typeserver,SubReason}}
+ end.
+
+%% @private
+-spec is_instance(proper_gen:imm_instance(), raw_type()) -> boolean().
+%% TODO: If the second argument is not a type, let it pass (don't even check for
+%% term equality?) - if it's a raw type, don't cook it, instead recurse
+%% into it.
+is_instance(ImmInstance, RawType) ->
+ CleanInstance = proper_gen:clean_instance(ImmInstance),
+ Type = cook_outer(RawType),
+ (case get_prop(kind, Type) of
+ wrapper -> wrapper_test(ImmInstance, Type);
+ constructed -> constructed_test(ImmInstance, Type);
+ _ -> false
+ end
+ orelse
+ case find_prop(is_instance, Type) of
+ {ok,{typed, IsInstance}} -> IsInstance(Type, ImmInstance);
+ {ok,IsInstance} -> IsInstance(ImmInstance);
+ error -> false
+ end)
+ andalso weakly(satisfies_all(CleanInstance, Type)).
+
+-spec wrapper_test(proper_gen:imm_instance(), proper_types:type()) -> boolean().
+wrapper_test(ImmInstance, Type) ->
+ %% TODO: check if it's actually a raw type that's returned?
+ lists:any(fun(T) -> is_instance(ImmInstance, T) end, unwrap(Type)).
+
+%% @private
+-ifdef(AT_LEAST_17).
+-spec unwrap(proper_types:type()) -> [proper_types:type(),...].
+-endif.
+%% TODO: check if it's actually a raw type that's returned?
+unwrap(Type) ->
+ RawInnerTypes = proper_gen:alt_gens(Type) ++ [proper_gen:normal_gen(Type)],
+ [cook_outer(T) || T <- RawInnerTypes].
+
+-spec constructed_test(proper_gen:imm_instance(), proper_types:type()) ->
+ boolean().
+constructed_test({'$used',ImmParts,ImmInstance}, Type) ->
+ PartsType = get_prop(parts_type, Type),
+ Combine = get_prop(combine, Type),
+ is_instance(ImmParts, PartsType) andalso
+ begin
+ %% TODO: check if it's actually a raw type that's returned?
+ %% TODO: move construction code to proper_gen
+ %% TODO: non-type => should we check for strict term equality?
+ RawInnerType = Combine(proper_gen:clean_instance(ImmParts)),
+ is_instance(ImmInstance, RawInnerType)
+ end;
+constructed_test({'$to_part',ImmInstance}, Type) ->
+ PartsType = get_prop(parts_type, Type),
+ get_prop(shrink_to_parts, Type) =:= true andalso
+ %% TODO: we reject non-container types
+ get_prop(kind, PartsType) =:= container andalso
+ case {find_prop(internal_type,PartsType),
+ find_prop(internal_types,PartsType)} of
+ {{ok,EachPartType},error} ->
+ %% The parts are in a list or a vector.
+ is_instance(ImmInstance, EachPartType);
+ {error,{ok,PartTypesList}} ->
+ %% The parts are in a fixed list.
+ %% TODO: It should always be a proper list.
+ lists:any(fun(T) -> is_instance(ImmInstance,T) end, PartTypesList)
+ end;
+constructed_test(_CleanInstance, _Type) ->
+ %% TODO: can we do anything better?
+ false.
+
+%% @private
+-spec weakly({boolean(),boolean()}) -> boolean().
+weakly({B1,_B2}) -> B1.
+
+%% @private
+-spec strongly({boolean(),boolean()}) -> boolean().
+strongly({_B1,B2}) -> B2.
+
+-spec satisfies(proper_gen:instance(), {constraint_fun(),boolean()})
+ -> {boolean(),boolean()}.
+satisfies(Instance, {Test,false}) ->
+ {true,Test(Instance)};
+satisfies(Instance, {Test,true}) ->
+ Result = Test(Instance),
+ {Result,Result}.
+
+%% @private
+-spec satisfies_all(proper_gen:instance(), proper_types:type()) ->
+ {boolean(),boolean()}.
+satisfies_all(Instance, Type) ->
+ case find_prop(constraints, Type) of
+ {ok, Constraints} ->
+ L = [satisfies(Instance, C) || C <- Constraints],
+ {L1,L2} = lists:unzip(L),
+ {lists:all(fun(B) -> B end, L1), lists:all(fun(B) -> B end, L2)};
+ error ->
+ {true,true}
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Type definition functions
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec lazy(proper_gen:nosize_generator()) -> proper_types:type().
+lazy(Gen) ->
+ ?WRAPPER([
+ {generator, Gen}
+ ]).
+
+%% @private
+-spec sized(proper_gen:sized_generator()) -> proper_types:type().
+sized(Gen) ->
+ ?WRAPPER([
+ {generator, Gen}
+ ]).
+
+%% @private
+-spec bind(raw_type(), proper_gen:combine_fun(), boolean()) ->
+ proper_types:type().
+bind(RawPartsType, Combine, ShrinkToParts) ->
+ PartsType = cook_outer(RawPartsType),
+ ?CONSTRUCTED([
+ {parts_type, PartsType},
+ {combine, Combine},
+ {shrink_to_parts, ShrinkToParts}
+ ]).
+
+%% @private
+-spec shrinkwith(proper_gen:nosize_generator(), proper_gen:alt_gens()) ->
+ proper_types:type().
+shrinkwith(Gen, DelaydAltGens) ->
+ ?WRAPPER([
+ {generator, Gen},
+ {alt_gens, DelaydAltGens}
+ ]).
+
+%% @private
+-spec add_constraint(raw_type(), constraint_fun(), boolean()) ->
+ proper_types:type().
+add_constraint(RawType, Condition, IsStrict) ->
+ Type = cook_outer(RawType),
+ append_to_prop(constraints, {Condition,IsStrict}, Type).
+
+%% @private
+-spec native_type(mod_name(), string()) -> proper_types:type().
+native_type(Mod, TypeStr) ->
+ ?WRAPPER([
+ {generator, fun() -> proper_gen:native_type_gen(Mod,TypeStr) end}
+ ]).
+
+
+%%------------------------------------------------------------------------------
+%% Basic types
+%%------------------------------------------------------------------------------
+
+%% @doc All integers between `Low' and `High', bounds included.
+%% `Low' and `High' must be Erlang expressions that evaluate to integers, with
+%% `Low =< High'. Additionally, `Low' and `High' may have the value `inf', in
+%% which case they represent minus infinity and plus infinity respectively.
+%% Instances shrink towards 0 if `Low =< 0 =< High', or towards the bound with
+%% the smallest absolute value otherwise.
+-spec integer(extint(), extint()) -> proper_types:type().
+integer(Low, High) ->
+ ?BASIC([
+ {env, {Low, High}},
+ {generator, {typed, fun integer_gen/2}},
+ {is_instance, {typed, fun integer_is_instance/2}},
+ {shrinkers, [fun number_shrinker/3]}
+ ]).
+
+integer_gen(Type, Size) ->
+ {Low, High} = get_prop(env, Type),
+ proper_gen:integer_gen(Size, Low, High).
+
+integer_is_instance(Type, X) ->
+ {Low, High} = get_prop(env, Type),
+ is_integer(X) andalso le(Low, X) andalso le(X, High).
+
+number_shrinker(X, Type, S) ->
+ {Low, High} = get_prop(env, Type),
+ proper_shrink:number_shrinker(X, Low, High, S).
+
+%% @doc All floats between `Low' and `High', bounds included.
+%% `Low' and `High' must be Erlang expressions that evaluate to floats, with
+%% `Low =< High'. Additionally, `Low' and `High' may have the value `inf', in
+%% which case they represent minus infinity and plus infinity respectively.
+%% Instances shrink towards 0.0 if `Low =< 0.0 =< High', or towards the bound
+%% with the smallest absolute value otherwise.
+-spec float(extnum(), extnum()) -> proper_types:type().
+float(Low, High) ->
+ ?BASIC([
+ {env, {Low, High}},
+ {generator, {typed, fun float_gen/2}},
+ {is_instance, {typed, fun float_is_instance/2}},
+ {shrinkers, [fun number_shrinker/3]}
+ ]).
+
+float_gen(Type, Size) ->
+ {Low, High} = get_prop(env, Type),
+ proper_gen:float_gen(Size, Low, High).
+
+float_is_instance(Type, X) ->
+ {Low, High} = get_prop(env, Type),
+ is_float(X) andalso le(Low, X) andalso le(X, High).
+
+%% @private
+-spec le(extnum(), extnum()) -> boolean().
+le(inf, _B) -> true;
+le(_A, inf) -> true;
+le(A, B) -> A =< B.
+
+%% @doc All atoms. All atoms used internally by PropEr start with a '`$'', so
+%% such atoms will never be produced as instances of this type. You should also
+%% refrain from using such atoms in your code, to avoid a potential clash.
+%% Instances shrink towards the empty atom, ''.
+-spec atom() -> proper_types:type().
+atom() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:atom_gen/1},
+ {reverse_gen, fun proper_gen:atom_rev/1},
+ {size_transform, fun(Size) -> erlang:min(Size,255) end},
+ {is_instance, fun atom_is_instance/1}
+ ]).
+
+atom_is_instance(X) ->
+ is_atom(X)
+ %% We return false for atoms starting with '$', since these are
+ %% atoms used internally and never produced by the atom generator.
+ andalso (X =:= '' orelse hd(atom_to_list(X)) =/= $$).
+
+%% @doc All binaries. Instances shrink towards the empty binary, `<<>>'.
+-spec binary() -> proper_types:type().
+binary() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:binary_gen/1},
+ {reverse_gen, fun proper_gen:binary_rev/1},
+ {is_instance, fun erlang:is_binary/1}
+ ]).
+
+%% @doc All binaries with a byte size of `Len'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+%% Instances shrink towards binaries of zeroes.
+-spec binary(length()) -> proper_types:type().
+binary(Len) ->
+ ?WRAPPER([
+ {env, Len},
+ {generator, {typed, fun binary_len_gen/1}},
+ {reverse_gen, fun proper_gen:binary_rev/1},
+ {is_instance, {typed, fun binary_len_is_instance/2}}
+ ]).
+
+binary_len_gen(Type) ->
+ Len = get_prop(env, Type),
+ proper_gen:binary_len_gen(Len).
+
+binary_len_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ is_binary(X) andalso byte_size(X) =:= Len.
+
+%% @doc All bitstrings. Instances shrink towards the empty bitstring, `<<>>'.
+-spec bitstring() -> proper_types:type().
+bitstring() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:bitstring_gen/1},
+ {reverse_gen, fun proper_gen:bitstring_rev/1},
+ {is_instance, fun erlang:is_bitstring/1}
+ ]).
+
+%% @doc All bitstrings with a bit size of `Len'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+%% Instances shrink towards bitstrings of zeroes
+-spec bitstring(length()) -> proper_types:type().
+bitstring(Len) ->
+ ?WRAPPER([
+ {env, Len},
+ {generator, {typed, fun bitstring_len_gen/1}},
+ {reverse_gen, fun proper_gen:bitstring_rev/1},
+ {is_instance, {typed, fun bitstring_len_is_instance/2}}
+ ]).
+
+bitstring_len_gen(Type) ->
+ Len = get_prop(env, Type),
+ proper_gen:bitstring_len_gen(Len).
+
+bitstring_len_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ is_bitstring(X) andalso bit_size(X) =:= Len.
+
+%% @doc All lists containing elements of type `ElemType'.
+%% Instances shrink towards the empty list, `[]'.
+-spec list(ElemType::raw_type()) -> proper_types:type().
+% TODO: subtyping would be useful here (list, vector, fixed_list)
+list(RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?CONTAINER([
+ {generator, {typed, fun list_gen/2}},
+ {is_instance, {typed, fun list_is_instance/2}},
+ {internal_type, ElemType},
+ {get_length, fun erlang:length/1},
+ {split, fun lists:split/2},
+ {join, fun lists:append/2},
+ {get_indices, fun list_get_indices/2},
+ {remove, fun proper_arith:list_remove/2},
+ {retrieve, fun lists:nth/2},
+ {update, fun proper_arith:list_update/3}
+ ]).
+
+list_gen(Type, Size) ->
+ ElemType = get_prop(internal_type, Type),
+ proper_gen:list_gen(Size, ElemType).
+
+list_is_instance(Type, X) ->
+ ElemType = get_prop(internal_type, Type),
+ list_test(X, ElemType).
+
+%% @doc A type that generates exactly the list `List'. Instances shrink towards
+%% shorter sublists of the original list.
+-spec shrink_list([term()]) -> proper_types:type().
+shrink_list(List) ->
+ ?CONTAINER([
+ {env, List},
+ {generator, {typed, fun shrink_list_gen/1}},
+ {is_instance, {typed, fun shrink_list_is_instance/2}},
+ {get_length, fun erlang:length/1},
+ {split, fun lists:split/2},
+ {join, fun lists:append/2},
+ {get_indices, fun list_get_indices/2},
+ {remove, fun proper_arith:list_remove/2}
+ ]).
+
+shrink_list_gen(Type) ->
+ get_prop(env, Type).
+
+shrink_list_is_instance(Type, X) ->
+ List = get_prop(env, Type),
+ is_sublist(X, List).
+
+-spec is_sublist([term()], [term()]) -> boolean().
+is_sublist([], _) -> true;
+is_sublist(_, []) -> false;
+is_sublist([H|T1], [H|T2]) -> is_sublist(T1, T2);
+is_sublist(Slice, [_|T2]) -> is_sublist(Slice, T2).
+
+-spec list_test(proper_gen:imm_instance(), proper_types:type()) -> boolean().
+list_test(X, ElemType) ->
+ is_list(X) andalso lists:all(fun(E) -> is_instance(E, ElemType) end, X).
+
+%% @private
+-spec list_get_indices(proper_gen:generator(), list()) -> [position()].
+list_get_indices(_, List) ->
+ lists:seq(1, length(List)).
+
+%% @private
+%% This assumes that:
+%% - instances of size S are always valid instances of size >S
+%% - any recursive calls inside Gen are lazy
+-spec distlist(size(), proper_gen:sized_generator(), boolean()) ->
+ proper_types:type().
+distlist(Size, Gen, NonEmpty) ->
+ ParentType = case NonEmpty of
+ true -> non_empty(list(Gen(Size)));
+ false -> list(Gen(Size))
+ end,
+ ?SUBTYPE(ParentType, [
+ {subenv, {Size, Gen, NonEmpty}},
+ {generator, {typed, fun distlist_gen/1}}
+ ]).
+
+distlist_gen(Type) ->
+ {Size, Gen, NonEmpty} = get_prop(subenv, Type),
+ proper_gen:distlist_gen(Size, Gen, NonEmpty).
+
+%% @doc All lists of length `Len' containing elements of type `ElemType'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+-spec vector(length(), ElemType::raw_type()) -> proper_types:type().
+vector(Len, RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?CONTAINER([
+ {env, Len},
+ {generator, {typed, fun vector_gen/1}},
+ {is_instance, {typed, fun vector_is_instance/2}},
+ {internal_type, ElemType},
+ {get_indices, fun vector_get_indices/2},
+ {retrieve, fun lists:nth/2},
+ {update, fun proper_arith:list_update/3}
+ ]).
+
+vector_gen(Type) ->
+ Len = get_prop(env, Type),
+ ElemType = get_prop(internal_type, Type),
+ proper_gen:vector_gen(Len, ElemType).
+
+vector_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ ElemType = get_prop(internal_type, Type),
+ is_list(X)
+ andalso length(X) =:= Len
+ andalso lists:all(fun(E) -> is_instance(E, ElemType) end, X).
+
+vector_get_indices(Type, _X) ->
+ lists:seq(1, get_prop(env, Type)).
+
+%% @doc The union of all types in `ListOfTypes'. `ListOfTypes' can't be empty.
+%% The random instance generator is equally likely to choose any one of the
+%% types in `ListOfTypes'. The shrinking subsystem will always try to shrink an
+%% instance of a type union to an instance of the first type in `ListOfTypes',
+%% thus you should write the simplest case first.
+-spec union(ListOfTypes::[raw_type(),...]) -> proper_types:type().
+union(RawChoices) ->
+ Choices = [cook_outer(C) || C <- RawChoices],
+ ?BASIC([
+ {env, Choices},
+ {generator, {typed, fun union_gen/1}},
+ {is_instance, {typed, fun union_is_instance/2}},
+ {shrinkers, [fun union_shrinker_1/3, fun union_shrinker_2/3]}
+ ]).
+
+union_gen(Type) ->
+ Choices = get_prop(env,Type),
+ proper_gen:union_gen(Choices).
+
+union_is_instance(Type, X) ->
+ Choices = get_prop(env, Type),
+ lists:any(fun(C) -> is_instance(X, C) end, Choices).
+
+union_shrinker_1(X, Type, S) ->
+ Choices = get_prop(env, Type),
+ proper_shrink:union_first_choice_shrinker(X, Choices, S).
+
+union_shrinker_2(X, Type, S) ->
+ Choices = get_prop(env, Type),
+ proper_shrink:union_recursive_shrinker(X, Choices, S).
+
+%% @doc A specialization of {@link union/1}, where each type in `ListOfTypes' is
+%% assigned a frequency. Frequencies must be Erlang expressions that evaluate to
+%% positive integers. Types with larger frequencies are more likely to be chosen
+%% by the random instance generator. The shrinking subsystem will ignore the
+%% frequencies and try to shrink towards the first type in the list.
+-spec weighted_union(ListOfTypes::[{frequency(),raw_type()},...]) ->
+ proper_types:type().
+weighted_union(RawFreqChoices) ->
+ CookFreqType = fun({Freq,RawType}) -> {Freq,cook_outer(RawType)} end,
+ FreqChoices = lists:map(CookFreqType, RawFreqChoices),
+ Choices = [T || {_F,T} <- FreqChoices],
+ ?SUBTYPE(union(Choices), [
+ {subenv, FreqChoices},
+ {generator, {typed, fun weighted_union_gen/1}}
+ ]).
+
+weighted_union_gen(Gen) ->
+ FreqChoices = get_prop(subenv, Gen),
+ proper_gen:weighted_union_gen(FreqChoices).
+
+%% @private
+-spec safe_union([raw_type(),...]) -> proper_types:type().
+safe_union(RawChoices) ->
+ Choices = [cook_outer(C) || C <- RawChoices],
+ subtype(
+ [{subenv, Choices},
+ {generator, {typed, fun safe_union_gen/1}}],
+ union(Choices)).
+
+safe_union_gen(Type) ->
+ Choices = get_prop(subenv, Type),
+ proper_gen:safe_union_gen(Choices).
+
+%% @private
+-spec safe_weighted_union([{frequency(),raw_type()},...]) ->
+ proper_types:type().
+safe_weighted_union(RawFreqChoices) ->
+ CookFreqType = fun({Freq,RawType}) ->
+ {Freq,cook_outer(RawType)} end,
+ FreqChoices = lists:map(CookFreqType, RawFreqChoices),
+ Choices = [T || {_F,T} <- FreqChoices],
+ subtype([{subenv, FreqChoices},
+ {generator, {typed, fun safe_weighted_union_gen/1}}],
+ union(Choices)).
+
+safe_weighted_union_gen(Type) ->
+ FreqChoices = get_prop(subenv, Type),
+ proper_gen:safe_weighted_union_gen(FreqChoices).
+
+%% @doc All tuples whose i-th element is an instance of the type at index i of
+%% `ListOfTypes'. Also written simply as a tuple of types.
+-spec tuple(ListOfTypes::[raw_type()]) -> proper_types:type().
+tuple(RawFields) ->
+ Fields = [cook_outer(F) || F <- RawFields],
+ ?CONTAINER([
+ {env, Fields},
+ {generator, {typed, fun tuple_gen/1}},
+ {is_instance, {typed, fun tuple_is_instance/2}},
+ {internal_types, list_to_tuple(Fields)},
+ {get_indices, fun tuple_get_indices/2},
+ {retrieve, fun erlang:element/2},
+ {update, fun tuple_update/3}
+ ]).
+
+tuple_gen(Type) ->
+ Fields = get_prop(env, Type),
+ proper_gen:tuple_gen(Fields).
+
+tuple_is_instance(Type, X) ->
+ Fields = get_prop(env, Type),
+ is_tuple(X) andalso fixed_list_test(tuple_to_list(X), Fields).
+
+tuple_get_indices(Type, _X) ->
+ lists:seq(1, length(get_prop(env, Type))).
+
+-spec tuple_update(index(), value(), tuple()) -> tuple().
+tuple_update(Index, NewElem, Tuple) ->
+ setelement(Index, Tuple, NewElem).
+
+%% @doc Tuples whose elements are all of type `ElemType'.
+%% Instances shrink towards the 0-size tuple, `{}'.
+-spec loose_tuple(ElemType::raw_type()) -> proper_types:type().
+loose_tuple(RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?WRAPPER([
+ {env, ElemType},
+ {generator, {typed, fun loose_tuple_gen/2}},
+ {reverse_gen, {typed, fun loose_tuple_rev/2}},
+ {is_instance, {typed, fun loose_tuple_is_instance/2}}
+ ]).
+
+loose_tuple_gen(Type, Size) ->
+ ElemType = get_prop(env, Type),
+ proper_gen:loose_tuple_gen(Size, ElemType).
+
+loose_tuple_rev(Type, X) ->
+ ElemType = get_prop(env, Type),
+ proper_gen:loose_tuple_rev(X, ElemType).
+
+loose_tuple_is_instance(Type, X) ->
+ ElemType = get_prop(env, Type),
+ is_tuple(X) andalso list_test(tuple_to_list(X), ElemType).
+
+%% @doc Singleton type consisting only of `E'. `E' must be an evaluated term.
+%% Also written simply as `E'.
+-spec exactly(term()) -> proper_types:type().
+exactly(E) ->
+ ?BASIC([
+ {env, E},
+ {generator, {typed, fun exactly_gen/1}},
+ {is_instance, {typed, fun exactly_is_instance/2}}
+ ]).
+
+exactly_gen(Type) ->
+ E = get_prop(env, Type),
+ proper_gen:exactly_gen(E).
+
+exactly_is_instance(Type, X) ->
+ E = get_prop(env, Type),
+ X =:= E.
+
+%% @doc All lists whose i-th element is an instance of the type at index i of
+%% `ListOfTypes'. Also written simply as a list of types.
+-spec fixed_list(ListOfTypes::maybe_improper_list(raw_type(),raw_type()|[])) ->
+ proper_types:type().
+fixed_list(MaybeImproperRawFields) ->
+ %% CAUTION: must handle improper lists
+ {Fields, Internal, Len, Retrieve, Update} =
+ case proper_arith:cut_improper_tail(MaybeImproperRawFields) of
+ % TODO: have cut_improper_tail return the length and use it in test?
+ {ProperRawHead, ImproperRawTail} ->
+ HeadLen = length(ProperRawHead),
+ CookedHead = [cook_outer(F) || F <- ProperRawHead],
+ CookedTail = cook_outer(ImproperRawTail),
+ {{CookedHead,CookedTail},
+ CookedHead ++ CookedTail,
+ HeadLen + 1,
+ fun(I,L) -> improper_list_retrieve(I, L, HeadLen) end,
+ fun(I,V,L) -> improper_list_update(I, V, L, HeadLen) end};
+ ProperRawFields ->
+ LocalFields = [cook_outer(F) || F <- ProperRawFields],
+ {LocalFields,
+ LocalFields,
+ length(ProperRawFields),
+ fun lists:nth/2,
+ fun proper_arith:list_update/3}
+ end,
+ ?CONTAINER([
+ {env, {Fields, Len}},
+ {generator, {typed, fun fixed_list_gen/1}},
+ {is_instance, {typed, fun fixed_list_is_instance/2}},
+ {internal_types, Internal},
+ {get_indices, fun fixed_list_get_indices/2},
+ {retrieve, Retrieve},
+ {update, Update}
+ ]).
+
+fixed_list_gen(Type) ->
+ {Fields, _} = get_prop(env, Type),
+ proper_gen:fixed_list_gen(Fields).
+
+fixed_list_is_instance(Type, X) ->
+ {Fields, _} = get_prop(env, Type),
+ fixed_list_test(X, Fields).
+
+fixed_list_get_indices(Type, _X) ->
+ {_, Len} = get_prop(env, Type),
+ lists:seq(1, Len).
+
+-spec fixed_list_test(proper_gen:imm_instance(),
+ [proper_types:type()] | {[proper_types:type()],
+ proper_types:type()}) ->
+ boolean().
+fixed_list_test(X, {ProperHead,ImproperTail}) ->
+ is_list(X) andalso
+ begin
+ ProperHeadLen = length(ProperHead),
+ proper_arith:head_length(X) >= ProperHeadLen andalso
+ begin
+ {XHead,XTail} = lists:split(ProperHeadLen, X),
+ fixed_list_test(XHead, ProperHead)
+ andalso is_instance(XTail, ImproperTail)
+ end
+ end;
+fixed_list_test(X, ProperFields) ->
+ is_list(X)
+ andalso length(X) =:= length(ProperFields)
+ andalso lists:all(fun({E,T}) -> is_instance(E, T) end,
+ lists:zip(X, ProperFields)).
+
+%% TODO: Move these 2 functions to proper_arith?
+-spec improper_list_retrieve(index(), nonempty_improper_list(value(),value()),
+ pos_integer()) -> value().
+improper_list_retrieve(Index, List, HeadLen) ->
+ case Index =< HeadLen of
+ true -> lists:nth(Index, List);
+ false -> lists:nthtail(HeadLen, List)
+ end.
+
+-spec improper_list_update(index(), value(),
+ nonempty_improper_list(value(),value()),
+ pos_integer()) ->
+ nonempty_improper_list(value(),value()).
+improper_list_update(Index, Value, List, HeadLen) ->
+ case Index =< HeadLen of
+ %% TODO: This happens to work, but is not implied by list_update's spec.
+ true -> proper_arith:list_update(Index, Value, List);
+ false -> lists:sublist(List, HeadLen) ++ Value
+ end.
+
+%% @doc All pure functions that map instances of `ArgTypes' to instances of
+%% `RetType'. The syntax `function(Arity, RetType)' is also acceptable.
+-spec function(ArgTypes::[raw_type()] | arity(), RetType::raw_type()) ->
+ proper_types:type().
+function(Arity, RawRetType) when is_integer(Arity), Arity >= 0, Arity =< 255 ->
+ RetType = cook_outer(RawRetType),
+ ?BASIC([
+ {env, {Arity, RetType}},
+ {generator, {typed, fun function_gen/1}},
+ {is_instance, {typed, fun function_is_instance/2}}
+ ]);
+function(RawArgTypes, RawRetType) ->
+ function(length(RawArgTypes), RawRetType).
+
+function_gen(Type) ->
+ {Arity, RetType} = get_prop(env, Type),
+ proper_gen:function_gen(Arity, RetType).
+
+function_is_instance(Type, X) ->
+ {Arity, RetType} = get_prop(env, Type),
+ is_function(X, Arity)
+ %% TODO: what if it's not a function we produced?
+ andalso equal_types(RetType, proper_gen:get_ret_type(X)).
+
+%% @doc All Erlang terms (that PropEr can produce). For reasons of efficiency,
+%% functions are never produced as instances of this type.<br />
+%% CAUTION: Instances of this type are expensive to produce, shrink and instance-
+%% check, both in terms of processing time and consumed memory. Only use this
+%% type if you are certain that you need it.
+-spec any() -> proper_types:type().
+any() ->
+ AllTypes = [integer(),float(),atom(),bitstring(),?LAZY(loose_tuple(any())),
+ ?LAZY(list(any()))],
+ ?SUBTYPE(union(AllTypes), [
+ {generator, fun proper_gen:any_gen/1}
+ ]).
+
+
+%%------------------------------------------------------------------------------
+%% Type aliases
+%%------------------------------------------------------------------------------
+
+%% @equiv integer(inf, inf)
+-spec integer() -> proper_types:type().
+integer() -> integer(inf, inf).
+
+%% @equiv integer(0, inf)
+-spec non_neg_integer() -> proper_types:type().
+non_neg_integer() -> integer(0, inf).
+
+%% @equiv integer(1, inf)
+-spec pos_integer() -> proper_types:type().
+pos_integer() -> integer(1, inf).
+
+%% @equiv integer(inf, -1)
+-spec neg_integer() -> proper_types:type().
+neg_integer() -> integer(inf, -1).
+
+%% @equiv integer(Low, High)
+-spec range(extint(), extint()) -> proper_types:type().
+range(Low, High) -> integer(Low, High).
+
+%% @equiv float(inf, inf)
+-spec float() -> proper_types:type().
+float() -> float(inf, inf).
+
+%% @equiv float(0.0, inf)
+-spec non_neg_float() -> proper_types:type().
+non_neg_float() -> float(0.0, inf).
+
+%% @equiv union([integer(), float()])
+-spec number() -> proper_types:type().
+number() -> union([integer(), float()]).
+
+%% @doc The atoms `true' and `false'. Instances shrink towards `false'.
+-spec boolean() -> proper_types:type().
+boolean() -> union(['false', 'true']).
+
+%% @equiv integer(0, 255)
+-spec byte() -> proper_types:type().
+byte() -> integer(0, 255).
+
+%% @equiv integer(0, 16#10ffff)
+-spec char() -> proper_types:type().
+char() -> integer(0, 16#10ffff).
+
+%% @equiv list(any())
+-spec list() -> proper_types:type().
+list() -> list(any()).
+
+%% @equiv loose_tuple(any())
+-spec tuple() -> proper_types:type().
+tuple() -> loose_tuple(any()).
+
+%% @equiv list(char())
+-spec string() -> proper_types:type().
+string() -> list(char()).
+
+%% @equiv weighted_union(FreqChoices)
+-spec wunion([{frequency(),raw_type()},...]) -> proper_types:type().
+wunion(FreqChoices) -> weighted_union(FreqChoices).
+
+%% @equiv any()
+-spec term() -> proper_types:type().
+term() -> any().
+
+%% @equiv union([non_neg_integer() | infinity])
+-spec timeout() -> proper_types:type().
+timeout() -> union([non_neg_integer(), 'infinity']).
+
+%% @equiv integer(0, 255)
+-spec arity() -> proper_types:type().
+arity() -> integer(0, 255).
+
+
+%%------------------------------------------------------------------------------
+%% QuickCheck compatibility types
+%%------------------------------------------------------------------------------
+
+%% @doc Small integers (bound by the current value of the `size' parameter).
+%% Instances shrink towards `0'.
+-spec int() -> proper_types:type().
+int() -> ?SIZED(Size, integer(-Size,Size)).
+
+%% @doc Small non-negative integers (bound by the current value of the `size'
+%% parameter). Instances shrink towards `0'.
+-spec nat() -> proper_types:type().
+nat() -> ?SIZED(Size, integer(0,Size)).
+
+%% @equiv integer()
+-spec largeint() -> proper_types:type().
+largeint() -> integer().
+
+%% @equiv float()
+-spec real() -> proper_types:type().
+real() -> float().
+
+%% @equiv boolean()
+-spec bool() -> proper_types:type().
+bool() -> boolean().
+
+%% @equiv integer(Low, High)
+-spec choose(extint(), extint()) -> proper_types:type().
+choose(Low, High) -> integer(Low, High).
+
+%% @equiv union(Choices)
+-spec elements([raw_type(),...]) -> proper_types:type().
+elements(Choices) -> union(Choices).
+
+%% @equiv union(Choices)
+-spec oneof([raw_type(),...]) -> proper_types:type().
+oneof(Choices) -> union(Choices).
+
+%% @equiv weighted_union(Choices)
+-spec frequency([{frequency(),raw_type()},...]) -> proper_types:type().
+frequency(FreqChoices) -> weighted_union(FreqChoices).
+
+%% @equiv exactly(E)
+-spec return(term()) -> proper_types:type().
+return(E) -> exactly(E).
+
+%% @doc Adds a default value, `Default', to `Type'.
+%% The default serves as a primary shrinking target for instances, while it
+%% is also chosen by the random instance generation subsystem half the time.
+-spec default(raw_type(), raw_type()) -> proper_types:type().
+default(Default, Type) ->
+ union([Default, Type]).
+
+%% @doc All sorted lists containing elements of type `ElemType'.
+%% Instances shrink towards the empty list, `[]'.
+-spec orderedlist(ElemType::raw_type()) -> proper_types:type().
+orderedlist(RawElemType) ->
+ ?LET(L, list(RawElemType), lists:sort(L)).
+
+%% @equiv function(0, RetType)
+-spec function0(raw_type()) -> proper_types:type().
+function0(RetType) ->
+ function(0, RetType).
+
+%% @equiv function(1, RetType)
+-spec function1(raw_type()) -> proper_types:type().
+function1(RetType) ->
+ function(1, RetType).
+
+%% @equiv function(2, RetType)
+-spec function2(raw_type()) -> proper_types:type().
+function2(RetType) ->
+ function(2, RetType).
+
+%% @equiv function(3, RetType)
+-spec function3(raw_type()) -> proper_types:type().
+function3(RetType) ->
+ function(3, RetType).
+
+%% @equiv function(4, RetType)
+-spec function4(raw_type()) -> proper_types:type().
+function4(RetType) ->
+ function(4, RetType).
+
+%% @doc A specialization of {@link default/2}, where `Default' and `Type' are
+%% assigned weights to be considered by the random instance generator. The
+%% shrinking subsystem will ignore the weights and try to shrink using the
+%% default value.
+-spec weighted_default({frequency(),raw_type()}, {frequency(),raw_type()}) ->
+ proper_types:type().
+weighted_default(Default, Type) ->
+ weighted_union([Default, Type]).
+
+
+%%------------------------------------------------------------------------------
+%% Additional type specification functions
+%%------------------------------------------------------------------------------
+
+%% @doc Overrides the `size' parameter used when generating instances of
+%% `Type' with `NewSize'. Has no effect on size-less types, such as unions.
+%% Also, this will not affect the generation of any internal types contained in
+%% `Type', such as the elements of a list - those will still be generated
+%% using the test-wide value of `size'. One use of this function is to modify
+%% types to produce instances that grow faster or slower, like so:
+%% ```?SIZED(Size, resize(Size * 2, list(integer()))'''
+%% The above specifies a list type that grows twice as fast as normal lists.
+-spec resize(size(), Type::raw_type()) -> proper_types:type().
+resize(NewSize, RawType) ->
+ Type = cook_outer(RawType),
+ case find_prop(size_transform, Type) of
+ {ok,Transform} ->
+ add_prop(size_transform, fun(_S) -> Transform(NewSize) end, Type);
+ error ->
+ add_prop(size_transform, fun(_S) -> NewSize end, Type)
+ end.
+
+%% @doc This is a predefined constraint that can be applied to random-length
+%% list and binary types to ensure that the produced values are never empty.
+%%
+%% e.g. {@link list/0}, {@link string/0}, {@link binary/0})
+-spec non_empty(ListType::raw_type()) -> proper_types:type().
+non_empty(RawListType) ->
+ ?SUCHTHAT(L, RawListType, L =/= [] andalso L =/= <<>>).
+
+%% @doc Creates a new type which is equivalent to `Type', but whose instances
+%% are never shrunk by the shrinking subsystem.
+-spec noshrink(Type::raw_type()) -> proper_types:type().
+noshrink(RawType) ->
+ add_prop(noshrink, true, cook_outer(RawType)).
+
+%% @doc Associates the atom key `Parameter' with the value `Value' while
+%% generating instances of `Type'.
+-spec with_parameter(atom(), value(), Type::raw_type()) -> proper_types:type().
+with_parameter(Parameter, Value, RawType) ->
+ with_parameters([{Parameter,Value}], RawType).
+
+%% @doc Similar to {@link with_parameter/3}, but accepts a list of
+%% `{Parameter, Value}' pairs.
+-spec with_parameters([{atom(),value()}], Type::raw_type()) ->
+ proper_types:type().
+with_parameters(PVlist, RawType) ->
+ Type = cook_outer(RawType),
+ case find_prop(parameters, Type) of
+ {ok,Params} when is_list(Params) ->
+ append_list_to_prop(parameters, PVlist, Type);
+ error ->
+ add_prop(parameters, PVlist, Type)
+ end.
+
+%% @doc Returns the value associated with `Parameter', or `Default' in case
+%% `Parameter' is not associated with any value.
+-spec parameter(atom(), value()) -> value().
+parameter(Parameter, Default) ->
+ Parameters =
+ case erlang:get('$parameters') of
+ undefined -> [];
+ List -> List
+ end,
+ proplists:get_value(Parameter, Parameters, Default).
+
+%% @equiv parameter(Parameter, undefined)
+-spec parameter(atom()) -> value().
+parameter(Parameter) ->
+ parameter(Parameter, undefined).
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl
new file mode 100644
index 0000000000..b16075763f
--- /dev/null
+++ b/lib/dialyzer/test/behaviour_SUITE_data/src/proper/proper_typeserver.erl
@@ -0,0 +1,2411 @@
+%%% Copyright 2010-2016 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2016 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Erlang type system - PropEr type system integration module.
+%%%
+%%% PropEr can parse types expressed in Erlang's type language and convert them
+%%% to its own type format. Such expressions can be used instead of regular type
+%%% constructors in the second argument of `?FORALL's. No extra notation is
+%%% required; PropEr will detect which calls correspond to native types by
+%%% applying a parse transform during compilation. This parse transform is
+%%% automatically applied to any module that includes the `proper.hrl' header
+%%% file. You can disable this feature by compiling your modules with
+%%% `-DPROPER_NO_TRANS'. Note that this will currently also disable the
+%%% automatic exporting of properties.
+%%%
+%%% The use of native types in properties is subject to the following usage
+%%% rules:
+%%% <ul>
+%%% <li>Native types cannot be used outside of `?FORALL's.</li>
+%%% <li>Inside `?FORALL's, native types can be combined with other native
+%%% types, and even with PropEr types, inside tuples and lists (the constructs
+%%% `[...]', `{...}' and `++' are all allowed).</li>
+%%% <li>All other constructs of Erlang's built-in type system (e.g. `|' for
+%%% union, `_' as an alias of `any()', `<<_:_>>' binary type syntax and
+%%% `fun((...) -> ...)' function type syntax) are not allowed in `?FORALL's,
+%%% because they are rejected by the Erlang parser.</li>
+%%% <li>Anything other than a tuple constructor, list constructor, `++'
+%%% application, local or remote call will automatically be considered a
+%%% PropEr type constructor and not be processed further by the parse
+%%% transform.</li>
+%%% <li>Parametric native types are fully supported; of course, they can only
+%%% appear instantiated in a `?FORALL'. The arguments of parametric native
+%%% types are always interpreted as native types.</li>
+%%% <li>Parametric PropEr types, on the other hand, can take any kind of
+%%% argument. You can even mix native and PropEr types in the arguments of a
+%%% PropEr type. For example, assuming that the following declarations are
+%%% present:
+%%% ``` my_proper_type() -> ?LET(...).
+%%% -type my_native_type() :: ... .'''
+%%% Then the following expressions are all legal:
+%%% ``` vector(2, my_native_type())
+%%% function(0, my_native_type())
+%%% union([my_proper_type(), my_native_type()])''' </li>
+%%% <li>Some type constructors can take native types as arguments (but only
+%%% inside `?FORALL's):
+%%% <ul>
+%%% <li>`?SUCHTHAT', `?SUCHTHATMAYBE', `non_empty', `noshrink': these work
+%%% with native types too</li>
+%%% <li>`?LAZY', `?SHRINK', `resize', `?SIZED': these don't work with native
+%%% types</li>
+%%% <li>`?LET', `?LETSHRINK': only the top-level base type can be a native
+%%% type</li>
+%%% </ul></li>
+%%% <li>Native type declarations in the `?FORALL's of a module can reference any
+%%% custom type declared in a `-type' or `-opaque' attribute of the same
+%%% module, as long as no module identifier is used.</li>
+%%% <li>Typed records cannot be referenced inside `?FORALL's using the
+%%% `#rec_name{}' syntax. To use a typed record in a `?FORALL', enclose the
+%%% record in a custom type like so:
+%%% ``` -type rec_name() :: #rec_name{}. '''
+%%% and use the custom type instead.</li>
+%%% <li>`?FORALL's may contain references to self-recursive or mutually
+%%% recursive native types, so long as each type in the hierarchy has a clear
+%%% base case.
+%%% Currently, PropEr requires that the toplevel of any recursive type
+%%% declaration is either a (maybe empty) list or a union containing at least
+%%% one choice that doesn't reference the type directly (it may, however,
+%%% reference any of the types that are mutually recursive with it). This
+%%% means, for example, that some valid recursive type declarations, such as
+%%% this one:
+%%% ``` ?FORALL(..., a(), ...) '''
+%%% where:
+%%% ``` -type a() :: {'a','none' | a()}. '''
+%%% are not accepted by PropEr. However, such types can be rewritten in a way
+%%% that allows PropEr to parse them:
+%%% ``` ?FORALL(..., a(), ...) '''
+%%% where:
+%%% ``` -type a() :: {'a','none'} | {'a',a()}. '''
+%%% This also means that recursive record declarations are not allowed:
+%%% ``` ?FORALL(..., rec(), ...) '''
+%%% where:
+%%% ``` -type rec() :: #rec{}.
+%%% -record(rec, {a = 0 :: integer(), b = 'nil' :: 'nil' | #rec{}}). '''
+%%% A little rewritting can usually remedy this problem as well:
+%%% ``` ?FORALL(..., rec(), ...) '''
+%%% where:
+%%% ``` -type rec() :: #rec{b :: 'nil'} | #rec{b :: rec()}.
+%%% -record(rec, {a = 0 :: integer(), b = 'nil' :: 'nil' | #rec{}}). '''
+%%% </li>
+%%% <li>Remote types may be referenced in a `?FORALL', so long as they are
+%%% exported from the remote module. Currently, PropEr requires that any
+%%% remote modules whose types are directly referenced from within properties
+%%% are present in the code path at compile time, either compiled with
+%%% `debug_info' enabled or in source form. If PropEr cannot find a remote
+%%% module at all, finds only a compiled object file with no debug
+%%% information or fails to compile the source file, all calls to that module
+%%% will automatically be considered calls to PropEr type constructors.</li>
+%%% <li>For native types to be translated correctly, both the module that
+%%% contains the `?FORALL' declaration as well as any module that contains
+%%% the declaration of a type referenced (directly or indirectly) from inside
+%%% a `?FORALL' must be present in the code path at runtime, either compiled
+%%% with `debug_info' enabled or in source form.</li>
+%%% <li>Local types with the same name as an auto-imported BIF are not accepted
+%%% by PropEr, unless the BIF in question has been declared in a
+%%% `no_auto_import' option.</li>
+%%% <li>When an expression can be interpreted both as a PropEr type and as a
+%%% native type, the former takes precedence. This means that a function
+%%% `foo()' will shadow a type `foo()' if they are both present in the module.
+%%% The same rule applies to remote functions and types as well.</li>
+%%% <li>The above may cause some confusion when list syntax is used:
+%%% <ul>
+%%% <li>The expression `[integer()]' can be interpreted both ways, so the
+%%% PropEr way applies. Therefore, instances of this type will always be
+%%% lists of length 1, not arbitrary integer lists, as would be expected
+%%% when interpreting the expression as a native type.</li>
+%%% <li>Assuming that a custom type foo/1 has been declared, the expression
+%%% `foo([integer()])' can only be interpreted as a native type declaration,
+%%% which means that the generic type of integer lists will be passed to
+%%% `foo/1'.</li>
+%%% </ul></li>
+%%% <li>Currently, PropEr does not detect the following mistakes:
+%%% <ul>
+%%% <li>inline record-field specializations that reference non-existent
+%%% fields</li>
+%%% <li>type parameters that are not present in the RHS of a `-type'
+%%% declaration</li>
+%%% <li>using `_' as a type variable in the LHS of a `-type' declaration</li>
+%%% <li>using the same variable in more than one position in the LHS of a
+%%% `-type' declaration</li>
+%%% </ul>
+%%% </li>
+%%% </ul>
+%%%
+%%% You can use <a href="#index">these</a> functions to try out the type
+%%% translation subsystem.
+%%%
+%%% CAUTION: These functions should never be used inside properties. They are
+%%% meant for demonstration purposes only.
+
+-module(proper_typeserver).
+-behaviour(gen_server).
+-export([demo_translate_type/2, demo_is_instance/3]).
+
+-export([start/0, restart/0, stop/0, create_spec_test/3, get_exp_specced/1,
+ is_instance/3, translate_type/1]).
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+ code_change/3]).
+-export([get_exp_info/1, match/2]).
+
+-export_type([imm_type/0, mod_exp_types/0, mod_exp_funs/0]).
+
+-include("proper_internal.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Macros
+%%------------------------------------------------------------------------------
+
+-define(SRC_FILE_EXT, ".erl").
+
+-ifdef(AT_LEAST_19).
+-define(anno(L), erl_anno:new(L)).
+-else.
+-define(anno(L), L).
+-endif.
+
+%% Starting with 18.0 we need to handle both 'type' and 'user_type' tags;
+%% prior Erlang/OTP releases had only 'type' as a tag.
+-define(IS_TYPE_TAG(T), (T =:= type orelse T =:= user_type)).
+
+%% CAUTION: all these must be sorted
+-define(STD_TYPES_0,
+ [any,arity,atom,binary,bitstring,bool,boolean,byte,char,float,integer,
+ list,neg_integer,non_neg_integer,number,pos_integer,string,term,
+ timeout]).
+-define(HARD_ADTS,
+ %% gb_trees:iterator and gb_sets:iterator are NOT hardcoded
+ [{{array,0},array}, {{array,1},proper_array},
+ {{dict,0},dict}, {{dict,2},proper_dict},
+ {{gb_set,0},gb_sets}, {{gb_set,1},proper_gb_sets},
+ {{gb_tree,0},gb_trees}, {{gb_tree,2},proper_gb_trees},
+ {{orddict,2},proper_orddict},
+ {{ordset,1},proper_ordsets},
+ {{queue,0},queue}, {{queue,1},proper_queue},
+ {{set,0},sets}, {{set,1},proper_sets}]).
+-define(HARD_ADT_MODS,
+ [{array, [{{array,0},
+ {{type,0,record,[{atom,0,array}]},[]}}]},
+ {dict, [{{dict,0},
+ {{type,0,record,[{atom,0,dict}]},[]}}]},
+ {gb_sets, [{{gb_set,0},
+ {{type,0,tuple,[{type,0,non_neg_integer,[]},
+ {type,0,gb_set_node,[]}]},[]}}]},
+ {gb_trees, [{{gb_tree,0},
+ {{type,0,tuple,[{type,0,non_neg_integer,[]},
+ {type,0,gb_tree_node,[]}]},[]}}]},
+ %% Our parametric ADTs are already declared as normal types, we just
+ %% need to change them to opaques.
+ {proper_array, [{{array,1},already_declared}]},
+ {proper_dict, [{{dict,2},already_declared}]},
+ {proper_gb_sets, [{{gb_set,1},already_declared},
+ {{iterator,1},already_declared}]},
+ {proper_gb_trees, [{{gb_tree,2},already_declared},
+ {{iterator,2},already_declared}]},
+ {proper_orddict, [{{orddict,2},already_declared}]},
+ {proper_ordsets, [{{ordset,1},already_declared}]},
+ {proper_queue, [{{queue,1},already_declared}]},
+ {proper_sets, [{{set,1},already_declared}]},
+ {queue, [{{queue,0},
+ {{type,0,tuple,[{type,0,list,[]},{type,0,list,[]}]},[]}}]},
+ {sets, [{{set,0},
+ {{type,0,record,[{atom,0,set}]},[]}}]}]).
+
+
+%%------------------------------------------------------------------------------
+%% Types
+%%------------------------------------------------------------------------------
+
+-type type_name() :: atom().
+-type var_name() :: atom(). %% TODO: also integers?
+-type field_name() :: atom().
+
+-type type_kind() :: 'type' | 'record'.
+-type type_ref() :: {type_kind(),type_name(),arity()}.
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type substs_dict() :: dict(). %% dict(field_name(),ret_type())
+-else.
+-type substs_dict() :: dict:dict(field_name(),ret_type()).
+-endif.
+-type full_type_ref() :: {mod_name(),type_kind(),type_name(),
+ [ret_type()] | substs_dict()}.
+-type symb_info() :: 'not_symb' | {'orig_abs',abs_type()}.
+-type type_repr() :: {'abs_type',abs_type(),[var_name()],symb_info()}
+ | {'cached',fin_type(),abs_type(),symb_info()}
+ | {'abs_record',[{field_name(),abs_type()}]}.
+-type gen_fun() :: fun((size()) -> fin_type()).
+-type rec_fun() :: fun(([gen_fun()],size()) -> fin_type()).
+-type rec_arg() :: {boolean() | {'list',boolean(),rec_fun()},full_type_ref()}.
+-type rec_args() :: [rec_arg()].
+-type ret_type() :: {'simple',fin_type()} | {'rec',rec_fun(),rec_args()}.
+-type rec_fun_info() :: {pos_integer(),pos_integer(),[arity(),...],
+ [rec_fun(),...]}.
+
+-type imm_type_ref() :: {type_name(),arity()}.
+-type hard_adt_repr() :: {abs_type(),[var_name()]} | 'already_declared'.
+-type fun_ref() :: {fun_name(),arity()}.
+-type fun_repr() :: fun_clause_repr().
+-type fun_clause_repr() :: {[abs_type()],abs_type()}.
+-type proc_fun_ref() :: {fun_name(),[abs_type()],abs_type()}.
+-type full_imm_type_ref() :: {mod_name(),type_name(),arity()}.
+-type imm_stack() :: [full_imm_type_ref()].
+-type pat_field() :: 0 | 1 | atom().
+-type pattern() :: loose_tuple(pat_field()).
+-type next_step() :: 'none' | 'take_head' | {'match_with',pattern()}.
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+%% @private_type
+-type mod_exp_types() :: set(). %% set(imm_type_ref())
+-type mod_types() :: dict(). %% dict(type_ref(),type_repr())
+%% @private_type
+-type mod_exp_funs() :: set(). %% set(fun_ref())
+-type mod_specs() :: dict(). %% dict(fun_ref(),fun_repr())
+-else.
+%% @private_type
+-type mod_exp_types() :: sets:set(imm_type_ref()).
+-type mod_types() :: dict:dict(type_ref(),type_repr()).
+%% @private_type
+-type mod_exp_funs() :: sets:set(fun_ref()).
+-type mod_specs() :: dict:dict(fun_ref(),fun_repr()).
+-endif.
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+-record(state,
+ {cached = dict:new() :: dict(), %% dict(imm_type(),fin_type())
+ exp_types = dict:new() :: dict(), %% dict(mod_name(),mod_exp_types())
+ types = dict:new() :: dict(), %% dict(mod_name(),mod_types())
+ exp_specs = dict:new() :: dict()}). %% dict(mod_name(),mod_specs())
+-else.
+-record(state,
+ {cached = dict:new() :: dict:dict(imm_type(),fin_type()),
+ exp_types = dict:new() :: dict:dict(mod_name(),mod_exp_types()),
+ types = dict:new() :: dict:dict(mod_name(),mod_types()),
+ exp_specs = dict:new() :: dict:dict(mod_name(),mod_specs())}).
+-endif.
+-type state() :: #state{}.
+
+-record(mod_info,
+ {mod_exp_types = sets:new() :: mod_exp_types(),
+ mod_types = dict:new() :: mod_types(),
+ mod_opaques = sets:new() :: mod_exp_types(),
+ mod_exp_funs = sets:new() :: mod_exp_funs(),
+ mod_specs = dict:new() :: mod_specs()}).
+-type mod_info() :: #mod_info{}.
+
+-type stack() :: [full_type_ref() | 'tuple' | 'list' | 'union' | 'fun'].
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type var_dict() :: dict(). %% dict(var_name(),ret_type())
+-else.
+-type var_dict() :: dict:dict(var_name(),ret_type()).
+-endif.
+%% @private_type
+-type imm_type() :: {mod_name(),string()}.
+%% @alias
+-type fin_type() :: proper_types:type().
+-type tagged_result(T) :: {'ok',T} | 'error'.
+-type tagged_result2(T,S) :: {'ok',T,S} | 'error'.
+%% @alias
+-type rich_result(T) :: {'ok',T} | {'error',term()}.
+-type rich_result2(T,S) :: {'ok',T,S} | {'error',term()}.
+-type false_positive_mfas() :: proper:false_positive_mfas().
+
+-type server_call() :: {'create_spec_test',mfa(),timeout(),false_positive_mfas()}
+ | {'get_exp_specced',mod_name()}
+ | {'get_type_repr',mod_name(),type_ref(),boolean()}
+ | {'translate_type',imm_type()}.
+-type server_response() :: rich_result(proper:test())
+ | rich_result([mfa()])
+ | rich_result(type_repr())
+ | rich_result(fin_type()).
+
+
+%%------------------------------------------------------------------------------
+%% Server interface functions
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec start() -> 'ok'.
+start() ->
+ {ok,TypeserverPid} = gen_server:start_link(?MODULE, dummy, []),
+ put('$typeserver_pid', TypeserverPid),
+ ok.
+
+%% @private
+-spec restart() -> 'ok'.
+restart() ->
+ TypeserverPid = get('$typeserver_pid'),
+ case (TypeserverPid =:= undefined orelse not is_process_alive(TypeserverPid)) of
+ true -> start();
+ false -> ok
+ end.
+
+%% @private
+-spec stop() -> 'ok'.
+stop() ->
+ TypeserverPid = get('$typeserver_pid'),
+ erase('$typeserver_pid'),
+ gen_server:cast(TypeserverPid, stop).
+
+%% @private
+-spec create_spec_test(mfa(), timeout(), false_positive_mfas()) -> rich_result(proper:test()).
+create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {create_spec_test,MFA,SpecTimeout,FalsePositiveMFAs}).
+
+%% @private
+-spec get_exp_specced(mod_name()) -> rich_result([mfa()]).
+get_exp_specced(Mod) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {get_exp_specced,Mod}).
+
+-spec get_type_repr(mod_name(), type_ref(), boolean()) ->
+ rich_result(type_repr()).
+get_type_repr(Mod, TypeRef, IsRemote) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {get_type_repr,Mod,TypeRef,IsRemote}).
+
+%% @private
+-spec translate_type(imm_type()) -> rich_result(fin_type()).
+translate_type(ImmType) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {translate_type,ImmType}).
+
+%% @doc Translates the native type expression `TypeExpr' (which should be
+%% provided inside a string) into a PropEr type, which can then be passed to any
+%% of the demo functions defined in the {@link proper_gen} module. PropEr acts
+%% as if it found this type expression inside the code of module `Mod'.
+-spec demo_translate_type(mod_name(), string()) -> rich_result(fin_type()).
+demo_translate_type(Mod, TypeExpr) ->
+ start(),
+ Result = translate_type({Mod,TypeExpr}),
+ stop(),
+ Result.
+
+%% @doc Checks if `Term' is a valid instance of native type `TypeExpr' (which
+%% should be provided inside a string). PropEr acts as if it found this type
+%% expression inside the code of module `Mod'.
+-spec demo_is_instance(term(), mod_name(), string()) ->
+ boolean() | {'error',term()}.
+demo_is_instance(Term, Mod, TypeExpr) ->
+ case parse_type(TypeExpr) of
+ {ok,TypeForm} ->
+ start(),
+ Result =
+ %% Force the typeserver to load the module.
+ case translate_type({Mod,"integer()"}) of
+ {ok,_FinType} ->
+ try is_instance(Term, Mod, TypeForm)
+ catch
+ throw:{'$typeserver',Reason} -> {error, Reason}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end,
+ stop(),
+ Result;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Implementation of gen_server interface
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec init(_) -> {'ok',state()}.
+init(_) ->
+ {ok, #state{}}.
+
+%% @private
+-spec handle_call(server_call(), _, state()) ->
+ {'reply',server_response(),state()}.
+handle_call({create_spec_test,MFA,SpecTimeout,FalsePositiveMFAs}, _From, State) ->
+ case create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs, State) of
+ {ok,Test,NewState} ->
+ {reply, {ok,Test}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({get_exp_specced,Mod}, _From, State) ->
+ case get_exp_specced(Mod, State) of
+ {ok,MFAs,NewState} ->
+ {reply, {ok,MFAs}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({get_type_repr,Mod,TypeRef,IsRemote}, _From, State) ->
+ case get_type_repr(Mod, TypeRef, IsRemote, State) of
+ {ok,TypeRepr,NewState} ->
+ {reply, {ok,TypeRepr}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({translate_type,ImmType}, _From, State) ->
+ case translate_type(ImmType, State) of
+ {ok,FinType,NewState} ->
+ {reply, {ok,FinType}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end.
+
+%% @private
+-spec handle_cast('stop', state()) -> {'stop','normal',state()}.
+handle_cast(stop, State) ->
+ {stop, normal, State}.
+
+%% @private
+-spec handle_info(term(), state()) -> {'stop',{'received_info',term()},state()}.
+handle_info(Info, State) ->
+ {stop, {received_info,Info}, State}.
+
+%% @private
+-spec terminate(term(), state()) -> 'ok'.
+terminate(_Reason, _State) ->
+ ok.
+
+%% @private
+-spec code_change(term(), state(), _) -> {'ok',state()}.
+code_change(_OldVsn, State, _) ->
+ {ok, State}.
+
+
+%%------------------------------------------------------------------------------
+%% Top-level interface
+%%------------------------------------------------------------------------------
+
+-spec create_spec_test(mfa(), timeout(), false_positive_mfas(), state()) ->
+ rich_result2(proper:test(),state()).
+create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs, State) ->
+ case get_exp_spec(MFA, State) of
+ {ok,FunRepr,NewState} ->
+ make_spec_test(MFA, FunRepr, SpecTimeout, FalsePositiveMFAs, NewState);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_exp_spec(mfa(), state()) -> rich_result2(fun_repr(),state()).
+get_exp_spec({Mod,Fun,Arity} = MFA, State) ->
+ case add_module(Mod, State) of
+ {ok,#state{exp_specs = ExpSpecs} = NewState} ->
+ ModExpSpecs = dict:fetch(Mod, ExpSpecs),
+ case dict:find({Fun,Arity}, ModExpSpecs) of
+ {ok,FunRepr} ->
+ {ok, FunRepr, NewState};
+ error ->
+ {error, {function_not_exported_or_specced,MFA}}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec make_spec_test(mfa(), fun_repr(), timeout(), false_positive_mfas(), state()) ->
+ rich_result2(proper:test(),state()).
+make_spec_test({Mod,_Fun,_Arity}=MFA, {Domain,_Range}=FunRepr, SpecTimeout, FalsePositiveMFAs, State) ->
+ case convert(Mod, {type,?anno(0),'$fixed_list',Domain}, State) of
+ {ok,FinType,NewState} ->
+ Test = ?FORALL(Args, FinType, apply_spec_test(MFA, FunRepr, SpecTimeout, FalsePositiveMFAs, Args)),
+ {ok, Test, NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec apply_spec_test(mfa(), fun_repr(), timeout(), false_positive_mfas(), term()) -> proper:test().
+apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiveMFAs, Args) ->
+ ?TIMEOUT(SpecTimeout,
+ begin
+ %% NOTE: only call apply/3 inside try/catch (do not trust ?MODULE:is_instance/3)
+ Result =
+ try apply(Mod, Fun, Args) of
+ X -> {ok, X}
+ catch
+ X:Y -> {X, Y}
+ end,
+ case Result of
+ {ok, Z} ->
+ case ?MODULE:is_instance(Z, Mod, Range) of
+ true ->
+ true;
+ false when is_function(FalsePositiveMFAs) ->
+ FalsePositiveMFAs(MFA, Args, {fail, Z});
+ false ->
+ false
+ end;
+ Exception when is_function(FalsePositiveMFAs) ->
+ case FalsePositiveMFAs(MFA, Args, Exception) of
+ true ->
+ true;
+ false ->
+ error(Exception, erlang:get_stacktrace())
+ end;
+ Exception ->
+ error(Exception, erlang:get_stacktrace())
+ end
+ end).
+
+-spec get_exp_specced(mod_name(), state()) -> rich_result2([mfa()],state()).
+get_exp_specced(Mod, State) ->
+ case add_module(Mod, State) of
+ {ok,#state{exp_specs = ExpSpecs} = NewState} ->
+ ModExpSpecs = dict:fetch(Mod, ExpSpecs),
+ ExpSpecced = [{Mod,F,A} || {F,A} <- dict:fetch_keys(ModExpSpecs)],
+ {ok, ExpSpecced, NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_type_repr(mod_name(), type_ref(), boolean(), state()) ->
+ rich_result2(type_repr(),state()).
+get_type_repr(Mod, {type,Name,Arity} = TypeRef, true, State) ->
+ case prepare_for_remote(Mod, Name, Arity, State) of
+ {ok,NewState} ->
+ get_type_repr(Mod, TypeRef, false, NewState);
+ {error,_Reason} = Error ->
+ Error
+ end;
+get_type_repr(Mod, TypeRef, false, #state{types = Types} = State) ->
+ ModTypes = dict:fetch(Mod, Types),
+ case dict:find(TypeRef, ModTypes) of
+ {ok,TypeRepr} ->
+ {ok, TypeRepr, State};
+ error ->
+ {error, {missing_type,Mod,TypeRef}}
+ end.
+
+-spec prepare_for_remote(mod_name(), type_name(), arity(), state()) ->
+ rich_result(state()).
+prepare_for_remote(RemMod, Name, Arity, State) ->
+ case add_module(RemMod, State) of
+ {ok,#state{exp_types = ExpTypes} = NewState} ->
+ RemModExpTypes = dict:fetch(RemMod, ExpTypes),
+ case sets:is_element({Name,Arity}, RemModExpTypes) of
+ true -> {ok, NewState};
+ false -> {error, {type_not_exported,{RemMod,Name,Arity}}}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec translate_type(imm_type(), state()) -> rich_result2(fin_type(),state()).
+translate_type({Mod,Str} = ImmType, #state{cached = Cached} = State) ->
+ case dict:find(ImmType, Cached) of
+ {ok,Type} ->
+ {ok, Type, State};
+ error ->
+ case parse_type(Str) of
+ {ok,TypeForm} ->
+ case add_module(Mod, State) of
+ {ok,NewState} ->
+ case convert(Mod, TypeForm, NewState) of
+ {ok,FinType,
+ #state{cached = Cached} = FinalState} ->
+ NewCached = dict:store(ImmType, FinType,
+ Cached),
+ {ok, FinType,
+ FinalState#state{cached = NewCached}};
+ {error,_Reason} = Error ->
+ Error
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end;
+ {error,Reason} ->
+ {error, {parse_error,Str,Reason}}
+ end
+ end.
+
+-spec parse_type(string()) -> rich_result(abs_type()).
+parse_type(Str) ->
+ TypeStr = "-type mytype() :: " ++ Str ++ ".",
+ case erl_scan:string(TypeStr) of
+ {ok,Tokens,_EndLocation} ->
+ case erl_parse:parse_form(Tokens) of
+ {ok,{attribute,_Line,type,{mytype,TypeExpr,[]}}} ->
+ {ok, TypeExpr};
+ {error,_ErrorInfo} = Error ->
+ Error
+ end;
+ {error,ErrorInfo,_EndLocation} ->
+ {error, ErrorInfo}
+ end.
+
+-spec add_module(mod_name(), state()) -> rich_result(state()).
+add_module(Mod, #state{exp_types = ExpTypes} = State) ->
+ case dict:is_key(Mod, ExpTypes) of
+ true ->
+ {ok, State};
+ false ->
+ case get_code_and_exports(Mod) of
+ {ok,AbsCode,ModExpFuns} ->
+ RawModInfo = get_mod_info(Mod, AbsCode, ModExpFuns),
+ ModInfo = process_adts(Mod, RawModInfo),
+ {ok, store_mod_info(Mod, ModInfo, State)};
+ {error,Reason} ->
+ {error, {cant_load_code,Mod,Reason}}
+ end
+ end.
+
+%% @private
+-spec get_exp_info(mod_name()) -> rich_result2(mod_exp_types(),mod_exp_funs()).
+get_exp_info(Mod) ->
+ case get_code_and_exports(Mod) of
+ {ok,AbsCode,ModExpFuns} ->
+ RawModInfo = get_mod_info(Mod, AbsCode, ModExpFuns),
+ {ok, RawModInfo#mod_info.mod_exp_types, ModExpFuns};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_code_and_exports(mod_name()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_code_and_exports(Mod) ->
+ case code:get_object_code(Mod) of
+ {Mod, ObjBin, _ObjFileName} ->
+ case get_chunks(ObjBin) of
+ {ok,_AbsCode,_ModExpFuns} = Result ->
+ Result;
+ {error,Reason} ->
+ get_code_and_exports_from_source(Mod, Reason)
+ end;
+ error ->
+ get_code_and_exports_from_source(Mod, cant_find_object_file)
+ end.
+
+-spec get_code_and_exports_from_source(mod_name(), term()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_code_and_exports_from_source(Mod, ObjError) ->
+ SrcFileName = atom_to_list(Mod) ++ ?SRC_FILE_EXT,
+ case code:where_is_file(SrcFileName) of
+ FullSrcFileName when is_list(FullSrcFileName) ->
+ Opts = [binary,debug_info,return_errors,{d,'PROPER_REMOVE_PROPS'}],
+ case compile:file(FullSrcFileName, Opts) of
+ {ok,Mod,Binary} ->
+ get_chunks(Binary);
+ {error,Errors,_Warnings} ->
+ {error, {ObjError,{cant_compile_source_file,Errors}}}
+ end;
+ non_existing ->
+ {error, {ObjError,cant_find_source_file}}
+ end.
+
+-spec get_chunks(string() | binary()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_chunks(ObjFile) ->
+ case beam_lib:chunks(ObjFile, [abstract_code,exports]) of
+ {ok,{_Mod,[{abstract_code,AbsCodeChunk},{exports,ExpFunsList}]}} ->
+ case AbsCodeChunk of
+ {raw_abstract_v1,AbsCode} ->
+ %% HACK: Add a declaration for iolist() to every module
+ {ok, add_iolist(AbsCode), sets:from_list(ExpFunsList)};
+ no_abstract_code ->
+ {error, no_abstract_code};
+ _ ->
+ {error, unsupported_abstract_code_format}
+ end;
+ {error,beam_lib,Reason} ->
+ {error, Reason}
+ end.
+
+-spec add_iolist([abs_form()]) -> [abs_form()].
+add_iolist(Forms) ->
+ IOListDef =
+ {type,0,maybe_improper_list,
+ [{type,0,union,[{type,0,byte,[]},{type,0,binary,[]},
+ {type,0,iolist,[]}]},
+ {type,0,binary,[]}]},
+ IOListDecl = {attribute,0,type,{iolist,IOListDef,[]}},
+ [IOListDecl | Forms].
+
+-spec get_mod_info(mod_name(), [abs_form()], mod_exp_funs()) -> mod_info().
+get_mod_info(Mod, AbsCode, ModExpFuns) ->
+ StartModInfo = #mod_info{mod_exp_funs = ModExpFuns},
+ ImmModInfo = lists:foldl(fun add_mod_info/2, StartModInfo, AbsCode),
+ #mod_info{mod_specs = AllModSpecs} = ImmModInfo,
+ IsExported = fun(FunRef,_FunRepr) -> sets:is_element(FunRef,ModExpFuns) end,
+ ModExpSpecs = dict:filter(IsExported, AllModSpecs),
+ ModInfo = ImmModInfo#mod_info{mod_specs = ModExpSpecs},
+ case orddict:find(Mod, ?HARD_ADT_MODS) of
+ {ok,ModADTs} ->
+ #mod_info{mod_exp_types = ModExpTypes, mod_types = ModTypes,
+ mod_opaques = ModOpaques} = ModInfo,
+ ModADTsSet =
+ sets:from_list([ImmTypeRef
+ || {ImmTypeRef,_HardADTRepr} <- ModADTs]),
+ NewModExpTypes = sets:union(ModExpTypes, ModADTsSet),
+ NewModTypes = lists:foldl(fun store_hard_adt/2, ModTypes, ModADTs),
+ NewModOpaques = sets:union(ModOpaques, ModADTsSet),
+ ModInfo#mod_info{mod_exp_types = NewModExpTypes,
+ mod_types = NewModTypes,
+ mod_opaques = NewModOpaques};
+ error ->
+ ModInfo
+ end.
+
+-spec store_hard_adt({imm_type_ref(),hard_adt_repr()}, mod_types()) ->
+ mod_types().
+store_hard_adt({_ImmTypeRef,already_declared}, ModTypes) ->
+ ModTypes;
+store_hard_adt({{Name,Arity},{TypeForm,VarNames}}, ModTypes) ->
+ TypeRef = {type,Name,Arity},
+ TypeRepr = {abs_type,TypeForm,VarNames,not_symb},
+ dict:store(TypeRef, TypeRepr, ModTypes).
+
+-spec add_mod_info(abs_form(), mod_info()) -> mod_info().
+add_mod_info({attribute,_Line,export_type,TypesList},
+ #mod_info{mod_exp_types = ModExpTypes} = ModInfo) ->
+ NewModExpTypes = sets:union(sets:from_list(TypesList), ModExpTypes),
+ ModInfo#mod_info{mod_exp_types = NewModExpTypes};
+add_mod_info({attribute,_Line,type,{{record,RecName},Fields,[]}},
+ #mod_info{mod_types = ModTypes} = ModInfo) ->
+ FieldInfo = [process_rec_field(F) || F <- Fields],
+ NewModTypes = dict:store({record,RecName,0}, {abs_record,FieldInfo},
+ ModTypes),
+ ModInfo#mod_info{mod_types = NewModTypes};
+add_mod_info({attribute,Line,record,{RecName,Fields}},
+ #mod_info{mod_types = ModTypes} = ModInfo) ->
+ case dict:is_key(RecName, ModTypes) of
+ true ->
+ ModInfo;
+ false -> % fake an opaque term by using the same Line as annotation
+ TypedRecord = {attribute,Line,type,{{record,RecName},Fields,[]}},
+ add_mod_info(TypedRecord, ModInfo)
+ end;
+add_mod_info({attribute,_Line,Kind,{Name,TypeForm,VarForms}},
+ #mod_info{mod_types = ModTypes,
+ mod_opaques = ModOpaques} = ModInfo)
+ when Kind =:= type; Kind =:= opaque ->
+ Arity = length(VarForms),
+ VarNames = [V || {var,_,V} <- VarForms],
+ %% TODO: No check whether variables are different, or non-'_'.
+ NewModTypes = dict:store({type,Name,Arity},
+ {abs_type,TypeForm,VarNames,not_symb}, ModTypes),
+ NewModOpaques =
+ case Kind of
+ type -> ModOpaques;
+ opaque -> sets:add_element({Name,Arity}, ModOpaques)
+ end,
+ ModInfo#mod_info{mod_types = NewModTypes, mod_opaques = NewModOpaques};
+add_mod_info({attribute,_Line,spec,{RawFunRef,[RawFirstClause | _Rest]}},
+ #mod_info{mod_specs = ModSpecs} = ModInfo) ->
+ FunRef = case RawFunRef of
+ {_Mod,Name,Arity} -> {Name,Arity};
+ {_Name,_Arity} = F -> F
+ end,
+ %% TODO: We just take the first function clause.
+ FirstClause = process_fun_clause(RawFirstClause),
+ NewModSpecs = dict:store(FunRef, FirstClause, ModSpecs),
+ ModInfo#mod_info{mod_specs = NewModSpecs};
+add_mod_info(_Form, ModInfo) ->
+ ModInfo.
+
+-spec process_rec_field(abs_rec_field()) -> {field_name(),abs_type()}.
+process_rec_field({record_field,_,{atom,_,FieldName}}) ->
+ {FieldName, {type,0,any,[]}};
+process_rec_field({record_field,_,{atom,_,FieldName},_Initialization}) ->
+ {FieldName, {type,0,any,[]}};
+process_rec_field({typed_record_field,RecField,FieldType}) ->
+ {FieldName,_} = process_rec_field(RecField),
+ {FieldName, FieldType}.
+
+-spec process_fun_clause(abs_type()) -> fun_clause_repr().
+process_fun_clause({type,_,'fun',[{type,_,product,Domain},Range]}) ->
+ {Domain, Range};
+process_fun_clause({type,_,bounded_fun,[MainClause,Constraints]}) ->
+ {RawDomain,RawRange} = process_fun_clause(MainClause),
+ VarSubsts = [{V,T} || {type,_,constraint,
+ [{atom,_,is_subtype},[{var,_,V},T]]} <- Constraints,
+ V =/= '_'],
+ VarSubstsDict = dict:from_list(VarSubsts),
+ Domain = [update_vars(A, VarSubstsDict, false) || A <- RawDomain],
+ Range = update_vars(RawRange, VarSubstsDict, false),
+ {Domain, Range}.
+
+-spec store_mod_info(mod_name(), mod_info(), state()) -> state().
+store_mod_info(Mod, #mod_info{mod_exp_types = ModExpTypes, mod_types = ModTypes,
+ mod_specs = ImmModExpSpecs},
+ #state{exp_types = ExpTypes, types = Types,
+ exp_specs = ExpSpecs} = State) ->
+ NewExpTypes = dict:store(Mod, ModExpTypes, ExpTypes),
+ NewTypes = dict:store(Mod, ModTypes, Types),
+ ModExpSpecs = dict:map(fun unbound_to_any/2, ImmModExpSpecs),
+ NewExpSpecs = dict:store(Mod, ModExpSpecs, ExpSpecs),
+ State#state{exp_types = NewExpTypes, types = NewTypes,
+ exp_specs = NewExpSpecs}.
+
+-spec unbound_to_any(fun_ref(), fun_repr()) -> fun_repr().
+unbound_to_any(_FunRef, {Domain,Range}) ->
+ EmptySubstsDict = dict:new(),
+ NewDomain = [update_vars(A,EmptySubstsDict,true) || A <- Domain],
+ NewRange = update_vars(Range, EmptySubstsDict, true),
+ {NewDomain, NewRange}.
+
+
+%%------------------------------------------------------------------------------
+%% ADT translation functions
+%%------------------------------------------------------------------------------
+
+-spec process_adts(mod_name(), mod_info()) -> mod_info().
+process_adts(Mod,
+ #mod_info{mod_exp_types = ModExpTypes, mod_opaques = ModOpaques,
+ mod_specs = ModExpSpecs} = ModInfo) ->
+ %% TODO: No warning on unexported opaques.
+ case sets:to_list(sets:intersection(ModExpTypes,ModOpaques)) of
+ [] ->
+ ModInfo;
+ ModADTs ->
+ %% TODO: No warning on unexported API functions.
+ ModExpSpecsList = [{Name,Domain,Range}
+ || {{Name,_Arity},{Domain,Range}}
+ <- dict:to_list(ModExpSpecs)],
+ AddADT = fun(ADT,Acc) -> add_adt(Mod,ADT,Acc,ModExpSpecsList) end,
+ lists:foldl(AddADT, ModInfo, ModADTs)
+ end.
+
+-spec add_adt(mod_name(), imm_type_ref(), mod_info(), [proc_fun_ref()]) ->
+ mod_info().
+add_adt(Mod, {Name,Arity}, #mod_info{mod_types = ModTypes} = ModInfo,
+ ModExpFunSpecs) ->
+ ADTRef = {type,Name,Arity},
+ {abs_type,InternalRepr,VarNames,not_symb} = dict:fetch(ADTRef, ModTypes),
+ FullADTRef = {Mod,Name,Arity},
+ %% TODO: No warning on unsuitable range.
+ SymbCalls1 = [get_symb_call(FullADTRef,Spec) || Spec <- ModExpFunSpecs],
+ %% TODO: No warning on bad use of variables.
+ SymbCalls2 = [fix_vars(FullADTRef,Call,RangeVars,VarNames)
+ || {ok,Call,RangeVars} <- SymbCalls1],
+ case [Call || {ok,Call} <- SymbCalls2] of
+ [] ->
+ %% TODO: No warning on no acceptable spec.
+ ModInfo;
+ SymbCalls3 ->
+ NewADTRepr = {abs_type,{type,0,union,SymbCalls3},VarNames,
+ {orig_abs,InternalRepr}},
+ NewModTypes = dict:store(ADTRef, NewADTRepr, ModTypes),
+ ModInfo#mod_info{mod_types = NewModTypes}
+ end.
+
+-spec get_symb_call(full_imm_type_ref(), proc_fun_ref()) ->
+ tagged_result2(abs_type(),[var_name()]).
+get_symb_call({Mod,_TypeName,_Arity} = FullADTRef, {FunName,Domain,Range}) ->
+ A = ?anno(0),
+ BaseCall = {type,A,tuple,[{atom,A,'$call'},{atom,A,Mod},{atom,A,FunName},
+ {type,A,'$fixed_list',Domain}]},
+ unwrap_range(FullADTRef, BaseCall, Range, false).
+
+-spec unwrap_range(full_imm_type_ref(), abs_type() | next_step(), abs_type(),
+ boolean()) ->
+ tagged_result2(abs_type() | next_step(),[var_name()]).
+unwrap_range(FullADTRef, Call, {paren_type,_,[Type]}, TestRun) ->
+ unwrap_range(FullADTRef, Call, Type, TestRun);
+unwrap_range(FullADTRef, Call, {ann_type,_,[_Var,Type]}, TestRun) ->
+ unwrap_range(FullADTRef, Call, Type, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,list,[ElemType]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, ElemType, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,maybe_improper_list,[Cont,_Term]},
+ TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,nonempty_list,[ElemType]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, ElemType, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,nonempty_improper_list,[Cont,_Term]},
+ TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(FullADTRef, Call,
+ {type,_,nonempty_maybe_improper_list,[Cont,_Term]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(_FullADTRef, _Call, {type,_,tuple,any}, _TestRun) ->
+ error;
+unwrap_range(FullADTRef, Call, {type,_,tuple,FieldForms}, TestRun) ->
+ Translates = fun(T) -> unwrap_range(FullADTRef,none,T,true) =/= error end,
+ case proper_arith:find_first(Translates, FieldForms) of
+ none ->
+ error;
+ {TargetPos,TargetElem} ->
+ Pattern = get_pattern(TargetPos, FieldForms),
+ case TestRun of
+ true ->
+ NewCall =
+ case Call of
+ none -> {match_with,Pattern};
+ _ -> Call
+ end,
+ {ok, NewCall, []};
+ false ->
+ AbsPattern = term_to_singleton_type(Pattern),
+ A = ?anno(0),
+ NewCall =
+ {type,A,tuple,
+ [{atom,A,'$call'},{atom,A,?MODULE},{atom,A,match},
+ {type,A,'$fixed_list',[AbsPattern,Call]}]},
+ unwrap_range(FullADTRef, NewCall, TargetElem, TestRun)
+ end
+ end;
+unwrap_range(FullADTRef, Call, {type,_,union,Choices}, TestRun) ->
+ TestedChoices = [unwrap_range(FullADTRef,none,C,true) || C <- Choices],
+ NotError = fun(error) -> false; (_) -> true end,
+ case proper_arith:find_first(NotError, TestedChoices) of
+ none ->
+ error;
+ {_ChoicePos,{ok,none,_RangeVars}} ->
+ error;
+ {ChoicePos,{ok,NextStep,_RangeVars}} ->
+ {A, [ChoiceElem|B]} = lists:split(ChoicePos-1, Choices),
+ OtherChoices = A ++ B,
+ DistinctChoice =
+ case NextStep of
+ take_head ->
+ fun cant_have_head/1;
+ {match_with,Pattern} ->
+ fun(C) -> cant_match(Pattern, C) end
+ end,
+ case {lists:all(DistinctChoice,OtherChoices), TestRun} of
+ {true,true} ->
+ {ok, NextStep, []};
+ {true,false} ->
+ unwrap_range(FullADTRef, Call, ChoiceElem, TestRun);
+ {false,_} ->
+ error
+ end
+ end;
+unwrap_range({_Mod,SameName,Arity}, Call, {type,_,SameName,ArgForms},
+ _TestRun) ->
+ RangeVars = [V || {var,_,V} <- ArgForms, V =/= '_'],
+ case length(ArgForms) =:= Arity andalso length(RangeVars) =:= Arity of
+ true -> {ok, Call, RangeVars};
+ false -> error
+ end;
+unwrap_range({SameMod,SameName,_Arity} = FullADTRef, Call,
+ {remote_type,_,[{atom,_,SameMod},{atom,_,SameName},ArgForms]},
+ TestRun) ->
+ unwrap_range(FullADTRef, Call, {type,?anno(0),SameName,ArgForms}, TestRun);
+unwrap_range(_FullADTRef, _Call, _Range, _TestRun) ->
+ error.
+
+-spec unwrap_list(full_imm_type_ref(), abs_type() | next_step(), abs_type(),
+ boolean()) ->
+ tagged_result2(abs_type() | next_step(),[var_name()]).
+unwrap_list(FullADTRef, Call, HeadType, TestRun) ->
+ NewCall =
+ case TestRun of
+ true ->
+ case Call of
+ none -> take_head;
+ _ -> Call
+ end;
+ false ->
+ {type,0,tuple,[{atom,0,'$call'},{atom,0,erlang},{atom,0,hd},
+ {type,0,'$fixed_list',[Call]}]}
+ end,
+ unwrap_range(FullADTRef, NewCall, HeadType, TestRun).
+
+-spec fix_vars(full_imm_type_ref(), abs_type(), [var_name()], [var_name()]) ->
+ tagged_result(abs_type()).
+fix_vars(FullADTRef, Call, RangeVars, VarNames) ->
+ NotAnyVar = fun(V) -> V =/= '_' end,
+ case no_duplicates(VarNames) andalso lists:all(NotAnyVar,VarNames) of
+ true ->
+ RawUsedVars =
+ collect_vars(FullADTRef, Call, [[V] || V <- RangeVars]),
+ UsedVars = [lists:usort(L) || L <- RawUsedVars],
+ case correct_var_use(UsedVars) of
+ true ->
+ PairAll = fun(L,Y) -> [{X,{var,0,Y}} || X <- L] end,
+ VarSubsts =
+ lists:flatten(lists:zipwith(PairAll,UsedVars,VarNames)),
+ VarSubstsDict = dict:from_list(VarSubsts),
+ {ok, update_vars(Call,VarSubstsDict,true)};
+ false ->
+ error
+ end;
+ false ->
+ error
+ end.
+
+-spec no_duplicates(list()) -> boolean().
+no_duplicates(L) ->
+ length(lists:usort(L)) =:= length(L).
+
+-spec correct_var_use([[var_name() | 0]]) -> boolean().
+correct_var_use(UsedVars) ->
+ NoNonVarArgs = fun([0|_]) -> false; (_) -> true end,
+ lists:all(NoNonVarArgs, UsedVars)
+ andalso no_duplicates(lists:flatten(UsedVars)).
+
+-spec collect_vars(full_imm_type_ref(), abs_type(), [[var_name() | 0]]) ->
+ [[var_name() | 0]].
+collect_vars(FullADTRef, {paren_type,_,[Type]}, UsedVars) ->
+ collect_vars(FullADTRef, Type, UsedVars);
+collect_vars(FullADTRef, {ann_type,_,[_Var,Type]}, UsedVars) ->
+ collect_vars(FullADTRef, Type, UsedVars);
+collect_vars(_FullADTRef, {type,_,tuple,any}, UsedVars) ->
+ UsedVars;
+collect_vars({_Mod,SameName,Arity} = FullADTRef, {type,_,SameName,ArgForms},
+ UsedVars) ->
+ case length(ArgForms) =:= Arity of
+ true ->
+ VarArgs = [V || {var,_,V} <- ArgForms, V =/= '_'],
+ case length(VarArgs) =:= Arity of
+ true ->
+ AddToList = fun(X,L) -> [X | L] end,
+ lists:zipwith(AddToList, VarArgs, UsedVars);
+ false ->
+ [[0|L] || L <- UsedVars]
+ end;
+ false ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars)
+ end;
+collect_vars(FullADTRef, {type,_,_Name,ArgForms}, UsedVars) ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars);
+collect_vars({SameMod,SameName,_Arity} = FullADTRef,
+ {remote_type,_,[{atom,_,SameMod},{atom,_,SameName},ArgForms]},
+ UsedVars) ->
+ collect_vars(FullADTRef, {type,?anno(0),SameName,ArgForms}, UsedVars);
+collect_vars(FullADTRef, {remote_type,_,[_RemModForm,_NameForm,ArgForms]},
+ UsedVars) ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars);
+collect_vars(_FullADTRef, _Call, UsedVars) ->
+ UsedVars.
+
+-spec multi_collect_vars(full_imm_type_ref(), [abs_type()],
+ [[var_name() | 0]]) -> [[var_name() | 0]].
+multi_collect_vars({_Mod,_Name,Arity} = FullADTRef, Forms, UsedVars) ->
+ NoUsedVars = lists:duplicate(Arity, []),
+ MoreUsedVars = [collect_vars(FullADTRef,T,NoUsedVars) || T <- Forms],
+ CombineVars = fun(L1,L2) -> lists:zipwith(fun erlang:'++'/2, L1, L2) end,
+ lists:foldl(CombineVars, UsedVars, MoreUsedVars).
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type var_substs_dict() :: dict().
+-else.
+-type var_substs_dict() :: dict:dict(var_name(),abs_type()).
+-endif.
+-spec update_vars(abs_type(), var_substs_dict(), boolean()) -> abs_type().
+update_vars({paren_type,Line,[Type]}, VarSubstsDict, UnboundToAny) ->
+ {paren_type, Line, [update_vars(Type,VarSubstsDict,UnboundToAny)]};
+update_vars({ann_type,Line,[Var,Type]}, VarSubstsDict, UnboundToAny) ->
+ {ann_type, Line, [Var,update_vars(Type,VarSubstsDict,UnboundToAny)]};
+update_vars({var,Line,VarName} = Call, VarSubstsDict, UnboundToAny) ->
+ case dict:find(VarName, VarSubstsDict) of
+ {ok,SubstType} ->
+ SubstType;
+ error when UnboundToAny =:= false ->
+ Call;
+ error when UnboundToAny =:= true ->
+ {type,Line,any,[]}
+ end;
+update_vars({remote_type,Line,[RemModForm,NameForm,ArgForms]}, VarSubstsDict,
+ UnboundToAny) ->
+ NewArgForms = [update_vars(A,VarSubstsDict,UnboundToAny) || A <- ArgForms],
+ {remote_type, Line, [RemModForm,NameForm,NewArgForms]};
+update_vars({T,_,tuple,any} = Call, _VarSubstsDict, _UnboundToAny) when ?IS_TYPE_TAG(T) ->
+ Call;
+update_vars({T,Line,Name,ArgForms}, VarSubstsDict, UnboundToAny) when ?IS_TYPE_TAG(T) ->
+ NewArgForms = [update_vars(A,VarSubstsDict,UnboundToAny) || A <- ArgForms],
+ {T, Line, Name, NewArgForms};
+update_vars(Call, _VarSubstsDict, _UnboundToAny) ->
+ Call.
+
+
+%%------------------------------------------------------------------------------
+%% Match-related functions
+%%------------------------------------------------------------------------------
+
+-spec get_pattern(position(), [abs_type()]) -> pattern().
+get_pattern(TargetPos, FieldForms) ->
+ {0,RevPattern} = lists:foldl(fun add_field/2, {TargetPos,[]}, FieldForms),
+ list_to_tuple(lists:reverse(RevPattern)).
+
+-spec add_field(abs_type(), {non_neg_integer(),[pat_field()]}) ->
+ {non_neg_integer(),[pat_field(),...]}.
+add_field(_Type, {1,Acc}) ->
+ {0, [1|Acc]};
+add_field({atom,_,Tag}, {Left,Acc}) ->
+ {erlang:max(0,Left-1), [Tag|Acc]};
+add_field(_Type, {Left,Acc}) ->
+ {erlang:max(0,Left-1), [0|Acc]}.
+
+%% @private
+-spec match(pattern(), tuple()) -> term().
+match(Pattern, Term) when tuple_size(Pattern) =:= tuple_size(Term) ->
+ match(tuple_to_list(Pattern), tuple_to_list(Term), none, false);
+match(_Pattern, _Term) ->
+ throw(no_match).
+
+-spec match([pat_field()], [term()], 'none' | {'ok',T}, boolean()) -> T.
+match([], [], {ok,Target}, _TypeMode) ->
+ Target;
+match([0|PatRest], [_|ToMatchRest], Acc, TypeMode) ->
+ match(PatRest, ToMatchRest, Acc, TypeMode);
+match([1|PatRest], [Target|ToMatchRest], none, TypeMode) ->
+ match(PatRest, ToMatchRest, {ok,Target}, TypeMode);
+match([Tag|PatRest], [X|ToMatchRest], Acc, TypeMode) when is_atom(Tag) ->
+ MatchesTag =
+ case TypeMode of
+ true -> can_be_tag(Tag, X);
+ false -> Tag =:= X
+ end,
+ case MatchesTag of
+ true -> match(PatRest, ToMatchRest, Acc, TypeMode);
+ false -> throw(no_match)
+ end.
+
+%% CAUTION: these must be sorted
+-define(NON_ATOM_TYPES,
+ [arity,binary,bitstring,byte,char,float,'fun',function,integer,iodata,
+ iolist,list,maybe_improper_list,mfa,neg_integer,nil,no_return,
+ non_neg_integer,none,nonempty_improper_list,nonempty_list,
+ nonempty_maybe_improper_list,nonempty_string,number,pid,port,
+ pos_integer,range,record,reference,string,tuple]).
+-define(NON_TUPLE_TYPES,
+ [arity,atom,binary,bitstring,bool,boolean,byte,char,float,'fun',
+ function,identifier,integer,iodata,iolist,list,maybe_improper_list,
+ neg_integer,nil,no_return,node,non_neg_integer,none,
+ nonempty_improper_list,nonempty_list,nonempty_maybe_improper_list,
+ nonempty_string,number,pid,port,pos_integer,range,reference,string,
+ timeout]).
+-define(NO_HEAD_TYPES,
+ [arity,atom,binary,bitstring,bool,boolean,byte,char,float,'fun',
+ function,identifier,integer,mfa,module,neg_integer,nil,no_return,node,
+ non_neg_integer,none,number,pid,port,pos_integer,range,record,
+ reference,timeout,tuple]).
+
+-spec can_be_tag(atom(), abs_type()) -> boolean().
+can_be_tag(Tag, {ann_type,_,[_Var,Type]}) ->
+ can_be_tag(Tag, Type);
+can_be_tag(Tag, {paren_type,_,[Type]}) ->
+ can_be_tag(Tag, Type);
+can_be_tag(Tag, {atom,_,Atom}) ->
+ Tag =:= Atom;
+can_be_tag(_Tag, {integer,_,_Int}) ->
+ false;
+can_be_tag(_Tag, {op,_,_Op,_Arg}) ->
+ false;
+can_be_tag(_Tag, {op,_,_Op,_Arg1,_Arg2}) ->
+ false;
+can_be_tag(Tag, {type,_,BName,[]}) when BName =:= bool; BName =:= boolean ->
+ is_boolean(Tag);
+can_be_tag(Tag, {type,_,timeout,[]}) ->
+ Tag =:= infinity;
+can_be_tag(Tag, {type,_,union,Choices}) ->
+ lists:any(fun(C) -> can_be_tag(Tag,C) end, Choices);
+can_be_tag(_Tag, {type,_,Name,_Args}) ->
+ not ordsets:is_element(Name, ?NON_ATOM_TYPES);
+can_be_tag(_Tag, _Type) ->
+ true.
+
+-spec cant_match(pattern(), abs_type()) -> boolean().
+cant_match(Pattern, {ann_type,_,[_Var,Type]}) ->
+ cant_match(Pattern, Type);
+cant_match(Pattern, {paren_type,_,[Type]}) ->
+ cant_match(Pattern, Type);
+cant_match(_Pattern, {atom,_,_Atom}) ->
+ true;
+cant_match(_Pattern, {integer,_,_Int}) ->
+ true;
+cant_match(_Pattern, {op,_,_Op,_Arg}) ->
+ true;
+cant_match(_Pattern, {op,_,_Op,_Arg1,_Arg2}) ->
+ true;
+cant_match(Pattern, {type,Anno,mfa,[]}) ->
+ MFA_Ts = [{type,Anno,atom,[]}, {type,Anno,atom,[]}, {type,Anno,arity,[]}],
+ cant_match(Pattern, {type,Anno,tuple,MFA_Ts});
+cant_match(Pattern, {type,_,union,Choices}) ->
+ lists:all(fun(C) -> cant_match(Pattern,C) end, Choices);
+cant_match(_Pattern, {type,_,tuple,any}) ->
+ false;
+cant_match(Pattern, {type,_,tuple,Fields}) ->
+ tuple_size(Pattern) =/= length(Fields) orelse
+ try match(tuple_to_list(Pattern), Fields, none, true) of
+ _ -> false
+ catch
+ throw:no_match -> true
+ end;
+cant_match(_Pattern, {type,_,Name,_Args}) ->
+ ordsets:is_element(Name, ?NON_TUPLE_TYPES);
+cant_match(_Pattern, _Type) ->
+ false.
+
+-spec cant_have_head(abs_type()) -> boolean().
+cant_have_head({ann_type,_,[_Var,Type]}) ->
+ cant_have_head(Type);
+cant_have_head({paren_type,_,[Type]}) ->
+ cant_have_head(Type);
+cant_have_head({atom,_,_Atom}) ->
+ true;
+cant_have_head({integer,_,_Int}) ->
+ true;
+cant_have_head({op,_,_Op,_Arg}) ->
+ true;
+cant_have_head({op,_,_Op,_Arg1,_Arg2}) ->
+ true;
+cant_have_head({type,_,union,Choices}) ->
+ lists:all(fun cant_have_head/1, Choices);
+cant_have_head({type,_,Name,_Args}) ->
+ ordsets:is_element(Name, ?NO_HEAD_TYPES);
+cant_have_head(_Type) ->
+ false.
+
+%% Only covers atoms, integers and tuples, i.e. those that can be specified
+%% through singleton types.
+-spec term_to_singleton_type(atom() | integer()
+ | loose_tuple(atom() | integer())) -> abs_type().
+term_to_singleton_type(Atom) when is_atom(Atom) ->
+ {atom,?anno(0),Atom};
+term_to_singleton_type(Int) when is_integer(Int), Int >= 0 ->
+ {integer,?anno(0),Int};
+term_to_singleton_type(Int) when is_integer(Int), Int < 0 ->
+ A = ?anno(0),
+ {op,A,'-',{integer,A,-Int}};
+term_to_singleton_type(Tuple) when is_tuple(Tuple) ->
+ Fields = tuple_to_list(Tuple),
+ {type,?anno(0),tuple,[term_to_singleton_type(F) || F <- Fields]}.
+
+
+%%------------------------------------------------------------------------------
+%% Instance testing functions
+%%------------------------------------------------------------------------------
+
+%% CAUTION: this must be sorted
+-define(EQUIV_TYPES,
+ [{arity, {type,0,range,[{integer,0,0},{integer,0,255}]}},
+ {bool, {type,0,boolean,[]}},
+ {byte, {type,0,range,[{integer,0,0},{integer,0,255}]}},
+ {char, {type,0,range,[{integer,0,0},{integer,0,16#10ffff}]}},
+ {function, {type,0,'fun',[]}},
+ {identifier, {type,0,union,[{type,0,pid,[]},{type,0,port,[]},
+ {type,0,reference,[]}]}},
+ {iodata, {type,0,union,[{type,0,binary,[]},{type,0,iolist,[]}]}},
+ {iolist, {type,0,maybe_improper_list,
+ [{type,0,union,[{type,0,byte,[]},{type,0,binary,[]},
+ {type,0,iolist,[]}]},
+ {type,0,binary,[]}]}},
+ {list, {type,0,list,[{type,0,any,[]}]}},
+ {maybe_improper_list, {type,0,maybe_improper_list,[{type,0,any,[]},
+ {type,0,any,[]}]}},
+ {mfa, {type,0,tuple,[{type,0,atom,[]},{type,0,atom,[]},
+ {type,0,arity,[]}]}},
+ {node, {type,0,atom,[]}},
+ {nonempty_list, {type,0,nonempty_list,[{type,0,any,[]}]}},
+ {nonempty_maybe_improper_list, {type,0,nonempty_maybe_improper_list,
+ [{type,0,any,[]},{type,0,any,[]}]}},
+ {nonempty_string, {type,0,nonempty_list,[{type,0,char,[]}]}},
+ {string, {type,0,list,[{type,0,char,[]}]}},
+ {term, {type,0,any,[]}},
+ {timeout, {type,0,union,[{atom,0,infinity},
+ {type,0,non_neg_integer,[]}]}}]).
+
+%% @private
+%% TODO: Most of these functions accept an extended form of abs_type(), namely
+%% the addition of a custom wrapper: {'from_mod',mod_name(),...}
+-spec is_instance(term(), mod_name(), abs_type()) -> boolean().
+is_instance(X, Mod, TypeForm) ->
+ is_instance(X, Mod, TypeForm, []).
+
+-spec is_instance(term(), mod_name(), abs_type(), imm_stack()) -> boolean().
+is_instance(X, _Mod, {from_mod,OrigMod,Type}, Stack) ->
+ is_instance(X, OrigMod, Type, Stack);
+is_instance(_X, _Mod, {var,_,'_'}, _Stack) ->
+ true;
+is_instance(_X, _Mod, {var,_,Name}, _Stack) ->
+ %% All unconstrained spec vars have been replaced by 'any()' and we always
+ %% replace the variables on the RHS of types before recursing into them.
+ %% Provided that '-type' declarations contain no unbound variables, we
+ %% don't expect to find any non-'_' variables while recursing.
+ throw({'$typeserver',{unbound_var_in_type_declaration,Name}});
+is_instance(X, Mod, {ann_type,_,[_Var,Type]}, Stack) ->
+ is_instance(X, Mod, Type, Stack);
+is_instance(X, Mod, {paren_type,_,[Type]}, Stack) ->
+ is_instance(X, Mod, Type, Stack);
+is_instance(X, Mod, {remote_type,_,[{atom,_,RemMod},{atom,_,Name},ArgForms]},
+ Stack) ->
+ is_custom_instance(X, Mod, RemMod, Name, ArgForms, true, Stack);
+is_instance(SameAtom, _Mod, {atom,_,SameAtom}, _Stack) ->
+ true;
+is_instance(SameInt, _Mod, {integer,_,SameInt}, _Stack) ->
+ true;
+is_instance(X, _Mod, {op,_,_Op,_Arg} = Expr, _Stack) ->
+ is_int_const(X, Expr);
+is_instance(X, _Mod, {op,_,_Op,_Arg1,_Arg2} = Expr, _Stack) ->
+ is_int_const(X, Expr);
+is_instance(_X, _Mod, {type,_,any,[]}, _Stack) ->
+ true;
+is_instance(X, _Mod, {type,_,atom,[]}, _Stack) ->
+ is_atom(X);
+is_instance(X, _Mod, {type,_,binary,[]}, _Stack) ->
+ is_binary(X);
+is_instance(X, _Mod, {type,_,binary,[BaseExpr,UnitExpr]}, _Stack) ->
+ %% <<_:X,_:_*Y>> means "bitstrings of X + k*Y bits, k >= 0"
+ case eval_int(BaseExpr) of
+ {ok,Base} when Base >= 0 ->
+ case eval_int(UnitExpr) of
+ {ok,Unit} when Unit >= 0 ->
+ case is_bitstring(X) of
+ true ->
+ BitSizeX = bit_size(X),
+ case Unit =:= 0 of
+ true ->
+ BitSizeX =:= Base;
+ false ->
+ BitSizeX >= Base
+ andalso
+ (BitSizeX - Base) rem Unit =:= 0
+ end;
+ false -> false
+ end;
+ _ ->
+ abs_expr_error(invalid_unit, UnitExpr)
+ end;
+ _ ->
+ abs_expr_error(invalid_base, BaseExpr)
+ end;
+is_instance(X, _Mod, {type,_,bitstring,[]}, _Stack) ->
+ is_bitstring(X);
+is_instance(X, _Mod, {type,_,boolean,[]}, _Stack) ->
+ is_boolean(X);
+is_instance(X, _Mod, {type,_,float,[]}, _Stack) ->
+ is_float(X);
+is_instance(X, _Mod, {type,_,'fun',[]}, _Stack) ->
+ is_function(X);
+%% TODO: how to check range type? random inputs? special case for 0-arity?
+is_instance(X, _Mod, {type,_,'fun',[{type,_,any,[]},_Range]}, _Stack) ->
+ is_function(X);
+is_instance(X, _Mod, {type,_,'fun',[{type,_,product,Domain},_Range]}, _Stack) ->
+ is_function(X, length(Domain));
+is_instance(X, _Mod, {type,_,integer,[]}, _Stack) ->
+ is_integer(X);
+is_instance(X, Mod, {type,_,list,[Type]}, _Stack) ->
+ list_test(X, Mod, Type, dummy, true, true, false);
+is_instance(X, Mod, {type,_,maybe_improper_list,[Cont,Term]}, _Stack) ->
+ list_test(X, Mod, Cont, Term, true, true, true);
+is_instance(X, _Mod, {type,_,module,[]}, _Stack) ->
+ is_atom(X) orelse
+ is_tuple(X) andalso X =/= {} andalso is_atom(element(1,X));
+is_instance([], _Mod, {type,_,nil,[]}, _Stack) ->
+ true;
+is_instance(X, _Mod, {type,_,neg_integer,[]}, _Stack) ->
+ is_integer(X) andalso X < 0;
+is_instance(X, _Mod, {type,_,non_neg_integer,[]}, _Stack) ->
+ is_integer(X) andalso X >= 0;
+is_instance(X, Mod, {type,_,nonempty_list,[Type]}, _Stack) ->
+ list_test(X, Mod, Type, dummy, false, true, false);
+is_instance(X, Mod, {type,_,nonempty_improper_list,[Cont,Term]}, _Stack) ->
+ list_test(X, Mod, Cont, Term, false, false, true);
+is_instance(X, Mod, {type,_,nonempty_maybe_improper_list,[Cont,Term]},
+ _Stack) ->
+ list_test(X, Mod, Cont, Term, false, true, true);
+is_instance(X, _Mod, {type,_,number,[]}, _Stack) ->
+ is_number(X);
+is_instance(X, _Mod, {type,_,pid,[]}, _Stack) ->
+ is_pid(X);
+is_instance(X, _Mod, {type,_,port,[]}, _Stack) ->
+ is_port(X);
+is_instance(X, _Mod, {type,_,pos_integer,[]}, _Stack) ->
+ is_integer(X) andalso X > 0;
+is_instance(_X, _Mod, {type,_,product,_Elements}, _Stack) ->
+ throw({'$typeserver',{internal,product_in_is_instance}});
+is_instance(X, _Mod, {type,_,range,[LowExpr,HighExpr]}, _Stack) ->
+ case {eval_int(LowExpr),eval_int(HighExpr)} of
+ {{ok,Low},{ok,High}} when Low =< High ->
+ X >= Low andalso X =< High;
+ _ ->
+ abs_expr_error(invalid_range, LowExpr, HighExpr)
+ end;
+is_instance(X, Mod, {type,_,record,[{atom,_,Name} = NameForm | RawSubsts]},
+ Stack) ->
+ Substs = [{N,T} || {type,_,field_type,[{atom,_,N},T]} <- RawSubsts],
+ SubstsDict = dict:from_list(Substs),
+ case get_type_repr(Mod, {record,Name,0}, false) of
+ {ok,{abs_record,OrigFields}} ->
+ Fields = [case dict:find(FieldName, SubstsDict) of
+ {ok,NewFieldType} -> NewFieldType;
+ error -> OrigFieldType
+ end
+ || {FieldName,OrigFieldType} <- OrigFields],
+ is_instance(X, Mod, {type,?anno(0),tuple,[NameForm|Fields]}, Stack);
+ {error,Reason} ->
+ throw({'$typeserver',Reason})
+ end;
+is_instance(X, _Mod, {type,_,reference,[]}, _Stack) ->
+ is_reference(X);
+is_instance(X, _Mod, {type,_,tuple,any}, _Stack) ->
+ is_tuple(X);
+is_instance(X, Mod, {type,_,tuple,Fields}, _Stack) ->
+ is_tuple(X) andalso tuple_test(tuple_to_list(X), Mod, Fields);
+is_instance(X, Mod, {type,_,union,Choices}, Stack) ->
+ IsInstance = fun(Choice) -> is_instance(X,Mod,Choice,Stack) end,
+ lists:any(IsInstance, Choices);
+is_instance(X, Mod, {T,_,Name,[]}, Stack) when ?IS_TYPE_TAG(T) ->
+ case orddict:find(Name, ?EQUIV_TYPES) of
+ {ok,EquivType} ->
+ is_instance(X, Mod, EquivType, Stack);
+ error ->
+ is_maybe_hard_adt(X, Mod, Name, [], Stack)
+ end;
+is_instance(X, Mod, {T,_,Name,ArgForms}, Stack) when ?IS_TYPE_TAG(T) ->
+ is_maybe_hard_adt(X, Mod, Name, ArgForms, Stack);
+is_instance(_X, _Mod, _Type, _Stack) ->
+ false.
+
+-spec is_int_const(term(), abs_expr()) -> boolean().
+is_int_const(X, Expr) ->
+ case eval_int(Expr) of
+ {ok,Int} ->
+ X =:= Int;
+ error ->
+ abs_expr_error(invalid_int_const, Expr)
+ end.
+
+%% TODO: We implicitly add the '| []' at the termination of maybe_improper_list.
+%% TODO: We ignore a '[]' termination in improper_list.
+-spec list_test(term(), mod_name(), abs_type(), 'dummy' | abs_type(), boolean(),
+ boolean(), boolean()) -> boolean().
+list_test(X, Mod, Content, Termination, CanEmpty, CanProper, CanImproper) ->
+ is_list(X) andalso
+ list_rec(X, Mod, Content, Termination, CanEmpty, CanProper, CanImproper).
+
+-spec list_rec(term(), mod_name(), abs_type(), 'dummy' | abs_type(), boolean(),
+ boolean(), boolean()) -> boolean().
+list_rec([], _Mod, _Content, _Termination, CanEmpty, CanProper, _CanImproper) ->
+ CanEmpty andalso CanProper;
+list_rec([X | Rest], Mod, Content, Termination, _CanEmpty, CanProper,
+ CanImproper) ->
+ is_instance(X, Mod, Content, []) andalso
+ list_rec(Rest, Mod, Content, Termination, true, CanProper, CanImproper);
+list_rec(X, Mod, _Content, Termination, _CanEmpty, _CanProper, CanImproper) ->
+ CanImproper andalso is_instance(X, Mod, Termination, []).
+
+-spec tuple_test([term()], mod_name(), [abs_type()]) -> boolean().
+tuple_test([], _Mod, []) ->
+ true;
+tuple_test([X | XTail], Mod, [T | TTail]) ->
+ is_instance(X, Mod, T, []) andalso tuple_test(XTail, Mod, TTail);
+tuple_test(_, _Mod, _) ->
+ false.
+
+-spec is_maybe_hard_adt(term(), mod_name(), type_name(), [abs_type()],
+ imm_stack()) -> boolean().
+is_maybe_hard_adt(X, Mod, Name, ArgForms, Stack) ->
+ case orddict:find({Name,length(ArgForms)}, ?HARD_ADTS) of
+ {ok,ADTMod} ->
+ is_custom_instance(X, Mod, ADTMod, Name, ArgForms, true, Stack);
+ error ->
+ is_custom_instance(X, Mod, Mod, Name, ArgForms, false, Stack)
+ end.
+
+-spec is_custom_instance(term(), mod_name(), mod_name(), type_name(),
+ [abs_type()], boolean(), imm_stack()) -> boolean().
+is_custom_instance(X, Mod, RemMod, Name, RawArgForms, IsRemote, Stack) ->
+ ArgForms = case Mod =/= RemMod of
+ true -> [{from_mod,Mod,A} || A <- RawArgForms];
+ false -> RawArgForms
+ end,
+ Arity = length(ArgForms),
+ FullTypeRef = {RemMod,Name,Arity},
+ case lists:member(FullTypeRef, Stack) of
+ true ->
+ throw({'$typeserver',{self_reference,FullTypeRef}});
+ false ->
+ TypeRef = {type,Name,Arity},
+ AbsType = get_abs_type(RemMod, TypeRef, ArgForms, IsRemote),
+ is_instance(X, RemMod, AbsType, [FullTypeRef|Stack])
+ end.
+
+-spec get_abs_type(mod_name(), type_ref(), [abs_type()], boolean()) ->
+ abs_type().
+get_abs_type(RemMod, TypeRef, ArgForms, IsRemote) ->
+ case get_type_repr(RemMod, TypeRef, IsRemote) of
+ {ok,TypeRepr} ->
+ {FinalAbsType,SymbInfo,VarNames} =
+ case TypeRepr of
+ {cached,_FinType,FAT,SI} -> {FAT,SI,[]};
+ {abs_type,FAT,VN,SI} -> {FAT,SI,VN}
+ end,
+ AbsType =
+ case SymbInfo of
+ not_symb -> FinalAbsType;
+ {orig_abs,OrigAbsType} -> OrigAbsType
+ end,
+ VarSubstsDict = dict:from_list(lists:zip(VarNames,ArgForms)),
+ update_vars(AbsType, VarSubstsDict, false);
+ {error,Reason} ->
+ throw({'$typeserver',Reason})
+ end.
+
+-spec abs_expr_error(atom(), abs_expr()) -> no_return().
+abs_expr_error(ImmReason, Expr) ->
+ {error,Reason} = expr_error(ImmReason, Expr),
+ throw({'$typeserver',Reason}).
+
+-spec abs_expr_error(atom(), abs_expr(), abs_expr()) -> no_return().
+abs_expr_error(ImmReason, Expr1, Expr2) ->
+ {error,Reason} = expr_error(ImmReason, Expr1, Expr2),
+ throw({'$typeserver',Reason}).
+
+
+%%------------------------------------------------------------------------------
+%% Type translation functions
+%%------------------------------------------------------------------------------
+
+-spec convert(mod_name(), abs_type(), state()) ->
+ rich_result2(fin_type(),state()).
+convert(Mod, TypeForm, State) ->
+ case convert(Mod, TypeForm, State, [], dict:new()) of
+ {ok,{simple,Type},NewState} ->
+ {ok, Type, NewState};
+ {ok,{rec,_RecFun,_RecArgs},_NewState} ->
+ {error, {internal,rec_returned_to_toplevel}};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert(mod_name(), abs_type(), state(), stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert(Mod, {paren_type,_,[Type]}, State, Stack, VarDict) ->
+ convert(Mod, Type, State, Stack, VarDict);
+convert(Mod, {ann_type,_,[_Var,Type]}, State, Stack, VarDict) ->
+ convert(Mod, Type, State, Stack, VarDict);
+convert(_Mod, {var,_,'_'}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:any()}, State};
+convert(_Mod, {var,_,VarName}, State, _Stack, VarDict) ->
+ case dict:find(VarName, VarDict) of
+ %% TODO: do we need to check if we are at toplevel of a recursive?
+ {ok,RetType} -> {ok, RetType, State};
+ error -> {error, {unbound_var,VarName}}
+ end;
+convert(Mod, {remote_type,_,[{atom,_,RemMod},{atom,_,Name},ArgForms]}, State,
+ Stack, VarDict) ->
+ case prepare_for_remote(RemMod, Name, length(ArgForms), State) of
+ {ok,NewState} ->
+ convert_custom(Mod,RemMod,Name,ArgForms,NewState,Stack,VarDict);
+ {error,_Reason} = Error ->
+ Error
+ end;
+convert(_Mod, {atom,_,Atom}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:exactly(Atom)}, State};
+convert(_Mod, {integer,_,_Int} = IntExpr, State, _Stack, _VarDict) ->
+ convert_integer(IntExpr, State);
+convert(_Mod, {op,_,_Op,_Arg} = OpExpr, State, _Stack, _VarDict) ->
+ convert_integer(OpExpr, State);
+convert(_Mod, {op,_,_Op,_Arg1,_Arg2} = OpExpr, State, _Stack, _VarDict) ->
+ convert_integer(OpExpr, State);
+convert(_Mod, {type,_,binary,[BaseExpr,UnitExpr]}, State, _Stack, _VarDict) ->
+ %% <<_:X,_:_*Y>> means "bitstrings of X + k*Y bits, k >= 0"
+ case eval_int(BaseExpr) of
+ {ok,0} ->
+ case eval_int(UnitExpr) of
+ {ok,0} -> {ok, {simple,proper_types:exactly(<<>>)}, State};
+ {ok,1} -> {ok, {simple,proper_types:bitstring()}, State};
+ {ok,8} -> {ok, {simple,proper_types:binary()}, State};
+ {ok,N} when N > 0 ->
+ Gen = ?LET(L, proper_types:list(proper_types:bitstring(N)),
+ concat_bitstrings(L)),
+ {ok, {simple,Gen}, State};
+ _ -> expr_error(invalid_unit, UnitExpr)
+ end;
+ {ok,Base} when Base > 0 ->
+ Head = proper_types:bitstring(Base),
+ case eval_int(UnitExpr) of
+ {ok,0} -> {ok, {simple,Head}, State};
+ {ok,1} ->
+ Tail = proper_types:bitstring(),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ {ok,8} ->
+ Tail = proper_types:binary(),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ {ok,N} when N > 0 ->
+ Tail =
+ ?LET(L, proper_types:list(proper_types:bitstring(N)),
+ concat_bitstrings(L)),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ _ -> expr_error(invalid_unit, UnitExpr)
+ end;
+ _ ->
+ expr_error(invalid_base, BaseExpr)
+ end;
+convert(_Mod, {type,_,range,[LowExpr,HighExpr]}, State, _Stack, _VarDict) ->
+ case {eval_int(LowExpr),eval_int(HighExpr)} of
+ {{ok,Low},{ok,High}} when Low =< High ->
+ {ok, {simple,proper_types:integer(Low,High)}, State};
+ _ ->
+ expr_error(invalid_range, LowExpr, HighExpr)
+ end;
+convert(_Mod, {type,_,nil,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:exactly([])}, State};
+convert(Mod, {type,_,list,[ElemForm]}, State, Stack, VarDict) ->
+ convert_list(Mod, false, ElemForm, State, Stack, VarDict);
+convert(Mod, {type,_,nonempty_list,[ElemForm]}, State, Stack, VarDict) ->
+ convert_list(Mod, true, ElemForm, State, Stack, VarDict);
+convert(_Mod, {type,_,nonempty_list,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:non_empty(proper_types:list())}, State};
+convert(_Mod, {type,_,nonempty_string,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:non_empty(proper_types:string())}, State};
+convert(_Mod, {type,_,tuple,any}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:tuple()}, State};
+convert(Mod, {type,_,tuple,ElemForms}, State, Stack, VarDict) ->
+ convert_tuple(Mod, ElemForms, false, State, Stack, VarDict);
+convert(Mod, {type,_,'$fixed_list',ElemForms}, State, Stack, VarDict) ->
+ convert_tuple(Mod, ElemForms, true, State, Stack, VarDict);
+convert(Mod, {type,_,record,[{atom,_,Name}|FieldForms]}, State, Stack,
+ VarDict) ->
+ convert_record(Mod, Name, FieldForms, State, Stack, VarDict);
+convert(Mod, {type,_,union,ChoiceForms}, State, Stack, VarDict) ->
+ convert_union(Mod, ChoiceForms, State, Stack, VarDict);
+convert(Mod, {type,_,'fun',[{type,_,product,Domain},Range]}, State, Stack,
+ VarDict) ->
+ convert_fun(Mod, length(Domain), Range, State, Stack, VarDict);
+%% TODO: These types should be replaced with accurate types.
+%% TODO: Add support for nonempty_improper_list/2.
+convert(Mod, {type,Anno,maybe_improper_list,[]}, State, Stack, VarDict) ->
+ convert(Mod, {type,Anno,list,[]}, State, Stack, VarDict);
+convert(Mod, {type,A,maybe_improper_list,[Cont,_Ter]}, State, Stack, VarDict) ->
+ convert(Mod, {type,A,list,[Cont]}, State, Stack, VarDict);
+convert(Mod, {type,A,nonempty_maybe_improper_list,[]}, State, Stack, VarDict) ->
+ convert(Mod, {type,A,nonempty_list,[]}, State, Stack, VarDict);
+convert(Mod, {type,A,nonempty_maybe_improper_list,[Cont,_Term]}, State, Stack,
+ VarDict) ->
+ convert(Mod, {type,A,nonempty_list,[Cont]}, State, Stack, VarDict);
+convert(Mod, {type,A,iodata,[]}, State, Stack, VarDict) ->
+ RealType = {type,A,union,[{type,A,binary,[]},{type,A,iolist,[]}]},
+ convert(Mod, RealType, State, Stack, VarDict);
+convert(Mod, {T,_,Name,[]}, State, Stack, VarDict) when ?IS_TYPE_TAG(T) ->
+ case ordsets:is_element(Name, ?STD_TYPES_0) of
+ true ->
+ {ok, {simple,proper_types:Name()}, State};
+ false ->
+ convert_maybe_hard_adt(Mod, Name, [], State, Stack, VarDict)
+ end;
+convert(Mod, {T,_,Name,ArgForms}, State, Stack, VarDict) when ?IS_TYPE_TAG(T) ->
+ convert_maybe_hard_adt(Mod, Name, ArgForms, State, Stack, VarDict);
+convert(_Mod, TypeForm, _State, _Stack, _VarDict) ->
+ {error, {unsupported_type,TypeForm}}.
+
+-spec concat_bitstrings([bitstring()]) -> bitstring().
+concat_bitstrings(BitStrings) ->
+ concat_bitstrings_tr(BitStrings, <<>>).
+
+-spec concat_bitstrings_tr([bitstring()], bitstring()) -> bitstring().
+concat_bitstrings_tr([], Acc) ->
+ Acc;
+concat_bitstrings_tr([BitString | Rest], Acc) ->
+ concat_bitstrings_tr(Rest, <<Acc/bits,BitString/bits>>).
+
+-spec concat_binary_gens(fin_type(), fin_type()) -> fin_type().
+concat_binary_gens(HeadType, TailType) ->
+ ?LET({H,T}, {HeadType,TailType}, <<H/bits,T/bits>>).
+
+-spec convert_fun(mod_name(), arity(), abs_type(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_fun(Mod, Arity, Range, State, Stack, VarDict) ->
+ case convert(Mod, Range, State, ['fun' | Stack], VarDict) of
+ {ok,{simple,RangeType},NewState} ->
+ {ok, {simple,proper_types:function(Arity,RangeType)}, NewState};
+ {ok,{rec,RecFun,RecArgs},NewState} ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> convert_rec_fun(Arity, RecFun, RecArgs, NewState)
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_rec_fun(arity(), rec_fun(), rec_args(), state()) ->
+ {'ok',ret_type(),state()}.
+convert_rec_fun(Arity, RecFun, RecArgs, State) ->
+ %% We bind the generated value by size.
+ NewRecFun =
+ fun(GenFuns,Size) ->
+ proper_types:function(Arity, RecFun(GenFuns,Size))
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, State}.
+
+-spec convert_list(mod_name(), boolean(), abs_type(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_list(Mod, NonEmpty, ElemForm, State, Stack, VarDict) ->
+ case convert(Mod, ElemForm, State, [list | Stack], VarDict) of
+ {ok,{simple,ElemType},NewState} ->
+ InnerType = proper_types:list(ElemType),
+ FinType = case NonEmpty of
+ true -> proper_types:non_empty(InnerType);
+ false -> InnerType
+ end,
+ {ok, {simple,FinType}, NewState};
+ {ok,{rec,RecFun,RecArgs},NewState} ->
+ case {at_toplevel(RecArgs,Stack), NonEmpty} of
+ {true,true} ->
+ base_case_error(Stack);
+ {true,false} ->
+ NewRecFun =
+ fun(GenFuns,Size) ->
+ ElemGen = fun(S) -> ?LAZY(RecFun(GenFuns,S)) end,
+ proper_types:distlist(Size, ElemGen, false)
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState};
+ {false,_} ->
+ {NewRecFun,NewRecArgs} =
+ convert_rec_list(RecFun, RecArgs, NonEmpty),
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_rec_list(rec_fun(), rec_args(), boolean()) ->
+ {rec_fun(),rec_args()}.
+convert_rec_list(RecFun, [{true,FullTypeRef}] = RecArgs, NonEmpty) ->
+ {NewRecFun,_NormalRecArgs} =
+ convert_normal_rec_list(RecFun, RecArgs, NonEmpty),
+ AltRecFun =
+ fun([InstListGen],Size) ->
+ InstTypesList =
+ proper_types:get_prop(internal_types, InstListGen(Size)),
+ proper_types:fixed_list([RecFun([fun(_Size) -> I end],0)
+ || I <- InstTypesList])
+ end,
+ NewRecArgs = [{{list,NonEmpty,AltRecFun},FullTypeRef}],
+ {NewRecFun, NewRecArgs};
+convert_rec_list(RecFun, RecArgs, NonEmpty) ->
+ convert_normal_rec_list(RecFun, RecArgs, NonEmpty).
+
+-spec convert_normal_rec_list(rec_fun(), rec_args(), boolean()) ->
+ {rec_fun(),rec_args()}.
+convert_normal_rec_list(RecFun, RecArgs, NonEmpty) ->
+ NewRecFun = fun(GenFuns,Size) ->
+ ElemGen = fun(S) -> RecFun(GenFuns, S) end,
+ proper_types:distlist(Size, ElemGen, NonEmpty)
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {NewRecFun, NewRecArgs}.
+
+-spec convert_tuple(mod_name(), [abs_type()], boolean(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_tuple(Mod, ElemForms, ToList, State, Stack, VarDict) ->
+ case process_list(Mod, ElemForms, State, [tuple | Stack], VarDict) of
+ {ok,RetTypes,NewState} ->
+ case combine_ret_types(RetTypes, {tuple,ToList}) of
+ {simple,_FinType} = RetType ->
+ {ok, RetType, NewState};
+ {rec,_RecFun,RecArgs} = RetType ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> {ok, RetType, NewState}
+ end
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_union(mod_name(), [abs_type()], state(), stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert_union(Mod, ChoiceForms, State, Stack, VarDict) ->
+ case process_list(Mod, ChoiceForms, State, [union | Stack], VarDict) of
+ {ok,RawChoices,NewState} ->
+ ProcessChoice = fun(T,A) -> process_choice(T,A,Stack) end,
+ {RevSelfRecs,RevNonSelfRecs,RevNonRecs} =
+ lists:foldl(ProcessChoice, {[],[],[]}, RawChoices),
+ case {lists:reverse(RevSelfRecs),lists:reverse(RevNonSelfRecs),
+ lists:reverse(RevNonRecs)} of
+ {_SelfRecs,[],[]} ->
+ base_case_error(Stack);
+ {[],NonSelfRecs,NonRecs} ->
+ {ok, combine_ret_types(NonRecs ++ NonSelfRecs, union),
+ NewState};
+ {SelfRecs,NonSelfRecs,NonRecs} ->
+ {BCaseRecFun,BCaseRecArgs} =
+ case combine_ret_types(NonRecs ++ NonSelfRecs, union) of
+ {simple,BCaseType} ->
+ {fun([],_Size) -> BCaseType end,[]};
+ {rec,BCRecFun,BCRecArgs} ->
+ {BCRecFun,BCRecArgs}
+ end,
+ NumBCaseGens = length(BCaseRecArgs),
+ [ParentRef | _Upper] = Stack,
+ FallbackRecFun = fun([SelfGen],_Size) -> SelfGen(0) end,
+ FallbackRecArgs = [{false,ParentRef}],
+ FallbackRetType = {rec,FallbackRecFun,FallbackRecArgs},
+ {rec,RCaseRecFun,RCaseRecArgs} =
+ combine_ret_types([FallbackRetType] ++ SelfRecs
+ ++ NonSelfRecs, wunion),
+ NewRecFun =
+ fun(AllGens,Size) ->
+ {BCaseGens,RCaseGens} =
+ lists:split(NumBCaseGens, AllGens),
+ case Size of
+ 0 -> BCaseRecFun(BCaseGens,0);
+ _ -> RCaseRecFun(RCaseGens,Size)
+ end
+ end,
+ NewRecArgs = BCaseRecArgs ++ RCaseRecArgs,
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec process_choice(ret_type(), {[ret_type()],[ret_type()],[ret_type()]},
+ stack()) -> {[ret_type()],[ret_type()],[ret_type()]}.
+process_choice({simple,_} = RetType, {SelfRecs,NonSelfRecs,NonRecs}, _Stack) ->
+ {SelfRecs, NonSelfRecs, [RetType | NonRecs]};
+process_choice({rec,RecFun,RecArgs}, {SelfRecs,NonSelfRecs,NonRecs}, Stack) ->
+ case at_toplevel(RecArgs, Stack) of
+ true ->
+ case partition_by_toplevel(RecArgs, Stack, true) of
+ {[],[],_,_} ->
+ NewRecArgs = clean_rec_args(RecArgs),
+ {[{rec,RecFun,NewRecArgs} | SelfRecs], NonSelfRecs,
+ NonRecs};
+ {SelfRecArgs,SelfPos,OtherRecArgs,_OtherPos} ->
+ NumInstances = length(SelfRecArgs),
+ IsListInst = fun({true,_FTRef}) -> false
+ ; ({{list,_NE,_AltRecFun},_FTRef}) -> true
+ end,
+ NewRecFun =
+ case proper_arith:filter(IsListInst,SelfRecArgs) of
+ {[],[]} ->
+ no_list_inst_rec_fun(RecFun,NumInstances,
+ SelfPos);
+ {[{{list,NonEmpty,AltRecFun},_}],[ListInstPos]} ->
+ list_inst_rec_fun(AltRecFun,NumInstances,
+ SelfPos,NonEmpty,ListInstPos)
+ end,
+ [{_B,SelfRef} | _] = SelfRecArgs,
+ NewRecArgs =
+ [{false,SelfRef} | clean_rec_args(OtherRecArgs)],
+ {[{rec,NewRecFun,NewRecArgs} | SelfRecs], NonSelfRecs,
+ NonRecs}
+ end;
+ false ->
+ NewRecArgs = clean_rec_args(RecArgs),
+ {SelfRecs, [{rec,RecFun,NewRecArgs} | NonSelfRecs], NonRecs}
+ end.
+
+-spec no_list_inst_rec_fun(rec_fun(), pos_integer(), [position()]) -> rec_fun().
+no_list_inst_rec_fun(RecFun, NumInstances, SelfPos) ->
+ fun([SelfGen|OtherGens], Size) ->
+ ?LETSHRINK(
+ Instances,
+ %% Size distribution will be a little off if both normal and
+ %% instance-accepting generators are present.
+ lists:duplicate(NumInstances, SelfGen(Size div NumInstances)),
+ begin
+ InstGens = [fun(_Size) -> proper_types:exactly(I) end
+ || I <- Instances],
+ AllGens = proper_arith:insert(InstGens, SelfPos, OtherGens),
+ RecFun(AllGens, Size)
+ end)
+ end.
+
+-spec list_inst_rec_fun(rec_fun(), pos_integer(), [position()], boolean(),
+ position()) -> rec_fun().
+list_inst_rec_fun(AltRecFun, NumInstances, SelfPos, NonEmpty, ListInstPos) ->
+ fun([SelfGen|OtherGens], Size) ->
+ ?LETSHRINK(
+ AllInsts,
+ lists:duplicate(NumInstances - 1, SelfGen(Size div NumInstances))
+ ++ proper_types:distlist(Size div NumInstances, SelfGen, NonEmpty),
+ begin
+ {Instances,InstList} = lists:split(NumInstances - 1, AllInsts),
+ InstGens = [fun(_Size) -> proper_types:exactly(I) end
+ || I <- Instances],
+ InstTypesList = [proper_types:exactly(I) || I <- InstList],
+ InstListGen =
+ fun(_Size) -> proper_types:fixed_list(InstTypesList) end,
+ AllInstGens = proper_arith:list_insert(ListInstPos, InstListGen,
+ InstGens),
+ AllGens = proper_arith:insert(AllInstGens, SelfPos, OtherGens),
+ AltRecFun(AllGens, Size)
+ end)
+ end.
+
+-spec convert_maybe_hard_adt(mod_name(), type_name(), [abs_type()], state(),
+ stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert_maybe_hard_adt(Mod, Name, ArgForms, State, Stack, VarDict) ->
+ Arity = length(ArgForms),
+ case orddict:find({Name,Arity}, ?HARD_ADTS) of
+ {ok,Mod} ->
+ convert_custom(Mod, Mod, Name, ArgForms, State, Stack, VarDict);
+ {ok,ADTMod} ->
+ A = ?anno(0),
+ ADT = {remote_type,A,[{atom,A,ADTMod},{atom,A,Name},ArgForms]},
+ convert(Mod, ADT, State, Stack, VarDict);
+ error ->
+ convert_custom(Mod, Mod, Name, ArgForms, State, Stack, VarDict)
+ end.
+
+-spec convert_custom(mod_name(), mod_name(), type_name(), [abs_type()], state(),
+ stack(), var_dict()) -> rich_result2(ret_type(),state()).
+convert_custom(Mod, RemMod, Name, ArgForms, State, Stack, VarDict) ->
+ case process_list(Mod, ArgForms, State, Stack, VarDict) of
+ {ok,Args,NewState} ->
+ Arity = length(Args),
+ TypeRef = {type,Name,Arity},
+ FullTypeRef = {RemMod,type,Name,Args},
+ convert_type(TypeRef, FullTypeRef, NewState, Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_record(mod_name(), type_name(), [abs_type()], state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_record(Mod, Name, RawSubsts, State, Stack, VarDict) ->
+ Substs = [{N,T} || {type,_,field_type,[{atom,_,N},T]} <- RawSubsts],
+ {SubstFields,SubstTypeForms} = lists:unzip(Substs),
+ case process_list(Mod, SubstTypeForms, State, Stack, VarDict) of
+ {ok,SubstTypes,NewState} ->
+ SubstsDict = dict:from_list(lists:zip(SubstFields, SubstTypes)),
+ TypeRef = {record,Name,0},
+ FullTypeRef = {Mod,record,Name,SubstsDict},
+ convert_type(TypeRef, FullTypeRef, NewState, Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_type(type_ref(), full_type_ref(), state(), stack()) ->
+ rich_result2(ret_type(),state()).
+convert_type(TypeRef, {Mod,_Kind,_Name,_Spec} = FullTypeRef, State, Stack) ->
+ case stack_position(FullTypeRef, Stack) of
+ none ->
+ case get_type_repr(Mod, TypeRef, false, State) of
+ {ok,TypeRepr,NewState} ->
+ convert_new_type(TypeRef, FullTypeRef, TypeRepr, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end;
+ 1 ->
+ base_case_error(Stack);
+ _Pos ->
+ {ok, {rec,fun([Gen],Size) -> Gen(Size) end,[{true,FullTypeRef}]},
+ State}
+ end.
+
+-spec convert_new_type(type_ref(), full_type_ref(), type_repr(), state(),
+ stack()) -> rich_result2(ret_type(),state()).
+convert_new_type(_TypeRef, {_Mod,type,_Name,[]},
+ {cached,FinType,_TypeForm,_SymbInfo}, State, _Stack) ->
+ {ok, {simple,FinType}, State};
+convert_new_type(TypeRef, {Mod,type,_Name,Args} = FullTypeRef,
+ {abs_type,TypeForm,Vars,SymbInfo}, State, Stack) ->
+ VarDict = dict:from_list(lists:zip(Vars, Args)),
+ case convert(Mod, TypeForm, State, [FullTypeRef | Stack], VarDict) of
+ {ok, {simple,ImmFinType}, NewState} ->
+ FinType = case SymbInfo of
+ not_symb ->
+ ImmFinType;
+ {orig_abs,_OrigAbsType} ->
+ proper_symb:internal_well_defined(ImmFinType)
+ end,
+ FinalState = case Vars of
+ [] -> cache_type(Mod, TypeRef, FinType, TypeForm,
+ SymbInfo, NewState);
+ _ -> NewState
+ end,
+ {ok, {simple,FinType}, FinalState};
+ {ok, {rec,RecFun,RecArgs}, NewState} ->
+ convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end;
+convert_new_type(_TypeRef, {Mod,record,Name,SubstsDict} = FullTypeRef,
+ {abs_record,OrigFields}, State, Stack) ->
+ Fields = [case dict:find(FieldName, SubstsDict) of
+ {ok,NewFieldType} -> NewFieldType;
+ error -> OrigFieldType
+ end
+ || {FieldName,OrigFieldType} <- OrigFields],
+ case convert_tuple(Mod, [{atom,0,Name} | Fields], false, State,
+ [FullTypeRef | Stack], dict:new()) of
+ {ok, {simple,_FinType}, _NewState} = Result ->
+ Result;
+ {ok, {rec,RecFun,RecArgs}, NewState} ->
+ convert_maybe_rec(FullTypeRef, not_symb, RecFun, RecArgs, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec cache_type(mod_name(), type_ref(), fin_type(), abs_type(), symb_info(),
+ state()) -> state().
+cache_type(Mod, TypeRef, FinType, TypeForm, SymbInfo,
+ #state{types = Types} = State) ->
+ TypeRepr = {cached,FinType,TypeForm,SymbInfo},
+ ModTypes = dict:fetch(Mod, Types),
+ NewModTypes = dict:store(TypeRef, TypeRepr, ModTypes),
+ NewTypes = dict:store(Mod, NewModTypes, Types),
+ State#state{types = NewTypes}.
+
+-spec convert_maybe_rec(full_type_ref(), symb_info(), rec_fun(), rec_args(),
+ state(), stack()) -> rich_result2(ret_type(),state()).
+convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, State, Stack) ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> safe_convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs,
+ State)
+ end.
+
+-spec safe_convert_maybe_rec(full_type_ref(),symb_info(),rec_fun(),rec_args(),
+ state()) -> rich_result2(ret_type(),state()).
+safe_convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, State) ->
+ case partition_rec_args(FullTypeRef, RecArgs, false) of
+ {[],[],_,_} ->
+ {ok, {rec,RecFun,RecArgs}, State};
+ {MyRecArgs,MyPos,OtherRecArgs,_OtherPos} ->
+ case lists:all(fun({B,_T}) -> B =:= false end, MyRecArgs) of
+ true -> convert_rec_type(SymbInfo, RecFun, MyPos, OtherRecArgs,
+ State);
+ false -> {error, {internal,true_rec_arg_reached_type}}
+ end
+ end.
+
+-spec convert_rec_type(symb_info(), rec_fun(), [position()], rec_args(),
+ state()) -> {ok, ret_type(), state()}.
+convert_rec_type(SymbInfo, RecFun, MyPos, [], State) ->
+ NumRecArgs = length(MyPos),
+ M = fun(GenFun) ->
+ fun(Size) ->
+ GenFuns = lists:duplicate(NumRecArgs, GenFun),
+ RecFun(GenFuns, erlang:max(0,Size - 1))
+ end
+ end,
+ SizedGen = y(M),
+ ImmFinType = ?SIZED(Size,SizedGen(Size + 1)),
+ FinType = case SymbInfo of
+ not_symb ->
+ ImmFinType;
+ {orig_abs,_OrigAbsType} ->
+ proper_symb:internal_well_defined(ImmFinType)
+ end,
+ {ok, {simple,FinType}, State};
+convert_rec_type(_SymbInfo, RecFun, MyPos, OtherRecArgs, State) ->
+ NumRecArgs = length(MyPos),
+ NewRecFun =
+ fun(OtherGens,TopSize) ->
+ M = fun(GenFun) ->
+ fun(Size) ->
+ GenFuns = lists:duplicate(NumRecArgs, GenFun),
+ AllGens =
+ proper_arith:insert(GenFuns, MyPos, OtherGens),
+ RecFun(AllGens, erlang:max(0,Size - 1))
+ end
+ end,
+ (y(M))(TopSize)
+ end,
+ NewRecArgs = clean_rec_args(OtherRecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, State}.
+
+%% Y Combinator: Read more at http://bc.tech.coop/blog/070611.html.
+-spec y(fun((fun((T) -> S)) -> fun((T) -> S))) -> fun((T) -> S).
+y(M) ->
+ G = fun(F) ->
+ M(fun(A) -> (F(F))(A) end)
+ end,
+ G(G).
+
+-spec process_list(mod_name(), [abs_type() | ret_type()], state(), stack(),
+ var_dict()) -> rich_result2([ret_type()],state()).
+process_list(Mod, RawTypes, State, Stack, VarDict) ->
+ Process = fun({simple,_FinType} = Type, {ok,Types,State1}) ->
+ {ok, [Type|Types], State1};
+ ({rec,_RecFun,_RecArgs} = Type, {ok,Types,State1}) ->
+ {ok, [Type|Types], State1};
+ (TypeForm, {ok,Types,State1}) ->
+ case convert(Mod, TypeForm, State1, Stack, VarDict) of
+ {ok,Type,State2} -> {ok,[Type|Types],State2};
+ {error,_} = Err -> Err
+ end;
+ (_RawType, {error,_} = Err) ->
+ Err
+ end,
+ case lists:foldl(Process, {ok,[],State}, RawTypes) of
+ {ok,RevTypes,NewState} ->
+ {ok, lists:reverse(RevTypes), NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_integer(abs_expr(), state()) -> rich_result2(ret_type(),state()).
+convert_integer(Expr, State) ->
+ case eval_int(Expr) of
+ {ok,Int} -> {ok, {simple,proper_types:exactly(Int)}, State};
+ error -> expr_error(invalid_int_const, Expr)
+ end.
+
+-spec eval_int(abs_expr()) -> tagged_result(integer()).
+eval_int(Expr) ->
+ NoBindings = erl_eval:new_bindings(),
+ try erl_eval:expr(Expr, NoBindings) of
+ {value,Value,_NewBindings} when is_integer(Value) ->
+ {ok, Value};
+ _ ->
+ error
+ catch
+ error:_ ->
+ error
+ end.
+
+-spec expr_error(atom(), abs_expr()) -> {'error',term()}.
+expr_error(Reason, Expr) ->
+ {error, {Reason,lists:flatten(erl_pp:expr(Expr))}}.
+
+-spec expr_error(atom(), abs_expr(), abs_expr()) -> {'error',term()}.
+expr_error(Reason, Expr1, Expr2) ->
+ Str1 = lists:flatten(erl_pp:expr(Expr1)),
+ Str2 = lists:flatten(erl_pp:expr(Expr2)),
+ {error, {Reason,Str1,Str2}}.
+
+-spec base_case_error(stack()) -> {'error',term()}.
+%% TODO: This might confuse, since it doesn't record the arguments to parametric
+%% types or the type subsitutions of a record.
+base_case_error([{Mod,type,Name,Args} | _Upper]) ->
+ Arity = length(Args),
+ {error, {no_base_case,{Mod,type,Name,Arity}}};
+base_case_error([{Mod,record,Name,_SubstsDict} | _Upper]) ->
+ {error, {no_base_case,{Mod,record,Name}}}.
+
+
+%%------------------------------------------------------------------------------
+%% Helper datatypes handling functions
+%%------------------------------------------------------------------------------
+
+-spec stack_position(full_type_ref(), stack()) -> 'none' | pos_integer().
+stack_position(FullTypeRef, Stack) ->
+ SameType = fun(A) -> same_full_type_ref(A,FullTypeRef) end,
+ case proper_arith:find_first(SameType, Stack) of
+ {Pos,_} -> Pos;
+ none -> none
+ end.
+
+-spec partition_by_toplevel(rec_args(), stack(), boolean()) ->
+ {rec_args(),[position()],rec_args(),[position()]}.
+partition_by_toplevel(RecArgs, [], _OnlyInstanceAccepting) ->
+ {[],[],RecArgs,lists:seq(1,length(RecArgs))};
+partition_by_toplevel(RecArgs, [_Parent | _Upper], _OnlyInstanceAccepting)
+ when is_atom(_Parent) ->
+ {[],[],RecArgs,lists:seq(1,length(RecArgs))};
+partition_by_toplevel(RecArgs, [Parent | _Upper], OnlyInstanceAccepting) ->
+ partition_rec_args(Parent, RecArgs, OnlyInstanceAccepting).
+
+-spec at_toplevel(rec_args(), stack()) -> boolean().
+at_toplevel(RecArgs, Stack) ->
+ case partition_by_toplevel(RecArgs, Stack, false) of
+ {[],[],_,_} -> false;
+ _ -> true
+ end.
+
+-spec partition_rec_args(full_type_ref(), rec_args(), boolean()) ->
+ {rec_args(),[position()],rec_args(),[position()]}.
+partition_rec_args(FullTypeRef, RecArgs, OnlyInstanceAccepting) ->
+ SameType =
+ case OnlyInstanceAccepting of
+ true -> fun({false,_T}) -> false
+ ; ({_B,T}) -> same_full_type_ref(T,FullTypeRef) end;
+ false -> fun({_B,T}) -> same_full_type_ref(T,FullTypeRef) end
+ end,
+ proper_arith:partition(SameType, RecArgs).
+
+%% Tuples can be of 0 arity, unions of 1 and wunions at least of 2.
+-spec combine_ret_types([ret_type()], {'tuple',boolean()} | 'union'
+ | 'wunion') -> ret_type().
+combine_ret_types(RetTypes, EnclosingType) ->
+ case lists:all(fun is_simple_ret_type/1, RetTypes) of
+ true ->
+ %% This should never happen for wunion.
+ Combine = case EnclosingType of
+ {tuple,false} -> fun proper_types:tuple/1;
+ {tuple,true} -> fun proper_types:fixed_list/1;
+ union -> fun proper_types:union/1
+ end,
+ FinTypes = [T || {simple,T} <- RetTypes],
+ {simple, Combine(FinTypes)};
+ false ->
+ NumTypes = length(RetTypes),
+ {RevRecFuns,RevRecArgsList,NumRecs} =
+ lists:foldl(fun add_ret_type/2, {[],[],0}, RetTypes),
+ RecFuns = lists:reverse(RevRecFuns),
+ RecArgsList = lists:reverse(RevRecArgsList),
+ RecArgLens = [length(RecArgs) || RecArgs <- RecArgsList],
+ RecFunInfo = {NumTypes,NumRecs,RecArgLens,RecFuns},
+ FlatRecArgs = lists:flatten(RecArgsList),
+ {NewRecFun,NewRecArgs} =
+ case EnclosingType of
+ {tuple,ToList} ->
+ {tuple_rec_fun(RecFunInfo,ToList),
+ soft_clean_rec_args(FlatRecArgs,RecFunInfo,ToList)};
+ union ->
+ {union_rec_fun(RecFunInfo),clean_rec_args(FlatRecArgs)};
+ wunion ->
+ {wunion_rec_fun(RecFunInfo),
+ clean_rec_args(FlatRecArgs)}
+ end,
+ {rec, NewRecFun, NewRecArgs}
+ end.
+
+-spec tuple_rec_fun(rec_fun_info(), boolean()) -> rec_fun().
+tuple_rec_fun({_NumTypes,NumRecs,RecArgLens,RecFuns}, ToList) ->
+ Combine = case ToList of
+ true -> fun proper_types:fixed_list/1;
+ false -> fun proper_types:tuple/1
+ end,
+ fun(AllGFs,TopSize) ->
+ Size = TopSize div NumRecs,
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun erlang:apply/2,
+ Combine(lists:zipwith(ZipFun, RecFuns, ArgsList))
+ end.
+
+-spec union_rec_fun(rec_fun_info()) -> rec_fun().
+union_rec_fun({_NumTypes,_NumRecs,RecArgLens,RecFuns}) ->
+ fun(AllGFs,Size) ->
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun(F,A) -> ?LAZY(apply(F,A)) end,
+ proper_types:union(lists:zipwith(ZipFun, RecFuns, ArgsList))
+ end.
+
+-spec wunion_rec_fun(rec_fun_info()) -> rec_fun().
+wunion_rec_fun({NumTypes,_NumRecs,RecArgLens,RecFuns}) ->
+ fun(AllGFs,Size) ->
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun(W,F,A) -> {W,?LAZY(apply(F,A))} end,
+ RecWeight = erlang:max(1, Size div (NumTypes - 1)),
+ Weights = [1 | lists:duplicate(NumTypes - 1, RecWeight)],
+ WeightedChoices = lists:zipwith3(ZipFun, Weights, RecFuns, ArgsList),
+ proper_types:wunion(WeightedChoices)
+ end.
+
+-spec add_ret_type(ret_type(), {[rec_fun()],[rec_args()],non_neg_integer()}) ->
+ {[rec_fun()],[rec_args()],non_neg_integer()}.
+add_ret_type({simple,FinType}, {RecFuns,RecArgsList,NumRecs}) ->
+ {[fun([],_) -> FinType end | RecFuns], [[] | RecArgsList], NumRecs};
+add_ret_type({rec,RecFun,RecArgs}, {RecFuns,RecArgsList,NumRecs}) ->
+ {[RecFun | RecFuns], [RecArgs | RecArgsList], NumRecs + 1}.
+
+-spec is_simple_ret_type(ret_type()) -> boolean().
+is_simple_ret_type({simple,_FinType}) ->
+ true;
+is_simple_ret_type({rec,_RecFun,_RecArgs}) ->
+ false.
+
+-spec clean_rec_args(rec_args()) -> rec_args().
+clean_rec_args(RecArgs) ->
+ [{false,F} || {_B,F} <- RecArgs].
+
+-spec soft_clean_rec_args(rec_args(), rec_fun_info(), boolean()) -> rec_args().
+soft_clean_rec_args(RecArgs, RecFunInfo, ToList) ->
+ soft_clean_rec_args_tr(RecArgs, [], RecFunInfo, ToList, false, 1).
+
+-spec soft_clean_rec_args_tr(rec_args(), rec_args(), rec_fun_info(), boolean(),
+ boolean(), position()) -> rec_args().
+soft_clean_rec_args_tr([], Acc, _RecFunInfo, _ToList, _FoundListInst, _Pos) ->
+ lists:reverse(Acc);
+soft_clean_rec_args_tr([{{list,_NonEmpty,_AltRecFun},FTRef} | Rest], Acc,
+ RecFunInfo, ToList, true, Pos) ->
+ NewArg = {false,FTRef},
+ soft_clean_rec_args_tr(Rest, [NewArg|Acc], RecFunInfo, ToList, true, Pos+1);
+soft_clean_rec_args_tr([{{list,NonEmpty,AltRecFun},FTRef} | Rest], Acc,
+ RecFunInfo, ToList, false, Pos) ->
+ {NumTypes,NumRecs,RecArgLens,RecFuns} = RecFunInfo,
+ AltRecFunPos = get_group(Pos, RecArgLens),
+ AltRecFuns = proper_arith:list_update(AltRecFunPos, AltRecFun, RecFuns),
+ AltRecFunInfo = {NumTypes,NumRecs,RecArgLens,AltRecFuns},
+ NewArg = {{list,NonEmpty,tuple_rec_fun(AltRecFunInfo,ToList)},FTRef},
+ soft_clean_rec_args_tr(Rest, [NewArg|Acc], RecFunInfo, ToList, true, Pos+1);
+soft_clean_rec_args_tr([Arg | Rest], Acc, RecFunInfo, ToList, FoundListInst,
+ Pos) ->
+ soft_clean_rec_args_tr(Rest, [Arg | Acc], RecFunInfo, ToList, FoundListInst,
+ Pos+1).
+
+-spec get_group(pos_integer(), [non_neg_integer()]) -> pos_integer().
+get_group(Pos, AllMembers) ->
+ get_group_tr(Pos, AllMembers, 1).
+
+-spec get_group_tr(pos_integer(), [non_neg_integer()], pos_integer()) ->
+ pos_integer().
+get_group_tr(Pos, [Members | Rest], GroupNum) ->
+ case Pos =< Members of
+ true -> GroupNum;
+ false -> get_group_tr(Pos - Members, Rest, GroupNum + 1)
+ end.
+
+-spec same_full_type_ref(full_type_ref(), term()) -> boolean().
+same_full_type_ref({SameMod,type,SameName,Args1},
+ {SameMod,type,SameName,Args2}) ->
+ length(Args1) =:= length(Args2)
+ andalso lists:all(fun({A,B}) -> same_ret_type(A,B) end,
+ lists:zip(Args1, Args2));
+same_full_type_ref({SameMod,record,SameName,SubstsDict1},
+ {SameMod,record,SameName,SubstsDict2}) ->
+ same_substs_dict(SubstsDict1, SubstsDict2);
+same_full_type_ref(_, _) ->
+ false.
+
+-spec same_ret_type(ret_type(), ret_type()) -> boolean().
+same_ret_type({simple,FinType1}, {simple,FinType2}) ->
+ same_fin_type(FinType1, FinType2);
+same_ret_type({rec,RecFun1,RecArgs1}, {rec,RecFun2,RecArgs2}) ->
+ NumRecArgs = length(RecArgs1),
+ length(RecArgs2) =:= NumRecArgs
+ andalso lists:all(fun({A1,A2}) -> same_rec_arg(A1,A2,NumRecArgs) end,
+ lists:zip(RecArgs1,RecArgs2))
+ andalso same_rec_fun(RecFun1, RecFun2, NumRecArgs);
+same_ret_type(_, _) ->
+ false.
+
+%% TODO: Is this too strict?
+-spec same_rec_arg(rec_arg(), rec_arg(), arity()) -> boolean().
+same_rec_arg({{list,SameBool,AltRecFun1},FTRef1},
+ {{list,SameBool,AltRecFun2},FTRef2}, NumRecArgs) ->
+ same_rec_fun(AltRecFun1, AltRecFun2, NumRecArgs)
+ andalso same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg({true,FTRef1}, {true,FTRef2}, _NumRecArgs) ->
+ same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg({false,FTRef1}, {false,FTRef2}, _NumRecArgs) ->
+ same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg(_, _, _NumRecArgs) ->
+ false.
+
+-spec same_substs_dict(substs_dict(), substs_dict()) -> boolean().
+same_substs_dict(SubstsDict1, SubstsDict2) ->
+ SameKVPair = fun({{_K,V1},{_K,V2}}) -> same_ret_type(V1,V2);
+ (_) -> false
+ end,
+ SubstsKVList1 = lists:sort(dict:to_list(SubstsDict1)),
+ SubstsKVList2 = lists:sort(dict:to_list(SubstsDict2)),
+ length(SubstsKVList1) =:= length(SubstsKVList2)
+ andalso lists:all(SameKVPair, lists:zip(SubstsKVList1,SubstsKVList2)).
+
+-spec same_fin_type(fin_type(), fin_type()) -> boolean().
+same_fin_type(Type1, Type2) ->
+ proper_types:equal_types(Type1, Type2).
+
+-spec same_rec_fun(rec_fun(), rec_fun(), arity()) -> boolean().
+same_rec_fun(RecFun1, RecFun2, NumRecArgs) ->
+ %% It's ok that we return a type, even if there's a 'true' for use of
+ %% an instance.
+ GenFun = fun(_Size) -> proper_types:exactly('$dummy') end,
+ GenFuns = lists:duplicate(NumRecArgs,GenFun),
+ same_fin_type(RecFun1(GenFuns,0), RecFun2(GenFuns,0)).
diff --git a/lib/dialyzer/test/dialyzer_SUITE.erl b/lib/dialyzer/test/dialyzer_SUITE.erl
index 80f4508ec6..d46181f66f 100644
--- a/lib/dialyzer/test/dialyzer_SUITE.erl
+++ b/lib/dialyzer/test/dialyzer_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2015. All Rights Reserved.
+%% Copyright Ericsson AB 2015-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -19,7 +19,7 @@
%%
-module(dialyzer_SUITE).
--include_lib("test_server/include/test_server.hrl").
+-include_lib("common_test/include/ct.hrl").
%% Default timetrap timeout (set in init_per_testcase).
-define(default_timeout, ?t:minutes(1)).
diff --git a/lib/dialyzer/test/dialyzer_common.erl b/lib/dialyzer/test/dialyzer_common.erl
index d2b1026c06..48083a2731 100644
--- a/lib/dialyzer/test/dialyzer_common.erl
+++ b/lib/dialyzer/test/dialyzer_common.erl
@@ -7,7 +7,7 @@
-module(dialyzer_common).
--export([check_plt/1, check/4, create_all_suites/0, new_tests/2]).
+-export([check_plt/1, check/4, create_all_suites/0, new_tests/2, plt_file/1]).
-include_lib("kernel/include/file.hrl").
@@ -39,7 +39,7 @@
check_plt(OutDir) ->
io:format("Checking plt:"),
- PltFilename = filename:join(OutDir, ?plt_filename),
+ PltFilename = plt_file(OutDir),
case file:read_file_info(PltFilename) of
{ok, _} -> dialyzer_check_plt(PltFilename);
{error, _ } ->
@@ -63,6 +63,11 @@ check_plt(OutDir) ->
end
end.
+-spec plt_file(string()) -> string().
+
+plt_file(OutDir) ->
+ filename:join(OutDir, ?plt_filename).
+
dialyzer_check_plt(PltFilename) ->
try dialyzer:run([{analysis_type, plt_check},
{init_plt, PltFilename}]) of
@@ -119,7 +124,7 @@ build_plt(PltFilename) ->
'same' | {differ, [term()]}.
check(TestCase, Opts, Dir, OutDir) ->
- PltFilename = filename:join(OutDir, ?plt_filename),
+ PltFilename = plt_file(OutDir),
SrcDir = filename:join(Dir, ?input_files_directory),
ResDir = filename:join(Dir, ?result_files_directory),
Filename = filename:join(SrcDir, atom_to_list(TestCase)),
diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
new file mode 100644
index 0000000000..02425c33f2
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
@@ -0,0 +1,2 @@
+{dialyzer_options, []}.
+{time_limit, 30}.
diff --git a/lib/dialyzer/test/map_SUITE_data/results/bad_argument b/lib/dialyzer/test/map_SUITE_data/results/bad_argument
new file mode 100644
index 0000000000..af61a89638
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/bad_argument
@@ -0,0 +1,5 @@
+
+bad_argument.erl:14: Function t3/0 has no local return
+bad_argument.erl:15: Guard test is_map('not_a_map') can never succeed
+bad_argument.erl:5: Function t/0 has no local return
+bad_argument.erl:6: A key of type 'b' cannot exist in a map of type #{'a':='q'}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/contract b/lib/dialyzer/test/map_SUITE_data/results/contract
new file mode 100644
index 0000000000..0f6e1d0c65
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/contract
@@ -0,0 +1,7 @@
+
+contract.erl:10: Function t2/0 has no local return
+contract.erl:10: The call missing:f(#{'a':=1, 'c':=4}) breaks the contract (#{'a':=1,'b'=>2,'c'=>3}) -> 'ok'
+contract.erl:12: Function t3/0 has no local return
+contract.erl:12: The call missing:f(#{'a':=1, 'b':=2, 'e':=3}) breaks the contract (#{'a':=1,'b'=>2,'c'=>3}) -> 'ok'
+contract.erl:8: Function t1/0 has no local return
+contract.erl:8: The call missing:f(#{'b':=2}) breaks the contract (#{'a':=1,'b'=>2,'c'=>3}) -> 'ok'
diff --git a/lib/dialyzer/test/map_SUITE_data/results/contract_violation b/lib/dialyzer/test/map_SUITE_data/results/contract_violation
new file mode 100644
index 0000000000..958321618f
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/contract_violation
@@ -0,0 +1,3 @@
+
+contract_violation.erl:12: The pattern #{I:=Loc} can never match the type #{}
+contract_violation.erl:16: Invalid type specification for function contract_violation:beam_disasm_lines/2. The success typing is ('none' | <<_:32,_:_*8>>,_) -> #{pos_integer()=>{'location',_,_}}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/exact b/lib/dialyzer/test/map_SUITE_data/results/exact
new file mode 100644
index 0000000000..ea00e61330
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/exact
@@ -0,0 +1,3 @@
+
+exact.erl:15: Function t2/1 has no local return
+exact.erl:19: The variable _ can never match since previous clauses completely covered the type #{'a':=_, _=>_}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/guard_update b/lib/dialyzer/test/map_SUITE_data/results/guard_update
new file mode 100644
index 0000000000..98df23907f
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/guard_update
@@ -0,0 +1,5 @@
+
+guard_update.erl:5: Function t/0 has no local return
+guard_update.erl:6: The call guard_update:f(#{'a':=2}) will never return since it differs in the 1st argument from the success typing arguments: (#{'b':=_, _=>_})
+guard_update.erl:8: Clause guard cannot succeed. The variable M was matched against the type #{'a':=2}
+guard_update.erl:8: Function f/1 has no local return
diff --git a/lib/dialyzer/test/map_SUITE_data/results/initial_dataflow b/lib/dialyzer/test/map_SUITE_data/results/initial_dataflow
new file mode 100644
index 0000000000..69144f9208
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/initial_dataflow
@@ -0,0 +1,4 @@
+
+initial_dataflow.erl:11: The variable Q can never match since previous clauses completely covered the type #{}
+initial_dataflow.erl:5: Function test/0 has no local return
+initial_dataflow.erl:6: The pattern 'false' can never match the type 'true'
diff --git a/lib/dialyzer/test/map_SUITE_data/results/is_map_guard b/lib/dialyzer/test/map_SUITE_data/results/is_map_guard
new file mode 100644
index 0000000000..6ab464d865
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/is_map_guard
@@ -0,0 +1,5 @@
+
+is_map_guard.erl:13: Function t2/0 has no local return
+is_map_guard.erl:15: The call is_map_guard:explicit('still_not_map') will never return since it differs in the 1st argument from the success typing arguments: (map())
+is_map_guard.erl:6: Function t1/0 has no local return
+is_map_guard.erl:8: The call is_map_guard:implicit('not_a_map') will never return since it differs in the 1st argument from the success typing arguments: (map())
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_galore b/lib/dialyzer/test/map_SUITE_data/results/map_galore
new file mode 100644
index 0000000000..c34ba5cf30
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_galore
@@ -0,0 +1,28 @@
+
+map_galore.erl:1000: A key of type 42 cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c' | 'v'}
+map_galore.erl:1080: A key of type 'nonexisting' cannot exist in a map of type #{#{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
+map_galore.erl:1082: A key of type 42 cannot exist in a map of type #{#{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
+map_galore.erl:1140: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1141: The call map_galore:map_guard_sequence_2(#{'b':=5}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1209: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...], #{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1210: The call map_galore:map_guard_sequence_2(#{'b':=5, #{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1418: Fun application with arguments (#{'s':='none', 'v':='none'}) will never return since it differs in the 1st argument from the success typing arguments: (#{'s':='l' | 't' | 'v', 'v':='none' | <<_:16>> | [<<_:16>>,...] | {<<_:16>>,<<_:16>>}})
+map_galore.erl:1491: The test #{} =:= #{'a':=1} can never evaluate to 'true'
+map_galore.erl:1492: The test #{'a':=1} =:= #{} can never evaluate to 'true'
+map_galore.erl:1495: The test #{'a':=1} =:= #{'a':=2} can never evaluate to 'true'
+map_galore.erl:1496: The test #{'a':=2} =:= #{'a':=1} can never evaluate to 'true'
+map_galore.erl:1497: The test #{'a':=2, 'b':=1} =:= #{'a':=1, 'b':=3} can never evaluate to 'true'
+map_galore.erl:1498: The test #{'a':=1, 'b':=1} =:= #{'a':=1, 'b':=3} can never evaluate to 'true'
+map_galore.erl:1762: The call maps:get({1, 1},#{{1,float()}=>[101 | 108 | 112 | 116 | 117,...]}) will never return since the success typing arguments are (any(),map())
+map_galore.erl:1763: The call maps:get('a',#{}) will never return since the success typing arguments are (any(),map())
+map_galore.erl:1765: The call maps:get('a',#{'b':=1, 'c':=2}) will never return since the success typing arguments are (any(),map())
+map_galore.erl:186: The pattern #{'x':=2} can never match the type #{'x':=3}
+map_galore.erl:187: The pattern #{'x':=3} can never match the type {'a','b','c'}
+map_galore.erl:188: The pattern #{'x':=3} can never match the type #{'y':=3}
+map_galore.erl:189: The pattern #{'x':=3} can never match the type #{'x':=[101 | 104 | 114 | 116,...]}
+map_galore.erl:2280: Cons will produce an improper list since its 2nd argument is {'b','a'}
+map_galore.erl:2280: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2281: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2282: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:997: A key of type 'nonexisting' cannot exist in a map of type #{}
+map_galore.erl:998: A key of type 'nonexisting' cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c'}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_in_guard b/lib/dialyzer/test/map_SUITE_data/results/map_in_guard
new file mode 100644
index 0000000000..1015f76128
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_in_guard
@@ -0,0 +1,4 @@
+
+map_in_guard.erl:10: The call map_in_guard:assoc_update('not_a_map') will never return since it differs in the 1st argument from the success typing arguments: (#{})
+map_in_guard.erl:13: The call map_in_guard:assoc_guard_clause('not_a_map') will never return since it differs in the 1st argument from the success typing arguments: (#{})
+map_in_guard.erl:20: The call map_in_guard:exact_guard_clause(#{}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='q'})
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_in_guard2 b/lib/dialyzer/test/map_SUITE_data/results/map_in_guard2
new file mode 100644
index 0000000000..46e2e8d36c
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_in_guard2
@@ -0,0 +1,13 @@
+
+map_in_guard2.erl:10: The call map_in_guard2:assoc_guard_clause('not_a_map') will never return since it differs in the 1st argument from the success typing arguments: (map())
+map_in_guard2.erl:12: The pattern 'true' can never match the type 'false'
+map_in_guard2.erl:14: The call map_in_guard2:exact_guard_clause(#{}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':=_, _=>_})
+map_in_guard2.erl:17: Guard test is_map(M::'not_a_map') can never succeed
+map_in_guard2.erl:20: Function assoc_update/1 has no local return
+map_in_guard2.erl:20: Guard test is_map(M::'not_a_map') can never succeed
+map_in_guard2.erl:22: Function assoc_guard_clause/1 has no local return
+map_in_guard2.erl:22: Guard test is_map(M::'not_a_map') can never succeed
+map_in_guard2.erl:24: Clause guard cannot succeed. The variable M was matched against the type #{}
+map_in_guard2.erl:27: Clause guard cannot succeed. The variable M was matched against the type #{}
+map_in_guard2.erl:27: Function exact_guard_clause/1 has no local return
+map_in_guard2.erl:8: The call map_in_guard2:assoc_update('not_a_map') will never return since it differs in the 1st argument from the success typing arguments: (map())
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_size b/lib/dialyzer/test/map_SUITE_data/results/map_size
new file mode 100644
index 0000000000..fc6c1f028c
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_size
@@ -0,0 +1,13 @@
+
+map_size.erl:11: The pattern 1 can never match the type 0
+map_size.erl:13: Function t2/0 has no local return
+map_size.erl:15: Function p/1 has no local return
+map_size.erl:15: Guard test 1 =:= 0 can never succeed
+map_size.erl:17: Function t3/0 has no local return
+map_size.erl:21: The pattern 4 can never match the type 1 | 2 | 3
+map_size.erl:23: Function t4/0 has no local return
+map_size.erl:24: The pattern 0 can never match the type 1 | 2 | 3
+map_size.erl:26: Function t5/1 has no local return
+map_size.erl:5: Function t1/0 has no local return
+map_size.erl:7: The pattern 1 can never match the type 0
+map_size.erl:9: Function e1/0 has no local return
diff --git a/lib/dialyzer/test/map_SUITE_data/results/maps_merge b/lib/dialyzer/test/map_SUITE_data/results/maps_merge
new file mode 100644
index 0000000000..0c347b4cdb
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/maps_merge
@@ -0,0 +1,11 @@
+
+maps_merge.erl:10: The pattern #{_:=_} can never match the type #{'a':=1, 3:='ok', 'q'=>none(), 7=>none(), atom() | integer()=>_}
+maps_merge.erl:12: Function t3/0 has no local return
+maps_merge.erl:14: The pattern #{7:='q'} can never match the type #{'a':=1, 3:='ok', 'q'=>none(), 7=>none(), atom() | integer()=>_}
+maps_merge.erl:16: Function t4/0 has no local return
+maps_merge.erl:18: The pattern #{7:='q'} can never match the type #{'a':=1, 3:='ok', 'q'=>none(), 7=>none(), atom() | integer()=>_}
+maps_merge.erl:20: Function t5/0 has no local return
+maps_merge.erl:21: The pattern #{'a':=2} can never match the type #{'a':=1, 'q'=>none(), 11=>_, atom()=>_}
+maps_merge.erl:5: Function t1/0 has no local return
+maps_merge.erl:6: The pattern #{'a':=1} can never match the type #{}
+maps_merge.erl:8: Function t2/0 has no local return
diff --git a/lib/dialyzer/test/map_SUITE_data/results/opaque_key b/lib/dialyzer/test/map_SUITE_data/results/opaque_key
new file mode 100644
index 0000000000..2ae0e0c5c6
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/opaque_key
@@ -0,0 +1,15 @@
+
+opaque_key_adt.erl:41: Invalid type specification for function opaque_key_adt:s4/0. The success typing is () -> #{1:='a'}
+opaque_key_adt.erl:44: Invalid type specification for function opaque_key_adt:s5/0. The success typing is () -> #{2:=3}
+opaque_key_adt.erl:56: Invalid type specification for function opaque_key_adt:smt1/0. The success typing is () -> #{3:='a'}
+opaque_key_adt.erl:59: Invalid type specification for function opaque_key_adt:smt2/0. The success typing is () -> #{1:='a'}
+opaque_key_use.erl:13: The test opaque_key_use:t() =:= opaque_key_use:t(integer()) can never evaluate to 'true'
+opaque_key_use.erl:24: Attempt to test for equality between a term of type opaque_key_adt:t(integer()) and a term of opaque type opaque_key_adt:t()
+opaque_key_use.erl:37: Function adt_mm1/0 has no local return
+opaque_key_use.erl:40: The attempt to match a term of type opaque_key_adt:m() against the pattern #{A:=R} breaks the opacity of the term
+opaque_key_use.erl:48: Function adt_mu1/0 has no local return
+opaque_key_use.erl:51: Guard test is_map(M::opaque_key_adt:m()) breaks the opacity of its argument
+opaque_key_use.erl:53: Function adt_mu2/0 has no local return
+opaque_key_use.erl:56: Guard test is_map(M::opaque_key_adt:m()) breaks the opacity of its argument
+opaque_key_use.erl:58: Function adt_mu3/0 has no local return
+opaque_key_use.erl:60: Guard test is_map(M::opaque_key_adt:m()) breaks the opacity of its argument
diff --git a/lib/dialyzer/test/map_SUITE_data/results/order b/lib/dialyzer/test/map_SUITE_data/results/order
new file mode 100644
index 0000000000..7be789a11a
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/order
@@ -0,0 +1,17 @@
+
+order.erl:12: Function t2/0 has no local return
+order.erl:14: Guard test is_integer(Int::'b') can never succeed
+order.erl:16: The variable _Else can never match since previous clauses completely covered the type 'b'
+order.erl:19: Function t3/0 has no local return
+order.erl:21: Guard test is_integer(Int::'b') can never succeed
+order.erl:23: The variable _Else can never match since previous clauses completely covered the type 'b'
+order.erl:30: The variable _Else can never match since previous clauses completely covered the type 'b' | 1
+order.erl:33: Function t5/0 has no local return
+order.erl:36: The variable Atom can never match since previous clauses completely covered the type 1
+order.erl:37: The variable _Else can never match since previous clauses completely covered the type 1
+order.erl:40: Function t6/0 has no local return
+order.erl:42: Guard test is_integer(Int::'b') can never succeed
+order.erl:44: The variable _Else can never match since previous clauses completely covered the type 'b'
+order.erl:5: Function t1/0 has no local return
+order.erl:7: Guard test is_integer(Int::'b') can never succeed
+order.erl:9: The variable _Else can never match since previous clauses completely covered the type 'b'
diff --git a/lib/dialyzer/test/map_SUITE_data/results/subtract_value_flip b/lib/dialyzer/test/map_SUITE_data/results/subtract_value_flip
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/subtract_value_flip
diff --git a/lib/dialyzer/test/map_SUITE_data/results/typeflow b/lib/dialyzer/test/map_SUITE_data/results/typeflow
new file mode 100644
index 0000000000..acfb7f551e
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/typeflow
@@ -0,0 +1,4 @@
+
+typeflow.erl:14: Function t2/1 has no local return
+typeflow.erl:16: The call lists:sort(integer()) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
+typeflow.erl:9: The variable _ can never match since previous clauses completely covered the type #{'a':=integer(), _=>_}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/typeflow2 b/lib/dialyzer/test/map_SUITE_data/results/typeflow2
new file mode 100644
index 0000000000..3adf638978
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/typeflow2
@@ -0,0 +1,13 @@
+
+typeflow2.erl:10: The pattern #{'a':=X} can never match the type #{'a'=>none(), _=>maybe_improper_list() | integer()}
+typeflow2.erl:11: The pattern #{'a':=X} can never match the type #{'a'=>none(), _=>maybe_improper_list() | integer()}
+typeflow2.erl:26: Function t2/1 has no local return
+typeflow2.erl:29: The call lists:sort(integer()) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
+typeflow2.erl:42: The pattern #{'a':=X} can never match since previous clauses completely covered the type #{'a':=integer()}
+typeflow2.erl:43: The variable _ can never match since previous clauses completely covered the type #{'a':=integer()}
+typeflow2.erl:48: The pattern #{} can never match since previous clauses completely covered the type #{'a':=atom() | maybe_improper_list() | integer()}
+typeflow2.erl:58: The pattern #{'a':=X} can never match the type #{'a'=>none(), _=>maybe_improper_list() | integer()}
+typeflow2.erl:59: The pattern #{'a':=X} can never match the type #{'a'=>none(), _=>maybe_improper_list() | integer()}
+typeflow2.erl:60: The pattern #{'a':=X} can never match the type #{'a'=>none(), _=>maybe_improper_list() | integer()}
+typeflow2.erl:82: The pattern #{'a':=X} can never match the type #{}
+typeflow2.erl:83: The pattern #{'a':=X} can never match the type #{}
diff --git a/lib/dialyzer/test/map_SUITE_data/results/typesig b/lib/dialyzer/test/map_SUITE_data/results/typesig
new file mode 100644
index 0000000000..fb2f851a7d
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/typesig
@@ -0,0 +1,5 @@
+
+typesig.erl:5: Function t1/0 has no local return
+typesig.erl:5: The call typesig:test(#{'a':=1}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':={number()}, _=>_})
+typesig.erl:6: Function t2/0 has no local return
+typesig.erl:6: The call typesig:test(#{'a':={'b'}}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':={number()}, _=>_})
diff --git a/lib/dialyzer/test/map_SUITE_data/src/bad_argument.erl b/lib/dialyzer/test/map_SUITE_data/src/bad_argument.erl
new file mode 100644
index 0000000000..95e2b32ddc
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/bad_argument.erl
@@ -0,0 +1,19 @@
+-module(bad_argument).
+
+-export([t/0, t2/0, t3/0]).
+
+t() ->
+ _=(id1(#{a=>q}))#{b:=9}.
+
+t2() ->
+ _ = id2(4),
+ X = id2(3),
+ _ = (#{ X => q})#{3 := p},
+ X.
+
+t3() ->
+ (id3(not_a_map))#{a => b}.
+
+id1(X) -> X.
+id2(X) -> X.
+id3(X) -> X.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/bug.erl b/lib/dialyzer/test/map_SUITE_data/src/bug.erl
new file mode 100644
index 0000000000..fc32f5641a
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/bug.erl
@@ -0,0 +1,63 @@
+-module(bug).
+
+-export([t1/0, f1/1
+ ,t2/0, f2/1
+ ,t3/0, f3/1
+ ,t4/0, f4/1
+ ,t5/0, f5/1
+ ]).
+
+t1() ->
+ V = f1(#{a=>b}),
+ case V of
+ #{a := Q} -> Q; %% Must not warn here
+ _ -> ok
+ end,
+ ok.
+
+f1(M) -> %% Should get map() succ typing
+ #{} = M.
+
+t2() ->
+ V = f2([#{a=>b}]),
+ case V of
+ [#{a := P}] -> P; %% Must not warn here
+ _ -> ok
+ end,
+ ok.
+
+f2(M) -> %% Should get [map(),...] succ typing
+ [#{}] = M.
+
+t3() ->
+ V = f3([#{a=>b},a]),
+ case V of
+ [#{a := P}, _Q] -> P; %% Must not warn here
+ _ -> ok
+ end,
+ ok.
+
+f3(M) -> %% Should get [map()|a,...] succ typing
+ [#{},a] = M.
+
+t4() ->
+ V = f4({#{a=>b},{}}),
+ case V of
+ {#{a := P},{}} -> P; %% Must not warn here
+ _ -> ok
+ end,
+ ok.
+
+f4(M) -> %% Should get {map(),{}} succ typing
+ {#{},{}} = M.
+
+t5() ->
+ V = f5(#{k=>q,a=>b}),
+ case V of
+ #{k := q, a := P} -> P; %% Must not warn here
+ _ -> ok
+ end,
+ ok.
+
+f5(M) -> %% Should get #{k:=q, ...} succ typing
+ #{k:=q} = M.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/contract.erl b/lib/dialyzer/test/map_SUITE_data/src/contract.erl
new file mode 100644
index 0000000000..2b31be0d58
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/contract.erl
@@ -0,0 +1,14 @@
+-module(missing).
+
+-export([t1/0, t2/0, t3/0, t4/0]).
+
+-spec f(#{a := 1, b => 2, c => 3}) -> ok.
+f(_) -> ok.
+
+t1() -> f(#{b => 2}).
+
+t2() -> f(#{a => 1, c => 4}).
+
+t3() -> f(#{a => 1, b => 2, e => 3}).
+
+t4() -> f(#{a => 1, b => 2}).
diff --git a/lib/dialyzer/test/map_SUITE_data/src/contract_violation.erl b/lib/dialyzer/test/map_SUITE_data/src/contract_violation.erl
new file mode 100644
index 0000000000..850f2cad34
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/contract_violation.erl
@@ -0,0 +1,29 @@
+-module(contract_violation).
+
+-export([entry/1, beam_disasm_lines/2]).
+
+%%-----------------------------------------------------------------------
+
+-type lines() :: #{non_neg_integer() => {string(), non_neg_integer()}}.
+
+entry(Bin) ->
+ I = 42,
+ case beam_disasm_lines(Bin, ':-)') of
+ #{I := Loc} -> {good, Loc};
+ _ -> bad
+ end.
+
+-spec beam_disasm_lines(binary() | none, module()) -> lines().
+
+beam_disasm_lines(none, _) -> #{};
+beam_disasm_lines(<<NumLines:32, LineBin:NumLines/binary, FileBin/binary>>,
+ _Module) ->
+ Lines = binary_to_term(LineBin),
+ Files = binary_to_term(FileBin),
+ lines_collect_items(Lines, Files, #{}).
+
+lines_collect_items([], _, Acc) -> Acc;
+lines_collect_items([{FileNo, LineNo}|Rest], Files, Acc) ->
+ #{FileNo := File} = Files,
+ lines_collect_items(
+ Rest, Files, Acc#{map_size(Acc)+1 => {location, File, LineNo}}).
diff --git a/lib/dialyzer/test/map_SUITE_data/src/exact.erl b/lib/dialyzer/test/map_SUITE_data/src/exact.erl
new file mode 100644
index 0000000000..e5ad02ec54
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/exact.erl
@@ -0,0 +1,23 @@
+-module(exact).
+
+-export([t1/1, t2/1]).
+
+t1(M = #{}) ->
+ any_map(M),
+ case M of
+ #{a := _} -> error(fail);
+ _ -> ok
+ end.
+
+any_map(X) ->
+ X#{a => 1, a := 2}.
+
+t2(M = #{}) ->
+ has_a(M),
+ case M of
+ #{a := _} -> error(ok);
+ _ -> unreachable
+ end.
+
+has_a(M) ->
+ M#{a := 1, a => 2}.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/guard_update.erl b/lib/dialyzer/test/map_SUITE_data/src/guard_update.erl
new file mode 100644
index 0000000000..19d0089401
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/guard_update.erl
@@ -0,0 +1,18 @@
+-module(guard_update).
+
+-export([t/0, t2/0]).
+
+t() ->
+ f(#{a=>2}). %% Illegal
+
+f(M)
+ when M#{b := 7} =/= q
+ -> ok.
+
+t2() ->
+ f2(#{a=>2}). %% Legal!
+
+f2(M)
+ when M#{b := 7} =/= q;
+ M =/= p
+ -> ok.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/initial_dataflow.erl b/lib/dialyzer/test/map_SUITE_data/src/initial_dataflow.erl
new file mode 100644
index 0000000000..bbc0d5682a
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/initial_dataflow.erl
@@ -0,0 +1,11 @@
+-module(initial_dataflow).
+
+-export([test/0]).
+
+test() ->
+ false = assoc_guard(#{}),
+ true = assoc_guard(not_a_map),
+ ok.
+
+assoc_guard(#{}) -> true;
+assoc_guard(Q) -> false.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/is_map_guard.erl b/lib/dialyzer/test/map_SUITE_data/src/is_map_guard.erl
new file mode 100644
index 0000000000..ceb4a8763a
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/is_map_guard.erl
@@ -0,0 +1,17 @@
+-module(is_map_guard).
+
+-export([t1/0, t2/0, implicit/1, explicit/1
+ ]).
+
+t1() ->
+ _ = implicit(#{}),
+ implicit(not_a_map).
+
+implicit(M) ->
+ M#{}.
+
+t2() ->
+ explicit(#{q=>d}),
+ explicit(still_not_map).
+
+explicit(M) when is_map(M) -> ok.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/a.erl b/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/a.erl
new file mode 100644
index 0000000000..827984b20b
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/a.erl
@@ -0,0 +1,9 @@
+-module(a).
+-export([to_map/1, to_map/2]).
+-type t() :: #{type := b:t()}.
+
+-spec to_map(t()) -> map().
+to_map(Resource) -> to_map(Resource, #{}).
+
+-spec to_map(t(), map()) -> map().
+to_map(_, Map) when is_map(Map) -> #{}.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/b.erl b/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/b.erl
new file mode 100644
index 0000000000..31f9bb6b3e
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/mand_remote_val/b.erl
@@ -0,0 +1,3 @@
+-module(b).
+-export_type([t/0]).
+-type t() :: binary().
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
new file mode 100644
index 0000000000..99eb73a5f6
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
@@ -0,0 +1,2800 @@
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2013. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(map_galore).
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+ init_per_group/2,end_per_group/2
+ ]).
+
+-export([
+ t_build_and_match_literals/1, t_build_and_match_literals_large/1,
+ t_update_literals/1, t_update_literals_large/1,
+ t_match_and_update_literals/1, t_match_and_update_literals_large/1,
+ t_update_map_expressions/1,
+ t_update_assoc/1, t_update_assoc_large/1,
+ t_update_exact/1, t_update_exact_large/1,
+ t_guard_bifs/1,
+ t_guard_sequence/1, t_guard_sequence_large/1,
+ t_guard_update/1, t_guard_update_large/1,
+ t_guard_receive/1, t_guard_receive_large/1,
+ t_guard_fun/1,
+ t_update_deep/1,
+ t_list_comprehension/1,
+ t_map_sort_literals/1,
+ t_map_equal/1,
+ t_map_compare/1,
+ t_map_size/1,
+ t_is_map/1,
+
+ %% Specific Map BIFs
+ t_bif_map_get/1,
+ t_bif_map_find/1,
+ t_bif_map_is_key/1,
+ t_bif_map_keys/1,
+ t_bif_map_merge/1,
+ t_bif_map_new/1,
+ t_bif_map_put/1,
+ t_bif_map_remove/1,
+ t_bif_map_update/1,
+ t_bif_map_values/1,
+ t_bif_map_to_list/1,
+ t_bif_map_from_list/1,
+
+ %% erlang
+ t_erlang_hash/1,
+ t_map_encode_decode/1,
+
+ %% non specific BIF related
+ t_bif_build_and_check/1,
+ t_bif_merge_and_check/1,
+
+ %% maps module not bifs
+ t_maps_fold/1,
+ t_maps_map/1,
+ t_maps_size/1,
+ t_maps_without/1,
+
+ %% misc
+ t_erts_internal_order/1,
+ t_erts_internal_hash/1,
+ t_pdict/1,
+ t_ets/1,
+ t_dets/1,
+ t_tracing/1,
+
+ %% instruction-level tests
+ t_has_map_fields/1,
+ y_regs/1
+ ]).
+
+-include_lib("stdlib/include/ms_transform.hrl").
+
+-define(CHECK(Cond,Term),
+ case (catch (Cond)) of
+ true -> true;
+ _ -> io:format("###### CHECK FAILED ######~nINPUT: ~p~n", [Term]),
+ exit(Term)
+ end).
+
+suite() -> [].
+
+all() -> [
+ t_build_and_match_literals, t_build_and_match_literals_large,
+ t_update_literals, t_update_literals_large,
+ t_match_and_update_literals, t_match_and_update_literals_large,
+ t_update_map_expressions,
+ t_update_assoc, t_update_assoc_large,
+ t_update_exact, t_update_exact_large,
+ t_guard_bifs,
+ t_guard_sequence, t_guard_sequence_large,
+ t_guard_update, t_guard_update_large,
+ t_guard_receive, t_guard_receive_large,
+ t_guard_fun, t_list_comprehension,
+ t_update_deep,
+ t_map_equal, t_map_compare,
+ t_map_sort_literals,
+
+ %% Specific Map BIFs
+ t_bif_map_get,t_bif_map_find,t_bif_map_is_key,
+ t_bif_map_keys, t_bif_map_merge, t_bif_map_new,
+ t_bif_map_put,
+ t_bif_map_remove, t_bif_map_update,
+ t_bif_map_values,
+ t_bif_map_to_list, t_bif_map_from_list,
+
+ %% erlang
+ t_erlang_hash, t_map_encode_decode,
+ t_map_size, t_is_map,
+
+ %% non specific BIF related
+ t_bif_build_and_check,
+ t_bif_merge_and_check,
+
+ %% maps module
+ t_maps_fold, t_maps_map,
+ t_maps_size, t_maps_without,
+
+
+ %% Other functions
+ t_erts_internal_order,
+ t_erts_internal_hash,
+ t_pdict,
+ t_ets,
+ t_tracing,
+
+ %% instruction-level tests
+ t_has_map_fields,
+ y_regs
+ ].
+
+groups() -> [].
+
+init_per_suite(Config) -> Config.
+end_per_suite(_Config) -> ok.
+
+init_per_group(_GroupName, Config) -> Config.
+end_per_group(_GroupName, Config) -> Config.
+
+%% tests
+
+t_build_and_match_literals(Config) when is_list(Config) ->
+ #{} = #{},
+ #{1:=a} = #{1=>a},
+ #{1:=a,2:=b} = #{1=>a,2=>b},
+ #{1:=a,2:=b,3:="c"} = #{1=>a,2=>b,3=>"c"},
+ #{1:=a,2:=b,3:="c","4":="d"} = #{1=>a,2=>b,3=>"c","4"=>"d"},
+ #{1:=a,2:=b,3:="c","4":="d",<<"5">>:=<<"e">>} =
+ #{1=>a,2=>b,3=>"c","4"=>"d",<<"5">>=><<"e">>},
+ #{1:=a,2:=b,3:="c","4":="d",<<"5">>:=<<"e">>,{"6",7}:="f"} =
+ #{1=>a,2=>b,3=>"c","4"=>"d",<<"5">>=><<"e">>,{"6",7}=>"f"},
+ #{1:=a,2:=b,3:="c","4":="d",<<"5">>:=<<"e">>,{"6",7}:="f",8:=g} =
+ #{1=>a,2=>b,3=>"c","4"=>"d",<<"5">>=><<"e">>,{"6",7}=>"f",8=>g},
+
+ #{[]:=a,42.0:=b,x:={x,y},[a,b]:=list} =
+ #{[]=>a,42.0=>b,x=>{x,y},[a,b]=>list},
+
+ #{<<"hi all">> := 1} = #{<<"hi",32,"all">> => 1},
+
+ #{a:=X,a:=X=3,b:=4} = #{a=>3,b=>4}, % weird but ok =)
+
+ #{ a:=#{ b:=#{c := third, b:=second}}, b:=first} =
+ #{ b=>first, a=>#{ b=>#{c => third, b=> second}}},
+
+ M = #{ map_1=>#{ map_2=>#{value_3 => third}, value_2=> second}, value_1=>first},
+ M = #{ map_1:=#{ map_2:=#{value_3 := third}, value_2:= second}, value_1:=first} =
+ #{ map_1=>#{ map_2=>#{value_3 => third}, value_2=> second}, value_1=>first},
+
+ %% error case
+ %V = 32,
+ %{'EXIT',{{badmatch,_},_}} = (catch (#{<<"hi all">> => 1} = #{<<"hi",V,"all">> => 1})),
+ {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3,x:=2} = #{x=>3})),
+ {'EXIT',{{badmatch,_},_}} = (catch (#{x:=2} = #{x=>3})),
+ {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = {a,b,c})),
+ {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = #{y=>3})),
+ {'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = #{x=>"three"})),
+ ok.
+
+t_build_and_match_literals_large(Config) when is_list(Config) ->
+ % normal non-repeating
+ M0 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" },
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0,
+
+ 60 = map_size(M0),
+ 60 = maps:size(M0),
+
+ % with repeating
+ M1 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10",
+ 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11",
+ 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+
+ 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13",
+ 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14",
+
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" },
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ 60 = map_size(M1),
+ 60 = maps:size(M1),
+
+ % with floats
+
+ M2 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9"},
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ 90 = map_size(M2),
+ 90 = maps:size(M2),
+
+ % with bignums
+ M3 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ 36893488147419103232=>big1, 73786976294838206464=>big2,
+ 147573952589676412928=>big3, 18446744073709551616=>big4,
+ 4294967296=>big5, 8589934592=>big6,
+ 4294967295=>big7, 67108863=>big8
+ },
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ 98 = map_size(M3),
+ 98 = maps:size(M3),
+
+ %% with maps
+
+ M4 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M4,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4,
+
+
+ #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15",
+ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := V4,
+ #{ third => small, map => key } := "small map key 3",
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4,
+
+ a5 = V1,
+ "c5" = V2,
+ "e5" = V3,
+ "small map key 2" = V4,
+ "large map key 1" = V5,
+
+ 95 = map_size(M4),
+ 95 = maps:size(M4),
+
+ % call for value
+
+ M5 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M5,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5,
+
+ 95 = map_size(M5),
+ 95 = maps:size(M5),
+
+ %% remember
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ ok.
+
+
+t_map_size(Config) when is_list(Config) ->
+ 0 = map_size(#{}),
+ 1 = map_size(#{a=>1}),
+ 1 = map_size(#{a=>"wat"}),
+ 2 = map_size(#{a=>1, b=>2}),
+ 3 = map_size(#{a=>1, b=>2, b=>"3","33"=><<"n">>}),
+
+ true = map_is_size(#{a=>1}, 1),
+ true = map_is_size(#{a=>1, a=>2}, 1),
+ M = #{ "a" => 1, "b" => 2},
+ true = map_is_size(M, 2),
+ false = map_is_size(M, 3),
+ true = map_is_size(M#{ "a" => 2}, 2),
+ false = map_is_size(M#{ "c" => 2}, 2),
+
+ Ks = [build_key(fun(K) -> <<1,K:32,1>> end,I)||I<-lists:seq(1,100)],
+ ok = build_and_check_size(Ks,0,#{}),
+
+ %% try deep collisions
+ %% statistically we get another subtree at 50k -> 100k elements
+ %% Try to be nice and don't use too much memory in the testcase,
+
+ N = 500000,
+ Is = lists:seq(1,N),
+ N = map_size(maps:from_list([{I,I}||I<-Is])),
+ N = map_size(maps:from_list([{<<I:32>>,I}||I<-Is])),
+ N = map_size(maps:from_list([{integer_to_list(I),I}||I<-Is])),
+ N = map_size(maps:from_list([{float(I),I}||I<-Is])),
+
+ %% Error cases.
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch map_size(T))
+ end),
+ ok.
+
+build_and_check_size([K|Ks],N,M0) ->
+ N = map_size(M0),
+ M1 = M0#{ K => K },
+ build_and_check_size(Ks,N + 1,M1);
+build_and_check_size([],N,M) ->
+ N = map_size(M),
+ ok.
+
+map_is_size(M,N) when map_size(M) =:= N -> true;
+map_is_size(_,_) -> false.
+
+t_is_map(Config) when is_list(Config) ->
+ true = is_map(#{}),
+ true = is_map(#{a=>1}),
+ false = is_map({a,b}),
+ false = is_map(x),
+ if is_map(#{}) -> ok end,
+ if is_map(#{b=>1}) -> ok end,
+ if not is_map([1,2,3]) -> ok end,
+ if not is_map(x) -> ok end,
+ ok.
+
+% test map updates without matching
+t_update_literals_large(Config) when is_list(Config) ->
+ Map = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+ #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [
+ {"a","1"},{"b","2"},{"c","3"},{"d","4"}
+ ]),
+ ok.
+
+t_update_literals(Config) when is_list(Config) ->
+ Map = #{x=>1,y=>2,z=>3,q=>4},
+ #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [
+ {"a","1"},{"b","2"},{"c","3"},{"d","4"}
+ ]),
+ ok.
+
+
+loop_update_literals_x_q(Map, []) -> Map;
+loop_update_literals_x_q(Map, [{X,Q}|Vs]) ->
+ loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs).
+
+% test map updates with matching
+t_match_and_update_literals(Config) when is_list(Config) ->
+ Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+ #{ "one" => small, map => key } => "small map key 1" },
+ #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
+ {1,2},{3,4},{5,6},{7,8}
+ ]),
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M1 = #{},
+ M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M0 = M2,
+
+ #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
+ ok.
+
+t_match_and_update_literals_large(Config) when is_list(Config) ->
+ Map = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+
+ #{ "one" => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+ #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
+ {1,2},{3,4},{5,6},{7,8}
+ ]),
+ M0 = Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M1 = Map#{},
+ M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M0 = M2,
+
+ #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
+ ok.
+
+
+loop_match_and_update_literals_x_q(Map, []) -> Map;
+loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0,
+ #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) ->
+ loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs).
+
+
+t_update_map_expressions(Config) when is_list(Config) ->
+ M = maps:new(),
+ #{ a := 1 } = M#{a => 1},
+
+ #{ b := 2 } = (maps:new())#{ b => 2 },
+
+ #{ a :=42, b:=42, c:=42 } = (maps:from_list([{a,1},{b,2},{c,3}]))#{ a := 42, b := 42, c := 42 },
+ #{ "a" :=1, "b":=42, "c":=42 } = (maps:from_list([{"a",1},{"b",2}]))#{ "b" := 42, "c" => 42 },
+ Ks = lists:seq($a,$z),
+ #{ "aa" := {$a,$a}, "ac":=41, "dc":=42 } =
+ (maps:from_list([{[K1,K2],{K1,K2}}|| K1 <- Ks, K2 <- Ks]))#{ "ac" := 41, "dc" => 42 },
+
+ %% Error cases.
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch (T)#{a:=42,b=>2})
+ end),
+ ok.
+
+t_update_assoc(Config) when is_list(Config) ->
+ M0 = #{1=>a,2=>b,3.0=>c,4=>d,5=>e},
+
+ M1 = M0#{1=>42,2=>100,4=>[a,b,c]},
+ #{1:=42,2:=100,3.0:=c,4:=[a,b,c],5:=e} = M1,
+ #{1:=42,2:=b,4:=d,5:=e,2.0:=100,3.0:=c,4.0:=[a,b,c]} = M0#{1.0=>float,1:=42,2.0=>wrong,2.0=>100,4.0=>[a,b,c]},
+
+ M2 = M0#{3.0=>new},
+ #{1:=a,2:=b,3.0:=new,4:=d,5:=e} = M2,
+ M2 = M0#{3.0:=wrong,3.0=>new},
+
+ %% Errors cases.
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch T#{nonexisting=>val})
+ end),
+ ok.
+
+
+t_update_assoc_large(Config) when is_list(Config) ->
+ M0 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+
+ M1 = M0#{1=>42,2=>100,4=>[a,b,c]},
+ #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1,
+ #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} =
+ M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]},
+
+ M2 = M0#{13.0=>new},
+ #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2,
+ M2 = M0#{13.0:=wrong,13.0=>new},
+
+ ok.
+
+t_update_exact(Config) when is_list(Config) ->
+ M0 = #{1=>a,2=>b,3.0=>c,4=>d,5=>e},
+
+ M1 = M0#{1:=42,2:=100,4:=[a,b,c]},
+ #{1:=42,2:=100,3.0:=c,4:=[a,b,c],5:=e} = M1,
+ M1 = M0#{1:=wrong,1=>42,2=>wrong,2:=100,4:=[a,b,c]},
+
+ M2 = M0#{3.0:=new},
+ #{1:=a,2:=b,3.0:=new,4:=d,5:=e} = M2,
+ M2 = M0#{3.0=>wrong,3.0:=new},
+ true = M2 =/= M0#{3=>right,3.0:=new},
+ #{ 3 := right, 3.0 := new } = M0#{3=>right,3.0:=new},
+
+ M3 = #{ 1 => val},
+ #{1 := update2,1.0 := new_val4} = M3#{
+ 1.0 => new_val1, 1 := update, 1=> update3,
+ 1 := update2, 1.0 := new_val2, 1.0 => new_val3,
+ 1.0 => new_val4 },
+
+ %% Errors cases.
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch T#{nonexisting=>val})
+ end),
+ Empty = #{},
+ {'EXIT',{{badkey,nonexisting},_}} = (catch Empty#{nonexisting:=val}),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,42},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,42.0},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+
+ ok.
+
+t_update_exact_large(Config) when is_list(Config) ->
+ M0 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+
+ M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]},
+ #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c],
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1,
+
+ M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]},
+
+ M2 = M0#{13.0:=new},
+ #{10:=a0,20:=b0,13.0:=new} = M2,
+ M2 = M0#{13.0=>wrong,13.0:=new},
+
+ %% Errors cases.
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,42},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,42.0},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+
+ ok.
+
+t_update_deep(Config) when is_list(Config) ->
+ N = 250000,
+ M0 = maps:from_list([{integer_to_list(I),a}||I<-lists:seq(1,N)]),
+ #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0,
+
+ M1 = M0#{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b },
+ #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0,
+ #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1,
+
+ M2 = M0#{ "1" => c, "10" => c, "100" => c, "1000" => c, "10000" => c },
+ #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0,
+ #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1,
+ #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M2,
+
+ M3 = M2#{ "n1" => d, "n10" => d, "n100" => d, "n1000" => d, "n10000" => d },
+ #{ "1" := a, "10" := a, "100" := a, "1000" := a, "10000" := a } = M0,
+ #{ "1" := b, "10" := b, "100" := b, "1000" := b, "10000" := b } = M1,
+ #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M2,
+ #{ "1" := c, "10" := c, "100" := c, "1000" := c, "10000" := c } = M3,
+ #{ "n1" := d, "n10" := d, "n100" := d, "n1000" := d, "n10000" := d } = M3,
+ ok.
+
+t_guard_bifs(Config) when is_list(Config) ->
+ true = map_guard_head(#{a=>1}),
+ false = map_guard_head([]),
+ true = map_guard_body(#{a=>1}),
+ false = map_guard_body({}),
+ true = map_guard_pattern(#{a=>1, <<"hi">> => "hi" }),
+ false = map_guard_pattern("list"),
+ ok.
+
+map_guard_head(M) when is_map(M) -> true;
+map_guard_head(_) -> false.
+
+map_guard_body(M) -> is_map(M).
+
+map_guard_pattern(#{}) -> true;
+map_guard_pattern(_) -> false.
+
+t_guard_sequence(Config) when is_list(Config) ->
+ {1, "a"} = map_guard_sequence_1(#{seq=>1,val=>"a"}),
+ {2, "b"} = map_guard_sequence_1(#{seq=>2,val=>"b"}),
+ {3, "c"} = map_guard_sequence_1(#{seq=>3,val=>"c"}),
+ {4, "d"} = map_guard_sequence_1(#{seq=>4,val=>"d"}),
+ {5, "e"} = map_guard_sequence_1(#{seq=>5,val=>"e"}),
+
+ {1,M1} = map_guard_sequence_2(M1 = #{a=>3}),
+ {2,M2} = map_guard_sequence_2(M2 = #{a=>4, b=>4}),
+ {3,gg,M3} = map_guard_sequence_2(M3 = #{a=>gg, b=>4}),
+ {4,sc,sc,M4} = map_guard_sequence_2(M4 = #{a=>sc, b=>3, c=>sc2}),
+ {5,kk,kk,M5} = map_guard_sequence_2(M5 = #{a=>kk, b=>other, c=>sc2}),
+
+ %% error case
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(#{seq=>6,val=>"e"})),
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})),
+ ok.
+
+t_guard_sequence_large(Config) when is_list(Config) ->
+ M0 = #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+ {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>"a"}),
+ {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>"b"}),
+ {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>"c"}),
+ {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>"d"}),
+ {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>"e"}),
+
+ {1,M1} = map_guard_sequence_2(M1 = M0#{a=>3}),
+ {2,M2} = map_guard_sequence_2(M2 = M0#{a=>4, b=>4}),
+ {3,gg,M3} = map_guard_sequence_2(M3 = M0#{a=>gg, b=>4}),
+ {4,sc,sc,M4} = map_guard_sequence_2(M4 = M0#{a=>sc, b=>3, c=>sc2}),
+ {5,kk,kk,M5} = map_guard_sequence_2(M5 = M0#{a=>kk, b=>other, c=>sc2}),
+
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>"e"})),
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})),
+ ok.
+
+
+map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val};
+map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val};
+map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val};
+map_guard_sequence_1(#{seq:=4=Seq, val:=Val}) -> {Seq,Val};
+map_guard_sequence_1(#{seq:=5=Seq, val:=Val}) -> {Seq,Val}.
+
+map_guard_sequence_2(#{ a:=3 }=M) -> {1, M};
+map_guard_sequence_2(#{ a:=4 }=M) -> {2, M};
+map_guard_sequence_2(#{ a:=X, a:=X, b:=4 }=M) -> {3,X,M};
+map_guard_sequence_2(#{ a:=X, a:=Y, b:=3 }=M) when X =:= Y -> {4,X,Y,M};
+map_guard_sequence_2(#{ a:=X, a:=Y }=M) when X =:= Y -> {5,X,Y,M}.
+
+
+t_guard_update(Config) when is_list(Config) ->
+ error = map_guard_update(#{},#{}),
+ first = map_guard_update(#{}, #{x=>first}),
+ second = map_guard_update(#{y=>old}, #{x=>second,y=>old}),
+ ok.
+
+t_guard_update_large(Config) when is_list(Config) ->
+ M0 = #{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 70.0=>fa0,80.0=>fb0,90.0=>"fc0",
+ 71.0=>fa1,81.0=>fb1,91.0=>"fc1",
+ 72.0=>fa2,82.0=>fb2,92.0=>"fc2",
+ 73.0=>fa3,83.0=>fb3,93.0=>"fc3",
+ 74.0=>fa4,84.0=>fb4,94.0=>"fc4",
+
+ 75.0=>fa5,85.0=>fb5,95.0=>"fc5",
+ 76.0=>fa6,86.0=>fb6,96.0=>"fc6",
+ 77.0=>fa7,87.0=>fb7,97.0=>"fc7",
+ 78.0=>fa8,88.0=>fb8,98.0=>"fc8",
+ 79.0=>fa9,89.0=>fb9,99.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" },
+
+
+ error = map_guard_update(M0#{},M0#{}),
+ first = map_guard_update(M0#{},M0#{x=>first}),
+ second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}),
+ ok.
+
+
+map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first;
+map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second;
+map_guard_update(_, _) -> error.
+
+t_guard_receive(Config) when is_list(Config) ->
+ M0 = #{ id => 0 },
+ Pid = spawn_link(fun() -> guard_receive_loop() end),
+ Big = 36893488147419103229,
+ B1 = <<"some text">>,
+ B2 = <<"was appended">>,
+ B3 = <<B1/binary, B2/binary>>,
+
+ #{id:=1, res:=Big} = M1 = call(Pid, M0#{op=>sub,in=>{1 bsl 65, 3}}),
+ #{id:=2, res:=26} = M2 = call(Pid, M1#{op=>idiv,in=>{53,2}}),
+ #{id:=3, res:=832} = M3 = call(Pid, M2#{op=>imul,in=>{26,32}}),
+ #{id:=4, res:=4} = M4 = call(Pid, M3#{op=>add,in=>{1,3}}),
+ #{id:=5, res:=Big} = M5 = call(Pid, M4#{op=>sub,in=>{1 bsl 65, 3}}),
+ #{id:=6, res:=B3} = M6 = call(Pid, M5#{op=>"append",in=>{B1,B2}}),
+ #{id:=7, res:=4} = _ = call(Pid, M6#{op=>add,in=>{1,3}}),
+
+
+ %% update old maps and check id update
+ #{id:=2, res:=B3} = call(Pid, M1#{op=>"append",in=>{B1,B2}}),
+ #{id:=5, res:=99} = call(Pid, M4#{op=>add,in=>{33, 66}}),
+
+ %% cleanup
+ done = call(Pid, done),
+ ok.
+
+-define(t_guard_receive_large_procs, 1500).
+
+t_guard_receive_large(Config) when is_list(Config) ->
+ M = lists:foldl(fun(_,#{procs := Ps } = M) ->
+ M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }}
+ end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)),
+ lists:foreach(fun(Pid) ->
+ Pid ! {self(), hello}
+ end, maps:keys(maps:get(procs,M))),
+ ok = guard_receive_large_loop(M),
+ ok.
+
+guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) ->
+ ok;
+guard_receive_large_loop(M) ->
+ receive
+ #{pid := Pid, msg := hello} ->
+ case M of
+ #{done := Count, procs := #{Pid := 150}} ->
+ Pid ! {self(), done},
+ guard_receive_large_loop(M#{done := Count + 1});
+ #{procs := #{Pid := Count} = Ps} ->
+ Pid ! {self(), hello},
+ guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}})
+ end
+ end.
+
+grecv_loop() ->
+ receive
+ {_, done} ->
+ ok;
+ {Pid, hello} ->
+ Pid ! #{pid=>self(), msg=>hello},
+ grecv_loop()
+ end.
+
+call(Pid, M) ->
+ Pid ! {self(), M}, receive {Pid, Res} -> Res end.
+
+guard_receive_loop() ->
+ receive
+ {Pid, #{ id:=Id, op:="append", in:={X,Y}}=M} when is_binary(X), is_binary(Y) ->
+ Pid ! {self(), M#{ id=>Id+1, res=><<X/binary,Y/binary>>}},
+ guard_receive_loop();
+ {Pid, #{ id:=Id, op:=add, in:={X,Y}}} ->
+ Pid ! {self(), #{ id=>Id+1, res=>X+Y}},
+ guard_receive_loop();
+ {Pid, #{ id:=Id, op:=sub, in:={X,Y}}=M} ->
+ Pid ! {self(), M#{ id=>Id+1, res=>X-Y}},
+ guard_receive_loop();
+ {Pid, #{ id:=Id, op:=idiv, in:={X,Y}}=M} ->
+ Pid ! {self(), M#{ id=>Id+1, res=>X div Y}},
+ guard_receive_loop();
+ {Pid, #{ id:=Id, op:=imul, in:={X,Y}}=M} ->
+ Pid ! {self(), M#{ id=>Id+1, res=>X * Y}},
+ guard_receive_loop();
+ {Pid, done} ->
+ Pid ! {self(), done};
+ {Pid, Other} ->
+ Pid ! {error, Other},
+ guard_receive_loop()
+ end.
+
+
+t_list_comprehension(Config) when is_list(Config) ->
+ [#{k:=1},#{k:=2},#{k:=3}] = [#{k=>I} || I <- [1,2,3]],
+
+ Ks = lists:seq($a,$z),
+ Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks],
+ [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms,
+ [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms),
+ ok.
+
+t_guard_fun(Config) when is_list(Config) ->
+ F1 = fun
+ (#{s:=v,v:=V}) -> {v,V};
+ (#{s:=t,v:={V,V}}) -> {t,V};
+ (#{s:=l,v:=[V,V]}) -> {l,V}
+ end,
+
+ F2 = fun
+ (#{s:=T,v:={V,V}}) -> {T,V};
+ (#{s:=T,v:=[V,V]}) -> {T,V};
+ (#{s:=T,v:=V}) -> {T,V}
+ end,
+ V = <<"hi">>,
+
+ {v,V} = F1(#{s=>v,v=>V}),
+ {t,V} = F1(#{s=>t,v=>{V,V}}),
+ {l,V} = F1(#{s=>l,v=>[V,V]}),
+
+ {v,V} = F2(#{s=>v,v=>V}),
+ {t,V} = F2(#{s=>t,v=>{V,V}}),
+ {l,V} = F2(#{s=>l,v=>[V,V]}),
+
+ %% error case
+ {'EXIT', {function_clause,[{?MODULE,_,[#{s:=none,v:=none}],_}|_]}} = (catch F1(#{s=>none,v=>none})),
+ ok.
+
+
+t_map_sort_literals(Config) when is_list(Config) ->
+ % test relation
+
+ %% size order
+ true = #{ a => 1, b => 2} < #{ a => 1, b => 1, c => 1},
+ true = #{ b => 1, a => 1} < #{ c => 1, a => 1, b => 1},
+ false = #{ c => 1, b => 1, a => 1} < #{ c => 1, a => 1},
+
+ %% key order
+ true = #{ a => 1 } < #{ b => 1},
+ false = #{ b => 1 } < #{ a => 1},
+ true = #{ a => 1, b => 1, c => 1 } < #{ b => 1, c => 1, d => 1},
+ true = #{ b => 1, c => 1, d => 1 } > #{ a => 1, b => 1, c => 1},
+ true = #{ c => 1, b => 1, a => 1 } < #{ b => 1, c => 1, d => 1},
+ true = #{ "a" => 1 } < #{ <<"a">> => 1},
+ false = #{ <<"a">> => 1 } < #{ "a" => 1},
+ true = #{ 1 => 1 } < #{ 1.0 => 1},
+ false = #{ 1.0 => 1 } < #{ 1 => 1},
+
+ %% value order
+ true = #{ a => 1 } < #{ a => 2},
+ false = #{ a => 2 } < #{ a => 1},
+ false = #{ a => 2, b => 1 } < #{ a => 1, b => 3},
+ true = #{ a => 1, b => 1 } < #{ a => 1, b => 3},
+ false = #{ a => 1 } < #{ a => 1.0},
+ false = #{ a => 1.0 } < #{ a => 1},
+
+ true = #{ "a" => "hi", b => 134 } == #{ b => 134,"a" => "hi"},
+
+ %% large maps
+
+ M = maps:from_list([{I,I}||I <- lists:seq(1,500)]),
+
+ %% size order
+ true = M#{ a => 1, b => 2} < M#{ a => 1, b => 1, c => 1},
+ true = M#{ b => 1, a => 1} < M#{ c => 1, a => 1, b => 1},
+ false = M#{ c => 1, b => 1, a => 1} < M#{ c => 1, a => 1},
+
+ %% key order
+ true = M#{ a => 1 } < M#{ b => 1},
+ false = M#{ b => 1 } < M#{ a => 1},
+ true = M#{ a => 1, b => 1, c => 1 } < M#{ b => 1, c => 1, d => 1},
+ true = M#{ b => 1, c => 1, d => 1 } > M#{ a => 1, b => 1, c => 1},
+ true = M#{ c => 1, b => 1, a => 1 } < M#{ b => 1, c => 1, d => 1},
+ true = M#{ "a" => 1 } < M#{ <<"a">> => 1},
+ false = M#{ <<"a">> => 1 } < #{ "a" => 1},
+ true = M#{ 1 => 1 } < maps:remove(1,M#{ 1.0 => 1}),
+ false = M#{ 1.0 => 1 } < M#{ 1 => 1},
+
+ %% value order
+ true = M#{ a => 1 } < M#{ a => 2},
+ false = M#{ a => 2 } < M#{ a => 1},
+ false = M#{ a => 2, b => 1 } < M#{ a => 1, b => 3},
+ true = M#{ a => 1, b => 1 } < M#{ a => 1, b => 3},
+ false = M#{ a => 1 } < M#{ a => 1.0},
+ false = M#{ a => 1.0 } < M#{ a => 1},
+
+ true = M#{ "a" => "hi", b => 134 } == M#{ b => 134,"a" => "hi"},
+
+ %% lists:sort
+
+ SortVs = [#{"a"=>1},#{a=>2},#{1=>3},#{<<"a">>=>4}],
+ [#{1:=ok},#{a:=ok},#{"a":=ok},#{<<"a">>:=ok}] = lists:sort([#{"a"=>ok},#{a=>ok},#{1=>ok},#{<<"a">>=>ok}]),
+ [#{1:=3},#{a:=2},#{"a":=1},#{<<"a">>:=4}] = lists:sort(SortVs),
+ [#{1:=3},#{a:=2},#{"a":=1},#{<<"a">>:=4}] = lists:sort(lists:reverse(SortVs)),
+ ok.
+
+t_map_equal(Config) when is_list(Config) ->
+ true = #{} =:= #{},
+ false = #{} =:= #{a=>1},
+ false = #{a=>1} =:= #{},
+ true = #{ "a" => "hi", b => 134 } =:= #{ b => 134,"a" => "hi"},
+
+ false = #{ a => 1 } =:= #{ a => 2},
+ false = #{ a => 2 } =:= #{ a => 1},
+ false = #{ a => 2, b => 1 } =:= #{ a => 1, b => 3},
+ false = #{ a => 1, b => 1 } =:= #{ a => 1, b => 3},
+
+ true = #{ a => 1 } =:= #{ a => 1},
+ true = #{ "a" => 2 } =:= #{ "a" => 2},
+ true = #{ "a" => 2, b => 3 } =:= #{ "a" => 2, b => 3},
+ true = #{ a => 1, b => 3, c => <<"wat">> } =:= #{ a => 1, b => 3, c=><<"wat">>},
+ ok.
+
+
+t_map_compare(Config) when is_list(Config) ->
+ Seed = {erlang:monotonic_time(),
+ erlang:time_offset(),
+ erlang:unique_integer()},
+ io:format("seed = ~p\n", [Seed]),
+ random:seed(Seed),
+ repeat(100, fun(_) -> float_int_compare() end, []),
+ repeat(100, fun(_) -> recursive_compare() end, []),
+ ok.
+
+float_int_compare() ->
+ Terms = numeric_keys(3),
+ %%io:format("Keys to use: ~p\n", [Terms]),
+ Pairs = lists:map(fun(K) -> list_to_tuple([{K,V} || V <- Terms]) end, Terms),
+ lists:foreach(fun(Size) ->
+ MapGen = fun() -> map_gen(list_to_tuple(Pairs), Size) end,
+ repeat(100, fun do_compare/1, [MapGen, MapGen])
+ end,
+ lists:seq(1,length(Terms))),
+ ok.
+
+numeric_keys(N) ->
+ lists:foldl(fun(_,Acc) ->
+ Int = random:uniform(N*4) - N*2,
+ Float = float(Int),
+ [Int, Float, Float * 0.99, Float * 1.01 | Acc]
+ end,
+ [],
+ lists:seq(1,N)).
+
+
+repeat(0, _, _) ->
+ ok;
+repeat(N, Fun, Arg) ->
+ Fun(Arg),
+ repeat(N-1, Fun, Arg).
+
+copy_term(T) ->
+ Papa = self(),
+ P = spawn_link(fun() -> receive Msg -> Papa ! Msg end end),
+ P ! T,
+ receive R -> R end.
+
+do_compare([Gen1, Gen2]) ->
+ M1 = Gen1(),
+ M2 = Gen2(),
+ %%io:format("Maps to compare: ~p AND ~p\n", [M1, M2]),
+ C = (M1 < M2),
+ Erlang = maps_lessthan(M1, M2),
+ C = Erlang,
+ ?CHECK(M1==M1, M1),
+
+ %% Change one key from int to float (or vice versa) and check compare
+ ML1 = maps:to_list(M1),
+ {K1,V1} = lists:nth(random:uniform(length(ML1)), ML1),
+ case K1 of
+ I when is_integer(I) ->
+ case maps:find(float(I),M1) of
+ error ->
+ M1f = maps:remove(I, maps:put(float(I), V1, M1)),
+ ?CHECK(M1f > M1, [M1f, M1]);
+ _ -> ok
+ end;
+
+ F when is_float(F), round(F) == F ->
+ case maps:find(round(F),M1) of
+ error ->
+ M1i = maps:remove(F, maps:put(round(F), V1, M1)),
+ ?CHECK(M1i < M1, [M1i, M1]);
+ _ -> ok
+ end;
+
+ _ -> ok % skip floats with decimals
+ end,
+
+ ?CHECK(M2 == M2, [M2]).
+
+
+maps_lessthan(M1, M2) ->
+ case {maps:size(M1),maps:size(M2)} of
+ {_S,_S} ->
+ {K1,V1} = lists:unzip(term_sort(maps:to_list(M1))),
+ {K2,V2} = lists:unzip(term_sort(maps:to_list(M2))),
+
+ case erts_internal:cmp_term(K1,K2) of
+ -1 -> true;
+ 0 -> (V1 < V2);
+ 1 -> false
+ end;
+
+ {S1, S2} ->
+ S1 < S2
+ end.
+
+term_sort(L) ->
+ lists:sort(fun(A,B) -> erts_internal:cmp_term(A,B) =< 0 end,
+ L).
+
+
+cmp(T1, T2, Exact) when is_tuple(T1) and is_tuple(T2) ->
+ case {size(T1),size(T2)} of
+ {_S,_S} -> cmp(tuple_to_list(T1), tuple_to_list(T2), Exact);
+ {S1,S2} when S1 < S2 -> -1;
+ {S1,S2} when S1 > S2 -> 1
+ end;
+
+cmp([H1|T1], [H2|T2], Exact) ->
+ case cmp(H1,H2, Exact) of
+ 0 -> cmp(T1,T2, Exact);
+ C -> C
+ end;
+
+cmp(M1, M2, Exact) when is_map(M1) andalso is_map(M2) ->
+ cmp_maps(M1,M2,Exact);
+cmp(M1, M2, Exact) ->
+ cmp_others(M1, M2, Exact).
+
+cmp_maps(M1, M2, Exact) ->
+ case {maps:size(M1),maps:size(M2)} of
+ {_S,_S} ->
+ {K1,V1} = lists:unzip(term_sort(maps:to_list(M1))),
+ {K2,V2} = lists:unzip(term_sort(maps:to_list(M2))),
+
+ case cmp(K1, K2, true) of
+ 0 -> cmp(V1, V2, Exact);
+ C -> C
+ end;
+
+ {S1,S2} when S1 < S2 -> -1;
+ {S1,S2} when S1 > S2 -> 1
+ end.
+
+cmp_others(I, F, true) when is_integer(I), is_float(F) ->
+ -1;
+cmp_others(F, I, true) when is_float(F), is_integer(I) ->
+ 1;
+cmp_others(T1, T2, _) ->
+ case {T1<T2, T1==T2} of
+ {true,false} -> -1;
+ {false,true} -> 0;
+ {false,false} -> 1
+ end.
+
+map_gen(Pairs, Size) ->
+ {_,L} = lists:foldl(fun(_, {Keys, Acc}) ->
+ KI = random:uniform(size(Keys)),
+ K = element(KI,Keys),
+ KV = element(random:uniform(size(K)), K),
+ {erlang:delete_element(KI,Keys), [KV | Acc]}
+ end,
+ {Pairs, []},
+ lists:seq(1,Size)),
+
+ maps:from_list(L).
+
+
+recursive_compare() ->
+ Leafs = {atom, 17, 16.9, 17.1, [], self(), spawn(fun() -> ok end), make_ref(), make_ref()},
+ {A, B} = term_gen_recursive(Leafs, 0, 0),
+ %%io:format("Recursive term A = ~p\n", [A]),
+ %%io:format("Recursive term B = ~p\n", [B]),
+
+ ?CHECK({true,false} =:= case do_cmp(A, B, false) of
+ -1 -> {A<B, A>=B};
+ 0 -> {A==B, A/=B};
+ 1 -> {A>B, A=<B}
+ end,
+ {A,B}),
+ A2 = copy_term(A),
+ ?CHECK(A == A2, {A,A2}),
+ ?CHECK(0 =:= cmp(A, A2, false), {A,A2}),
+
+ B2 = copy_term(B),
+ ?CHECK(B == B2, {B,B2}),
+ ?CHECK(0 =:= cmp(B, B2, false), {B,B2}),
+ ok.
+
+do_cmp(A, B, Exact) ->
+ C = cmp(A, B, Exact),
+ C.
+
+%% Generate two terms {A,B} that may only differ
+%% at float vs integer types.
+term_gen_recursive(Leafs, Flags, Depth) ->
+ MaxDepth = 10,
+ Rnd = case {Flags, Depth} of
+ {_, MaxDepth} -> % Only leafs
+ random:uniform(size(Leafs)) + 3;
+ {0, 0} -> % Only containers
+ random:uniform(3);
+ {0,_} -> % Anything
+ random:uniform(size(Leafs)+3)
+ end,
+ case Rnd of
+ 1 -> % Make map
+ Size = random:uniform(size(Leafs)),
+ lists:foldl(fun(_, {Acc1,Acc2}) ->
+ {K1,K2} = term_gen_recursive(Leafs, Flags,
+ Depth+1),
+ {V1,V2} = term_gen_recursive(Leafs, Flags, Depth+1),
+ {maps:put(K1,V1, Acc1), maps:put(K2,V2, Acc2)}
+ end,
+ {maps:new(), maps:new()},
+ lists:seq(1,Size));
+ 2 -> % Make cons
+ {Car1,Car2} = term_gen_recursive(Leafs, Flags, Depth+1),
+ {Cdr1,Cdr2} = term_gen_recursive(Leafs, Flags, Depth+1),
+ {[Car1 | Cdr1], [Car2 | Cdr2]};
+ 3 -> % Make tuple
+ Size = random:uniform(size(Leafs)),
+ L = lists:map(fun(_) -> term_gen_recursive(Leafs, Flags, Depth+1) end,
+ lists:seq(1,Size)),
+ {L1, L2} = lists:unzip(L),
+ {list_to_tuple(L1), list_to_tuple(L2)};
+
+ N -> % Make leaf
+ case element(N-3, Leafs) of
+ I when is_integer(I) ->
+ case random:uniform(4) of
+ 1 -> {I, float(I)};
+ 2 -> {float(I), I};
+ _ -> {I,I}
+ end;
+ T -> {T,T}
+ end
+ end.
+
+%% BIFs
+t_bif_map_get(Config) when is_list(Config) ->
+ %% small map
+ 1 = maps:get(a, #{ a=> 1}),
+ 2 = maps:get(b, #{ a=> 1, b => 2}),
+ "hi" = maps:get("hello", #{ a=>1, "hello" => "hi"}),
+ "tuple hi" = maps:get({1,1.0}, #{ a=>a, {1,1.0} => "tuple hi"}),
+
+ M0 = #{ k1=>"v1", <<"k2">> => <<"v3">> },
+ "v4" = maps:get(<<"k2">>, M0#{<<"k2">> => "v4"}),
+
+ %% large map
+ M1 = maps:from_list([{I,I}||I<-lists:seq(1,100)] ++
+ [{a,1},{b,2},{"hello","hi"},{{1,1.0},"tuple hi"},
+ {k1,"v1"},{<<"k2">>,"v3"}]),
+ 1 = maps:get(a, M1),
+ 2 = maps:get(b, M1),
+ "hi" = maps:get("hello", M1),
+ "tuple hi" = maps:get({1,1.0}, M1),
+ "v3" = maps:get(<<"k2">>, M1),
+
+ %% error cases
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,get,_,_}|_]}} =
+ (catch maps:get(a, T))
+ end),
+
+ {'EXIT',{{badkey,{1,1}},[{maps,get,_,_}|_]}} =
+ (catch maps:get({1,1}, #{{1,1.0} => "tuple"})),
+ {'EXIT',{{badkey,a},[{maps,get,_,_}|_]}} = (catch maps:get(a, #{})),
+ {'EXIT',{{badkey,a},[{maps,get,_,_}|_]}} =
+ (catch maps:get(a, #{b=>1, c=>2})),
+ ok.
+
+t_bif_map_find(Config) when is_list(Config) ->
+ %% small map
+ {ok, 1} = maps:find(a, #{ a=> 1}),
+ {ok, 2} = maps:find(b, #{ a=> 1, b => 2}),
+ {ok, "int"} = maps:find(1, #{ 1 => "int"}),
+ {ok, "float"} = maps:find(1.0, #{ 1.0=> "float"}),
+
+ {ok, "hi"} = maps:find("hello", #{ a=>1, "hello" => "hi"}),
+ {ok, "tuple hi"} = maps:find({1,1.0}, #{ a=>a, {1,1.0} => "tuple hi"}),
+
+ M0 = #{ k1=>"v1", <<"k2">> => <<"v3">> },
+ {ok, "v4"} = maps:find(<<"k2">>, M0#{ <<"k2">> => "v4" }),
+
+ %% large map
+ M1 = maps:from_list([{I,I}||I<-lists:seq(1,100)] ++
+ [{a,1},{b,2},{"hello","hi"},{{1,1.0},"tuple hi"},
+ {k1,"v1"},{<<"k2">>,"v3"}]),
+ {ok, 1} = maps:find(a, M1),
+ {ok, 2} = maps:find(b, M1),
+ {ok, "hi"} = maps:find("hello", M1),
+ {ok, "tuple hi"} = maps:find({1,1.0}, M1),
+ {ok, "v3"} = maps:find(<<"k2">>, M1),
+
+ %% error case
+ error = maps:find(a,#{}),
+ error = maps:find(a,#{b=>1, c=>2}),
+ error = maps:find(1.0, #{ 1 => "int"}),
+ error = maps:find(1, #{ 1.0 => "float"}),
+ error = maps:find({1.0,1}, #{ a=>a, {1,1.0} => "tuple hi"}), % reverse types in tuple key
+
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,find,_,_}|_]}} =
+ (catch maps:find(a, T))
+ end),
+ ok.
+
+
+t_bif_map_is_key(Config) when is_list(Config) ->
+ M1 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, 4 => number},
+
+ true = maps:is_key("hi", M1),
+ true = maps:is_key(int, M1),
+ true = maps:is_key(<<"key">>, M1),
+ true = maps:is_key(4, M1),
+
+ false = maps:is_key(5, M1),
+ false = maps:is_key(<<"key2">>, M1),
+ false = maps:is_key("h", M1),
+ false = maps:is_key("hello", M1),
+ false = maps:is_key(atom, M1),
+ false = maps:is_key(any, #{}),
+
+ false = maps:is_key("hi", maps:remove("hi", M1)),
+ true = maps:is_key("hi", M1),
+ true = maps:is_key(1, maps:put(1, "number", M1)),
+ false = maps:is_key(1.0, maps:put(1, "number", M1)),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,is_key,_,_}|_]}} =
+ (catch maps:is_key(a, T))
+ end),
+ ok.
+
+t_bif_map_keys(Config) when is_list(Config) ->
+ [] = maps:keys(#{}),
+
+ [1,2,3,4,5] = lists:sort(maps:keys(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e})),
+ [1,2,3,4,5] = lists:sort(maps:keys(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c})),
+
+ % values in key order: [4,int,"hi",<<"key">>]
+ M1 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, 4 => number},
+ [4,int,"hi",<<"key">>] = lists:sort(maps:keys(M1)),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,keys,_,_}|_]}} =
+ (catch maps:keys(T))
+ end),
+ ok.
+
+t_bif_map_new(Config) when is_list(Config) ->
+ #{} = maps:new(),
+ 0 = erlang:map_size(maps:new()),
+ ok.
+
+t_bif_map_merge(Config) when is_list(Config) ->
+ 0 = erlang:map_size(maps:merge(#{},#{})),
+
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+
+ #{ "hi" := "hello", int := 3, <<"key">> := <<"value">>,
+ 4 := number, 18446744073709551629 := wat} = maps:merge(#{}, M0),
+
+ #{ "hi" := "hello", int := 3, <<"key">> := <<"value">>,
+ 4 := number, 18446744073709551629 := wat} = maps:merge(M0, #{}),
+
+ M1 = #{ "hi" => "hello again", float => 3.3, {1,2} => "tuple", 4 => integer },
+
+ #{4 := number, 18446744073709551629 := wat, float := 3.3, int := 3,
+ {1,2} := "tuple", "hi" := "hello", <<"key">> := <<"value">>} = maps:merge(M1,M0),
+
+ #{4 := integer, 18446744073709551629 := wat, float := 3.3, int := 3,
+ {1,2} := "tuple", "hi" := "hello again", <<"key">> := <<"value">>} = maps:merge(M0,M1),
+
+ %% try deep collisions
+ N = 150000,
+ Is = lists:seq(1,N),
+ M2 = maps:from_list([{I,I}||I<-Is]),
+ 150000 = maps:size(M2),
+ M3 = maps:from_list([{<<I:32>>,I}||I<-Is]),
+ 150000 = maps:size(M3),
+ M4 = maps:merge(M2,M3),
+ 300000 = maps:size(M4),
+ M5 = maps:from_list([{integer_to_list(I),I}||I<-Is]),
+ 150000 = maps:size(M5),
+ M6 = maps:merge(M4,M5),
+ 450000 = maps:size(M6),
+ M7 = maps:from_list([{float(I),I}||I<-Is]),
+ 150000 = maps:size(M7),
+ M8 = maps:merge(M7,M6),
+ 600000 = maps:size(M8),
+
+ #{ 1 := 1, "1" := 1, <<1:32>> := 1 } = M8,
+ #{ 10 := 10, "10" := 10, <<10:32>> := 10 } = M8,
+ #{ 100 := 100, "100" := 100, <<100:32>> := 100 } = M8,
+ #{ 1000 := 1000, "1000" := 1000, <<1000:32>> := 1000 } = M8,
+ #{ 10000 := 10000, "10000" := 10000, <<10000:32>> := 10000 } = M8,
+ #{ 100000 := 100000, "100000" := 100000, <<100000:32>> := 100000 } = M8,
+
+ %% overlapping
+ M8 = maps:merge(M2,M8),
+ M8 = maps:merge(M3,M8),
+ M8 = maps:merge(M4,M8),
+ M8 = maps:merge(M5,M8),
+ M8 = maps:merge(M6,M8),
+ M8 = maps:merge(M7,M8),
+ M8 = maps:merge(M8,M8),
+
+ %% maps:merge/2 and mixed
+
+ Ks1 = [764492191,2361333849], %% deep collision
+ Ks2 = lists:seq(1,33),
+ M9 = maps:from_list([{K,K}||K <- Ks1]),
+ M10 = maps:from_list([{K,K}||K <- Ks2]),
+ M11 = maps:merge(M9,M10),
+ ok = check_keys_exist(Ks1 ++ Ks2, M11),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,merge,_,_}|_]}} =
+ (catch maps:merge(#{}, T)),
+ {'EXIT',{{badmap,T},[{maps,merge,_,_}|_]}} =
+ (catch maps:merge(T, #{})),
+ {'EXIT',{{badmap,T},[{maps,merge,_,_}|_]}} =
+ (catch maps:merge(T, T))
+ end),
+ ok.
+
+
+t_bif_map_put(Config) when is_list(Config) ->
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+
+ M1 = #{ "hi" := "hello"} = maps:put("hi", "hello", #{}),
+
+ true = is_members(["hi"],maps:keys(M1)),
+ true = is_members(["hello"],maps:values(M1)),
+
+ M2 = #{ int := 3 } = maps:put(int, 3, M1),
+
+ true = is_members([int,"hi"],maps:keys(M2)),
+ true = is_members([3,"hello"],maps:values(M2)),
+
+ M3 = #{ <<"key">> := <<"value">> } = maps:put(<<"key">>, <<"value">>, M2),
+
+ true = is_members([int,"hi",<<"key">>],maps:keys(M3)),
+ true = is_members([3,"hello",<<"value">>],maps:values(M3)),
+
+ M4 = #{ 18446744073709551629 := wat } = maps:put(18446744073709551629, wat, M3),
+
+ true = is_members([18446744073709551629,int,"hi",<<"key">>],maps:keys(M4)),
+ true = is_members([wat,3,"hello",<<"value">>],maps:values(M4)),
+
+ M0 = #{ 4 := number } = M5 = maps:put(4, number, M4),
+
+ true = is_members([4,18446744073709551629,int,"hi",<<"key">>],maps:keys(M5)),
+ true = is_members([number,wat,3,"hello",<<"value">>],maps:values(M5)),
+
+ M6 = #{ <<"key">> := <<"other value">> } = maps:put(<<"key">>, <<"other value">>, M5),
+
+ true = is_members([4,18446744073709551629,int,"hi",<<"key">>],maps:keys(M6)),
+ true = is_members([number,wat,3,"hello",<<"other value">>],maps:values(M6)),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,put,_,_}|_]}} =
+ (catch maps:put(1, a, T))
+ end),
+ ok.
+
+is_members(Ks,Ls) when length(Ks) =/= length(Ls) -> false;
+is_members(Ks,Ls) -> is_members_do(Ks,Ls).
+
+is_members_do([],[]) -> true;
+is_members_do([],_) -> false;
+is_members_do([K|Ks],Ls) ->
+ is_members_do(Ks, lists:delete(K,Ls)).
+
+t_bif_map_remove(Config) when is_list(Config) ->
+ 0 = erlang:map_size(maps:remove(some_key, #{})),
+
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+
+ M1 = maps:remove("hi", M0),
+ true = is_members([4,18446744073709551629,int,<<"key">>],maps:keys(M1)),
+ true = is_members([number,wat,3,<<"value">>],maps:values(M1)),
+
+ M2 = maps:remove(int, M1),
+ true = is_members([4,18446744073709551629,<<"key">>],maps:keys(M2)),
+ true = is_members([number,wat,<<"value">>],maps:values(M2)),
+
+ M3 = maps:remove(<<"key">>, M2),
+ true = is_members([4,18446744073709551629],maps:keys(M3)),
+ true = is_members([number,wat],maps:values(M3)),
+
+ M4 = maps:remove(18446744073709551629, M3),
+ true = is_members([4],maps:keys(M4)),
+ true = is_members([number],maps:values(M4)),
+
+ M5 = maps:remove(4, M4),
+ [] = maps:keys(M5),
+ [] = maps:values(M5),
+
+ M0 = maps:remove(5,M0),
+ M0 = maps:remove("hi there",M0),
+
+ #{ "hi" := "hello", int := 3, 4 := number} = maps:remove(18446744073709551629,maps:remove(<<"key">>,M0)),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,remove,_,_}|_]}} =
+ (catch maps:remove(a, T))
+ end),
+ ok.
+
+t_bif_map_update(Config) when is_list(Config) ->
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+
+ #{ "hi" := "hello again", int := 3, <<"key">> := <<"value">>,
+ 4 := number, 18446744073709551629 := wat} = maps:update("hi", "hello again", M0),
+
+ #{ "hi" := "hello", int := 1337, <<"key">> := <<"value">>,
+ 4 := number, 18446744073709551629 := wat} = maps:update(int, 1337, M0),
+
+ #{ "hi" := "hello", int := 3, <<"key">> := <<"new value">>,
+ 4 := number, 18446744073709551629 := wat} = maps:update(<<"key">>, <<"new value">>, M0),
+
+ #{ "hi" := "hello", int := 3, <<"key">> := <<"value">>,
+ 4 := integer, 18446744073709551629 := wat} = maps:update(4, integer, M0),
+
+ #{ "hi" := "hello", int := 3, <<"key">> := <<"value">>,
+ 4 := number, 18446744073709551629 := wazzup} = maps:update(18446744073709551629, wazzup, M0),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,update,_,_}|_]}} =
+ (catch maps:update(1, none, T))
+ end),
+ ok.
+
+
+
+t_bif_map_values(Config) when is_list(Config) ->
+
+ [] = maps:values(#{}),
+ [1] = maps:values(#{a=>1}),
+
+ true = is_members([a,b,c,d,e],maps:values(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e})),
+ true = is_members([a,b,c,d,e],maps:values(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c})),
+
+ M1 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, 4 => number},
+ M2 = M1#{ "hi" => "hello2", <<"key">> => <<"value2">> },
+ true = is_members([number,3,"hello2",<<"value2">>],maps:values(M2)),
+ true = is_members([number,3,"hello",<<"value">>],maps:values(M1)),
+
+ Vs = lists:seq(1000,20000),
+ M3 = maps:from_list([{K,K}||K<-Vs]),
+ M4 = maps:merge(M1,M3),
+ M5 = maps:merge(M2,M3),
+ true = is_members(Vs,maps:values(M3)),
+ true = is_members([number,3,"hello",<<"value">>]++Vs,maps:values(M4)),
+ true = is_members([number,3,"hello2",<<"value2">>]++Vs,maps:values(M5)),
+
+ %% error case
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},[{maps,values,_,_}|_]}} =
+ (catch maps:values(T))
+ end),
+ ok.
+
+t_erlang_hash(Config) when is_list(Config) ->
+ ok = t_bif_erlang_phash2(),
+ ok = t_bif_erlang_phash(),
+ ok.
+
+t_bif_erlang_phash2() ->
+
+ 39679005 = erlang:phash2(#{}),
+ 33667975 = erlang:phash2(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 }), % 78942764
+ 95332690 = erlang:phash2(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} }), % 37338230
+ 108954384 = erlang:phash2(#{ 1 => a }), % 14363616
+ 59617982 = erlang:phash2(#{ a => 1 }), % 51612236
+
+ 42770201 = erlang:phash2(#{{} => <<>>}), % 37468437
+ 71687700 = erlang:phash2(#{<<>> => {}}), % 44049159
+
+ M0 = #{ a => 1, "key" => <<"value">> },
+ M1 = maps:remove("key",M0),
+ M2 = M1#{ "key" => <<"value">> },
+
+ 70249457 = erlang:phash2(M0), % 118679416
+ 59617982 = erlang:phash2(M1), % 51612236
+ 70249457 = erlang:phash2(M2), % 118679416
+ ok.
+
+t_bif_erlang_phash() ->
+ Sz = 1 bsl 32,
+ 1113425985 = erlang:phash(#{},Sz), % 268440612
+ 1510068139 = erlang:phash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 1196461908
+ 3182345590 = erlang:phash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 3944426064
+ 2927531828 = erlang:phash(#{ 1 => a },Sz), % 1394238263
+ 1670235874 = erlang:phash(#{ a => 1 },Sz), % 4066388227
+
+ 3935089469 = erlang:phash(#{{} => <<>>},Sz), % 1578050717
+ 71692856 = erlang:phash(#{<<>> => {}},Sz), % 1578050717
+
+ M0 = #{ a => 1, "key" => <<"value">> },
+ M1 = maps:remove("key",M0),
+ M2 = M1#{ "key" => <<"value">> },
+
+ 2620391445 = erlang:phash(M0,Sz), % 3590546636
+ 1670235874 = erlang:phash(M1,Sz), % 4066388227
+ 2620391445 = erlang:phash(M2,Sz), % 3590546636
+ ok.
+
+t_map_encode_decode(Config) when is_list(Config) ->
+ <<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
+ Pairs = [
+ {a,b},{"key","values"},{<<"key">>,<<"value">>},
+ {1,b},{[atom,1],{<<"wat">>,1,2,3}},
+ {aa,"values"},
+ {1 bsl 64 + (1 bsl 50 - 1), sc1},
+ {99, sc2},
+ {1 bsl 65 + (1 bsl 51 - 1), sc3},
+ {88, sc4},
+ {1 bsl 66 + (1 bsl 52 - 1), sc5},
+ {77, sc6},
+ {1 bsl 67 + (1 bsl 53 - 1), sc3},
+ {75, sc6}, {-10,sc8},
+ {<<>>, sc9}, {3.14158, sc10},
+ {[3.14158], sc11}, {more_atoms, sc12},
+ {{more_tuples}, sc13}, {self(), sc14},
+ {{},{}},{[],[]}
+ ],
+ ok = map_encode_decode_and_match(Pairs,[],#{}),
+
+ %% check sorting
+
+ %% literally #{ b=>2, a=>1 } in the internal order
+ #{ a:=1, b:=2 } =
+ erlang:binary_to_term(<<131,116,0,0,0,2,100,0,1,98,97,2,100,0,1,97,97,1>>),
+
+
+ %% literally #{ "hi" => "value", a=>33, b=>55 } in the internal order
+ #{ a:=33, b:=55, "hi" := "value"} = erlang:binary_to_term(<<131,116,0,0,0,3,
+ 107,0,2,104,105, % "hi" :: list()
+ 107,0,5,118,97,108,117,101, % "value" :: list()
+ 100,0,1,97, % a :: atom()
+ 97,33, % 33 :: integer()
+ 100,0,1,98, % b :: atom()
+ 97,55 % 55 :: integer()
+ >>),
+
+ %% Maps of different sizes
+ lists:foldl(fun(Key, M0) ->
+ M1 = M0#{Key => Key},
+ case Key rem 17 of
+ 0 ->
+ M1 = binary_to_term(term_to_binary(M1));
+ _ ->
+ ok
+ end,
+ M1
+ end,
+ #{},
+ lists:seq(1,10000)),
+
+ %% many maps in same binary
+ MapList = lists:foldl(fun(K, [M|_]=Acc) -> [M#{K => K} | Acc] end,
+ [#{}],
+ lists:seq(1,100)),
+ MapList = binary_to_term(term_to_binary(MapList)),
+ MapListR = lists:reverse(MapList),
+ MapListR = binary_to_term(term_to_binary(MapListR)),
+
+ %% error cases
+ %% template: <<131,116,0,0,0,2,100,0,1,97,100,0,1,98,97,1,97,1>>
+ %% which is: #{ a=>1, b=>1 }
+
+ %% uniqueness violation
+ %% literally #{ a=>1, "hi"=>"value", a=>2 }
+ {'EXIT',{badarg,[{_,_,_,_}|_]}} = (catch
+ erlang:binary_to_term(<<131,116,0,0,0,3,
+ 100,0,1,97,
+ 97,1,
+ 107,0,2,104,105,
+ 107,0,5,118,97,108,117,101,
+ 100,0,1,97,
+ 97,2>>)),
+
+ %% bad size (too large)
+ {'EXIT',{badarg,[{_,_,_,_}|_]}} = (catch
+ erlang:binary_to_term(<<131,116,0,0,0,12,100,0,1,97,97,1,100,0,1,98,97,1>>)),
+
+ %% bad size (too small) .. should fail just truncate it .. weird.
+ %% possibly change external format so truncated will be #{a:=1}
+ #{ a:=b } =
+ erlang:binary_to_term(<<131,116,0,0,0,1,100,0,1,97,100,0,1,98,97,1,97,1>>),
+
+ ok.
+
+map_encode_decode_and_match([{K,V}|Pairs], EncodedPairs, M0) ->
+ M1 = maps:put(K,V,M0),
+ B0 = erlang:term_to_binary(M1),
+ Ls = [{erlang:term_to_binary(K), erlang:term_to_binary(V)}|EncodedPairs],
+ ok = match_encoded_map(B0, length(Ls), Ls),
+ %% decode and match it
+ M1 = erlang:binary_to_term(B0),
+ map_encode_decode_and_match(Pairs,Ls,M1);
+map_encode_decode_and_match([],_,_) -> ok.
+
+match_encoded_map(<<131,116,Size:32,Encoded/binary>>,Size,Items) ->
+ match_encoded_map_stripped_size(Encoded,Items,Items);
+match_encoded_map(_,_,_) -> no_match_size.
+
+match_encoded_map_stripped_size(<<>>,_,_) -> ok;
+match_encoded_map_stripped_size(B0,[{<<131,K/binary>>,<<131,V/binary>>}|Items],Ls) ->
+ Ksz = byte_size(K),
+ Vsz = byte_size(V),
+ case B0 of
+ <<K:Ksz/binary,V:Vsz/binary,B1/binary>> ->
+ match_encoded_map_stripped_size(B1,Ls,Ls);
+ _ ->
+ match_encoded_map_stripped_size(B0,Items,Ls)
+ end;
+match_encoded_map_stripped_size(_,[],_) -> fail.
+
+
+t_bif_map_to_list(Config) when is_list(Config) ->
+ [] = maps:to_list(#{}),
+ [{a,1},{b,2}] = lists:sort(maps:to_list(#{a=>1,b=>2})),
+ [{a,1},{b,2},{c,3}] = lists:sort(maps:to_list(#{c=>3,a=>1,b=>2})),
+ [{a,1},{b,2},{g,3}] = lists:sort(maps:to_list(#{g=>3,a=>1,b=>2})),
+ [{a,1},{b,2},{g,3},{"c",4}] = lists:sort(maps:to_list(#{g=>3,a=>1,b=>2,"c"=>4})),
+ [{3,v2},{hi,v4},{{hi,3},v5},{"hi",v3},{<<"hi">>,v1}] =
+ lists:sort(maps:to_list(#{<<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5})),
+
+ [{3,v7},{hi,v9},{{hi,3},v10},{"hi",v8},{<<"hi">>,v6}] =
+ lists:sort(maps:to_list(#{<<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5,
+ <<"hi">>=>v6,3=>v7,"hi"=>v8,hi=>v9,{hi,3}=>v10})),
+
+ %% error cases
+ do_badmap(fun(T) ->
+ {'EXIT', {{badmap,T},_}} =
+ (catch maps:to_list(T))
+ end),
+ ok.
+
+
+t_bif_map_from_list(Config) when is_list(Config) ->
+ #{} = maps:from_list([]),
+ A = maps:from_list([]),
+ 0 = erlang:map_size(A),
+
+ #{a:=1,b:=2} = maps:from_list([{a,1},{b,2}]),
+ #{c:=3,a:=1,b:=2} = maps:from_list([{a,1},{b,2},{c,3}]),
+ #{g:=3,a:=1,b:=2} = maps:from_list([{a,1},{b,2},{g,3}]),
+
+ #{a:=2} = maps:from_list([{a,1},{a,3},{a,2}]),
+
+ #{ <<"hi">>:=v1,3:=v3,"hi":=v6,hi:=v4,{hi,3}:=v5} =
+ maps:from_list([{3,v3},{"hi",v6},{hi,v4},{{hi,3},v5},{<<"hi">>,v1}]),
+
+ #{<<"hi">>:=v6,3:=v8,"hi":=v11,hi:=v9,{hi,3}:=v10} =
+ maps:from_list([ {{hi,3},v3}, {"hi",v0},{3,v1}, {<<"hi">>,v4}, {hi,v2},
+ {<<"hi">>,v6}, {{hi,3},v10},{"hi",v11}, {hi,v9}, {3,v8}]),
+
+ %% repeated keys (large -> small)
+ Ps1 = [{a,I}|| I <- lists:seq(1,32)],
+ Ps2 = [{a,I}|| I <- lists:seq(33,64)],
+
+ M = maps:from_list(Ps1 ++ [{b,1},{c,1}] ++ Ps2),
+ #{ a := 64, b := 1, c := 1 } = M,
+
+ %% error cases
+ {'EXIT', {badarg,_}} = (catch maps:from_list([{a,b},b])),
+ {'EXIT', {badarg,_}} = (catch maps:from_list([{a,b},{b,b,3}])),
+ {'EXIT', {badarg,_}} = (catch maps:from_list([{a,b},<<>>])),
+ {'EXIT', {badarg,_}} = (catch maps:from_list([{a,b}|{b,a}])),
+ {'EXIT', {badarg,_}} = (catch maps:from_list(a)),
+ {'EXIT', {badarg,_}} = (catch maps:from_list(42)),
+ ok.
+
+t_bif_build_and_check(Config) when is_list(Config) ->
+ ok = check_build_and_remove(750,[
+ fun(K) -> [K,K] end,
+ fun(K) -> [float(K),K] end,
+ fun(K) -> K end,
+ fun(K) -> {1,K} end,
+ fun(K) -> {K} end,
+ fun(K) -> [K|K] end,
+ fun(K) -> [K,1,2,3,4] end,
+ fun(K) -> {K,atom} end,
+ fun(K) -> float(K) end,
+ fun(K) -> integer_to_list(K) end,
+ fun(K) -> list_to_atom(integer_to_list(K)) end,
+ fun(K) -> [K,{K,[K,{K,[K]}]}] end,
+ fun(K) -> <<K:32>> end
+ ]),
+
+ ok.
+
+check_build_and_remove(_,[]) -> ok;
+check_build_and_remove(N,[F|Fs]) ->
+ {M,Ks} = build_and_check(N, maps:new(), F, []),
+ ok = remove_and_check(Ks,M),
+ check_build_and_remove(N,Fs).
+
+build_and_check(0, M0, _, Ks) -> {M0, Ks};
+build_and_check(N, M0, F, Ks) ->
+ K = build_key(F,N),
+ M1 = maps:put(K,K,M0),
+ ok = check_keys_exist([I||{I,_} <- [{K,M1}|Ks]], M1),
+ M2 = maps:update(K,v,M1),
+ v = maps:get(K,M2),
+ build_and_check(N-1,M1,F,[{K,M1}|Ks]).
+
+remove_and_check([],_) -> ok;
+remove_and_check([{K,Mc}|Ks], M0) ->
+ K = maps:get(K,M0),
+ true = maps:is_key(K,M0),
+ true = Mc =:= M0,
+ true = M0 == Mc,
+ M1 = maps:remove(K,M0),
+ false = M1 =:= Mc,
+ false = Mc == M1,
+ false = maps:is_key(K,M1),
+ true = maps:is_key(K,M0),
+ ok = check_keys_exist([I||{I,_} <- Ks],M1),
+ error = maps:find(K,M1),
+ remove_and_check(Ks, M1).
+
+build_key(F,N) when N rem 3 =:= 0 -> F(N);
+build_key(F,N) when N rem 3 =:= 1 -> K = F(N), {K,K};
+build_key(F,N) when N rem 3 =:= 2 -> K = F(N), [K,K].
+
+check_keys_exist([], _) -> ok;
+check_keys_exist([K|Ks],M) ->
+ true = maps:is_key(K,M),
+ check_keys_exist(Ks,M).
+
+t_bif_merge_and_check(Config) when is_list(Config) ->
+
+ io:format("rand:export_seed() -> ~p\n",[rand:export_seed()]),
+
+ %% simple disjunct ones
+ %% make sure all keys are unique
+ Kss = [[a,b,c,d],
+ [1,2,3,4],
+ [],
+ ["hi"],
+ [e],
+ [build_key(fun(K) -> {small,K} end, I) || I <- lists:seq(1,32)],
+ lists:seq(5, 28),
+ lists:seq(29, 59),
+ [build_key(fun(K) -> integer_to_list(K) end, I) || I <- lists:seq(2000,10000)],
+ [build_key(fun(K) -> <<K:32>> end, I) || I <- lists:seq(1,80)],
+ [build_key(fun(K) -> {<<K:32>>} end, I) || I <- lists:seq(100,1000)]],
+
+
+ KsMs = build_keys_map_pairs(Kss),
+ Cs = [{CKs1,CM1,CKs2,CM2} || {CKs1,CM1} <- KsMs, {CKs2,CM2} <- KsMs],
+ ok = merge_and_check_combo(Cs),
+
+ %% overlapping ones
+
+ KVs1 = [{a,1},{b,2},{c,3}],
+ KVs2 = [{b,3},{c,4},{d,5}],
+ KVs = [{I,I} || I <- lists:seq(1,32)],
+ KVs3 = KVs1 ++ KVs,
+ KVs4 = KVs2 ++ KVs,
+
+ M1 = maps:from_list(KVs1),
+ M2 = maps:from_list(KVs2),
+ M3 = maps:from_list(KVs3),
+ M4 = maps:from_list(KVs4),
+
+ M12 = maps:merge(M1,M2),
+ ok = check_key_values(KVs2 ++ [{a,1}], M12),
+ M21 = maps:merge(M2,M1),
+ ok = check_key_values(KVs1 ++ [{d,5}], M21),
+
+ M34 = maps:merge(M3,M4),
+ ok = check_key_values(KVs4 ++ [{a,1}], M34),
+ M43 = maps:merge(M4,M3),
+ ok = check_key_values(KVs3 ++ [{d,5}], M43),
+
+ M14 = maps:merge(M1,M4),
+ ok = check_key_values(KVs4 ++ [{a,1}], M14),
+ M41 = maps:merge(M4,M1),
+ ok = check_key_values(KVs1 ++ [{d,5}] ++ KVs, M41),
+
+ [begin Ma = random_map(SzA, a),
+ Mb = random_map(SzB, b),
+ ok = merge_maps(Ma, Mb)
+ end || SzA <- [3,10,20,100,200,1000], SzB <- [3,10,20,100,200,1000]],
+
+ ok.
+
+% Generate random map with an average of Sz number of pairs: K -> {V,K}
+random_map(Sz, V) ->
+ random_map_insert(#{}, 0, V, Sz*2).
+
+random_map_insert(M0, K0, _, Sz) when K0 > Sz ->
+ M0;
+random_map_insert(M0, K0, V, Sz) ->
+ Key = K0 + rand:uniform(3),
+ random_map_insert(M0#{Key => {V,Key}}, Key, V, Sz).
+
+
+merge_maps(A, B) ->
+ AB = maps:merge(A, B),
+ %%io:format("A=~p\nB=~p\n",[A,B]),
+ maps_foreach(fun(K,VB) -> VB = maps:get(K, AB)
+ end, B),
+ maps_foreach(fun(K,VA) ->
+ case {maps:get(K, AB),maps:find(K, B)} of
+ {VA, error} -> ok;
+ {VB, {ok, VB}} -> ok
+ end
+ end, A),
+
+ maps_foreach(fun(K,V) ->
+ case {maps:find(K, A),maps:find(K, B)} of
+ {{ok, V}, error} -> ok;
+ {error, {ok, V}} -> ok;
+ {{ok,_}, {ok, V}} -> ok
+ end
+ end, AB),
+ ok.
+
+maps_foreach(Fun, Map) ->
+ maps:fold(fun(K,V,_) -> Fun(K,V) end, void, Map).
+
+
+check_key_values([],_) -> ok;
+check_key_values([{K,V}|KVs],M) ->
+ V = maps:get(K,M),
+ check_key_values(KVs,M).
+
+merge_and_check_combo([]) -> ok;
+merge_and_check_combo([{Ks1,M1,Ks2,M2}|Cs]) ->
+ M12 = maps:merge(M1,M2),
+ ok = check_keys_exist(Ks1 ++ Ks2, M12),
+ M21 = maps:merge(M2,M1),
+ ok = check_keys_exist(Ks1 ++ Ks2, M21),
+
+ true = M12 =:= M21,
+ M12 = M21,
+
+ merge_and_check_combo(Cs).
+
+build_keys_map_pairs([]) -> [];
+build_keys_map_pairs([Ks|Kss]) ->
+ M = maps:from_list(keys_to_pairs(Ks)),
+ ok = check_keys_exist(Ks, M),
+ [{Ks,M}|build_keys_map_pairs(Kss)].
+
+keys_to_pairs(Ks) -> [{K,K} || K <- Ks].
+
+
+%% Maps module, not BIFs
+t_maps_fold(_Config) ->
+ Vs = lists:seq(1,100),
+ M = maps:from_list([{{k,I},{v,I}}||I<-Vs]),
+
+ %% fold
+ 5050 = maps:fold(fun({k,_},{v,V},A) -> V + A end, 0, M),
+
+ ok.
+
+t_maps_map(_Config) ->
+ Vs = lists:seq(1,100),
+ M1 = maps:from_list([{I,I}||I<-Vs]),
+ M2 = maps:from_list([{I,{token,I}}||I<-Vs]),
+
+ M2 = maps:map(fun(_K,V) -> {token,V} end, M1),
+ ok.
+
+t_maps_size(_Config) ->
+ Vs = lists:seq(1,100),
+ lists:foldl(fun(I,M) ->
+ M1 = maps:put(I,I,M),
+ I = maps:size(M1),
+ M1
+ end, #{}, Vs),
+ ok.
+
+
+t_maps_without(_Config) ->
+ Ki = [11,22,33,44,55,66,77,88,99],
+ M0 = maps:from_list([{{k,I},{v,I}}||I<-lists:seq(1,100)]),
+ M1 = maps:from_list([{{k,I},{v,I}}||I<-lists:seq(1,100) -- Ki]),
+ M1 = maps:without([{k,I}||I <- Ki],M0),
+ ok.
+
+t_erts_internal_order(_Config) when is_list(_Config) ->
+ M = #{0 => 0,2147483648 => 0},
+ true = M =:= binary_to_term(term_to_binary(M)),
+
+ F1 = fun(_, _) -> 0 end,
+ F2 = fun(_, _) -> 1 end,
+ M0 = maps:from_list( [{-2147483649, 0}, {0,0}, {97, 0}, {false, 0}, {flower, 0}, {F1, 0}, {F2, 0}, {<<>>, 0}]),
+ M1 = maps:merge(M0, #{0 => 1}),
+ 8 = maps:size(M1),
+ 1 = maps:get(0,M1),
+ ok.
+
+t_erts_internal_hash(_Config) when is_list(_Config) ->
+ K1 = 0.0,
+ K2 = 0.0/-1,
+ M = maps:from_list([{I,I}||I<-lists:seq(1,32)]),
+
+ M1 = M#{ K1 => a, K2 => b },
+ b = maps:get(K2,M1),
+
+ M2 = M#{ K2 => a, K1 => b },
+ b = maps:get(K1,M2),
+
+ %% test previously faulty hash list optimization
+
+ M3 = M#{[0] => a, [0,0] => b, [0,0,0] => c, [0,0,0,0] => d},
+ a = maps:get([0],M3),
+ b = maps:get([0,0],M3),
+ c = maps:get([0,0,0],M3),
+ d = maps:get([0,0,0,0],M3),
+
+ M4 = M#{{[0]} => a, {[0,0]} => b, {[0,0,0]} => c, {[0,0,0,0]} => d},
+ a = maps:get({[0]},M4),
+ b = maps:get({[0,0]},M4),
+ c = maps:get({[0,0,0]},M4),
+ d = maps:get({[0,0,0,0]},M4),
+
+ M5 = M3#{[0,0,0] => e, [0,0,0,0] => f, [0,0,0,0,0] => g,
+ [0,0,0,0,0,0] => h, [0,0,0,0,0,0,0] => i,
+ [0,0,0,0,0,0,0,0] => j, [0,0,0,0,0,0,0,0,0] => k},
+
+ a = maps:get([0],M5),
+ b = maps:get([0,0],M5),
+ e = maps:get([0,0,0],M5),
+ f = maps:get([0,0,0,0],M5),
+ g = maps:get([0,0,0,0,0],M5),
+ h = maps:get([0,0,0,0,0,0],M5),
+ i = maps:get([0,0,0,0,0,0,0],M5),
+ j = maps:get([0,0,0,0,0,0,0,0],M5),
+ k = maps:get([0,0,0,0,0,0,0,0,0],M5),
+
+ M6 = M4#{{[0,0,0]} => e, {[0,0,0,0]} => f, {[0,0,0,0,0]} => g,
+ {[0,0,0,0,0,0]} => h, {[0,0,0,0,0,0,0]} => i,
+ {[0,0,0,0,0,0,0,0]} => j, {[0,0,0,0,0,0,0,0,0]} => k},
+
+ a = maps:get({[0]},M6),
+ b = maps:get({[0,0]},M6),
+ e = maps:get({[0,0,0]},M6),
+ f = maps:get({[0,0,0,0]},M6),
+ g = maps:get({[0,0,0,0,0]},M6),
+ h = maps:get({[0,0,0,0,0,0]},M6),
+ i = maps:get({[0,0,0,0,0,0,0]},M6),
+ j = maps:get({[0,0,0,0,0,0,0,0]},M6),
+ k = maps:get({[0,0,0,0,0,0,0,0,0]},M6),
+
+ M7 = maps:merge(M5,M6),
+
+ a = maps:get([0],M7),
+ b = maps:get([0,0],M7),
+ e = maps:get([0,0,0],M7),
+ f = maps:get([0,0,0,0],M7),
+ g = maps:get([0,0,0,0,0],M7),
+ h = maps:get([0,0,0,0,0,0],M7),
+ i = maps:get([0,0,0,0,0,0,0],M7),
+ j = maps:get([0,0,0,0,0,0,0,0],M7),
+ k = maps:get([0,0,0,0,0,0,0,0,0],M7),
+ a = maps:get({[0]},M7),
+ b = maps:get({[0,0]},M7),
+ e = maps:get({[0,0,0]},M7),
+ f = maps:get({[0,0,0,0]},M7),
+ g = maps:get({[0,0,0,0,0]},M7),
+ h = maps:get({[0,0,0,0,0,0]},M7),
+ i = maps:get({[0,0,0,0,0,0,0]},M7),
+ j = maps:get({[0,0,0,0,0,0,0,0]},M7),
+ k = maps:get({[0,0,0,0,0,0,0,0,0]},M7),
+ ok.
+
+t_pdict(_Config) ->
+
+ put(#{ a => b, b => a},#{ c => d}),
+ put(get(#{ a => b, b => a}),1),
+ 1 = get(#{ c => d}),
+ #{ c := d } = get(#{ a => b, b => a}).
+
+t_ets(_Config) ->
+
+ Tid = ets:new(map_table,[]),
+
+ [ets:insert(Tid,{maps:from_list([{I,-I}]),I}) || I <- lists:seq(1,100)],
+
+
+ [{#{ 2 := -2},2}] = ets:lookup(Tid,#{ 2 => -2 }),
+
+ %% Test equal
+ [3,4] = lists:sort(
+ ets:select(Tid,[{{'$1','$2'},
+ [{'or',{'==','$1',#{ 3 => -3 }},
+ {'==','$1',#{ 4 => -4 }}}],
+ ['$2']}])),
+ %% Test match
+ [30,50] = lists:sort(
+ ets:select(Tid,
+ [{{#{ 30 => -30}, '$1'},[],['$1']},
+ {{#{ 50 => -50}, '$1'},[],['$1']}]
+ )),
+
+ ets:insert(Tid,{#{ a => b, b => c, c => a},transitivity}),
+
+ %% Test equal with map of different size
+ [] = ets:select(Tid,[{{'$1','_'},[{'==','$1',#{ b => c }}],['$_']}]),
+
+ %% Test match with map of different size
+ %[{#{ a := b },_}] = ets:select(Tid,[{{#{ b => c },'_'},[],['$_']}]),
+
+ %%% Test match with don't care value
+ %[{#{ a := b },_}] = ets:select(Tid,[{{#{ b => '_' },'_'},[],['$_']}]),
+
+ %% Test is_map bif
+ 101 = length(ets:select(Tid,[{'$1',[{is_map,{element,1,'$1'}}],['$1']}])),
+ ets:insert(Tid,{not_a_map,2}),
+ 101 = length(ets:select(Tid,[{'$1',[{is_map,{element,1,'$1'}}],['$1']}])),
+ ets:insert(Tid,{{nope,a,tuple},2}),
+ 101 = length(ets:select(Tid,[{'$1',[{is_map,{element,1,'$1'}}],['$1']}])),
+
+ %% Test map_size bif
+ [3] = ets:select(Tid,[{{'$1','_'},[{'==',{map_size,'$1'},3}],
+ [{map_size,'$1'}]}]),
+
+ true = ets:delete(Tid,#{50 => -50}),
+ [] = ets:lookup(Tid,#{50 => -50}),
+
+ ets:delete(Tid),
+ ok.
+
+t_dets(_Config) ->
+ ok.
+
+t_tracing(_Config) ->
+
+ dbg:stop_clear(),
+ {ok,Tracer} = dbg:tracer(process,{fun trace_collector/2, self()}),
+ dbg:p(self(),c),
+
+ %% Test basic map call
+ {ok,_} = dbg:tpl(?MODULE,id,x),
+ #{ a => b },
+ {trace,_,call,{?MODULE,id,[#{ a := b }]}} = getmsg(Tracer),
+ {trace,_,return_from,{?MODULE,id,1},#{ a := b }} = getmsg(Tracer),
+ dbg:ctpl(),
+
+ %% Test equals in argument list
+ {ok,_} = dbg:tpl(?MODULE,id,[{['$1'],[{'==','$1',#{ b => c}}],
+ [{return_trace}]}]),
+ #{ a => b },
+ #{ b => c },
+ {trace,_,call,{?MODULE,id,[#{ b := c }]}} = getmsg(Tracer),
+ {trace,_,return_from,{?MODULE,id,1},#{ b := c }} = getmsg(Tracer),
+ dbg:ctpl(),
+
+ %% Test match in head
+ {ok,_} = dbg:tpl(?MODULE,id,[{[#{b => c}],[],[]}]),
+ #{ a => b },
+ #{ b => c },
+ {trace,_,call,{?MODULE,id,[#{ b := c }]}} = getmsg(Tracer),
+ dbg:ctpl(),
+
+ % Test map guard bifs
+ {ok,_} = dbg:tpl(?MODULE,id,[{['$1'],[{is_map,{element,1,'$1'}}],[]}]),
+ #{ a => b },
+ {1,2},
+ {#{ a => b},2},
+ {trace,_,call,{?MODULE,id,[{#{ a := b },2}]}} = getmsg(Tracer),
+ dbg:ctpl(),
+
+ {ok,_} = dbg:tpl(?MODULE,id,[{['$1'],[{'==',{map_size,{element,1,'$1'}},2}],[]}]),
+ #{ a => b },
+ {1,2},
+ {#{ a => b},2},
+ {#{ a => b, b => c},atom},
+ {trace,_,call,{?MODULE,id,[{#{ a := b, b := c },atom}]}} = getmsg(Tracer),
+ dbg:ctpl(),
+
+ %MS = dbg:fun2ms(fun([A]) when A == #{ a => b} -> ok end),
+ %dbg:tpl(?MODULE,id,MS),
+ %#{ a => b },
+ %#{ b => c },
+ %{trace,_,call,{?MODULE,id,[#{ a := b }]}} = getmsg(Tracer),
+ %dbg:ctpl(),
+
+ %% Check to extra messages
+ timeout = getmsg(Tracer),
+
+ dbg:stop_clear(),
+ ok.
+
+getmsg(_Tracer) ->
+ receive V -> V after 100 -> timeout end.
+
+trace_collector(Msg,Parent) ->
+ io:format("~p~n",[Msg]),
+ Parent ! Msg,
+ Parent.
+
+t_has_map_fields(Config) when is_list(Config) ->
+ true = has_map_fields_1(#{one=>1}),
+ true = has_map_fields_1(#{one=>1,two=>2}),
+ false = has_map_fields_1(#{two=>2}),
+ false = has_map_fields_1(#{}),
+
+ true = has_map_fields_2(#{c=>1,b=>2,a=>3}),
+ true = has_map_fields_2(#{c=>1,b=>2,a=>3,x=>42}),
+ false = has_map_fields_2(#{b=>2,c=>1}),
+ false = has_map_fields_2(#{x=>y}),
+ false = has_map_fields_2(#{}),
+
+ true = has_map_fields_3(#{c=>1,b=>2,a=>3}),
+ true = has_map_fields_3(#{c=>1,b=>2,a=>3,[]=>42}),
+ true = has_map_fields_3(#{b=>2,a=>3,[]=>42,42.0=>43}),
+ true = has_map_fields_3(#{a=>3,[]=>42,42.0=>43}),
+ true = has_map_fields_3(#{[]=>42,42.0=>43}),
+ false = has_map_fields_3(#{b=>2,c=>1}),
+ false = has_map_fields_3(#{[]=>y}),
+ false = has_map_fields_3(#{42.0=>x,a=>99}),
+ false = has_map_fields_3(#{}),
+
+ ok.
+
+has_map_fields_1(#{one:=_}) -> true;
+has_map_fields_1(#{}) -> false.
+
+has_map_fields_2(#{a:=_,b:=_,c:=_}) -> true;
+has_map_fields_2(#{}) -> false.
+
+has_map_fields_3(#{a:=_,b:=_}) -> true;
+has_map_fields_3(#{[]:=_,42.0:=_}) -> true;
+has_map_fields_3(#{}) -> false.
+
+y_regs(Config) when is_list(Config) ->
+ Val = [length(Config)],
+ Map0 = y_regs_update(#{}, Val),
+ Map2 = y_regs_update(Map0, Val),
+
+ Map3 = maps:from_list([{I,I*I} || I <- lists:seq(1, 100)]),
+ Map4 = y_regs_update(Map3, Val),
+
+ true = is_map(Map2) andalso is_map(Map4),
+
+ ok.
+
+y_regs_update(Map0, Val0) ->
+ Val1 = {t,Val0},
+ K1 = {key,1},
+ K2 = {key,2},
+ Map1 = Map0#{K1=>K1,
+ a=>Val0,b=>Val0,c=>Val0,d=>Val0,e=>Val0,
+ f=>Val0,g=>Val0,h=>Val0,i=>Val0,j=>Val0,
+ k=>Val0,l=>Val0,m=>Val0,n=>Val0,o=>Val0,
+ p=>Val0,q=>Val0,r=>Val0,s=>Val0,t=>Val0,
+ u=>Val0,v=>Val0,w=>Val0,x=>Val0,y=>Val0,
+ z=>Val0,
+ aa=>Val0,ab=>Val0,ac=>Val0,ad=>Val0,ae=>Val0,
+ af=>Val0,ag=>Val0,ah=>Val0,ai=>Val0,aj=>Val0,
+ ak=>Val0,al=>Val0,am=>Val0,an=>Val0,ao=>Val0,
+ ap=>Val0,aq=>Val0,ar=>Val0,as=>Val0,at=>Val0,
+ au=>Val0,av=>Val0,aw=>Val0,ax=>Val0,ay=>Val0,
+ az=>Val0,
+ K2=>[a,b,c]},
+ Map2 = Map1#{K1=>K1,
+ a:=Val1,b:=Val1,c:=Val1,d:=Val1,e:=Val1,
+ f:=Val1,g:=Val1,h:=Val1,i:=Val1,j:=Val1,
+ k:=Val1,l:=Val1,m:=Val1,n:=Val1,o:=Val1,
+ p:=Val1,q:=Val1,r:=Val1,s:=Val1,t:=Val1,
+ u:=Val1,v:=Val1,w:=Val1,x:=Val1,y:=Val1,
+ z:=Val1,
+ aa:=Val1,ab:=Val1,ac:=Val1,ad:=Val1,ae:=Val1,
+ af:=Val1,ag:=Val1,ah:=Val1,ai:=Val1,aj:=Val1,
+ ak:=Val1,al:=Val1,am:=Val1,an:=Val1,ao:=Val1,
+ ap:=Val1,aq:=Val1,ar:=Val1,as:=Val1,at:=Val1,
+ au:=Val1,av:=Val1,aw:=Val1,ax:=Val1,ay:=Val1,
+ az:=Val1,
+ K2=>[a,b,c]},
+
+ %% Traverse the maps to validate them.
+ _ = erlang:phash2({Map1,Map2}, 100000),
+
+ _ = {K1,K2,Val0,Val1}, %Force use of Y registers.
+ Map2.
+
+do_badmap(Test) ->
+ Terms = [Test,fun erlang:abs/1,make_ref(),self(),0.0/-1,
+ <<0:1024>>,<<1:1>>,<<>>,<<1,2,3>>,
+ [],{a,b,c},[a,b],atom,10.0,42,(1 bsl 65) + 3],
+ [Test(T) || T <- Terms].
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_in_guard.erl b/lib/dialyzer/test/map_SUITE_data/src/map_in_guard.erl
new file mode 100644
index 0000000000..6176ef1fdf
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_in_guard.erl
@@ -0,0 +1,35 @@
+-module(map_in_guard).
+
+-export([test/0, raw_expr/0]).
+
+test() ->
+ false = assoc_guard(#{}),
+ true = assoc_guard(not_a_map),
+ #{a := true} = assoc_update(#{}),
+ {'EXIT', {{badmap, not_a_map}, [{?MODULE, assoc_update, 1, _}|_]}}
+ = (catch assoc_update(not_a_map)),
+ ok = assoc_guard_clause(#{}),
+ {'EXIT', {function_clause, [{?MODULE, assoc_guard_clause, _, _}|_]}}
+ = (catch assoc_guard_clause(not_a_map)),
+ true = exact_guard(#{a=>1}),
+ {'EXIT', {function_clause, [{?MODULE, assoc_guard_clause, _, _}|_]}}
+ %% There's nothing we can do to find the error here, is there?
+ = (catch (begin true = exact_guard(#{}) end)),
+ ok = exact_guard_clause(#{a => q}),
+ {'EXIT', {function_clause, [{?MODULE, exact_guard_clause, _, _}|_]}}
+ = (catch exact_guard_clause(#{})),
+ ok.
+
+assoc_guard(M) when is_map(M#{a => b}) -> true;
+assoc_guard(Q) -> false.
+
+assoc_update(M) -> M#{a => true}.
+
+assoc_guard_clause(M) when is_map(M#{a => 3}) -> ok.
+
+exact_guard(M) when (false =/= M#{a := b}) -> true;
+exact_guard(_) -> false.
+
+exact_guard_clause(M) when (false =/= M#{a := b}) -> ok.
+
+raw_expr() when #{}; true -> ok. %% Must not warn here!
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_in_guard2.erl b/lib/dialyzer/test/map_SUITE_data/src/map_in_guard2.erl
new file mode 100644
index 0000000000..ac2205e8fa
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_in_guard2.erl
@@ -0,0 +1,27 @@
+-module(map_in_guard2).
+
+-export([test/0]).
+
+test() ->
+ false = assoc_guard(not_a_map),
+ {'EXIT', {{badmap, not_a_map}, [{?MODULE, assoc_update, 1, _}|_]}}
+ = (catch assoc_update(not_a_map)),
+ {'EXIT', {function_clause, [{?MODULE, assoc_guard_clause, _, _}|_]}}
+ = (catch assoc_guard_clause(not_a_map)),
+ {'EXIT', {function_clause, [{?MODULE, assoc_guard_clause, _, _}|_]}}
+ = (catch (begin true = exact_guard(#{}) end)),
+ {'EXIT', {function_clause, [{?MODULE, exact_guard_clause, _, _}|_]}}
+ = (catch exact_guard_clause(#{})),
+ ok.
+
+assoc_guard(M) when is_map(M#{a => b}) -> true;
+assoc_guard(_) -> false.
+
+assoc_update(M) -> M#{a => true}.
+
+assoc_guard_clause(M) when is_map(M#{a => 3}) -> ok.
+
+exact_guard(M) when (false =/= M#{a := b}) -> true;
+exact_guard(_) -> false.
+
+exact_guard_clause(M) when (false =/= M#{a := b}) -> ok.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_size.erl b/lib/dialyzer/test/map_SUITE_data/src/map_size.erl
new file mode 100644
index 0000000000..2da4f6904e
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_size.erl
@@ -0,0 +1,36 @@
+-module(map_size).
+
+-export([t1/0, e1/0, t2/0, t3/0, t4/0, t5/1, t6/1, t7/1]).
+
+t1() ->
+ 0 = maps:size(#{}),
+ 1 = maps:size(#{}).
+
+e1() ->
+ 0 = map_size(#{}),
+ 1 = map_size(#{}).
+
+t2() -> p(#{a=>x}).
+
+p(M) when map_size(M) =:= 0 -> ok.
+
+t3() ->
+ 1 = map_size(cio()),
+ 2 = map_size(cio()),
+ 3 = map_size(cio()),
+ 4 = map_size(cio()).
+
+t4() ->
+ 0 = map_size(cio()).
+
+t5(M) when map_size(M) =:= 0 ->
+ #{a := _} = M. %% Only t5 has no local return; want better message
+
+t6(M) when map_size(M) =:= 0 ->
+ #{} = M.
+
+t7(M=#{a := _}) when map_size(M) =:= 1 ->
+ #{b := _} = M. %% We should warn here too
+
+-spec cio() -> #{3 := ok, 9 => _, 11 => x}.
+cio() -> binary_to_term(<<131,116,0,0,0,2,97,3,100,0,2,111,107,97,9,97,6>>).
diff --git a/lib/dialyzer/test/map_SUITE_data/src/maps_merge.erl b/lib/dialyzer/test/map_SUITE_data/src/maps_merge.erl
new file mode 100644
index 0000000000..d4f3c6887a
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/maps_merge.erl
@@ -0,0 +1,29 @@
+-module(maps_merge).
+
+-export([t1/0, t2/0, t3/0, t4/0, t5/0]).
+
+t1() ->
+ #{a:=1} = maps:merge(#{}, #{}).
+
+t2() ->
+ #{hej := _} = maps:merge(cao(), cio()),
+ #{{} := _} = maps:merge(cao(), cio()).
+
+t3() ->
+ #{a:=1} = maps:merge(cao(), cio()),
+ #{7:=q} = maps:merge(cao(), cio()).
+
+t4() ->
+ #{a:=1} = maps:merge(cio(), cao()),
+ #{7:=q} = maps:merge(cio(), cao()).
+
+t5() ->
+ #{a:=2} = maps:merge(cao(), #{}).
+
+-spec cao() -> #{a := 1, q => none(), 11 => _, atom() => _}.
+cao() ->
+ binary_to_term(<<131,116,0,0,0,3,100,0,1,97,97,1,100,0,1,98,97,9,100,0,1,
+ 102,104,0>>).
+
+-spec cio() -> #{3 := ok, 7 => none(), z => _, integer() => _}.
+cio() -> binary_to_term(<<131,116,0,0,0,2,97,3,100,0,2,111,107,97,9,97,6>>).
diff --git a/lib/dialyzer/test/map_SUITE_data/src/opaque_bif.erl b/lib/dialyzer/test/map_SUITE_data/src/opaque_bif.erl
new file mode 100644
index 0000000000..40214a1887
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/opaque_bif.erl
@@ -0,0 +1,13 @@
+-module(opaque_bif).
+-export([o1/1]).
+-export_type([opaque_any_map/0]).
+-opaque opaque_any_map() :: map().
+
+%% ERL-249: A bug with opaque arguments to maps:merge/2
+%% Reported by Felipe Ripoll on 6/9/2016
+-spec o1(opaque_any_map()) -> opaque_any_map().
+o1(Map) ->
+ maps:merge(o1_c(), Map).
+
+-spec o1_c() -> opaque_any_map().
+o1_c() -> #{}.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl
new file mode 100644
index 0000000000..b98c713c6b
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl
@@ -0,0 +1,69 @@
+-module(opaque_key_adt).
+
+-compile(export_all).
+
+-export_type([t/0, t/1, m/0, s/1, sm/1]).
+
+-opaque t() :: #{atom() => integer()}.
+-opaque t(A) :: #{A => integer()}.
+
+-opaque m() :: #{t() => integer()}.
+-type mt() :: #{t() => integer()}.
+
+-opaque s(K) :: #{K => integer(), integer() => atom()}.
+-opaque sm(K) :: #{K := integer(), integer() := atom()}.
+-type smt(K) :: #{K := integer(), integer() := atom()}.
+
+-spec t0() -> t().
+t0() -> #{}.
+
+-spec t1() -> t(integer()).
+t1() -> #{3 => 1}.
+
+-spec m0() -> m().
+m0() -> #{#{} => 3}.
+
+-spec mt0() -> mt().
+mt0() -> #{#{} => 3}.
+
+-spec s0() -> s(atom()).
+s0() -> #{}.
+
+-spec s1() -> s(atom()).
+s1() -> #{3 => a}.
+
+-spec s2() -> s(atom() | 3).
+s2() -> #{3 => a}. %% Contract breakage (not found)
+
+-spec s3() -> s(atom() | 3).
+s3() -> #{3 => 5, a => 6, 7 => 8}.
+
+-spec s4() -> s(integer()).
+s4() -> #{1 => a}. %% Contract breakage
+
+-spec s5() -> s(1).
+s5() -> #{2 => 3}. %% Contract breakage
+
+-spec s6() -> s(1).
+s6() -> #{1 => 3}.
+
+-spec s7() -> s(integer()).
+s7() -> #{1 => 3}.
+
+-spec sm1() -> sm(1).
+sm1() -> #{1 => 2, 3 => a}.
+
+-spec smt1() -> smt(1).
+smt1() -> #{3 => a}. %% Contract breakage
+
+-spec smt2() -> smt(1).
+smt2() -> #{1 => a}. %% Contract breakage
+
+-spec smt3() -> smt(q).
+smt3() -> #{q => 1}. %% Slight contract breakage (probably requires better map type)
+
+-spec smt4() -> smt(q).
+smt4() -> #{q => 2, 3 => a}.
+
+-spec smt5() -> smt(1).
+smt5() -> #{1 => 2, 3 => a}.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_use.erl b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_use.erl
new file mode 100644
index 0000000000..917413fdd2
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_use.erl
@@ -0,0 +1,97 @@
+-module(opaque_key_use).
+
+-compile(export_all).
+
+-export_type([t/0, t/1]).
+
+-opaque t() :: #{atom() => integer()}.
+-opaque t(A) :: #{A => integer()}.
+
+tt1() ->
+ A = t0(),
+ B = t1(),
+ A =:= B. % never 'true'
+
+-spec t0() -> t().
+t0() -> #{a => 1}.
+
+-spec t1() -> t(integer()).
+t1() -> #{3 => 1}.
+
+adt_tt1() ->
+ A = adt_t0(),
+ B = adt_t1(),
+ A =:= B. % opaque attempt
+
+adt_tt2() ->
+ A = adt_t0(),
+ B = adt_t1(),
+ #{A => 1 % opaque key
+ ,B => 2 % opaque key
+ }.
+
+adt_tt3() ->
+ A = map_adt:t0(),
+ #{A => 1}. % opaque key
+
+adt_mm1() ->
+ A = adt_t0(),
+ M = adt_m0(),
+ #{A := R} = M, % opaque attempt
+ R.
+
+%% adt_ms1() ->
+%% A = adt_t0(),
+%% M = adt_m0(),
+%% M#{A}. % opaque arg
+
+adt_mu1() ->
+ A = adt_t0(),
+ M = adt_m0(),
+ M#{A := 4}. % opaque arg
+
+adt_mu2() ->
+ A = adt_t0(),
+ M = adt_m0(),
+ M#{A => 4}. % opaque arg
+
+adt_mu3() ->
+ M = adt_m0(),
+ M#{}. % opaque arg
+
+adt_mtm1() ->
+ A = adt_t0(),
+ M = adt_mt0(),
+ #{A := R} = M, % opaque key
+ R.
+
+%% adt_mts1() ->
+%% A = adt_t0(),
+%% M = adt_mt0(),
+%% M#{A}. % opaque key
+
+adt_mtu1() ->
+ A = adt_t0(),
+ M = adt_mt0(),
+ M#{A := 4}. % opaque key
+
+adt_mtu2() ->
+ A = adt_t0(),
+ M = adt_mt0(),
+ M#{A => 4}. % opaque key
+
+adt_mtu3() ->
+ M = adt_mt0(),
+ M#{}. % Ok to not warn
+
+adt_t0() ->
+ opaque_key_adt:t0().
+
+adt_t1() ->
+ opaque_key_adt:t1().
+
+adt_m0() ->
+ opaque_key_adt:m0().
+
+adt_mt0() ->
+ opaque_key_adt:mt0().
diff --git a/lib/dialyzer/test/map_SUITE_data/src/order.erl b/lib/dialyzer/test/map_SUITE_data/src/order.erl
new file mode 100644
index 0000000000..51868d7e94
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/order.erl
@@ -0,0 +1,56 @@
+-module(order).
+
+-export([t1/0, t2/0, t3/0, t4/0, t5/0, t6/0]).
+
+t1() ->
+ case maps:get(a, #{a=>1, a=>b}) of
+ Int when is_integer(Int) -> fail;
+ Atom when is_atom(Atom) -> error(ok);
+ _Else -> fail
+ end.
+
+t2() ->
+ case maps:get(a, #{a=>id_1(1), a=>id_b(b)}) of
+ Int when is_integer(Int) -> fail;
+ Atom when is_atom(Atom) -> error(ok);
+ _Else -> fail
+ end.
+
+t3() ->
+ case maps:get(a, #{a=>id_1(1), id_a(a)=>id_b(b)}) of
+ Int when is_integer(Int) -> fail;
+ Atom when is_atom(Atom) -> error(ok);
+ _Else -> fail
+ end.
+
+t4() ->
+ case maps:get(a, #{a=>id_1(1), a_or_b()=>id_b(b)}) of
+ Int when is_integer(Int) -> ok;
+ Atom when is_atom(Atom) -> ok;
+ _Else -> fail
+ end.
+
+t5() ->
+ case maps:get(c, #{c=>id_1(1), a_or_b()=>id_b(b)}) of
+ Int when is_integer(Int) -> error(ok);
+ Atom when is_atom(Atom) -> fail;
+ _Else -> fail
+ end.
+
+t6() ->
+ case maps:get(a, #{a_or_b()=>id_1(1), id_a(a)=>id_b(b)}) of
+ Int when is_integer(Int) -> fail;
+ Atom when is_atom(Atom) -> error(ok);
+ _Else -> fail
+ end.
+
+id_1(X) -> X.
+
+id_a(X) -> X.
+
+id_b(X) -> X.
+
+any() -> binary_to_term(<<>>).
+
+-spec a_or_b() -> a | b.
+a_or_b() -> any().
diff --git a/lib/dialyzer/test/map_SUITE_data/src/subtract_value_flip.erl b/lib/dialyzer/test/map_SUITE_data/src/subtract_value_flip.erl
new file mode 100644
index 0000000000..97e6b54e3c
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/subtract_value_flip.erl
@@ -0,0 +1,9 @@
+-module(subtract_value_flip).
+
+-export([t1/1]).
+
+t1(#{type := _Smth} = Map) ->
+ case Map of
+ #{type := a} -> ok;
+ #{type := b} -> error
+ end.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/typeflow.erl b/lib/dialyzer/test/map_SUITE_data/src/typeflow.erl
new file mode 100644
index 0000000000..b43fd6897b
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/typeflow.erl
@@ -0,0 +1,25 @@
+-module(typeflow).
+
+-export([t1/1, t2/1, t3/1, t4/1]).
+
+t1(M = #{}) ->
+ a_is_integer(M),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ _ -> fail
+ end.
+
+a_is_integer(#{a := X}) when is_integer(X) -> ok.
+
+t2(M = #{}) ->
+ a_is_integer(M),
+ lists:sort(maps:get(a, M)),
+ ok.
+
+t3(M = #{}) ->
+ lists:sort(maps:get(a, M)),
+ ok.
+
+t4(M) ->
+ lists:sort(maps:get(a, M)),
+ ok.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/typeflow2.erl b/lib/dialyzer/test/map_SUITE_data/src/typeflow2.erl
new file mode 100644
index 0000000000..71a9657a60
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/typeflow2.erl
@@ -0,0 +1,88 @@
+-module(typeflow2).
+
+-export([t1/1, t2/1, t3/1, t4/1, t5/1, t6/1, t7/1, optional3/1]).
+
+t1(L) ->
+ M = only_integers_and_lists(L),
+ optional(M),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ #{a := X} when is_list(X) -> ok; %% Must warn here
+ #{a := X} when is_pid(X) -> ok;
+ _ -> fail
+ end.
+
+optional(#{a:=X}) ->
+ true = is_integer(X);
+optional(#{}) ->
+ true.
+
+only_integers_and_lists(L) -> only_integers_and_lists(L, #{}).
+
+only_integers_and_lists([], M) -> M;
+only_integers_and_lists([{K,V}|T], M) when is_integer(V); is_list(V)->
+ only_integers_and_lists(T, M#{K => V}).
+
+t2(L) ->
+ M = only_integers_and_lists(L),
+ optional(M),
+ lists:sort(maps:get(a, M)),
+ ok.
+
+t3(L) ->
+ M = only_integers_and_lists(L),
+ lists:sort(maps:get(a, M)),
+ ok.
+
+t4(V) ->
+ M=map_with(a,V),
+ optional2(M),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ #{a := X} when is_list(X) -> ok; %% Must warn here
+ _ -> fail
+ end.
+
+optional2(#{a:=X}) ->
+ true = is_integer(X);
+optional2(#{}) ->
+ true.
+
+map_with(K, V) when is_integer(V); is_list(V); is_atom(V) -> #{K => V}.
+
+t5(L) ->
+ M = only_integers_and_lists(L),
+ optional3(M),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ #{a := X} when is_list(X) -> ok; %% Must warn here
+ #{a := X} when is_pid(X) -> ok; %% Must warn here
+ #{a := X} when is_atom(X) -> ok;
+ _ -> fail
+ end.
+
+t6(L) ->
+ M = only_integers_and_lists(L),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ #{a := X} when is_list(X) -> ok; %% Must not warn here
+ _ -> fail
+ end.
+
+optional3(#{a:=X}) ->
+ true = is_integer(X);
+optional3(#{}) ->
+ true.
+
+t7(M) ->
+ optional4(M),
+ case M of
+ #{a := X} when is_integer(X) -> ok;
+ #{a := X} when is_list(X) -> ok;
+ #{a := X} when is_pid(X) -> ok; %% Must warn here
+ #{a := X} when is_atom(X) -> ok; %% Must warn here
+ _ -> fail %% Must not warn here (requires parsing)
+ end.
+
+-spec optional4(#{a=>integer()|list()}) -> true.
+optional4(#{}) -> true.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/typesig.erl b/lib/dialyzer/test/map_SUITE_data/src/typesig.erl
new file mode 100644
index 0000000000..b50511af41
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/typesig.erl
@@ -0,0 +1,9 @@
+-module(typesig).
+
+-export([t1/0, t2/0, t3/0, test/1]).
+
+t1() -> test(#{a=>1}).
+t2() -> test(#{a=>{b}}).
+t3() -> test(#{a=>{3}}).
+
+test(#{a:={X}}) -> X+1.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
index 3ff26b87db..cb301ff6a1 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
@@ -1 +1,2 @@
{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
+{time_limit, 40}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/array b/lib/dialyzer/test/opaque_SUITE_data/results/array
index 9921b61669..6f1aa1ce3d 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/array
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/array
@@ -1,3 +1,3 @@
-array_use.erl:12: The type test is_tuple(array:array(_)) breaks the opaqueness of the term array:array(_)
-array_use.erl:9: The attempt to match a term of type array:array(_) against the pattern {'array', _, _, 'undefined', _} breaks the opaqueness of the term
+array_use.erl:12: The type test is_tuple(array:array(_)) breaks the opacity of the term array:array(_)
+array_use.erl:9: The attempt to match a term of type array:array(_) against the pattern {'array', _, _, 'undefined', _} breaks the opacity of the term
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/crash b/lib/dialyzer/test/opaque_SUITE_data/results/crash
index 69bdc00257..d63389f79c 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/crash
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/crash
@@ -1,6 +1,6 @@
-crash_1.erl:45: Record construction #targetlist{list::[]} violates the declared type of field list::'undefined' | crash_1:target()
-crash_1.erl:48: The call crash_1:get_using_branch2(Branch::maybe_improper_list(),L::'undefined' | crash_1:target()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),maybe_improper_list())
-crash_1.erl:50: The pattern <_Branch, []> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
-crash_1.erl:52: The pattern <Branch, [H = {'target', _, _} | _T]> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
-crash_1.erl:54: The pattern <Branch, [{'target', _, _} | T]> can never match the type <maybe_improper_list(),'undefined' | crash_1:target()>
+crash_1.erl:45: Record construction #targetlist{list::[]} violates the declared type of field list::crash_1:target()
+crash_1.erl:48: The call crash_1:get_using_branch2(Branch::maybe_improper_list(),L::crash_1:target()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),maybe_improper_list())
+crash_1.erl:50: The pattern <_Branch, []> can never match the type <maybe_improper_list(),crash_1:target()>
+crash_1.erl:52: The pattern <Branch, [H = {'target', _, _} | _T]> can never match the type <maybe_improper_list(),crash_1:target()>
+crash_1.erl:54: The pattern <Branch, [{'target', _, _} | T]> can never match the type <maybe_improper_list(),crash_1:target()>
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/dict b/lib/dialyzer/test/opaque_SUITE_data/results/dict
index 42f6663191..3f8242c72d 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/dict
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/dict
@@ -1,15 +1,15 @@
-dict_use.erl:41: The attempt to match a term of type dict:dict(_,_) against the pattern 'gazonk' breaks the opaqueness of the term
-dict_use.erl:45: The attempt to match a term of type dict:dict(_,_) against the pattern [] breaks the opaqueness of the term
-dict_use.erl:46: The attempt to match a term of type dict:dict(_,_) against the pattern 42 breaks the opaqueness of the term
-dict_use.erl:51: The attempt to match a term of type dict:dict(_,_) against the pattern [] breaks the opaqueness of the term
-dict_use.erl:52: The attempt to match a term of type dict:dict(_,_) against the pattern 42 breaks the opaqueness of the term
+dict_use.erl:41: The attempt to match a term of type dict:dict(_,_) against the pattern 'gazonk' breaks the opacity of the term
+dict_use.erl:45: The attempt to match a term of type dict:dict(_,_) against the pattern [] breaks the opacity of the term
+dict_use.erl:46: The attempt to match a term of type dict:dict(_,_) against the pattern 42 breaks the opacity of the term
+dict_use.erl:51: The attempt to match a term of type dict:dict(_,_) against the pattern [] breaks the opacity of the term
+dict_use.erl:52: The attempt to match a term of type dict:dict(_,_) against the pattern 42 breaks the opacity of the term
dict_use.erl:58: Attempt to test for equality between a term of type maybe_improper_list() and a term of opaque type dict:dict(_,_)
dict_use.erl:60: Attempt to test for inequality between a term of type atom() and a term of opaque type dict:dict(_,_)
-dict_use.erl:64: Guard test length(D::dict:dict(_,_)) breaks the opaqueness of its argument
-dict_use.erl:65: Guard test is_atom(D::dict:dict(_,_)) breaks the opaqueness of its argument
-dict_use.erl:66: Guard test is_list(D::dict:dict(_,_)) breaks the opaqueness of its argument
-dict_use.erl:70: The type test is_list(dict:dict(_,_)) breaks the opaqueness of the term dict:dict(_,_)
+dict_use.erl:64: Guard test length(D::dict:dict(_,_)) breaks the opacity of its argument
+dict_use.erl:65: Guard test is_atom(D::dict:dict(_,_)) breaks the opacity of its argument
+dict_use.erl:66: Guard test is_list(D::dict:dict(_,_)) breaks the opacity of its argument
+dict_use.erl:70: The type test is_list(dict:dict(_,_)) breaks the opacity of the term dict:dict(_,_)
dict_use.erl:73: The call dict:fetch('foo',[1 | 2 | 3,...]) does not have an opaque term of type dict:dict(_,_) as 2nd argument
dict_use.erl:76: The call dict:merge(Fun::any(),42,[1 | 2,...]) does not have opaque terms as 2nd and 3rd arguments
dict_use.erl:79: The call dict:store(42,'elli',{'dict',0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}) does not have an opaque term of type dict:dict(_,_) as 3rd argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/ets b/lib/dialyzer/test/opaque_SUITE_data/results/ets
index e11c7a8352..5dde23fb15 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/ets
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/ets
@@ -1,4 +1,4 @@
-ets_use.erl:12: Guard test is_integer(T::atom() | ets:tid()) breaks the opaqueness of its argument
-ets_use.erl:20: The type test is_integer(atom() | ets:tid()) breaks the opaqueness of the term atom() | ets:tid()
-ets_use.erl:7: Guard test is_integer(T::ets:tid()) breaks the opaqueness of its argument
+ets_use.erl:12: Guard test is_integer(T::atom() | ets:tid()) breaks the opacity of its argument
+ets_use.erl:20: The type test is_integer(atom() | ets:tid()) breaks the opacity of the term atom() | ets:tid()
+ets_use.erl:7: Guard test is_integer(T::ets:tid()) breaks the opacity of its argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/my_queue b/lib/dialyzer/test/opaque_SUITE_data/results/my_queue
index 1f25a6f9c3..67999b0e20 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/my_queue
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/my_queue
@@ -1,7 +1,7 @@
my_queue_use.erl:15: The call my_queue_adt:is_empty([]) does not have an opaque term of type my_queue_adt:my_queue() as 1st argument
my_queue_use.erl:19: The call my_queue_adt:add(42,Q0::[]) does not have an opaque term of type my_queue_adt:my_queue() as 2nd argument
-my_queue_use.erl:24: The attempt to match a term of type my_queue_adt:my_queue() against the pattern [42 | Q2] breaks the opaqueness of the term
+my_queue_use.erl:24: The attempt to match a term of type my_queue_adt:my_queue() against the pattern [42 | Q2] breaks the opacity of the term
my_queue_use.erl:30: Attempt to test for equality between a term of type [] and a term of opaque type my_queue_adt:my_queue()
my_queue_use.erl:34: Cons will produce an improper list since its 2nd argument is my_queue_adt:my_queue()
my_queue_use.erl:34: The call my_queue_adt:dequeue(nonempty_maybe_improper_list(42,my_queue_adt:my_queue())) does not have an opaque term of type my_queue_adt:my_queue() as 1st argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/opaque b/lib/dialyzer/test/opaque_SUITE_data/results/opaque
index 5747f9061f..864e0d853c 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/opaque
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/opaque
@@ -1,3 +1,3 @@
opaque_bug3.erl:19: The pattern 'a' can never match the type #c{}
-opaque_bug4.erl:20: The attempt to match a term of type opaque_adt:abc() against the pattern 'a' breaks the opaqueness of the term
+opaque_bug4.erl:20: The attempt to match a term of type opaque_adt:abc() against the pattern 'a' breaks the opacity of the term
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/para b/lib/dialyzer/test/opaque_SUITE_data/results/para
index 8fe67e39ad..37b5b7b44e 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/para
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/para
@@ -16,12 +16,12 @@ para2.erl:88: The test para2:circ(integer()) =:= para2:circ(integer(),integer())
para3.erl:28: Invalid type specification for function para3:ot2/0. The success typing is () -> 'foo'
para3.erl:36: The pattern {{{17}}} can never match the type {{{{{{_,_,_,_,_}}}}}}
para3.erl:55: Invalid type specification for function para3:t2/0. The success typing is () -> 'foo'
-para3.erl:65: The attempt to match a term of type {{{{{para3_adt:ot1(_,_,_,_,_)}}}}} against the pattern {{{{{17}}}}} breaks the opaqueness of para3_adt:ot1(_,_,_,_,_)
+para3.erl:65: The attempt to match a term of type {{{{{para3_adt:ot1(_,_,_,_,_)}}}}} against the pattern {{{{{17}}}}} breaks the opacity of para3_adt:ot1(_,_,_,_,_)
para3.erl:68: The pattern {{{{17}}}} can never match the type {{{{{para3_adt:ot1(_,_,_,_,_)}}}}}
-para3.erl:74: Invalid type specification for function para3:exp_adt/0. The success typing is () -> 3
-para4.erl:21: Invalid type specification for function para4:a/1. The success typing is (dict:dict(atom() | integer(),atom() | integer()) | para4:d_all()) -> [{atom() | integer(),atom() | integer()}]
-para4.erl:26: Invalid type specification for function para4:i/1. The success typing is (dict:dict(atom() | integer(),atom() | integer()) | para4:d_all()) -> [{atom() | integer(),atom() | integer()}]
-para4.erl:31: Invalid type specification for function para4:t/1. The success typing is (dict:dict(atom() | integer(),atom() | integer()) | para4:d_all()) -> [{atom() | integer(),atom() | integer()}]
+para3.erl:74: The specification for para3:exp_adt/0 has an opaque subtype para3_adt:exp1(para3_adt:exp2()) which is violated by the success typing () -> 3
+para4.erl:21: Invalid type specification for function para4:a/1. The success typing is (para4:d_all() | para4:d_atom()) -> [{atom() | integer(),atom() | integer()}]
+para4.erl:26: Invalid type specification for function para4:i/1. The success typing is (para4:d_all() | para4:d_integer()) -> [{atom() | integer(),atom() | integer()}]
+para4.erl:31: Invalid type specification for function para4:t/1. The success typing is (para4:d_all() | para4:d_tuple()) -> [{atom() | integer(),atom() | integer()}]
para4.erl:59: Attempt to test for equality between a term of type para4_adt:t(atom() | integer()) and a term of opaque type para4_adt:t(integer())
para4.erl:64: Attempt to test for equality between a term of type para4_adt:t(atom() | integer()) and a term of opaque type para4_adt:t(atom())
para4.erl:69: Attempt to test for equality between a term of type para4_adt:int(1 | 2 | 3 | 4) and a term of opaque type para4_adt:int(1 | 2)
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/queue b/lib/dialyzer/test/opaque_SUITE_data/results/queue
index 5b3813c418..9822b7168f 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/queue
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/queue
@@ -1,11 +1,11 @@
queue_use.erl:18: The call queue:is_empty({[],[]}) does not have an opaque term of type queue:queue(_) as 1st argument
queue_use.erl:22: The call queue:in(42,Q0::{[],[]}) does not have an opaque term of type queue:queue(_) as 2nd argument
-queue_use.erl:27: The attempt to match a term of type queue:queue(_) against the pattern {"*", Q2} breaks the opaqueness of the term
+queue_use.erl:27: The attempt to match a term of type queue:queue(_) against the pattern {"*", Q2} breaks the opacity of the term
queue_use.erl:33: Attempt to test for equality between a term of type {[42,...],[]} and a term of opaque type queue:queue(_)
-queue_use.erl:36: The attempt to match a term of type queue:queue(_) against the pattern {F, _R} breaks the opaqueness of the term
+queue_use.erl:36: The attempt to match a term of type queue:queue(_) against the pattern {F, _R} breaks the opacity of the term
queue_use.erl:40: The call queue:out({[42,...],[]}) does not have an opaque term of type queue:queue(_) as 1st argument
queue_use.erl:51: The call queue_use:is_in_queue(E::42,DB::#db{p::[],q::queue:queue(_)}) contains an opaque term as 2nd argument when terms of different types are expected in these positions
-queue_use.erl:56: The attempt to match a term of type #db{p::[],q::queue:queue(_)} against the pattern {'db', _, {L1, L2}} breaks the opaqueness of queue:queue(_)
+queue_use.erl:56: The attempt to match a term of type #db{p::[],q::queue:queue(_)} against the pattern {'db', _, {L1, L2}} breaks the opacity of queue:queue(_)
queue_use.erl:62: The call queue_use:tuple_queue({42,'gazonk'}) does not have a term of type {_,queue:queue(_)} (with opaque subterms) as 1st argument
queue_use.erl:65: The call queue:in(F::42,Q::'gazonk') does not have an opaque term of type queue:queue(_) as 2nd argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/rec b/lib/dialyzer/test/opaque_SUITE_data/results/rec
index 72736b3b3c..e9b217a93f 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/rec
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/rec
@@ -1,6 +1,6 @@
-rec_use.erl:17: The attempt to match a term of type rec_adt:rec() against the pattern {'rec', _, 42} breaks the opaqueness of the term
-rec_use.erl:18: Guard test tuple_size(R::rec_adt:rec()) breaks the opaqueness of its argument
+rec_use.erl:17: The attempt to match a term of type rec_adt:rec() against the pattern {'rec', _, 42} breaks the opacity of the term
+rec_use.erl:18: Guard test tuple_size(R::rec_adt:rec()) breaks the opacity of its argument
rec_use.erl:23: The call rec_adt:get_a(R::tuple()) does not have an opaque term of type rec_adt:rec() as 1st argument
rec_use.erl:27: Attempt to test for equality between a term of type {'rec','gazonk',42} and a term of opaque type rec_adt:rec()
rec_use.erl:30: The call erlang:tuple_size(rec_adt:rec()) contains an opaque term as 1st argument when a structured term of type tuple() is expected
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/simple b/lib/dialyzer/test/opaque_SUITE_data/results/simple
index 1a7a139d6e..5cd8916aee 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/simple
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/simple
@@ -1,29 +1,29 @@
exact_api.erl:17: The call exact_api:set_type(A::#digraph{vtab::'notable',etab::'notable',ntab::'notable',cyclic::'true'}) does not have an opaque term of type digraph:graph() as 1st argument
exact_api.erl:23: The call digraph:delete(G::#digraph{vtab::'notable',etab::'notable',ntab::'notable',cyclic::'true'}) does not have an opaque term of type digraph:graph() as 1st argument
-exact_api.erl:55: The attempt to match a term of type exact_adt:exact_adt() against the pattern {'exact_adt'} breaks the opaqueness of the term
+exact_api.erl:55: The attempt to match a term of type exact_adt:exact_adt() against the pattern {'exact_adt'} breaks the opacity of the term
exact_api.erl:59: The call exact_adt:exact_adt_set_type2(A::#exact_adt{}) does not have an opaque term of type exact_adt:exact_adt() as 1st argument
is_rec.erl:10: The call erlang:is_record(simple1_adt:d1(),'r',2) contains an opaque term as 1st argument when terms of different types are expected in these positions
is_rec.erl:15: The call erlang:is_record(A::simple1_adt:d1(),'r',I::1 | 2 | 3) contains an opaque term as 1st argument when terms of different types are expected in these positions
-is_rec.erl:19: Guard test is_record(A::simple1_adt:d1(),'r',2) breaks the opaqueness of its argument
-is_rec.erl:23: Guard test is_record({simple1_adt:d1(),1},'r',2) breaks the opaqueness of its argument
+is_rec.erl:19: Guard test is_record(A::simple1_adt:d1(),'r',2) breaks the opacity of its argument
+is_rec.erl:23: Guard test is_record({simple1_adt:d1(),1},'r',2) breaks the opacity of its argument
is_rec.erl:41: The call erlang:is_record(A::simple1_adt:d1(),R::'a') contains an opaque term as 1st argument when terms of different types are expected in these positions
is_rec.erl:45: The call erlang:is_record(A::simple1_adt:d1(),A::simple1_adt:d1(),1) contains an opaque term as 2nd argument when terms of different types are expected in these positions
is_rec.erl:49: The call erlang:is_record(A::simple1_adt:d1(),any(),1) contains an opaque term as 1st argument when terms of different types are expected in these positions
is_rec.erl:53: The call erlang:is_record(A::simple1_adt:d1(),A::simple1_adt:d1(),any()) contains an opaque term as 2nd argument when terms of different types are expected in these positions
-is_rec.erl:57: Guard test is_record(A::simple1_adt:d1(),'r',2) breaks the opaqueness of its argument
+is_rec.erl:57: Guard test is_record(A::simple1_adt:d1(),'r',2) breaks the opacity of its argument
is_rec.erl:61: The record #r{f1::simple1_adt:d1()} violates the declared type for #r{}
is_rec.erl:65: The call erlang:is_record({simple1_adt:d1(),1},'r',2) contains an opaque term as 1st argument when terms of different types are expected in these positions
rec_api.erl:104: Matching of pattern {'r2', 10} tagged with a record name violates the declared type of #r2{f1::10}
-rec_api.erl:113: The attempt to match a term of type #r3{f1::queue:queue(_)} against the pattern {'r3', 'a'} breaks the opaqueness of queue:queue(_)
+rec_api.erl:113: The attempt to match a term of type #r3{f1::queue:queue(_)} against the pattern {'r3', 'a'} breaks the opacity of queue:queue(_)
rec_api.erl:118: Record construction #r3{f1::10} violates the declared type of field f1::queue:queue(_)
-rec_api.erl:123: The attempt to match a term of type #r3{f1::10} against the pattern {'r3', 10} breaks the opaqueness of queue:queue(_)
-rec_api.erl:24: Record construction #r1{f1::10} violates the declared type of field f1::'undefined' | rec_api:a()
+rec_api.erl:123: The attempt to match a term of type #r3{f1::10} against the pattern {'r3', 10} breaks the opacity of queue:queue(_)
+rec_api.erl:24: Record construction #r1{f1::10} violates the declared type of field f1::rec_api:a()
rec_api.erl:29: Matching of pattern {'r1', 10} tagged with a record name violates the declared type of #r1{f1::10}
-rec_api.erl:33: The attempt to match a term of type rec_adt:r1() against the pattern {'r1', 'a'} breaks the opaqueness of the term
+rec_api.erl:33: The attempt to match a term of type rec_adt:r1() against the pattern {'r1', 'a'} breaks the opacity of the term
rec_api.erl:35: Invalid type specification for function rec_api:adt_t1/1. The success typing is (#r1{f1::'a'}) -> #r1{f1::'a'}
-rec_api.erl:40: Invalid type specification for function rec_api:adt_r1/0. The success typing is () -> #r1{f1::'a'}
-rec_api.erl:85: The attempt to match a term of type rec_api:f() against the variable _ breaks the opaqueness of rec_adt:f()
+rec_api.erl:40: The specification for rec_api:adt_r1/0 has an opaque subtype rec_adt:r1() which is violated by the success typing () -> #r1{f1::'a'}
+rec_api.erl:85: The attempt to match a term of type rec_adt:f() against the record field 'f' declared to be of type rec_api:f() breaks the opacity of the term
rec_api.erl:99: Record construction #r2{f1::10} violates the declared type of field f1::rec_api:a()
simple1_api.erl:113: The test simple1_api:d1() =:= simple1_api:d2() can never evaluate to 'true'
simple1_api.erl:118: Guard test simple1_api:d2() =:= A::simple1_api:d1() can never succeed
@@ -35,20 +35,20 @@ simple1_api.erl:165: Attempt to test for equality between a term of type simple1
simple1_api.erl:181: Guard test A::simple1_adt:d1() =< B::simple1_adt:d2() contains opaque terms as 1st and 2nd arguments
simple1_api.erl:185: Guard test 'a' =< B::simple1_adt:d2() contains an opaque term as 2nd argument
simple1_api.erl:189: Guard test A::simple1_adt:d1() =< 'd' contains an opaque term as 1st argument
-simple1_api.erl:197: The type test is_integer(A::simple1_adt:d1()) breaks the opaqueness of the term A::simple1_adt:d1()
+simple1_api.erl:197: The type test is_integer(A::simple1_adt:d1()) breaks the opacity of the term A::simple1_adt:d1()
simple1_api.erl:221: Guard test A::simple1_api:i1() > 3 can never succeed
simple1_api.erl:225: Guard test A::simple1_adt:i1() > 3 contains an opaque term as 1st argument
simple1_api.erl:233: Guard test A::simple1_adt:i1() < 3 contains an opaque term as 1st argument
simple1_api.erl:239: Guard test A::1 > 3 can never succeed
simple1_api.erl:243: Guard test A::1 > 3 can never succeed
simple1_api.erl:257: Guard test is_function(T::simple1_api:o1()) can never succeed
-simple1_api.erl:265: Guard test is_function(T::simple1_adt:o1()) breaks the opaqueness of its argument
-simple1_api.erl:269: The type test is_function(T::simple1_adt:o1()) breaks the opaqueness of the term T::simple1_adt:o1()
+simple1_api.erl:265: Guard test is_function(T::simple1_adt:o1()) breaks the opacity of its argument
+simple1_api.erl:269: The type test is_function(T::simple1_adt:o1()) breaks the opacity of the term T::simple1_adt:o1()
simple1_api.erl:274: Guard test is_function(T::simple1_api:o1(),A::simple1_api:i1()) can never succeed
-simple1_api.erl:284: Guard test is_function(T::simple1_adt:o1(),A::simple1_adt:i1()) breaks the opaqueness of its argument
-simple1_api.erl:289: The type test is_function(T::simple1_adt:o1(),A::simple1_adt:i1()) breaks the opaqueness of the term T::simple1_adt:o1()
+simple1_api.erl:284: Guard test is_function(T::simple1_adt:o1(),A::simple1_adt:i1()) breaks the opacity of its argument
+simple1_api.erl:289: The type test is_function(T::simple1_adt:o1(),A::simple1_adt:i1()) breaks the opacity of the term T::simple1_adt:o1()
simple1_api.erl:294: The call erlang:is_function(T::simple1_api:o1(),A::simple1_adt:i1()) contains an opaque term as 2nd argument when terms of different types are expected in these positions
-simple1_api.erl:300: The type test is_function(T::simple1_adt:o1(),A::simple1_api:i1()) breaks the opaqueness of the term T::simple1_adt:o1()
+simple1_api.erl:300: The type test is_function(T::simple1_adt:o1(),A::simple1_api:i1()) breaks the opacity of the term T::simple1_adt:o1()
simple1_api.erl:306: Guard test B::simple1_api:b2() =:= 'true' can never succeed
simple1_api.erl:315: Guard test A::simple1_api:b1() =:= 'false' can never succeed
simple1_api.erl:319: Guard test not('and'('true','true')) can never succeed
@@ -60,18 +60,18 @@ simple1_api.erl:365: Clause guard cannot succeed.
simple1_api.erl:368: Invalid type specification for function simple1_api:bool_adt_t8/2. The success typing is (boolean(),boolean()) -> 1
simple1_api.erl:378: Clause guard cannot succeed.
simple1_api.erl:381: Invalid type specification for function simple1_api:bool_adt_t9/2. The success typing is ('false','false') -> 1
-simple1_api.erl:407: The size simple1_adt:i1() breaks the opaqueness of A
-simple1_api.erl:418: The attempt to match a term of type non_neg_integer() against the variable A breaks the opaqueness of simple1_adt:i1()
-simple1_api.erl:425: The attempt to match a term of type non_neg_integer() against the variable B breaks the opaqueness of simple1_adt:i1()
+simple1_api.erl:407: The size simple1_adt:i1() breaks the opacity of A
+simple1_api.erl:418: The attempt to match a term of type non_neg_integer() against the variable A breaks the opacity of simple1_adt:i1()
+simple1_api.erl:425: The attempt to match a term of type non_neg_integer() against the variable B breaks the opacity of simple1_adt:i1()
simple1_api.erl:432: The pattern <<_:B/integer-unit:1>> can never match the type any()
-simple1_api.erl:448: The attempt to match a term of type non_neg_integer() against the variable Sz breaks the opaqueness of simple1_adt:i1()
-simple1_api.erl:460: The attempt to match a term of type simple1_adt:bit1() against the pattern <<_/binary-unit:8>> breaks the opaqueness of the term
-simple1_api.erl:478: The call 'foo':A(A::simple1_adt:a()) breaks the opaqueness of the term A :: simple1_adt:a()
-simple1_api.erl:486: The call A:'foo'(A::simple1_adt:a()) breaks the opaqueness of the term A :: simple1_adt:a()
+simple1_api.erl:448: The attempt to match a term of type non_neg_integer() against the variable Sz breaks the opacity of simple1_adt:i1()
+simple1_api.erl:460: The attempt to match a term of type simple1_adt:bit1() against the pattern <<_/binary-unit:8>> breaks the opacity of the term
+simple1_api.erl:478: The call 'foo':A(A::simple1_adt:a()) breaks the opacity of the term A :: simple1_adt:a()
+simple1_api.erl:486: The call A:'foo'(A::simple1_adt:a()) breaks the opacity of the term A :: simple1_adt:a()
simple1_api.erl:499: The call 'foo':A(A::simple1_api:i()) requires that A is of type atom() not simple1_api:i()
simple1_api.erl:503: The call 'foo':A(A::simple1_adt:i()) requires that A is of type atom() not simple1_adt:i()
-simple1_api.erl:507: The call A:'foo'(A::simple1_api:i()) requires that A is of type atom() | tuple() not simple1_api:i()
-simple1_api.erl:511: The call A:'foo'(A::simple1_adt:i()) requires that A is of type atom() | tuple() not simple1_adt:i()
+simple1_api.erl:507: The call A:'foo'(A::simple1_api:i()) requires that A is of type atom() not simple1_api:i()
+simple1_api.erl:511: The call A:'foo'(A::simple1_adt:i()) requires that A is of type atom() not simple1_adt:i()
simple1_api.erl:519: Guard test A::simple1_adt:d2() == B::simple1_adt:d1() contains opaque terms as 1st and 2nd arguments
simple1_api.erl:534: Guard test A::simple1_adt:d1() >= 3 contains an opaque term as 1st argument
simple1_api.erl:536: Guard test A::simple1_adt:d1() == 3 contains an opaque term as 1st argument
@@ -79,7 +79,7 @@ simple1_api.erl:538: Guard test A::simple1_adt:d1() =:= 3 contains an opaque ter
simple1_api.erl:548: The call erlang:'<'(A::simple1_adt:d1(),3) contains an opaque term as 1st argument when terms of different types are expected in these positions
simple1_api.erl:558: The call erlang:'=<'(A::simple1_adt:d1(),B::simple1_adt:d2()) contains opaque terms as 1st and 2nd arguments when terms of different types are expected in these positions
simple1_api.erl:565: Guard test {digraph:graph(),3} > {digraph:graph(),atom() | ets:tid()} contains an opaque term as 2nd argument
-simple1_api.erl:91: Invalid type specification for function simple1_api:tup/0. The success typing is () -> {'a','b'}
+simple1_api.erl:91: The specification for simple1_api:tup/0 has an opaque subtype simple1_adt:tuple1() which is violated by the success typing () -> {'a','b'}
simple2_api.erl:100: The call lists:flatten(A::simple1_adt:tuple1()) contains an opaque term as 1st argument when a structured term of type [any()] is expected
simple2_api.erl:116: The call lists:flatten({simple1_adt:tuple1()}) will never return since it differs in the 1st argument from the success typing arguments: ([any()])
simple2_api.erl:121: Guard test {simple1_adt:d1(),3} > {simple1_adt:d1(),simple1_adt:tuple1()} contains an opaque term as 2nd argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/timer b/lib/dialyzer/test/opaque_SUITE_data/results/timer
index b1cfcd4e9f..46c5a86307 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/timer
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/timer
@@ -1,4 +1,4 @@
timer_use.erl:16: The pattern 'gazonk' can never match the type {'error',_} | {'ok',timer:tref()}
-timer_use.erl:17: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {'ok', 42} breaks the opaqueness of timer:tref()
-timer_use.erl:18: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {Tag, 'gazonk'} breaks the opaqueness of timer:tref()
+timer_use.erl:17: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {'ok', 42} breaks the opacity of timer:tref()
+timer_use.erl:18: The attempt to match a term of type {'error',_} | {'ok',timer:tref()} against the pattern {Tag, 'gazonk'} breaks the opacity of timer:tref()
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/union b/lib/dialyzer/test/opaque_SUITE_data/results/union
index 98829b424a..8763088bf0 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/union
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/union
@@ -1,5 +1,5 @@
-union_use.erl:12: The attempt to match a term of type union_adt:u() against the pattern 'aaa' breaks the opaqueness of the term
-union_use.erl:16: The type test is_tuple(union_adt:u()) breaks the opaqueness of the term union_adt:u()
-union_use.erl:7: Guard test is_atom(A::union_adt:u()) breaks the opaqueness of its argument
-union_use.erl:8: Guard test is_tuple(T::union_adt:u()) breaks the opaqueness of its argument
+union_use.erl:12: The attempt to match a term of type union_adt:u() against the pattern 'aaa' breaks the opacity of the term
+union_use.erl:16: The type test is_tuple(union_adt:u()) breaks the opacity of the term union_adt:u()
+union_use.erl:7: Guard test is_atom(A::union_adt:u()) breaks the opacity of its argument
+union_use.erl:8: Guard test is_tuple(T::union_adt:u()) breaks the opacity of its argument
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/weird b/lib/dialyzer/test/opaque_SUITE_data/results/weird
new file mode 100644
index 0000000000..d7f57cd152
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/weird
@@ -0,0 +1,6 @@
+
+weird_warning1.erl:15: Matching of pattern {'a', Dict} tagged with a record name violates the declared type of #b{q::queue:queue(_)}
+weird_warning2.erl:13: Matching of pattern <{'b', Queue}, Key, Value> tagged with a record name violates the declared type of <#a{d::dict:dict(_,_)},'my_key','my_value'>
+weird_warning3.erl:14: The call weird_warning3:add_element(#a{d::queue:queue(_)},'my_key','my_value') does not have a term of type #a{d::dict:dict(_,_)} | #b{q::queue:queue(_)} (with opaque subterms) as 1st argument
+weird_warning3.erl:16: The attempt to match a term of type #a{d::queue:queue(_)} against the pattern {'a', Dict} breaks the opacity of queue:queue(_)
+weird_warning3.erl:18: Matching of pattern {'b', Queue} tagged with a record name violates the declared type of #a{d::queue:queue(_)}
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/wings b/lib/dialyzer/test/opaque_SUITE_data/results/wings
index 511263b70a..391501d86f 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/results/wings
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/wings
@@ -1,11 +1,11 @@
-wings_dissolve.erl:103: Guard test is_list(List::gb_sets:set(_)) breaks the opaqueness of its argument
-wings_dissolve.erl:19: Guard test is_list(Faces::gb_sets:set(_)) breaks the opaqueness of its argument
-wings_dissolve.erl:272: Guard test is_list(Faces::gb_sets:set(_)) breaks the opaqueness of its argument
+wings_dissolve.erl:103: Guard test is_list(List::gb_sets:set(_)) breaks the opacity of its argument
+wings_dissolve.erl:19: Guard test is_list(Faces::gb_sets:set(_)) breaks the opacity of its argument
+wings_dissolve.erl:272: Guard test is_list(Faces::gb_sets:set(_)) breaks the opacity of its argument
wings_dissolve.erl:31: The call gb_sets:is_empty(Faces::[any(),...]) does not have an opaque term of type gb_sets:set(_) as 1st argument
wings_edge.erl:205: The pattern <Edge, 'hard', Htab> can never match the type <_,'soft',_>
wings_edge_cmd.erl:30: The call gb_trees:size(P::gb_sets:set(_)) does not have an opaque term of type gb_trees:tree(_,_) as 1st argument
wings_edge_cmd.erl:32: The pattern [_ | Parts] can never match the type []
wings_edge_cmd.erl:32: The pattern [{_, P} | _] can never match the type []
-wings_io.erl:30: The attempt to match a term of type {'empty',queue:queue(_)} against the pattern {'empty', {In, Out}} breaks the opaqueness of queue:queue(_)
+wings_io.erl:30: The attempt to match a term of type {'empty',queue:queue(_)} against the pattern {'empty', {In, Out}} breaks the opacity of queue:queue(_)
wings_we.erl:155: The call wings_util:gb_trees_largest_key(Etab::gb_trees:tree(_,_)) contains an opaque term as 1st argument when a structured term of type {_,{_,_,_,'nil' | {_,_,_,'nil' | {_,_,_,_}}}} is expected
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/opaque_SUITE_data/src/big_external_type.erl
new file mode 100644
index 0000000000..d286a378ed
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/big_external_type.erl
@@ -0,0 +1,526 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A copy of small_SUITE_data/src/big_external_type.erl, where
+%%% abstract_expr() is opaque. The transformation of forms to types is
+%%% now much faster than it used to be, for this module.
+
+-module(big_external_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_anno:line().
+
+-export_type([af_record_index/0, af_record_field/1, af_record_name/0,
+ af_field_name/0, af_function_decl/0]).
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: ?MODULE:af_module()
+ | ?MODULE:af_export()
+ | ?MODULE:af_import()
+ | ?MODULE:af_compile()
+ | ?MODULE:af_file()
+ | ?MODULE:af_record_decl()
+ | ?MODULE:af_wild_attribute()
+ | ?MODULE:af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, ?MODULE:af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, ?MODULE:af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, ?MODULE:af_record_name(), [?MODULE:af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), ?MODULE:af_atom()}
+ | {record_field, line(), ?MODULE:af_atom(), ?MODULE:abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), ?MODULE:af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), ?MODULE:af_clause_seq()}.
+
+-opaque abstract_expr() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:abstract_expr())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:abstract_expr())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:abstract_expr())
+ | ?MODULE:af_bin(?MODULE:abstract_expr())
+ | ?MODULE:af_binary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_unary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_record_access(?MODULE:abstract_expr())
+ | ?MODULE:af_record_update(?MODULE:abstract_expr())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:abstract_expr())
+ | ?MODULE:af_catch()
+ | ?MODULE:af_local_call()
+ | ?MODULE:af_remote_call()
+ | ?MODULE:af_list_comprehension()
+ | ?MODULE:af_binary_comprehension()
+ | ?MODULE:af_block()
+ | ?MODULE:af_if()
+ | ?MODULE:af_case()
+ | ?MODULE:af_try()
+ | ?MODULE:af_receive()
+ | ?MODULE:af_local_fun()
+ | ?MODULE:af_remote_fun()
+ | ?MODULE:af_fun()
+ | ?MODULE:af_query()
+ | ?MODULE:af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ ?MODULE:abstract_expr(),
+ ?MODULE:af_record_name(),
+ [?MODULE:af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), ?MODULE:abstract_expr()}.
+
+-type af_local_call() :: {call, line(), ?MODULE:af_local_function(), ?MODULE:af_args()}.
+
+-type af_remote_call() :: {call, line(), ?MODULE:af_remote_function(), ?MODULE:af_args()}.
+
+-type af_args() :: [?MODULE:abstract_expr()].
+
+-type af_local_function() :: ?MODULE:abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), ?MODULE:abstract_expr(), ?MODULE:abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_template() :: ?MODULE:abstract_expr().
+
+-type af_qualifier_seq() :: [?MODULE:af_qualifier()].
+
+-type af_qualifier() :: ?MODULE:af_generator() | ?MODULE:af_filter().
+
+-type af_generator() :: {generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}
+ | {b_generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}.
+
+-type af_filter() :: ?MODULE:abstract_expr().
+
+-type af_block() :: {block, line(), ?MODULE:af_body()}.
+
+-type af_if() :: {'if', line(), ?MODULE:af_clause_seq()}.
+
+-type af_case() :: {'case', line(), ?MODULE:abstract_expr(), ?MODULE:af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ ?MODULE:af_body(),
+ ?MODULE:af_clause_seq(),
+ ?MODULE:af_catch_clause_seq(),
+ ?MODULE:af_body()}.
+
+-type af_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_catch_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), ?MODULE:af_clause_seq()}
+ | {'receive', line(), ?MODULE:af_clause_seq(), ?MODULE:abstract_expr(), ?MODULE:af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, ?MODULE:af_atom(), ?MODULE:af_atom(), ?MODULE:af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, ?MODULE:af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), ?MODULE:af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), ?MODULE:abstract_expr(), ?MODULE:af_field_name()}.
+
+-type abstract_clause() :: ?MODULE:af_clause() | ?MODULE:af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [?MODULE:af_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [?MODULE:af_catch_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_pattern() ::
+ {?MODULE:af_catch_class(), ?MODULE:af_pattern(), ?MODULE:af_anon_variable()}.
+
+-type af_catch_class() ::
+ ?MODULE:af_variable()
+ | ?MODULE:af_lit_atom(throw) | ?MODULE:af_lit_atom(error) | ?MODULE:af_lit_atom(exit).
+
+-type af_body() :: [?MODULE:abstract_expr(), ...].
+
+-type af_guard_seq() :: [?MODULE:af_guard()].
+
+-type af_guard() :: [?MODULE:af_guard_test(), ...].
+
+-type af_guard_test() :: ?MODULE:af_literal()
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:af_guard_test())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_guard_test())
+ | ?MODULE:af_bin(?MODULE:af_guard_test())
+ | ?MODULE:af_binary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_unary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_record_access(?MODULE:af_guard_test())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_guard_test())
+ | ?MODULE:af_guard_call()
+ | ?MODULE:af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), ?MODULE:af_record_name(), [?MODULE:af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [?MODULE:af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), ?MODULE:af_lit_atom(erlang), [?MODULE:af_guard_test()]}.
+
+-type af_pattern() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:af_pattern())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_anon_variable()
+ | ?MODULE:af_tuple(?MODULE:af_pattern())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_pattern())
+ | ?MODULE:af_bin(?MODULE:af_pattern())
+ | ?MODULE:af_binary_op(?MODULE:af_pattern())
+ | ?MODULE:af_unary_op(?MODULE:af_pattern())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_pattern()).
+
+-type af_literal() :: ?MODULE:af_atom() | ?MODULE:af_integer() | ?MODULE:af_float() | ?MODULE:af_string().
+
+-type af_atom() :: ?MODULE:af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [?MODULE:af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ ?MODULE:af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | ?MODULE:abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, ?MODULE:af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), ?MODULE:af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_anno:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/opaque_SUITE_data/src/big_local_type.erl
new file mode 100644
index 0000000000..7daceb5260
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/big_local_type.erl
@@ -0,0 +1,523 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A copy of small_SUITE_data/src/big_local_type.erl, where
+%%% abstract_expr() is opaque. The transformation of forms to types is
+%%% now much faster than it used to be, for this module.
+
+-module(big_local_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_anno:line().
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: af_module()
+ | af_export()
+ | af_import()
+ | af_compile()
+ | af_file()
+ | af_record_decl()
+ | af_wild_attribute()
+ | af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, af_record_name(), [af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), af_atom()}
+ | {record_field, line(), af_atom(), abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), af_clause_seq()}.
+
+-opaque abstract_expr() :: af_literal()
+ | af_match(abstract_expr())
+ | af_variable()
+ | af_tuple(abstract_expr())
+ | af_nil()
+ | af_cons(abstract_expr())
+ | af_bin(abstract_expr())
+ | af_binary_op(abstract_expr())
+ | af_unary_op(abstract_expr())
+ | af_record_access(abstract_expr())
+ | af_record_update(abstract_expr())
+ | af_record_index()
+ | af_record_field(abstract_expr())
+ | af_catch()
+ | af_local_call()
+ | af_remote_call()
+ | af_list_comprehension()
+ | af_binary_comprehension()
+ | af_block()
+ | af_if()
+ | af_case()
+ | af_try()
+ | af_receive()
+ | af_local_fun()
+ | af_remote_fun()
+ | af_fun()
+ | af_query()
+ | af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ abstract_expr(),
+ af_record_name(),
+ [af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), abstract_expr()}.
+
+-type af_local_call() :: {call, line(), af_local_function(), af_args()}.
+
+-type af_remote_call() :: {call, line(), af_remote_function(), af_args()}.
+
+-type af_args() :: [abstract_expr()].
+
+-type af_local_function() :: abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), abstract_expr(), abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_template() :: abstract_expr().
+
+-type af_qualifier_seq() :: [af_qualifier()].
+
+-type af_qualifier() :: af_generator() | af_filter().
+
+-type af_generator() :: {generate, line(), af_pattern(), abstract_expr()}
+ | {b_generate, line(), af_pattern(), abstract_expr()}.
+
+-type af_filter() :: abstract_expr().
+
+-type af_block() :: {block, line(), af_body()}.
+
+-type af_if() :: {'if', line(), af_clause_seq()}.
+
+-type af_case() :: {'case', line(), abstract_expr(), af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ af_body(),
+ af_clause_seq(),
+ af_catch_clause_seq(),
+ af_body()}.
+
+-type af_clause_seq() :: [af_clause(), ...].
+
+-type af_catch_clause_seq() :: [af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), af_clause_seq()}
+ | {'receive', line(), af_clause_seq(), abstract_expr(), af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, af_atom(), af_atom(), af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), abstract_expr(), af_field_name()}.
+
+-type abstract_clause() :: af_clause() | af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [af_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [af_catch_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_pattern() ::
+ {af_catch_class(), af_pattern(), af_anon_variable()}.
+
+-type af_catch_class() ::
+ af_variable()
+ | af_lit_atom(throw) | af_lit_atom(error) | af_lit_atom(exit).
+
+-type af_body() :: [abstract_expr(), ...].
+
+-type af_guard_seq() :: [af_guard()].
+
+-type af_guard() :: [af_guard_test(), ...].
+
+-type af_guard_test() :: af_literal()
+ | af_variable()
+ | af_tuple(af_guard_test())
+ | af_nil()
+ | af_cons(af_guard_test())
+ | af_bin(af_guard_test())
+ | af_binary_op(af_guard_test())
+ | af_unary_op(af_guard_test())
+ | af_record_access(af_guard_test())
+ | af_record_index()
+ | af_record_field(af_guard_test())
+ | af_guard_call()
+ | af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), af_record_name(), [af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), af_lit_atom(erlang), [af_guard_test()]}.
+
+-type af_pattern() :: af_literal()
+ | af_match(af_pattern())
+ | af_variable()
+ | af_anon_variable()
+ | af_tuple(af_pattern())
+ | af_nil()
+ | af_cons(af_pattern())
+ | af_bin(af_pattern())
+ | af_binary_op(af_pattern())
+ | af_unary_op(af_pattern())
+ | af_record_index()
+ | af_record_field(af_pattern()).
+
+-type af_literal() :: af_atom() | af_integer() | af_float() | af_string().
+
+-type af_atom() :: af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_anno:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl b/lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl
index a4cec065ab..2527f166f2 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl
@@ -34,7 +34,7 @@ middle() ->
{w1(), w2()}.
%%---------------------------------------------------------------------
-%% Cases that are problematic w.r.t. opaqueness of types
+%% Cases that are problematic w.r.t. opacity of types
%%---------------------------------------------------------------------
w1() ->
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_ig_moves.erl b/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_ig_moves.erl
new file mode 100644
index 0000000000..94768e77e7
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_ig_moves.erl
@@ -0,0 +1,75 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+-module(hipe_ig_moves).
+-export([new/1,
+ new_move/3,
+ get_moves/1]).
+
+%%-----------------------------------------------------------------------------
+%% The main data structure; its fields are:
+%% - movelist : mapping from temp to set of associated move numbers
+%% - nrmoves : number of distinct move instructions seen so far
+%% - moveinsns : list of move instructions, in descending move number order
+%% - moveset : set of move instructions
+
+-record(ig_moves, {movelist :: movelist(),
+ nrmoves = 0 :: non_neg_integer(),
+ moveinsns = [] :: [{_,_}],
+ moveset = gb_sets:empty() :: gb_sets:set()}).
+
+-type movelist() :: hipe_vectors:vector(ordsets:ordset(non_neg_integer())).
+
+%%-----------------------------------------------------------------------------
+
+-spec new(non_neg_integer()) -> #ig_moves{}.
+
+new(NrTemps) ->
+ MoveList = hipe_vectors:new(NrTemps, ordsets:new()),
+ #ig_moves{movelist = MoveList}.
+
+-spec new_move(_, _, #ig_moves{}) -> #ig_moves{}.
+
+new_move(Dst, Src, IG_moves) ->
+ MoveSet = IG_moves#ig_moves.moveset,
+ MoveInsn = {Dst, Src},
+ case gb_sets:is_member(MoveInsn, MoveSet) of
+ true ->
+ IG_moves;
+ false ->
+ MoveNr = IG_moves#ig_moves.nrmoves,
+ Movelist0 = IG_moves#ig_moves.movelist,
+ Movelist1 = add_movelist(MoveNr, Dst,
+ add_movelist(MoveNr, Src, Movelist0)),
+ IG_moves#ig_moves{nrmoves = MoveNr+1,
+ movelist = Movelist1,
+ moveinsns = [MoveInsn|IG_moves#ig_moves.moveinsns],
+ moveset = gb_sets:insert(MoveInsn, MoveSet)}
+ end.
+
+-spec add_movelist(non_neg_integer(), non_neg_integer(), movelist())
+ -> movelist().
+
+add_movelist(MoveNr, Temp, MoveList) ->
+ AssocMoves = hipe_vectors:get(MoveList, Temp),
+ %% XXX: MoveNr does not occur in moveList[Temp], but the new list must be an
+ %% ordset due to the ordsets:union in hipe_coalescing_regalloc:combine().
+ hipe_vectors:set(MoveList, Temp, ordsets:add_element(MoveNr, AssocMoves)).
+
+-spec get_moves(#ig_moves{}) -> {movelist(), non_neg_integer(), tuple()}.
+
+get_moves(IG_moves) -> % -> {MoveList, NrMoves, MoveInsns}
+ {IG_moves#ig_moves.movelist,
+ IG_moves#ig_moves.nrmoves,
+ list_to_tuple(lists:reverse(IG_moves#ig_moves.moveinsns))}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_vectors.erl b/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_vectors.erl
new file mode 100644
index 0000000000..1890b86c8d
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/hipe_vectors/hipe_vectors.erl
@@ -0,0 +1,129 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% VECTORS IN ERLANG
+%%
+%% Abstract interface to vectors, indexed from 0 to size-1.
+
+-module(hipe_vectors).
+-export([new/2,
+ set/3,
+ get/2,
+ size/1,
+ vector_to_list/1,
+ %% list_to_vector/1,
+ list/1]).
+
+%%-define(USE_TUPLES, true).
+%%-define(USE_GBTREES, true).
+-define(USE_ARRAYS, true).
+
+-type vector() :: vector(_).
+-export_type([vector/0, vector/1]).
+
+-spec new(non_neg_integer(), V) -> vector(E) when V :: E.
+-spec set(vector(E), non_neg_integer(), V :: E) -> vector(E).
+-spec get(vector(E), non_neg_integer()) -> E.
+-spec size(vector(_)) -> non_neg_integer().
+-spec vector_to_list(vector(E)) -> [E].
+%% -spec list_to_vector([E]) -> vector(E).
+-spec list(vector(E)) -> [{non_neg_integer(), E}].
+
+%% ---------------------------------------------------------------------
+
+-ifdef(USE_TUPLES).
+-opaque vector(_) :: tuple().
+
+new(N, V) ->
+ erlang:make_tuple(N, V).
+
+size(V) -> erlang:tuple_size(V).
+
+list(Vec) ->
+ index(tuple_to_list(Vec), 0).
+
+index([X|Xs],N) ->
+ [{N,X} | index(Xs,N+1)];
+index([],_) ->
+ [].
+
+%% list_to_vector(Xs) ->
+%% list_to_tuple(Xs).
+
+vector_to_list(V) ->
+ tuple_to_list(V).
+
+set(Vec, Ix, V) ->
+ setelement(Ix+1, Vec, V).
+
+get(Vec, Ix) -> element(Ix+1, Vec).
+
+-endif. %% ifdef USE_TUPLES
+
+%% ---------------------------------------------------------------------
+
+-ifdef(USE_GBTREES).
+-opaque vector(E) :: gb_trees:tree(non_neg_integer(), E).
+
+new(N, V) when is_integer(N), N >= 0 ->
+ gb_trees:from_orddict(mklist(N, V)).
+
+mklist(N, V) ->
+ mklist(0, N, V).
+
+mklist(M, N, V) when M < N ->
+ [{M, V} | mklist(M+1, N, V)];
+mklist(_, _, _) ->
+ [].
+
+size(V) -> gb_trees:size(V).
+
+list(Vec) ->
+ gb_trees:to_list(Vec).
+
+%% list_to_vector(Xs) ->
+%% gb_trees:from_orddict(index(Xs, 0)).
+%%
+%% index([X|Xs], N) ->
+%% [{N, X} | index(Xs, N+1)];
+%% index([],_) ->
+%% [].
+
+vector_to_list(V) ->
+ gb_trees:values(V).
+
+set(Vec, Ix, V) ->
+ gb_trees:update(Ix, V, Vec).
+
+get(Vec, Ix) ->
+ gb_trees:get(Ix, Vec).
+
+-endif. %% ifdef USE_GBTREES
+
+%% ---------------------------------------------------------------------
+
+-ifdef(USE_ARRAYS).
+-opaque vector(E) :: array:array(E).
+%%-type vector(E) :: array:array(E). % Work around dialyzer bug
+
+new(N, V) -> array:new(N, {default, V}).
+size(V) -> array:size(V).
+list(Vec) -> array:to_orddict(Vec).
+%% list_to_vector(Xs) -> array:from_list(Xs).
+vector_to_list(V) -> array:to_list(V).
+set(Vec, Ix, V) -> array:set(Ix, V, Vec).
+get(Vec, Ix) -> array:get(Ix, Vec).
+
+-endif. %% ifdef USE_ARRAYS
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_digraph.erl b/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_digraph.erl
index 47185dc99d..27d937277e 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_digraph.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_digraph.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_erl_scan.erl b/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_erl_scan.erl
index d6b3a730bb..12506f5b4c 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_erl_scan.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/modules/opaque_erl_scan.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue.erl b/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue.erl
new file mode 100644
index 0000000000..0f76680464
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue.erl
@@ -0,0 +1,17 @@
+-module(myqueue).
+
+-export([new/0, in/2]).
+
+-record(myqueue, {queue = queue:new() :: queue:queue({integer(), _})}).
+
+-opaque myqueue(Item) :: #myqueue{queue :: queue:queue({integer(), Item})}.
+
+-export_type([myqueue/1]).
+
+-spec new() -> myqueue(_).
+new() ->
+ #myqueue{queue=queue:new()}.
+
+-spec in(Item, myqueue(Item)) -> myqueue(Item).
+in(Item, #myqueue{queue=Q}) ->
+ #myqueue{queue=queue:in({1, Item}, Q)}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue_params.erl b/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue_params.erl
new file mode 100644
index 0000000000..8d766b7804
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/para/myqueue_params.erl
@@ -0,0 +1,15 @@
+-module(myqueue_params).
+
+-export([new/0, in/2]).
+
+-record(myqueue_params, {myqueue = myqueue:new() :: myqueue:myqueue(integer())}).
+
+-type myqueue_params() :: #myqueue_params{myqueue ::
+ myqueue:myqueue(integer())}.
+-spec new() -> myqueue_params().
+new() ->
+ #myqueue_params{myqueue=myqueue:new()}.
+
+-spec in(integer(), myqueue_params()) -> myqueue_params().
+in(Item, #myqueue_params{myqueue=Q} = P) when is_integer(Item) ->
+ P#myqueue_params{myqueue=myqueue:in(Item, Q)}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/para/para3.erl b/lib/dialyzer/test/opaque_SUITE_data/src/para/para3.erl
index 102215b28d..d8c1f561f7 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/para/para3.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/para/para3.erl
@@ -62,7 +62,7 @@ t2() ->
%% Shows that the list TypeNames in t_from_form must include ArgsLen.
t1_adt() ->
- {{{{{17}}}}} = para3_adt:t1(3). % breaks the opaqueness
+ {{{{{17}}}}} = para3_adt:t1(3). % breaks the opacity
t2_adt() ->
{{{{17}}}} = para3_adt:t1(3). % can never match
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_common.hrl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_common.hrl
new file mode 100644
index 0000000000..c10626c5cc
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_common.hrl
@@ -0,0 +1,55 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+%%% @doc Common parts of user and internal header files
+
+
+%%------------------------------------------------------------------------------
+%% Test generation macros
+%%------------------------------------------------------------------------------
+
+-define(FORALL(X,RawType,Prop), proper:forall(RawType,fun(X) -> Prop end)).
+-define(IMPLIES(Pre,Prop), proper:implies(Pre,?DELAY(Prop))).
+-define(WHENFAIL(Action,Prop), proper:whenfail(?DELAY(Action),?DELAY(Prop))).
+-define(TRAPEXIT(Prop), proper:trapexit(?DELAY(Prop))).
+-define(TIMEOUT(Limit,Prop), proper:timeout(Limit,?DELAY(Prop))).
+%% TODO: -define(ALWAYS(Tests,Prop), proper:always(Tests,?DELAY(Prop))).
+%% TODO: -define(SOMETIMES(Tests,Prop), proper:sometimes(Tests,?DELAY(Prop))).
+
+
+%%------------------------------------------------------------------------------
+%% Generator macros
+%%------------------------------------------------------------------------------
+
+-define(FORCE(X), (X)()).
+-define(DELAY(X), fun() -> X end).
+-define(LAZY(X), proper_types:lazy(?DELAY(X))).
+-define(SIZED(SizeArg,Gen), proper_types:sized(fun(SizeArg) -> Gen end)).
+-define(LET(X,RawType,Gen), proper_types:bind(RawType,fun(X) -> Gen end,false)).
+-define(SHRINK(Gen,AltGens),
+ proper_types:shrinkwith(?DELAY(Gen),?DELAY(AltGens))).
+-define(LETSHRINK(Xs,RawType,Gen),
+ proper_types:bind(RawType,fun(Xs) -> Gen end,true)).
+-define(SUCHTHAT(X,RawType,Condition),
+ proper_types:add_constraint(RawType,fun(X) -> Condition end,true)).
+-define(SUCHTHATMAYBE(X,RawType,Condition),
+ proper_types:add_constraint(RawType,fun(X) -> Condition end,false)).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_gen.erl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_gen.erl
new file mode 100644
index 0000000000..bf627d1373
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_gen.erl
@@ -0,0 +1,624 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Generator subsystem and generators for basic types.
+%%%
+%%% You can use <a href="#index">these</a> functions to try out the random
+%%% instance generation and shrinking subsystems.
+%%%
+%%% CAUTION: These functions should never be used inside properties. They are
+%%% meant for demonstration purposes only.
+
+-module(proper_gen).
+-export([pick/1, pick/2, pick/3, sample/1, sample/3, sampleshrink/1, sampleshrink/2]).
+
+-export([safe_generate/1]).
+-export([generate/1, normal_gen/1, alt_gens/1, clean_instance/1,
+ get_ret_type/1]).
+-export([integer_gen/3, float_gen/3, atom_gen/1, atom_rev/1, binary_gen/1,
+ binary_rev/1, binary_len_gen/1, bitstring_gen/1, bitstring_rev/1,
+ bitstring_len_gen/1, list_gen/2, distlist_gen/3, vector_gen/2,
+ union_gen/1, weighted_union_gen/1, tuple_gen/1, loose_tuple_gen/2,
+ loose_tuple_rev/2, exactly_gen/1, fixed_list_gen/1, function_gen/2,
+ any_gen/1, native_type_gen/2, safe_weighted_union_gen/1,
+ safe_union_gen/1]).
+
+-export_type([instance/0, imm_instance/0, sized_generator/0, nosize_generator/0,
+ generator/0, reverse_gen/0, combine_fun/0, alt_gens/0]).
+
+-include("proper_internal.hrl").
+
+
+%%-----------------------------------------------------------------------------
+%% Types
+%%-----------------------------------------------------------------------------
+
+%% TODO: update imm_instance() when adding more types: be careful when reading
+%% anything that returns it
+%% @private_type
+-type imm_instance() :: proper_types:raw_type()
+ | instance()
+ | {'$used', imm_instance(), imm_instance()}
+ | {'$to_part', imm_instance()}.
+-type instance() :: term().
+%% A value produced by the random instance generator.
+-type error_reason() :: 'arity_limit' | 'cant_generate' | {'typeserver',term()}.
+
+%% @private_type
+-type sized_generator() :: fun((size()) -> imm_instance()).
+%% @private_type
+-type typed_sized_generator() :: {'typed',
+ fun((proper_types:type(),size()) ->
+ imm_instance())}.
+%% @private_type
+-type nosize_generator() :: fun(() -> imm_instance()).
+%% @private_type
+-type typed_nosize_generator() :: {'typed',
+ fun((proper_types:type()) ->
+ imm_instance())}.
+%% @private_type
+-type generator() :: sized_generator()
+ | typed_sized_generator()
+ | nosize_generator()
+ | typed_nosize_generator().
+%% @private_type
+-type plain_reverse_gen() :: fun((instance()) -> imm_instance()).
+%% @private_type
+-type typed_reverse_gen() :: {'typed',
+ fun((proper_types:type(),instance()) ->
+ imm_instance())}.
+%% @private_type
+-type reverse_gen() :: plain_reverse_gen() | typed_reverse_gen().
+%% @private_type
+-type combine_fun() :: fun((instance()) -> imm_instance()).
+%% @private_type
+-type alt_gens() :: fun(() -> [imm_instance()]).
+%% @private_type
+-type fun_seed() :: {non_neg_integer(),non_neg_integer()}.
+
+
+%%-----------------------------------------------------------------------------
+%% Instance generation functions
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec safe_generate(proper_types:raw_type()) ->
+ {'ok',imm_instance()} | {'error',error_reason()}.
+safe_generate(RawType) ->
+ try generate(RawType) of
+ ImmInstance -> {ok, ImmInstance}
+ catch
+ throw:'$arity_limit' -> {error, arity_limit};
+ throw:'$cant_generate' -> {error, cant_generate};
+ throw:{'$typeserver',SubReason} -> {error, {typeserver,SubReason}}
+ end.
+
+%% @private
+-spec generate(proper_types:raw_type()) -> imm_instance().
+generate(RawType) ->
+ Type = proper_types:cook_outer(RawType),
+ ok = add_parameters(Type),
+ Instance = generate(Type, get('$constraint_tries'), none),
+ ok = remove_parameters(Type),
+ Instance.
+
+-spec add_parameters(proper_types:type()) -> 'ok'.
+add_parameters(Type) ->
+ case proper_types:find_prop(parameters, Type) of
+ {ok, Params} ->
+ OldParams = erlang:get('$parameters'),
+ case OldParams of
+ undefined ->
+ erlang:put('$parameters', Params);
+ _ ->
+ erlang:put('$parameters', Params ++ OldParams)
+ end,
+ ok;
+ _ ->
+ ok
+ end.
+
+-spec remove_parameters(proper_types:type()) -> 'ok'.
+remove_parameters(Type) ->
+ case proper_types:find_prop(parameters, Type) of
+ {ok, Params} ->
+ AllParams = erlang:get('$parameters'),
+ case AllParams of
+ Params->
+ erlang:erase('$parameters');
+ _ ->
+ erlang:put('$parameters', AllParams -- Params)
+ end,
+ ok;
+ _ ->
+ ok
+ end.
+
+-spec generate(proper_types:type(), non_neg_integer(),
+ 'none' | {'ok',imm_instance()}) -> imm_instance().
+generate(_Type, 0, none) ->
+ throw('$cant_generate');
+generate(_Type, 0, {ok,Fallback}) ->
+ Fallback;
+generate(Type, TriesLeft, Fallback) ->
+ ImmInstance =
+ case proper_types:get_prop(kind, Type) of
+ constructed ->
+ PartsType = proper_types:get_prop(parts_type, Type),
+ Combine = proper_types:get_prop(combine, Type),
+ ImmParts = generate(PartsType),
+ Parts = clean_instance(ImmParts),
+ ImmInstance1 = Combine(Parts),
+ %% TODO: We can just generate the internal type: if it's not
+ %% a type, it will turn into an exactly.
+ ImmInstance2 =
+ case proper_types:is_raw_type(ImmInstance1) of
+ true -> generate(ImmInstance1);
+ false -> ImmInstance1
+ end,
+ {'$used',ImmParts,ImmInstance2};
+ _ ->
+ ImmInstance1 = normal_gen(Type),
+ case proper_types:is_raw_type(ImmInstance1) of
+ true -> generate(ImmInstance1);
+ false -> ImmInstance1
+ end
+ end,
+ case proper_types:satisfies_all(clean_instance(ImmInstance), Type) of
+ {_,true} -> ImmInstance;
+ {true,false} -> generate(Type, TriesLeft - 1, {ok,ImmInstance});
+ {false,false} -> generate(Type, TriesLeft - 1, Fallback)
+ end.
+
+%% @equiv pick(Type, 10)
+-spec pick(Type::proper_types:raw_type()) -> {'ok',instance()} | 'error'.
+pick(RawType) ->
+ pick(RawType, 10).
+
+%% @equiv pick(Type, Size, now())
+-spec pick(Type::proper_types:raw_type(), size()) -> {'ok', instance()} | 'error'.
+pick(RawType, Size) ->
+ pick(RawType, Size, now()).
+
+%% @doc Generates a random instance of `Type', of size `Size' with seed `Seed'.
+-spec pick(Type::proper_types:raw_type(), size(), seed()) ->
+ {'ok',instance()} | 'error'.
+pick(RawType, Size, Seed) ->
+ proper:global_state_init_size_seed(Size, Seed),
+ case clean_instance(safe_generate(RawType)) of
+ {ok,Instance} = Result ->
+ Msg = "WARNING: Some garbage has been left in the process registry "
+ "and the code server~n"
+ "to allow for the returned function(s) to run normally.~n"
+ "Please run proper:global_state_erase() when done.~n",
+ case contains_fun(Instance) of
+ true -> io:format(Msg, []);
+ false -> proper:global_state_erase()
+ end,
+ Result;
+ {error,Reason} ->
+ proper:report_error(Reason, fun io:format/2),
+ proper:global_state_erase(),
+ error
+ end.
+
+%% @equiv sample(Type, 10, 20)
+-spec sample(Type::proper_types:raw_type()) -> 'ok'.
+sample(RawType) ->
+ sample(RawType, 10, 20).
+
+%% @doc Generates and prints one random instance of `Type' for each size from
+%% `StartSize' up to `EndSize'.
+-spec sample(Type::proper_types:raw_type(), size(), size()) -> 'ok'.
+sample(RawType, StartSize, EndSize) when StartSize =< EndSize ->
+ Tests = EndSize - StartSize + 1,
+ Prop = ?FORALL(X, RawType, begin io:format("~p~n",[X]), true end),
+ Opts = [quiet,{start_size,StartSize},{max_size,EndSize},{numtests,Tests}],
+ _ = proper:quickcheck(Prop, Opts),
+ ok.
+
+%% @equiv sampleshrink(Type, 10)
+-spec sampleshrink(Type::proper_types:raw_type()) -> 'ok'.
+sampleshrink(RawType) ->
+ sampleshrink(RawType, 10).
+
+%% @doc Generates a random instance of `Type', of size `Size', then shrinks it
+%% as far as it goes. The value produced on each step of the shrinking process
+%% is printed on the screen.
+-spec sampleshrink(Type::proper_types:raw_type(), size()) -> 'ok'.
+sampleshrink(RawType, Size) ->
+ proper:global_state_init_size(Size),
+ Type = proper_types:cook_outer(RawType),
+ case safe_generate(Type) of
+ {ok,ImmInstance} ->
+ Shrunk = keep_shrinking(ImmInstance, [], Type),
+ PrintInst = fun(I) -> io:format("~p~n",[clean_instance(I)]) end,
+ lists:foreach(PrintInst, Shrunk);
+ {error,Reason} ->
+ proper:report_error(Reason, fun io:format/2)
+ end,
+ proper:global_state_erase(),
+ ok.
+
+-spec keep_shrinking(imm_instance(), [imm_instance()], proper_types:type()) ->
+ [imm_instance(),...].
+keep_shrinking(ImmInstance, Acc, Type) ->
+ case proper_shrink:shrink(ImmInstance, Type, init) of
+ {[], _NewState} ->
+ lists:reverse([ImmInstance|Acc]);
+ {[Shrunk|_Rest], _NewState} ->
+ keep_shrinking(Shrunk, [ImmInstance|Acc], Type)
+ end.
+
+-spec contains_fun(term()) -> boolean().
+contains_fun(List) when is_list(List) ->
+ proper_arith:safe_any(fun contains_fun/1, List);
+contains_fun(Tuple) when is_tuple(Tuple) ->
+ contains_fun(tuple_to_list(Tuple));
+contains_fun(Fun) when is_function(Fun) ->
+ true;
+contains_fun(_Term) ->
+ false.
+
+
+%%-----------------------------------------------------------------------------
+%% Utility functions
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec normal_gen(proper_types:type()) -> imm_instance().
+normal_gen(Type) ->
+ case proper_types:get_prop(generator, Type) of
+ {typed, Gen} ->
+ if
+ is_function(Gen, 1) -> Gen(Type);
+ is_function(Gen, 2) -> Gen(Type, proper:get_size(Type))
+ end;
+ Gen ->
+ if
+ is_function(Gen, 0) -> Gen();
+ is_function(Gen, 1) -> Gen(proper:get_size(Type))
+ end
+ end.
+
+%% @private
+-spec alt_gens(proper_types:type()) -> [imm_instance()].
+alt_gens(Type) ->
+ case proper_types:find_prop(alt_gens, Type) of
+ {ok, AltGens} -> ?FORCE(AltGens);
+ error -> []
+ end.
+
+%% @private
+-spec clean_instance(imm_instance()) -> instance().
+clean_instance({'$used',_ImmParts,ImmInstance}) ->
+ clean_instance(ImmInstance);
+clean_instance({'$to_part',ImmInstance}) ->
+ clean_instance(ImmInstance);
+clean_instance(ImmInstance) ->
+ if
+ is_list(ImmInstance) ->
+ %% CAUTION: this must handle improper lists
+ proper_arith:safe_map(fun clean_instance/1, ImmInstance);
+ is_tuple(ImmInstance) ->
+ proper_arith:tuple_map(fun clean_instance/1, ImmInstance);
+ true ->
+ ImmInstance
+ end.
+
+
+%%-----------------------------------------------------------------------------
+%% Basic type generators
+%%-----------------------------------------------------------------------------
+
+%% @private
+-spec integer_gen(size(), proper_types:extint(), proper_types:extint()) ->
+ integer().
+integer_gen(Size, inf, inf) ->
+ proper_arith:rand_int(Size);
+integer_gen(Size, inf, High) ->
+ High - proper_arith:rand_non_neg_int(Size);
+integer_gen(Size, Low, inf) ->
+ Low + proper_arith:rand_non_neg_int(Size);
+integer_gen(Size, Low, High) ->
+ proper_arith:smart_rand_int(Size, Low, High).
+
+%% @private
+-spec float_gen(size(), proper_types:extnum(), proper_types:extnum()) ->
+ float().
+float_gen(Size, inf, inf) ->
+ proper_arith:rand_float(Size);
+float_gen(Size, inf, High) ->
+ High - proper_arith:rand_non_neg_float(Size);
+float_gen(Size, Low, inf) ->
+ Low + proper_arith:rand_non_neg_float(Size);
+float_gen(_Size, Low, High) ->
+ proper_arith:rand_float(Low, High).
+
+%% @private
+-spec atom_gen(size()) -> proper_types:type().
+%% We make sure we never clash with internal atoms by checking that the first
+%% character is not '$'.
+atom_gen(Size) ->
+ ?LET(Str,
+ ?SUCHTHAT(X,
+ proper_types:resize(Size,
+ proper_types:list(proper_types:byte())),
+ X =:= [] orelse hd(X) =/= $$),
+ list_to_atom(Str)).
+
+%% @private
+-spec atom_rev(atom()) -> imm_instance().
+atom_rev(Atom) ->
+ {'$used', atom_to_list(Atom), Atom}.
+
+%% @private
+-spec binary_gen(size()) -> proper_types:type().
+binary_gen(Size) ->
+ ?LET(Bytes,
+ proper_types:resize(Size,
+ proper_types:list(proper_types:byte())),
+ list_to_binary(Bytes)).
+
+%% @private
+-spec binary_rev(binary()) -> imm_instance().
+binary_rev(Binary) ->
+ {'$used', binary_to_list(Binary), Binary}.
+
+%% @private
+-spec binary_len_gen(length()) -> proper_types:type().
+binary_len_gen(Len) ->
+ ?LET(Bytes,
+ proper_types:vector(Len, proper_types:byte()),
+ list_to_binary(Bytes)).
+
+%% @private
+-spec bitstring_gen(size()) -> proper_types:type().
+bitstring_gen(Size) ->
+ ?LET({BytesHead, NumBits, TailByte},
+ {proper_types:resize(Size,proper_types:binary()),
+ proper_types:range(0,7), proper_types:range(0,127)},
+ <<BytesHead/binary, TailByte:NumBits>>).
+
+%% @private
+-spec bitstring_rev(bitstring()) -> imm_instance().
+bitstring_rev(BitString) ->
+ List = bitstring_to_list(BitString),
+ {BytesList, BitsTail} = lists:splitwith(fun erlang:is_integer/1, List),
+ {NumBits, TailByte} = case BitsTail of
+ [] -> {0, 0};
+ [Bits] -> N = bit_size(Bits),
+ <<Byte:N>> = Bits,
+ {N, Byte}
+ end,
+ {'$used',
+ {{'$used',BytesList,list_to_binary(BytesList)}, NumBits, TailByte},
+ BitString}.
+
+%% @private
+-spec bitstring_len_gen(length()) -> proper_types:type().
+bitstring_len_gen(Len) ->
+ BytesLen = Len div 8,
+ BitsLen = Len rem 8,
+ ?LET({BytesHead, NumBits, TailByte},
+ {proper_types:binary(BytesLen), BitsLen,
+ proper_types:range(0, 1 bsl BitsLen - 1)},
+ <<BytesHead/binary, TailByte:NumBits>>).
+
+%% @private
+-spec list_gen(size(), proper_types:type()) -> [imm_instance()].
+list_gen(Size, ElemType) ->
+ Len = proper_arith:rand_int(0, Size),
+ vector_gen(Len, ElemType).
+
+%% @private
+-spec distlist_gen(size(), sized_generator(), boolean()) -> [imm_instance()].
+distlist_gen(RawSize, Gen, NonEmpty) ->
+ Len = case NonEmpty of
+ true -> proper_arith:rand_int(1, erlang:max(1,RawSize));
+ false -> proper_arith:rand_int(0, RawSize)
+ end,
+ Size = case Len of
+ 1 -> RawSize - 1;
+ _ -> RawSize
+ end,
+ %% TODO: this produces a lot of types: maybe a simple 'div' is sufficient?
+ Sizes = proper_arith:distribute(Size, Len),
+ InnerTypes = [Gen(S) || S <- Sizes],
+ fixed_list_gen(InnerTypes).
+
+%% @private
+-spec vector_gen(length(), proper_types:type()) -> [imm_instance()].
+vector_gen(Len, ElemType) ->
+ vector_gen_tr(Len, ElemType, []).
+
+-spec vector_gen_tr(length(), proper_types:type(), [imm_instance()]) ->
+ [imm_instance()].
+vector_gen_tr(0, _ElemType, AccList) ->
+ AccList;
+vector_gen_tr(Left, ElemType, AccList) ->
+ vector_gen_tr(Left - 1, ElemType, [generate(ElemType) | AccList]).
+
+%% @private
+-spec union_gen([proper_types:type(),...]) -> imm_instance().
+union_gen(Choices) ->
+ {_Choice,Type} = proper_arith:rand_choose(Choices),
+ generate(Type).
+
+%% @private
+-spec weighted_union_gen([{frequency(),proper_types:type()},...]) ->
+ imm_instance().
+weighted_union_gen(FreqChoices) ->
+ {_Choice,Type} = proper_arith:freq_choose(FreqChoices),
+ generate(Type).
+
+%% @private
+-spec safe_union_gen([proper_types:type(),...]) -> imm_instance().
+safe_union_gen(Choices) ->
+ {Choice,Type} = proper_arith:rand_choose(Choices),
+ try generate(Type)
+ catch
+ error:_ ->
+ safe_union_gen(proper_arith:list_remove(Choice, Choices))
+ end.
+
+%% @private
+-spec safe_weighted_union_gen([{frequency(),proper_types:type()},...]) ->
+ imm_instance().
+safe_weighted_union_gen(FreqChoices) ->
+ {Choice,Type} = proper_arith:freq_choose(FreqChoices),
+ try generate(Type)
+ catch
+ error:_ ->
+ safe_weighted_union_gen(proper_arith:list_remove(Choice,
+ FreqChoices))
+ end.
+
+%% @private
+-spec tuple_gen([proper_types:type()]) -> tuple().
+tuple_gen(Fields) ->
+ list_to_tuple(fixed_list_gen(Fields)).
+
+%% @private
+-spec loose_tuple_gen(size(), proper_types:type()) -> proper_types:type().
+loose_tuple_gen(Size, ElemType) ->
+ ?LET(L,
+ proper_types:resize(Size, proper_types:list(ElemType)),
+ list_to_tuple(L)).
+
+%% @private
+-spec loose_tuple_rev(tuple(), proper_types:type()) -> imm_instance().
+loose_tuple_rev(Tuple, ElemType) ->
+ CleanList = tuple_to_list(Tuple),
+ List = case proper_types:find_prop(reverse_gen, ElemType) of
+ {ok,{typed, ReverseGen}} ->
+ [ReverseGen(ElemType,X) || X <- CleanList];
+ {ok,ReverseGen} -> [ReverseGen(X) || X <- CleanList];
+ error -> CleanList
+ end,
+ {'$used', List, Tuple}.
+
+%% @private
+-spec exactly_gen(T) -> T.
+exactly_gen(X) ->
+ X.
+
+%% @private
+-spec fixed_list_gen([proper_types:type()]) -> imm_instance()
+ ; ({[proper_types:type()],proper_types:type()}) ->
+ maybe_improper_list(imm_instance(), imm_instance() | []).
+fixed_list_gen({ProperHead,ImproperTail}) ->
+ [generate(F) || F <- ProperHead] ++ generate(ImproperTail);
+fixed_list_gen(ProperFields) ->
+ [generate(F) || F <- ProperFields].
+
+%% @private
+-spec function_gen(arity(), proper_types:type()) -> function().
+function_gen(Arity, RetType) ->
+ FunSeed = {proper_arith:rand_int(0, ?SEED_RANGE - 1),
+ proper_arith:rand_int(0, ?SEED_RANGE - 1)},
+ create_fun(Arity, RetType, FunSeed).
+
+%% @private
+-spec any_gen(size()) -> imm_instance().
+any_gen(Size) ->
+ case get('$any_type') of
+ undefined -> real_any_gen(Size);
+ {type,AnyType} -> generate(proper_types:resize(Size, AnyType))
+ end.
+
+-spec real_any_gen(size()) -> imm_instance().
+real_any_gen(0) ->
+ SimpleTypes = [proper_types:integer(), proper_types:float(),
+ proper_types:atom()],
+ union_gen(SimpleTypes);
+real_any_gen(Size) ->
+ FreqChoices = [{?ANY_SIMPLE_PROB,simple}, {?ANY_BINARY_PROB,binary},
+ {?ANY_EXPAND_PROB,expand}],
+ case proper_arith:freq_choose(FreqChoices) of
+ {_,simple} ->
+ real_any_gen(0);
+ {_,binary} ->
+ generate(proper_types:resize(Size, proper_types:bitstring()));
+ {_,expand} ->
+ %% TODO: statistics of produced terms?
+ NumElems = proper_arith:rand_int(0, Size - 1),
+ ElemSizes = proper_arith:distribute(Size - 1, NumElems),
+ ElemTypes = [?LAZY(real_any_gen(S)) || S <- ElemSizes],
+ case proper_arith:rand_int(1,2) of
+ 1 -> fixed_list_gen(ElemTypes);
+ 2 -> tuple_gen(ElemTypes)
+ end
+ end.
+
+%% @private
+-spec native_type_gen(mod_name(), string()) -> proper_types:type().
+native_type_gen(Mod, TypeStr) ->
+ case proper_typeserver:translate_type({Mod,TypeStr}) of
+ {ok,Type} -> Type;
+ {error,Reason} -> throw({'$typeserver',Reason})
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Function-generation functions
+%%------------------------------------------------------------------------------
+
+-spec create_fun(arity(), proper_types:type(), fun_seed()) -> function().
+create_fun(Arity, RetType, FunSeed) ->
+ Handler = fun(Args) -> function_body(Args, RetType, FunSeed) end,
+ Err = fun() -> throw('$arity_limit') end,
+ case Arity of
+ 0 -> fun() -> Handler([]) end;
+ _ -> Err()
+ end.
+
+%% @private
+-spec get_ret_type(function()) -> proper_types:type().
+get_ret_type(Fun) ->
+ {arity,Arity} = erlang:fun_info(Fun, arity),
+ put('$get_ret_type', true),
+ RetType = apply(Fun, lists:duplicate(Arity,dummy)),
+ erase('$get_ret_type'),
+ RetType.
+-spec function_body([term()], proper_types:type(), fun_seed()) ->
+ proper_types:type() | instance().
+function_body(Args, RetType, {Seed1,Seed2}) ->
+ case get('$get_ret_type') of
+ true ->
+ RetType;
+ _ ->
+ SavedSeed = get(?SEED_NAME),
+ update_seed({Seed1,Seed2,erlang:phash2(Args,?SEED_RANGE)}),
+ Ret = clean_instance(generate(RetType)),
+ put(?SEED_NAME, SavedSeed),
+ proper_symb:internal_eval(Ret)
+ end.
+
+-ifdef(USE_SFMT).
+update_seed(Seed) ->
+ sfmt:seed(Seed).
+-else.
+update_seed(Seed) ->
+ put(random_seed, Seed).
+-endif.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_internal.hrl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_internal.hrl
new file mode 100644
index 0000000000..c790d7d4db
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_internal.hrl
@@ -0,0 +1,92 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+%%% @doc Internal header file: This header is included in all PropEr source
+%%% files.
+
+-include("proper_common.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Activate strip_types parse transform
+%%------------------------------------------------------------------------------
+
+-ifdef(NO_TYPES).
+-compile({parse_transform, strip_types}).
+-endif.
+
+%%------------------------------------------------------------------------------
+%% Random generator selection
+%%------------------------------------------------------------------------------
+
+-ifdef(USE_SFMT).
+-define(RANDOM_MOD, sfmt).
+-define(SEED_NAME, sfmt_seed).
+-else.
+-define(RANDOM_MOD, random).
+-define(SEED_NAME, random_seed).
+-endif.
+
+%%------------------------------------------------------------------------------
+%% Macros
+%%------------------------------------------------------------------------------
+
+-define(PROPERTY_PREFIX, "prop_").
+
+
+%%------------------------------------------------------------------------------
+%% Constants
+%%------------------------------------------------------------------------------
+
+-define(SEED_RANGE, 4294967296).
+-define(MAX_ARITY, 20).
+-define(MAX_TRIES_FACTOR, 5).
+-define(ANY_SIMPLE_PROB, 3).
+-define(ANY_BINARY_PROB, 1).
+-define(ANY_EXPAND_PROB, 8).
+-define(SMALL_RANGE_THRESHOLD, 16#FFFF).
+
+
+%%------------------------------------------------------------------------------
+%% Common type aliases
+%%------------------------------------------------------------------------------
+
+%% TODO: Perhaps these should be moved inside modules.
+-type mod_name() :: atom().
+-type fun_name() :: atom().
+-type size() :: non_neg_integer().
+-type length() :: non_neg_integer().
+-type position() :: pos_integer().
+-type frequency() :: pos_integer().
+-type seed() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
+
+-type abs_form() :: erl_parse:abstract_form().
+-type abs_expr() :: erl_parse:abstract_expr().
+-type abs_clause() :: erl_parse:abstract_clause().
+
+%% TODO: Replace these with the appropriate types from stdlib.
+-type abs_type() :: term().
+-type abs_rec_field() :: term().
+
+-type loose_tuple(T) :: {} | {T} | {T,T} | {T,T,T} | {T,T,T,T} | {T,T,T,T,T}
+ | {T,T,T,T,T,T} | {T,T,T,T,T,T,T} | {T,T,T,T,T,T,T,T}
+ | {T,T,T,T,T,T,T,T,T} | {T,T,T,T,T,T,T,T,T,T} | tuple().
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_types.erl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_types.erl
new file mode 100644
index 0000000000..fe83a0ba11
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_types.erl
@@ -0,0 +1,1349 @@
+%%% Copyright 2010-2013 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2013 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Type manipulation functions and predefined types.
+%%%
+%%% == Basic types ==
+%%% This module defines all the basic types of the PropEr type system as
+%%% functions. See the <a href="#index">function index</a> for an overview.
+%%%
+%%% Types can be combined in tuples or lists to produce other types. Exact
+%%% values (such as exact numbers, atoms, binaries and strings) can be combined
+%%% with types inside such structures, like in this example of the type of a
+%%% tagged tuple: ``{'result', integer()}''.
+%%%
+%%% When including the PropEr header file, all
+%%% <a href="#index">API functions</a> of this module are automatically
+%%% imported, unless `PROPER_NO_IMPORTS' is defined.
+%%%
+%%% == Customized types ==
+%%% The following operators can be applied to basic types in order to produce
+%%% new ones:
+%%%
+%%% <dl>
+%%% <dt>`?LET(<Xs>, <Xs_type>, <In>)'</dt>
+%%% <dd>To produce an instance of this type, all appearances of the variables
+%%% in `<Xs>' are replaced inside `<In>' by their corresponding values in a
+%%% randomly generated instance of `<Xs_type>'. It's OK for the `<In>' part to
+%%% evaluate to a type - in that case, an instance of the inner type is
+%%% generated recursively.</dd>
+%%% <dt>`?SUCHTHAT(<X>, <Type>, <Condition>)'</dt>
+%%% <dd>This produces a specialization of `<Type>', which only includes those
+%%% members of `<Type>' that satisfy the constraint `<Condition>' - that is,
+%%% those members for which the function `fun(<X>) -> <Condition> end' returns
+%%% `true'. If the constraint is very strict - that is, only a small
+%%% percentage of instances of `<Type>' pass the test - it will take a lot of
+%%% tries for the instance generation subsystem to randomly produce a valid
+%%% instance. This will result in slower testing, and testing may even be
+%%% stopped short, in case the `constraint_tries' limit is reached (see the
+%%% "Options" section in the documentation of the {@link proper} module). If
+%%% this is the case, it would be more appropriate to generate valid instances
+%%% of the specialized type using the `?LET' macro. Also make sure that even
+%%% small instances can satisfy the constraint, since PropEr will only try
+%%% small instances at the start of testing. If this is not possible, you can
+%%% instruct PropEr to start at a larger size, by supplying a suitable value
+%%% for the `start_size' option (see the "Options" section in the
+%%% documentation of the {@link proper} module).</dd>
+%%% <dt>`?SUCHTHATMAYBE(<X>, <Type>, <Condition>)'</dt>
+%%% <dd>Equivalent to the `?SUCHTHAT' macro, but the constraint `<Condition>'
+%%% is considered non-strict: if the `constraint_tries' limit is reached, the
+%%% generator will just return an instance of `<Type>' instead of failing,
+%%% even if that instance doesn't satisfy the constraint.</dd>
+%%% <dt>`?SHRINK(<Generator>, <List_of_alt_gens>)'</dt>
+%%% <dd>This creates a type whose instances are generated by evaluating the
+%%% statement block `<Generator>' (this may evaluate to a type, which will
+%%% then be generated recursively). If an instance of such a type is to be
+%%% shrunk, the generators in `<List_of_alt_gens>' are first run to produce
+%%% hopefully simpler instances of the type. Thus, the generators in the
+%%% second argument should be simpler than the default. The simplest ones
+%%% should be at the front of the list, since those are the generators
+%%% preferred by the shrinking subsystem. Like the main `<Generator>', the
+%%% alternatives may also evaluate to a type, which is generated recursively.
+%%% </dd>
+%%% <dt>`?LETSHRINK(<List_of_variables>, <List_of_types>, <Generator>)'</dt>
+%%% <dd>This is created by combining a `?LET' and a `?SHRINK' macro. Instances
+%%% are generated by applying a randomly generated list of values inside
+%%% `<Generator>' (just like a `?LET', with the added constraint that the
+%%% variables and types must be provided in a list - alternatively,
+%%% `<List_of_types>' may be a list or vector type). When shrinking instances
+%%% of such a type, the sub-instances that were combined to produce it are
+%%% first tried in place of the failing instance.</dd>
+%%% <dt>`?LAZY(<Generator>)'</dt>
+%%% <dd>This construct returns a type whose only purpose is to delay the
+%%% evaluation of `<Generator>' (`<Generator>' can return a type, which will
+%%% be generated recursively). Using this, you can simulate the lazy
+%%% generation of instances:
+%%% ``` stream() -> ?LAZY(frequency([ {1,[]}, {3,[0|stream()]} ])). '''
+%%% The above type produces lists of zeroes with an average length of 3. Note
+%%% that, had we not enclosed the generator with a `?LAZY' macro, the
+%%% evaluation would continue indefinitely, due to the eager evaluation of
+%%% the Erlang language.</dd>
+%%% <dt>`non_empty(<List_or_binary_type>)'</dt>
+%%% <dd>See the documentation for {@link non_empty/1}.</dd>
+%%% <dt>`noshrink(<Type>)'</dt>
+%%% <dd>See the documentation for {@link noshrink/1}.</dd>
+%%% <dt>`default(<Default_value>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link default/2}.</dd>
+%%% <dt>`with_parameter(<Parameter>, <Value>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link with_parameter/3}.</dd>
+%%% <dt>`with_parameters(<Param_value_pairs>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link with_parameters/2}.</dd>
+%%% </dl>
+%%%
+%%% == Size manipulation ==
+%%% The following operators are related to the `size' parameter, which controls
+%%% the maximum size of produced instances. The actual size of a produced
+%%% instance is chosen randomly, but can never exceed the value of the `size'
+%%% parameter at the moment of generation. A more accurate definition is the
+%%% following: the maximum instance of `size S' can never be smaller than the
+%%% maximum instance of `size S-1'. The actual size of an instance is measured
+%%% differently for each type: the actual size of a list is its length, while
+%%% the actual size of a tree may be the number of its internal nodes. Some
+%%% types, e.g. unions, have no notion of size, thus their generation is not
+%%% influenced by the value of `size'. The `size' parameter starts at 1 and
+%%% grows automatically during testing.
+%%%
+%%% <dl>
+%%% <dt>`?SIZED(<S>, <Generator>)'</dt>
+%%% <dd>Creates a new type, whose instances are produced by replacing all
+%%% appearances of the `<S>' parameter inside the statement block
+%%% `<Generator>' with the value of the `size' parameter. It's OK for the
+%%% `<Generator>' to return a type - in that case, an instance of the inner
+%%% type is generated recursively.</dd>
+%%% <dt>`resize(<New_size>, <Type>)'</dt>
+%%% <dd>See the documentation for {@link resize/2}.</dd>
+%%% </dl>
+
+-module(proper_types).
+-export([is_inst/2, is_inst/3]).
+
+-export([integer/2, float/2, atom/0, binary/0, binary/1, bitstring/0,
+ bitstring/1, list/1, vector/2, union/1, weighted_union/1, tuple/1,
+ loose_tuple/1, exactly/1, fixed_list/1, function/2, any/0,
+ shrink_list/1, safe_union/1, safe_weighted_union/1]).
+-export([integer/0, non_neg_integer/0, pos_integer/0, neg_integer/0, range/2,
+ float/0, non_neg_float/0, number/0, boolean/0, byte/0, char/0,
+ list/0, tuple/0, string/0, wunion/1, term/0, timeout/0, arity/0]).
+-export([int/0, nat/0, largeint/0, real/0, bool/0, choose/2, elements/1,
+ oneof/1, frequency/1, return/1, default/2, orderedlist/1, function0/1,
+ function1/1, function2/1, function3/1, function4/1,
+ weighted_default/2]).
+-export([resize/2, non_empty/1, noshrink/1]).
+
+-export([cook_outer/1, is_type/1, equal_types/2, is_raw_type/1, to_binary/1,
+ from_binary/1, get_prop/2, find_prop/2, safe_is_instance/2,
+ is_instance/2, unwrap/1, weakly/1, strongly/1, satisfies_all/2,
+ new_type/2, subtype/2]).
+-export([lazy/1, sized/1, bind/3, shrinkwith/2, add_constraint/3,
+ native_type/2, distlist/3, with_parameter/3, with_parameters/2,
+ parameter/1, parameter/2]).
+-export([le/2]).
+
+-export_type([type/0, raw_type/0, extint/0, extnum/0]).
+
+-include("proper_internal.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Comparison with erl_types
+%%------------------------------------------------------------------------------
+
+%% Missing types
+%% -------------------
+%% will do:
+%% records, maybe_improper_list(T,S), nonempty_improper_list(T,S)
+%% maybe_improper_list(), maybe_improper_list(T), iolist, iodata
+%% don't need:
+%% nonempty_{list,string,maybe_improper_list}
+%% won't do:
+%% pid, port, ref, identifier, none, no_return, module, mfa, node
+%% array, dict, digraph, set, gb_tree, gb_set, queue, tid
+
+%% Missing type information
+%% ------------------------
+%% bin types:
+%% other unit sizes? what about size info?
+%% functions:
+%% generally some fun, unspecified number of arguments but specified
+%% return type
+%% any:
+%% doesn't cover functions and improper lists
+
+
+%%------------------------------------------------------------------------------
+%% Type declaration macros
+%%------------------------------------------------------------------------------
+
+-define(BASIC(PropList), new_type(PropList,basic)).
+-define(WRAPPER(PropList), new_type(PropList,wrapper)).
+-define(CONSTRUCTED(PropList), new_type(PropList,constructed)).
+-define(CONTAINER(PropList), new_type(PropList,container)).
+-define(SUBTYPE(Type,PropList), subtype(PropList,Type)).
+
+
+%%------------------------------------------------------------------------------
+%% Types
+%%------------------------------------------------------------------------------
+
+-type type_kind() :: 'basic' | 'wrapper' | 'constructed' | 'container' | atom().
+-type instance_test() :: fun((proper_gen:imm_instance()) -> boolean())
+ | {'typed',
+ fun((proper_types:type(),
+ proper_gen:imm_instance()) -> boolean())}.
+-type index() :: pos_integer().
+%% @alias
+-type value() :: term().
+%% @private_type
+%% @alias
+-type extint() :: integer() | 'inf'.
+%% @private_type
+%% @alias
+-type extnum() :: number() | 'inf'.
+-type constraint_fun() :: fun((proper_gen:instance()) -> boolean()).
+
+-opaque type() :: {'$type', [type_prop()]}.
+%% A type of the PropEr type system
+%% @type raw_type(). You can consider this as an equivalent of {@type type()}.
+-type raw_type() :: type() | [raw_type()] | loose_tuple(raw_type()) | term().
+-type type_prop_name() :: 'kind' | 'generator' | 'reverse_gen' | 'parts_type'
+ | 'combine' | 'alt_gens' | 'shrink_to_parts'
+ | 'size_transform' | 'is_instance' | 'shrinkers'
+ | 'noshrink' | 'internal_type' | 'internal_types'
+ | 'get_length' | 'split' | 'join' | 'get_indices'
+ | 'remove' | 'retrieve' | 'update' | 'constraints'
+ | 'parameters' | 'env' | 'subenv'.
+
+-type type_prop_value() :: term().
+-type type_prop() ::
+ {'kind', type_kind()}
+ | {'generator', proper_gen:generator()}
+ | {'reverse_gen', proper_gen:reverse_gen()}
+ | {'parts_type', type()}
+ | {'combine', proper_gen:combine_fun()}
+ | {'alt_gens', proper_gen:alt_gens()}
+ | {'shrink_to_parts', boolean()}
+ | {'size_transform', fun((size()) -> size())}
+ | {'is_instance', instance_test()}
+ | {'shrinkers', [proper_shrink:shrinker()]}
+ | {'noshrink', boolean()}
+ | {'internal_type', raw_type()}
+ | {'internal_types', tuple() | maybe_improper_list(type(),type() | [])}
+ %% The items returned by 'remove' must be of this type.
+ | {'get_length', fun((proper_gen:imm_instance()) -> length())}
+ %% If this is a container type, this should return the number of elements
+ %% it contains.
+ | {'split', fun((proper_gen:imm_instance()) -> [proper_gen:imm_instance()])
+ | fun((length(),proper_gen:imm_instance()) ->
+ {proper_gen:imm_instance(),proper_gen:imm_instance()})}
+ %% If present, the appropriate form depends on whether get_length is
+ %% defined: if get_length is undefined, this must be in the one-argument
+ %% form (e.g. a tree should be split into its subtrees), else it must be
+ %% in the two-argument form (e.g. a list should be split in two at the
+ %% index provided).
+ | {'join', fun((proper_gen:imm_instance(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'get_indices', fun((proper_types:type(),
+ proper_gen:imm_instance()) -> [index()])}
+ %% If this is a container type, this should return a list of indices we
+ %% can use to remove or insert elements from the given instance.
+ | {'remove', fun((index(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'retrieve', fun((index(), proper_gen:imm_instance() | tuple()
+ | maybe_improper_list(type(),type() | [])) ->
+ value() | type())}
+ | {'update', fun((index(),value(),proper_gen:imm_instance()) ->
+ proper_gen:imm_instance())}
+ | {'constraints', [{constraint_fun(), boolean()}]}
+ %% A list of constraints on instances of this type: each constraint is a
+ %% tuple of a fun that must return 'true' for each valid instance and a
+ %% boolean field that specifies whether the condition is strict.
+ | {'parameters', [{atom(),value()}]}
+ | {'env', term()}
+ | {'subenv', term()}.
+
+
+%%------------------------------------------------------------------------------
+%% Type manipulation functions
+%%------------------------------------------------------------------------------
+
+%% TODO: We shouldn't need the fully qualified type name in the range of these
+%% functions.
+
+%% @private
+%% TODO: just cook/1 ?
+-spec cook_outer(raw_type()) -> proper_types:type().
+cook_outer(Type = {'$type',_Props}) ->
+ Type;
+cook_outer(RawType) ->
+ if
+ is_tuple(RawType) -> tuple(tuple_to_list(RawType));
+ %% CAUTION: this must handle improper lists
+ is_list(RawType) -> fixed_list(RawType);
+ %% default case (covers integers, floats, atoms, binaries, ...):
+ true -> exactly(RawType)
+ end.
+
+%% @private
+-spec is_type(term()) -> boolean().
+is_type({'$type',_Props}) ->
+ true;
+is_type(_) ->
+ false.
+
+%% @private
+-spec equal_types(proper_types:type(), proper_types:type()) -> boolean().
+equal_types(SameType, SameType) ->
+ true;
+equal_types(_, _) ->
+ false.
+
+%% @private
+-spec is_raw_type(term()) -> boolean().
+is_raw_type({'$type',_TypeProps}) ->
+ true;
+is_raw_type(X) ->
+ if
+ is_tuple(X) -> is_raw_type_list(tuple_to_list(X));
+ is_list(X) -> is_raw_type_list(X);
+ true -> false
+ end.
+
+-spec is_raw_type_list(maybe_improper_list()) -> boolean().
+%% CAUTION: this must handle improper lists
+is_raw_type_list(List) ->
+ proper_arith:safe_any(fun is_raw_type/1, List).
+
+%% @private
+-spec to_binary(proper_types:type()) -> binary().
+to_binary(Type) ->
+ term_to_binary(Type).
+
+%% @private
+%% TODO: restore: -spec from_binary(binary()) -> proper_types:type().
+from_binary(Binary) ->
+ binary_to_term(Binary).
+
+-spec type_from_list([type_prop()]) -> proper_types:type().
+type_from_list(KeyValueList) ->
+ {'$type',KeyValueList}.
+
+-spec add_prop(type_prop_name(), type_prop_value(), proper_types:type()) ->
+ proper_types:type().
+add_prop(PropName, Value, {'$type',Props}) ->
+ {'$type',lists:keystore(PropName, 1, Props, {PropName, Value})}.
+
+-spec add_props([type_prop()], proper_types:type()) -> proper_types:type().
+add_props(PropList, {'$type',OldProps}) ->
+ {'$type', lists:foldl(fun({N,_}=NV,Acc) ->
+ lists:keystore(N, 1, Acc, NV)
+ end, OldProps, PropList)}.
+
+-spec append_to_prop(type_prop_name(), type_prop_value(),
+ proper_types:type()) -> proper_types:type().
+append_to_prop(PropName, Value, {'$type',Props}) ->
+ Val = case lists:keyfind(PropName, 1, Props) of
+ {PropName, V} ->
+ V;
+ _ ->
+ []
+ end,
+ {'$type', lists:keystore(PropName, 1, Props,
+ {PropName, lists:reverse([Value|Val])})}.
+
+-spec append_list_to_prop(type_prop_name(), [type_prop_value()],
+ proper_types:type()) -> proper_types:type().
+append_list_to_prop(PropName, List, {'$type',Props}) ->
+ {PropName, Val} = lists:keyfind(PropName, 1, Props),
+ {'$type', lists:keystore(PropName, 1, Props, {PropName, Val++List})}.
+
+%% @private
+-spec get_prop(type_prop_name(), proper_types:type()) -> type_prop_value().
+get_prop(PropName, {'$type',Props}) ->
+ {_PropName, Val} = lists:keyfind(PropName, 1, Props),
+ Val.
+
+%% @private
+-spec find_prop(type_prop_name(), proper_types:type()) ->
+ {'ok',type_prop_value()} | 'error'.
+find_prop(PropName, {'$type',Props}) ->
+ case lists:keyfind(PropName, 1, Props) of
+ {PropName, Value} ->
+ {ok, Value};
+ _ ->
+ error
+ end.
+
+%% @private
+-spec new_type([type_prop()], type_kind()) -> proper_types:type().
+new_type(PropList, Kind) ->
+ Type = type_from_list(PropList),
+ add_prop(kind, Kind, Type).
+
+%% @private
+-spec subtype([type_prop()], proper_types:type()) -> proper_types:type().
+%% TODO: should the 'is_instance' function etc. be reset for subtypes?
+subtype(PropList, Type) ->
+ add_props(PropList, Type).
+
+%% @private
+-spec is_inst(proper_gen:instance(), raw_type()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+is_inst(Instance, RawType) ->
+ is_inst(Instance, RawType, 10).
+
+%% @private
+-spec is_inst(proper_gen:instance(), raw_type(), size()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+is_inst(Instance, RawType, Size) ->
+ proper:global_state_init_size(Size),
+ Result = safe_is_instance(Instance, RawType),
+ proper:global_state_erase(),
+ Result.
+
+%% @private
+-spec safe_is_instance(proper_gen:imm_instance(), raw_type()) ->
+ boolean() | {'error',{'typeserver',term()}}.
+safe_is_instance(ImmInstance, RawType) ->
+ try is_instance(ImmInstance, RawType) catch
+ throw:{'$typeserver',SubReason} -> {error, {typeserver,SubReason}}
+ end.
+
+%% @private
+-spec is_instance(proper_gen:imm_instance(), raw_type()) -> boolean().
+%% TODO: If the second argument is not a type, let it pass (don't even check for
+%% term equality?) - if it's a raw type, don't cook it, instead recurse
+%% into it.
+is_instance(ImmInstance, RawType) ->
+ CleanInstance = proper_gen:clean_instance(ImmInstance),
+ Type = cook_outer(RawType),
+ (case get_prop(kind, Type) of
+ wrapper -> wrapper_test(ImmInstance, Type);
+ constructed -> constructed_test(ImmInstance, Type);
+ _ -> false
+ end
+ orelse
+ case find_prop(is_instance, Type) of
+ {ok,{typed, IsInstance}} -> IsInstance(Type, ImmInstance);
+ {ok,IsInstance} -> IsInstance(ImmInstance);
+ error -> false
+ end)
+ andalso weakly(satisfies_all(CleanInstance, Type)).
+
+-spec wrapper_test(proper_gen:imm_instance(), proper_types:type()) -> boolean().
+wrapper_test(ImmInstance, Type) ->
+ %% TODO: check if it's actually a raw type that's returned?
+ lists:any(fun(T) -> is_instance(ImmInstance, T) end, unwrap(Type)).
+
+%% @private
+%% TODO: restore:-spec unwrap(proper_types:type()) -> [proper_types:type(),...].
+%% TODO: check if it's actually a raw type that's returned?
+unwrap(Type) ->
+ RawInnerTypes = proper_gen:alt_gens(Type) ++ [proper_gen:normal_gen(Type)],
+ [cook_outer(T) || T <- RawInnerTypes].
+
+-spec constructed_test(proper_gen:imm_instance(), proper_types:type()) ->
+ boolean().
+constructed_test({'$used',ImmParts,ImmInstance}, Type) ->
+ PartsType = get_prop(parts_type, Type),
+ Combine = get_prop(combine, Type),
+ is_instance(ImmParts, PartsType) andalso
+ begin
+ %% TODO: check if it's actually a raw type that's returned?
+ %% TODO: move construction code to proper_gen
+ %% TODO: non-type => should we check for strict term equality?
+ RawInnerType = Combine(proper_gen:clean_instance(ImmParts)),
+ is_instance(ImmInstance, RawInnerType)
+ end;
+constructed_test({'$to_part',ImmInstance}, Type) ->
+ PartsType = get_prop(parts_type, Type),
+ get_prop(shrink_to_parts, Type) =:= true andalso
+ %% TODO: we reject non-container types
+ get_prop(kind, PartsType) =:= container andalso
+ case {find_prop(internal_type,PartsType),
+ find_prop(internal_types,PartsType)} of
+ {{ok,EachPartType},error} ->
+ %% The parts are in a list or a vector.
+ is_instance(ImmInstance, EachPartType);
+ {error,{ok,PartTypesList}} ->
+ %% The parts are in a fixed list.
+ %% TODO: It should always be a proper list.
+ lists:any(fun(T) -> is_instance(ImmInstance,T) end, PartTypesList)
+ end;
+constructed_test(_CleanInstance, _Type) ->
+ %% TODO: can we do anything better?
+ false.
+
+%% @private
+-spec weakly({boolean(),boolean()}) -> boolean().
+weakly({B1,_B2}) -> B1.
+
+%% @private
+-spec strongly({boolean(),boolean()}) -> boolean().
+strongly({_B1,B2}) -> B2.
+
+-spec satisfies(proper_gen:instance(), {constraint_fun(),boolean()})
+ -> {boolean(),boolean()}.
+satisfies(Instance, {Test,false}) ->
+ {true,Test(Instance)};
+satisfies(Instance, {Test,true}) ->
+ Result = Test(Instance),
+ {Result,Result}.
+
+%% @private
+-spec satisfies_all(proper_gen:instance(), proper_types:type()) ->
+ {boolean(),boolean()}.
+satisfies_all(Instance, Type) ->
+ case find_prop(constraints, Type) of
+ {ok, Constraints} ->
+ L = [satisfies(Instance, C) || C <- Constraints],
+ {L1,L2} = lists:unzip(L),
+ {lists:all(fun(B) -> B end, L1), lists:all(fun(B) -> B end, L2)};
+ error ->
+ {true,true}
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Type definition functions
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec lazy(proper_gen:nosize_generator()) -> proper_types:type().
+lazy(Gen) ->
+ ?WRAPPER([
+ {generator, Gen}
+ ]).
+
+%% @private
+-spec sized(proper_gen:sized_generator()) -> proper_types:type().
+sized(Gen) ->
+ ?WRAPPER([
+ {generator, Gen}
+ ]).
+
+%% @private
+-spec bind(raw_type(), proper_gen:combine_fun(), boolean()) ->
+ proper_types:type().
+bind(RawPartsType, Combine, ShrinkToParts) ->
+ PartsType = cook_outer(RawPartsType),
+ ?CONSTRUCTED([
+ {parts_type, PartsType},
+ {combine, Combine},
+ {shrink_to_parts, ShrinkToParts}
+ ]).
+
+%% @private
+-spec shrinkwith(proper_gen:nosize_generator(), proper_gen:alt_gens()) ->
+ proper_types:type().
+shrinkwith(Gen, DelaydAltGens) ->
+ ?WRAPPER([
+ {generator, Gen},
+ {alt_gens, DelaydAltGens}
+ ]).
+
+%% @private
+-spec add_constraint(raw_type(), constraint_fun(), boolean()) ->
+ proper_types:type().
+add_constraint(RawType, Condition, IsStrict) ->
+ Type = cook_outer(RawType),
+ append_to_prop(constraints, {Condition,IsStrict}, Type).
+
+%% @private
+-spec native_type(mod_name(), string()) -> proper_types:type().
+native_type(Mod, TypeStr) ->
+ ?WRAPPER([
+ {generator, fun() -> proper_gen:native_type_gen(Mod,TypeStr) end}
+ ]).
+
+
+%%------------------------------------------------------------------------------
+%% Basic types
+%%------------------------------------------------------------------------------
+
+%% @doc All integers between `Low' and `High', bounds included.
+%% `Low' and `High' must be Erlang expressions that evaluate to integers, with
+%% `Low =< High'. Additionally, `Low' and `High' may have the value `inf', in
+%% which case they represent minus infinity and plus infinity respectively.
+%% Instances shrink towards 0 if `Low =< 0 =< High', or towards the bound with
+%% the smallest absolute value otherwise.
+-spec integer(extint(), extint()) -> proper_types:type().
+integer(Low, High) ->
+ ?BASIC([
+ {env, {Low, High}},
+ {generator, {typed, fun integer_gen/2}},
+ {is_instance, {typed, fun integer_is_instance/2}},
+ {shrinkers, [fun number_shrinker/3]}
+ ]).
+
+integer_gen(Type, Size) ->
+ {Low, High} = get_prop(env, Type),
+ proper_gen:integer_gen(Size, Low, High).
+
+integer_is_instance(Type, X) ->
+ {Low, High} = get_prop(env, Type),
+ is_integer(X) andalso le(Low, X) andalso le(X, High).
+
+number_shrinker(X, Type, S) ->
+ {Low, High} = get_prop(env, Type),
+ proper_shrink:number_shrinker(X, Low, High, S).
+
+%% @doc All floats between `Low' and `High', bounds included.
+%% `Low' and `High' must be Erlang expressions that evaluate to floats, with
+%% `Low =< High'. Additionally, `Low' and `High' may have the value `inf', in
+%% which case they represent minus infinity and plus infinity respectively.
+%% Instances shrink towards 0.0 if `Low =< 0.0 =< High', or towards the bound
+%% with the smallest absolute value otherwise.
+-spec float(extnum(), extnum()) -> proper_types:type().
+float(Low, High) ->
+ ?BASIC([
+ {env, {Low, High}},
+ {generator, {typed, fun float_gen/2}},
+ {is_instance, {typed, fun float_is_instance/2}},
+ {shrinkers, [fun number_shrinker/3]}
+ ]).
+
+float_gen(Type, Size) ->
+ {Low, High} = get_prop(env, Type),
+ proper_gen:float_gen(Size, Low, High).
+
+float_is_instance(Type, X) ->
+ {Low, High} = get_prop(env, Type),
+ is_float(X) andalso le(Low, X) andalso le(X, High).
+
+%% @private
+-spec le(extnum(), extnum()) -> boolean().
+le(inf, _B) -> true;
+le(_A, inf) -> true;
+le(A, B) -> A =< B.
+
+%% @doc All atoms. All atoms used internally by PropEr start with a '`$'', so
+%% such atoms will never be produced as instances of this type. You should also
+%% refrain from using such atoms in your code, to avoid a potential clash.
+%% Instances shrink towards the empty atom, ''.
+-spec atom() -> proper_types:type().
+atom() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:atom_gen/1},
+ {reverse_gen, fun proper_gen:atom_rev/1},
+ {size_transform, fun(Size) -> erlang:min(Size,255) end},
+ {is_instance, fun atom_is_instance/1}
+ ]).
+
+atom_is_instance(X) ->
+ is_atom(X)
+ %% We return false for atoms starting with '$', since these are
+ %% atoms used internally and never produced by the atom generator.
+ andalso (X =:= '' orelse hd(atom_to_list(X)) =/= $$).
+
+%% @doc All binaries. Instances shrink towards the empty binary, `<<>>'.
+-spec binary() -> proper_types:type().
+binary() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:binary_gen/1},
+ {reverse_gen, fun proper_gen:binary_rev/1},
+ {is_instance, fun erlang:is_binary/1}
+ ]).
+
+%% @doc All binaries with a byte size of `Len'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+%% Instances shrink towards binaries of zeroes.
+-spec binary(length()) -> proper_types:type().
+binary(Len) ->
+ ?WRAPPER([
+ {env, Len},
+ {generator, {typed, fun binary_len_gen/1}},
+ {reverse_gen, fun proper_gen:binary_rev/1},
+ {is_instance, {typed, fun binary_len_is_instance/2}}
+ ]).
+
+binary_len_gen(Type) ->
+ Len = get_prop(env, Type),
+ proper_gen:binary_len_gen(Len).
+
+binary_len_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ is_binary(X) andalso byte_size(X) =:= Len.
+
+%% @doc All bitstrings. Instances shrink towards the empty bitstring, `<<>>'.
+-spec bitstring() -> proper_types:type().
+bitstring() ->
+ ?WRAPPER([
+ {generator, fun proper_gen:bitstring_gen/1},
+ {reverse_gen, fun proper_gen:bitstring_rev/1},
+ {is_instance, fun erlang:is_bitstring/1}
+ ]).
+
+%% @doc All bitstrings with a bit size of `Len'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+%% Instances shrink towards bitstrings of zeroes
+-spec bitstring(length()) -> proper_types:type().
+bitstring(Len) ->
+ ?WRAPPER([
+ {env, Len},
+ {generator, {typed, fun bitstring_len_gen/1}},
+ {reverse_gen, fun proper_gen:bitstring_rev/1},
+ {is_instance, {typed, fun bitstring_len_is_instance/2}}
+ ]).
+
+bitstring_len_gen(Type) ->
+ Len = get_prop(env, Type),
+ proper_gen:bitstring_len_gen(Len).
+
+bitstring_len_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ is_bitstring(X) andalso bit_size(X) =:= Len.
+
+%% @doc All lists containing elements of type `ElemType'.
+%% Instances shrink towards the empty list, `[]'.
+-spec list(ElemType::raw_type()) -> proper_types:type().
+% TODO: subtyping would be useful here (list, vector, fixed_list)
+list(RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?CONTAINER([
+ {generator, {typed, fun list_gen/2}},
+ {is_instance, {typed, fun list_is_instance/2}},
+ {internal_type, ElemType},
+ {get_length, fun erlang:length/1},
+ {split, fun lists:split/2},
+ {join, fun lists:append/2},
+ {get_indices, fun list_get_indices/2},
+ {remove, fun proper_arith:list_remove/2},
+ {retrieve, fun lists:nth/2},
+ {update, fun proper_arith:list_update/3}
+ ]).
+
+list_gen(Type, Size) ->
+ ElemType = get_prop(internal_type, Type),
+ proper_gen:list_gen(Size, ElemType).
+
+list_is_instance(Type, X) ->
+ ElemType = get_prop(internal_type, Type),
+ list_test(X, ElemType).
+
+%% @doc A type that generates exactly the list `List'. Instances shrink towards
+%% shorter sublists of the original list.
+-spec shrink_list([term()]) -> proper_types:type().
+shrink_list(List) ->
+ ?CONTAINER([
+ {env, List},
+ {generator, {typed, fun shrink_list_gen/1}},
+ {is_instance, {typed, fun shrink_list_is_instance/2}},
+ {get_length, fun erlang:length/1},
+ {split, fun lists:split/2},
+ {join, fun lists:append/2},
+ {get_indices, fun list_get_indices/2},
+ {remove, fun proper_arith:list_remove/2}
+ ]).
+
+shrink_list_gen(Type) ->
+ get_prop(env, Type).
+
+shrink_list_is_instance(Type, X) ->
+ List = get_prop(env, Type),
+ is_sublist(X, List).
+
+-spec is_sublist([term()], [term()]) -> boolean().
+is_sublist([], _) -> true;
+is_sublist(_, []) -> false;
+is_sublist([H|T1], [H|T2]) -> is_sublist(T1, T2);
+is_sublist(Slice, [_|T2]) -> is_sublist(Slice, T2).
+
+-spec list_test(proper_gen:imm_instance(), proper_types:type()) -> boolean().
+list_test(X, ElemType) ->
+ is_list(X) andalso lists:all(fun(E) -> is_instance(E, ElemType) end, X).
+
+%% @private
+-spec list_get_indices(proper_gen:generator(), list()) -> [position()].
+list_get_indices(_, List) ->
+ lists:seq(1, length(List)).
+
+%% @private
+%% This assumes that:
+%% - instances of size S are always valid instances of size >S
+%% - any recursive calls inside Gen are lazy
+-spec distlist(size(), proper_gen:sized_generator(), boolean()) ->
+ proper_types:type().
+distlist(Size, Gen, NonEmpty) ->
+ ParentType = case NonEmpty of
+ true -> non_empty(list(Gen(Size)));
+ false -> list(Gen(Size))
+ end,
+ ?SUBTYPE(ParentType, [
+ {subenv, {Size, Gen, NonEmpty}},
+ {generator, {typed, fun distlist_gen/1}}
+ ]).
+
+distlist_gen(Type) ->
+ {Size, Gen, NonEmpty} = get_prop(subenv, Type),
+ proper_gen:distlist_gen(Size, Gen, NonEmpty).
+
+%% @doc All lists of length `Len' containing elements of type `ElemType'.
+%% `Len' must be an Erlang expression that evaluates to a non-negative integer.
+-spec vector(length(), ElemType::raw_type()) -> proper_types:type().
+vector(Len, RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?CONTAINER([
+ {env, Len},
+ {generator, {typed, fun vector_gen/1}},
+ {is_instance, {typed, fun vector_is_instance/2}},
+ {internal_type, ElemType},
+ {get_indices, fun vector_get_indices/2},
+ {retrieve, fun lists:nth/2},
+ {update, fun proper_arith:list_update/3}
+ ]).
+
+vector_gen(Type) ->
+ Len = get_prop(env, Type),
+ ElemType = get_prop(internal_type, Type),
+ proper_gen:vector_gen(Len, ElemType).
+
+vector_is_instance(Type, X) ->
+ Len = get_prop(env, Type),
+ ElemType = get_prop(internal_type, Type),
+ is_list(X)
+ andalso length(X) =:= Len
+ andalso lists:all(fun(E) -> is_instance(E, ElemType) end, X).
+
+vector_get_indices(Type, _X) ->
+ lists:seq(1, get_prop(env, Type)).
+
+%% @doc The union of all types in `ListOfTypes'. `ListOfTypes' can't be empty.
+%% The random instance generator is equally likely to choose any one of the
+%% types in `ListOfTypes'. The shrinking subsystem will always try to shrink an
+%% instance of a type union to an instance of the first type in `ListOfTypes',
+%% thus you should write the simplest case first.
+-spec union(ListOfTypes::[raw_type(),...]) -> proper_types:type().
+union(RawChoices) ->
+ Choices = [cook_outer(C) || C <- RawChoices],
+ ?BASIC([
+ {env, Choices},
+ {generator, {typed, fun union_gen/1}},
+ {is_instance, {typed, fun union_is_instance/2}},
+ {shrinkers, [fun union_shrinker_1/3, fun union_shrinker_2/3]}
+ ]).
+
+union_gen(Type) ->
+ Choices = get_prop(env,Type),
+ proper_gen:union_gen(Choices).
+
+union_is_instance(Type, X) ->
+ Choices = get_prop(env, Type),
+ lists:any(fun(C) -> is_instance(X, C) end, Choices).
+
+union_shrinker_1(X, Type, S) ->
+ Choices = get_prop(env, Type),
+ proper_shrink:union_first_choice_shrinker(X, Choices, S).
+
+union_shrinker_2(X, Type, S) ->
+ Choices = get_prop(env, Type),
+ proper_shrink:union_recursive_shrinker(X, Choices, S).
+
+%% @doc A specialization of {@link union/1}, where each type in `ListOfTypes' is
+%% assigned a frequency. Frequencies must be Erlang expressions that evaluate to
+%% positive integers. Types with larger frequencies are more likely to be chosen
+%% by the random instance generator. The shrinking subsystem will ignore the
+%% frequencies and try to shrink towards the first type in the list.
+-spec weighted_union(ListOfTypes::[{frequency(),raw_type()},...]) ->
+ proper_types:type().
+weighted_union(RawFreqChoices) ->
+ CookFreqType = fun({Freq,RawType}) -> {Freq,cook_outer(RawType)} end,
+ FreqChoices = lists:map(CookFreqType, RawFreqChoices),
+ Choices = [T || {_F,T} <- FreqChoices],
+ ?SUBTYPE(union(Choices), [
+ {subenv, FreqChoices},
+ {generator, {typed, fun weighted_union_gen/1}}
+ ]).
+
+weighted_union_gen(Gen) ->
+ FreqChoices = get_prop(subenv, Gen),
+ proper_gen:weighted_union_gen(FreqChoices).
+
+%% @private
+-spec safe_union([raw_type(),...]) -> proper_types:type().
+safe_union(RawChoices) ->
+ Choices = [cook_outer(C) || C <- RawChoices],
+ subtype(
+ [{subenv, Choices},
+ {generator, {typed, fun safe_union_gen/1}}],
+ union(Choices)).
+
+safe_union_gen(Type) ->
+ Choices = get_prop(subenv, Type),
+ proper_gen:safe_union_gen(Choices).
+
+%% @private
+-spec safe_weighted_union([{frequency(),raw_type()},...]) ->
+ proper_types:type().
+safe_weighted_union(RawFreqChoices) ->
+ CookFreqType = fun({Freq,RawType}) ->
+ {Freq,cook_outer(RawType)} end,
+ FreqChoices = lists:map(CookFreqType, RawFreqChoices),
+ Choices = [T || {_F,T} <- FreqChoices],
+ subtype([{subenv, FreqChoices},
+ {generator, {typed, fun safe_weighted_union_gen/1}}],
+ union(Choices)).
+
+safe_weighted_union_gen(Type) ->
+ FreqChoices = get_prop(subenv, Type),
+ proper_gen:safe_weighted_union_gen(FreqChoices).
+
+%% @doc All tuples whose i-th element is an instance of the type at index i of
+%% `ListOfTypes'. Also written simply as a tuple of types.
+-spec tuple(ListOfTypes::[raw_type()]) -> proper_types:type().
+tuple(RawFields) ->
+ Fields = [cook_outer(F) || F <- RawFields],
+ ?CONTAINER([
+ {env, Fields},
+ {generator, {typed, fun tuple_gen/1}},
+ {is_instance, {typed, fun tuple_is_instance/2}},
+ {internal_types, list_to_tuple(Fields)},
+ {get_indices, fun tuple_get_indices/2},
+ {retrieve, fun erlang:element/2},
+ {update, fun tuple_update/3}
+ ]).
+
+tuple_gen(Type) ->
+ Fields = get_prop(env, Type),
+ proper_gen:tuple_gen(Fields).
+
+tuple_is_instance(Type, X) ->
+ Fields = get_prop(env, Type),
+ is_tuple(X) andalso fixed_list_test(tuple_to_list(X), Fields).
+
+tuple_get_indices(Type, _X) ->
+ lists:seq(1, length(get_prop(env, Type))).
+
+-spec tuple_update(index(), value(), tuple()) -> tuple().
+tuple_update(Index, NewElem, Tuple) ->
+ setelement(Index, Tuple, NewElem).
+
+%% @doc Tuples whose elements are all of type `ElemType'.
+%% Instances shrink towards the 0-size tuple, `{}'.
+-spec loose_tuple(ElemType::raw_type()) -> proper_types:type().
+loose_tuple(RawElemType) ->
+ ElemType = cook_outer(RawElemType),
+ ?WRAPPER([
+ {env, ElemType},
+ {generator, {typed, fun loose_tuple_gen/2}},
+ {reverse_gen, {typed, fun loose_tuple_rev/2}},
+ {is_instance, {typed, fun loose_tuple_is_instance/2}}
+ ]).
+
+loose_tuple_gen(Type, Size) ->
+ ElemType = get_prop(env, Type),
+ proper_gen:loose_tuple_gen(Size, ElemType).
+
+loose_tuple_rev(Type, X) ->
+ ElemType = get_prop(env, Type),
+ proper_gen:loose_tuple_rev(X, ElemType).
+
+loose_tuple_is_instance(Type, X) ->
+ ElemType = get_prop(env, Type),
+ is_tuple(X) andalso list_test(tuple_to_list(X), ElemType).
+
+%% @doc Singleton type consisting only of `E'. `E' must be an evaluated term.
+%% Also written simply as `E'.
+-spec exactly(term()) -> proper_types:type().
+exactly(E) ->
+ ?BASIC([
+ {env, E},
+ {generator, {typed, fun exactly_gen/1}},
+ {is_instance, {typed, fun exactly_is_instance/2}}
+ ]).
+
+exactly_gen(Type) ->
+ E = get_prop(env, Type),
+ proper_gen:exactly_gen(E).
+
+exactly_is_instance(Type, X) ->
+ E = get_prop(env, Type),
+ X =:= E.
+
+%% @doc All lists whose i-th element is an instance of the type at index i of
+%% `ListOfTypes'. Also written simply as a list of types.
+-spec fixed_list(ListOfTypes::maybe_improper_list(raw_type(),raw_type()|[])) ->
+ proper_types:type().
+fixed_list(MaybeImproperRawFields) ->
+ %% CAUTION: must handle improper lists
+ {Fields, Internal, Len, Retrieve, Update} =
+ case proper_arith:cut_improper_tail(MaybeImproperRawFields) of
+ % TODO: have cut_improper_tail return the length and use it in test?
+ {ProperRawHead, ImproperRawTail} ->
+ HeadLen = length(ProperRawHead),
+ CookedHead = [cook_outer(F) || F <- ProperRawHead],
+ CookedTail = cook_outer(ImproperRawTail),
+ {{CookedHead,CookedTail},
+ CookedHead ++ CookedTail,
+ HeadLen + 1,
+ fun(I,L) -> improper_list_retrieve(I, L, HeadLen) end,
+ fun(I,V,L) -> improper_list_update(I, V, L, HeadLen) end};
+ ProperRawFields ->
+ LocalFields = [cook_outer(F) || F <- ProperRawFields],
+ {LocalFields,
+ LocalFields,
+ length(ProperRawFields),
+ fun lists:nth/2,
+ fun proper_arith:list_update/3}
+ end,
+ ?CONTAINER([
+ {env, {Fields, Len}},
+ {generator, {typed, fun fixed_list_gen/1}},
+ {is_instance, {typed, fun fixed_list_is_instance/2}},
+ {internal_types, Internal},
+ {get_indices, fun fixed_list_get_indices/2},
+ {retrieve, Retrieve},
+ {update, Update}
+ ]).
+
+fixed_list_gen(Type) ->
+ {Fields, _} = get_prop(env, Type),
+ proper_gen:fixed_list_gen(Fields).
+
+fixed_list_is_instance(Type, X) ->
+ {Fields, _} = get_prop(env, Type),
+ fixed_list_test(X, Fields).
+
+fixed_list_get_indices(Type, _X) ->
+ {_, Len} = get_prop(env, Type),
+ lists:seq(1, Len).
+
+-spec fixed_list_test(proper_gen:imm_instance(),
+ [proper_types:type()] | {[proper_types:type()],
+ proper_types:type()}) ->
+ boolean().
+fixed_list_test(X, {ProperHead,ImproperTail}) ->
+ is_list(X) andalso
+ begin
+ ProperHeadLen = length(ProperHead),
+ proper_arith:head_length(X) >= ProperHeadLen andalso
+ begin
+ {XHead,XTail} = lists:split(ProperHeadLen, X),
+ fixed_list_test(XHead, ProperHead)
+ andalso is_instance(XTail, ImproperTail)
+ end
+ end;
+fixed_list_test(X, ProperFields) ->
+ is_list(X)
+ andalso length(X) =:= length(ProperFields)
+ andalso lists:all(fun({E,T}) -> is_instance(E, T) end,
+ lists:zip(X, ProperFields)).
+
+%% TODO: Move these 2 functions to proper_arith?
+-spec improper_list_retrieve(index(), nonempty_improper_list(value(),value()),
+ pos_integer()) -> value().
+improper_list_retrieve(Index, List, HeadLen) ->
+ case Index =< HeadLen of
+ true -> lists:nth(Index, List);
+ false -> lists:nthtail(HeadLen, List)
+ end.
+
+-spec improper_list_update(index(), value(),
+ nonempty_improper_list(value(),value()),
+ pos_integer()) ->
+ nonempty_improper_list(value(),value()).
+improper_list_update(Index, Value, List, HeadLen) ->
+ case Index =< HeadLen of
+ %% TODO: This happens to work, but is not implied by list_update's spec.
+ true -> proper_arith:list_update(Index, Value, List);
+ false -> lists:sublist(List, HeadLen) ++ Value
+ end.
+
+%% @doc All pure functions that map instances of `ArgTypes' to instances of
+%% `RetType'. The syntax `function(Arity, RetType)' is also acceptable.
+-spec function(ArgTypes::[raw_type()] | arity(), RetType::raw_type()) ->
+ proper_types:type().
+function(Arity, RawRetType) when is_integer(Arity), Arity >= 0, Arity =< 255 ->
+ RetType = cook_outer(RawRetType),
+ ?BASIC([
+ {env, {Arity, RetType}},
+ {generator, {typed, fun function_gen/1}},
+ {is_instance, {typed, fun function_is_instance/2}}
+ ]);
+function(RawArgTypes, RawRetType) ->
+ function(length(RawArgTypes), RawRetType).
+
+function_gen(Type) ->
+ {Arity, RetType} = get_prop(env, Type),
+ proper_gen:function_gen(Arity, RetType).
+
+function_is_instance(Type, X) ->
+ {Arity, RetType} = get_prop(env, Type),
+ is_function(X, Arity)
+ %% TODO: what if it's not a function we produced?
+ andalso equal_types(RetType, proper_gen:get_ret_type(X)).
+
+%% @doc All Erlang terms (that PropEr can produce). For reasons of efficiency,
+%% functions are never produced as instances of this type.<br />
+%% CAUTION: Instances of this type are expensive to produce, shrink and instance-
+%% check, both in terms of processing time and consumed memory. Only use this
+%% type if you are certain that you need it.
+-spec any() -> proper_types:type().
+any() ->
+ AllTypes = [integer(),float(),atom(),bitstring(),?LAZY(loose_tuple(any())),
+ ?LAZY(list(any()))],
+ ?SUBTYPE(union(AllTypes), [
+ {generator, fun proper_gen:any_gen/1}
+ ]).
+
+
+%%------------------------------------------------------------------------------
+%% Type aliases
+%%------------------------------------------------------------------------------
+
+%% @equiv integer(inf, inf)
+-spec integer() -> proper_types:type().
+integer() -> integer(inf, inf).
+
+%% @equiv integer(0, inf)
+-spec non_neg_integer() -> proper_types:type().
+non_neg_integer() -> integer(0, inf).
+
+%% @equiv integer(1, inf)
+-spec pos_integer() -> proper_types:type().
+pos_integer() -> integer(1, inf).
+
+%% @equiv integer(inf, -1)
+-spec neg_integer() -> proper_types:type().
+neg_integer() -> integer(inf, -1).
+
+%% @equiv integer(Low, High)
+-spec range(extint(), extint()) -> proper_types:type().
+range(Low, High) -> integer(Low, High).
+
+%% @equiv float(inf, inf)
+-spec float() -> proper_types:type().
+float() -> float(inf, inf).
+
+%% @equiv float(0.0, inf)
+-spec non_neg_float() -> proper_types:type().
+non_neg_float() -> float(0.0, inf).
+
+%% @equiv union([integer(), float()])
+-spec number() -> proper_types:type().
+number() -> union([integer(), float()]).
+
+%% @doc The atoms `true' and `false'. Instances shrink towards `false'.
+-spec boolean() -> proper_types:type().
+boolean() -> union(['false', 'true']).
+
+%% @equiv integer(0, 255)
+-spec byte() -> proper_types:type().
+byte() -> integer(0, 255).
+
+%% @equiv integer(0, 16#10ffff)
+-spec char() -> proper_types:type().
+char() -> integer(0, 16#10ffff).
+
+%% @equiv list(any())
+-spec list() -> proper_types:type().
+list() -> list(any()).
+
+%% @equiv loose_tuple(any())
+-spec tuple() -> proper_types:type().
+tuple() -> loose_tuple(any()).
+
+%% @equiv list(char())
+-spec string() -> proper_types:type().
+string() -> list(char()).
+
+%% @equiv weighted_union(FreqChoices)
+-spec wunion([{frequency(),raw_type()},...]) -> proper_types:type().
+wunion(FreqChoices) -> weighted_union(FreqChoices).
+
+%% @equiv any()
+-spec term() -> proper_types:type().
+term() -> any().
+
+%% @equiv union([non_neg_integer() | infinity])
+-spec timeout() -> proper_types:type().
+timeout() -> union([non_neg_integer(), 'infinity']).
+
+%% @equiv integer(0, 255)
+-spec arity() -> proper_types:type().
+arity() -> integer(0, 255).
+
+
+%%------------------------------------------------------------------------------
+%% QuickCheck compatibility types
+%%------------------------------------------------------------------------------
+
+%% @doc Small integers (bound by the current value of the `size' parameter).
+%% Instances shrink towards `0'.
+-spec int() -> proper_types:type().
+int() -> ?SIZED(Size, integer(-Size,Size)).
+
+%% @doc Small non-negative integers (bound by the current value of the `size'
+%% parameter). Instances shrink towards `0'.
+-spec nat() -> proper_types:type().
+nat() -> ?SIZED(Size, integer(0,Size)).
+
+%% @equiv integer()
+-spec largeint() -> proper_types:type().
+largeint() -> integer().
+
+%% @equiv float()
+-spec real() -> proper_types:type().
+real() -> float().
+
+%% @equiv boolean()
+-spec bool() -> proper_types:type().
+bool() -> boolean().
+
+%% @equiv integer(Low, High)
+-spec choose(extint(), extint()) -> proper_types:type().
+choose(Low, High) -> integer(Low, High).
+
+%% @equiv union(Choices)
+-spec elements([raw_type(),...]) -> proper_types:type().
+elements(Choices) -> union(Choices).
+
+%% @equiv union(Choices)
+-spec oneof([raw_type(),...]) -> proper_types:type().
+oneof(Choices) -> union(Choices).
+
+%% @equiv weighted_union(Choices)
+-spec frequency([{frequency(),raw_type()},...]) -> proper_types:type().
+frequency(FreqChoices) -> weighted_union(FreqChoices).
+
+%% @equiv exactly(E)
+-spec return(term()) -> proper_types:type().
+return(E) -> exactly(E).
+
+%% @doc Adds a default value, `Default', to `Type'.
+%% The default serves as a primary shrinking target for instances, while it
+%% is also chosen by the random instance generation subsystem half the time.
+-spec default(raw_type(), raw_type()) -> proper_types:type().
+default(Default, Type) ->
+ union([Default, Type]).
+
+%% @doc All sorted lists containing elements of type `ElemType'.
+%% Instances shrink towards the empty list, `[]'.
+-spec orderedlist(ElemType::raw_type()) -> proper_types:type().
+orderedlist(RawElemType) ->
+ ?LET(L, list(RawElemType), lists:sort(L)).
+
+%% @equiv function(0, RetType)
+-spec function0(raw_type()) -> proper_types:type().
+function0(RetType) ->
+ function(0, RetType).
+
+%% @equiv function(1, RetType)
+-spec function1(raw_type()) -> proper_types:type().
+function1(RetType) ->
+ function(1, RetType).
+
+%% @equiv function(2, RetType)
+-spec function2(raw_type()) -> proper_types:type().
+function2(RetType) ->
+ function(2, RetType).
+
+%% @equiv function(3, RetType)
+-spec function3(raw_type()) -> proper_types:type().
+function3(RetType) ->
+ function(3, RetType).
+
+%% @equiv function(4, RetType)
+-spec function4(raw_type()) -> proper_types:type().
+function4(RetType) ->
+ function(4, RetType).
+
+%% @doc A specialization of {@link default/2}, where `Default' and `Type' are
+%% assigned weights to be considered by the random instance generator. The
+%% shrinking subsystem will ignore the weights and try to shrink using the
+%% default value.
+-spec weighted_default({frequency(),raw_type()}, {frequency(),raw_type()}) ->
+ proper_types:type().
+weighted_default(Default, Type) ->
+ weighted_union([Default, Type]).
+
+
+%%------------------------------------------------------------------------------
+%% Additional type specification functions
+%%------------------------------------------------------------------------------
+
+%% @doc Overrides the `size' parameter used when generating instances of
+%% `Type' with `NewSize'. Has no effect on size-less types, such as unions.
+%% Also, this will not affect the generation of any internal types contained in
+%% `Type', such as the elements of a list - those will still be generated
+%% using the test-wide value of `size'. One use of this function is to modify
+%% types to produce instances that grow faster or slower, like so:
+%% ```?SIZED(Size, resize(Size * 2, list(integer()))'''
+%% The above specifies a list type that grows twice as fast as normal lists.
+-spec resize(size(), Type::raw_type()) -> proper_types:type().
+resize(NewSize, RawType) ->
+ Type = cook_outer(RawType),
+ case find_prop(size_transform, Type) of
+ {ok,Transform} ->
+ add_prop(size_transform, fun(_S) -> Transform(NewSize) end, Type);
+ error ->
+ add_prop(size_transform, fun(_S) -> NewSize end, Type)
+ end.
+
+%% @doc This is a predefined constraint that can be applied to random-length
+%% list and binary types to ensure that the produced values are never empty.
+%%
+%% e.g. {@link list/0}, {@link string/0}, {@link binary/0})
+-spec non_empty(ListType::raw_type()) -> proper_types:type().
+non_empty(RawListType) ->
+ ?SUCHTHAT(L, RawListType, L =/= [] andalso L =/= <<>>).
+
+%% @doc Creates a new type which is equivalent to `Type', but whose instances
+%% are never shrunk by the shrinking subsystem.
+-spec noshrink(Type::raw_type()) -> proper_types:type().
+noshrink(RawType) ->
+ add_prop(noshrink, true, cook_outer(RawType)).
+
+%% @doc Associates the atom key `Parameter' with the value `Value' while
+%% generating instances of `Type'.
+-spec with_parameter(atom(), value(), Type::raw_type()) -> proper_types:type().
+with_parameter(Parameter, Value, RawType) ->
+ with_parameters([{Parameter,Value}], RawType).
+
+%% @doc Similar to {@link with_parameter/3}, but accepts a list of
+%% `{Parameter, Value}' pairs.
+-spec with_parameters([{atom(),value()}], Type::raw_type()) ->
+ proper_types:type().
+with_parameters(PVlist, RawType) ->
+ Type = cook_outer(RawType),
+ case find_prop(parameters, Type) of
+ {ok,Params} when is_list(Params) ->
+ append_list_to_prop(parameters, PVlist, Type);
+ error ->
+ add_prop(parameters, PVlist, Type)
+ end.
+
+%% @doc Returns the value associated with `Parameter', or `Default' in case
+%% `Parameter' is not associated with any value.
+-spec parameter(atom(), value()) -> value().
+parameter(Parameter, Default) ->
+ Parameters =
+ case erlang:get('$parameters') of
+ undefined -> [];
+ List -> List
+ end,
+ proplists:get_value(Parameter, Parameters, Default).
+
+%% @equiv parameter(Parameter, undefined)
+-spec parameter(atom()) -> value().
+parameter(Parameter) ->
+ parameter(Parameter, undefined).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl
new file mode 100644
index 0000000000..1677b4efb8
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/proper/proper_typeserver.erl
@@ -0,0 +1,2402 @@
+%%% Copyright 2010-2015 Manolis Papadakis <[email protected]>,
+%%% Eirini Arvaniti <[email protected]>
+%%% and Kostis Sagonas <[email protected]>
+%%%
+%%% This file is part of PropEr.
+%%%
+%%% PropEr is free software: you can redistribute it and/or modify
+%%% it under the terms of the GNU General Public License as published by
+%%% the Free Software Foundation, either version 3 of the License, or
+%%% (at your option) any later version.
+%%%
+%%% PropEr is distributed in the hope that it will be useful,
+%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
+%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+%%% GNU General Public License for more details.
+%%%
+%%% You should have received a copy of the GNU General Public License
+%%% along with PropEr. If not, see <http://www.gnu.org/licenses/>.
+
+%%% @copyright 2010-2015 Manolis Papadakis, Eirini Arvaniti and Kostis Sagonas
+%%% @version {@version}
+%%% @author Manolis Papadakis
+
+%%% @doc Erlang type system - PropEr type system integration module.
+%%%
+%%% PropEr can parse types expressed in Erlang's type language and convert them
+%%% to its own type format. Such expressions can be used instead of regular type
+%%% constructors in the second argument of `?FORALL's. No extra notation is
+%%% required; PropEr will detect which calls correspond to native types by
+%%% applying a parse transform during compilation. This parse transform is
+%%% automatically applied to any module that includes the `proper.hrl' header
+%%% file. You can disable this feature by compiling your modules with
+%%% `-DPROPER_NO_TRANS'. Note that this will currently also disable the
+%%% automatic exporting of properties.
+%%%
+%%% The use of native types in properties is subject to the following usage
+%%% rules:
+%%% <ul>
+%%% <li>Native types cannot be used outside of `?FORALL's.</li>
+%%% <li>Inside `?FORALL's, native types can be combined with other native
+%%% types, and even with PropEr types, inside tuples and lists (the constructs
+%%% `[...]', `{...}' and `++' are all allowed).</li>
+%%% <li>All other constructs of Erlang's built-in type system (e.g. `|' for
+%%% union, `_' as an alias of `any()', `<<_:_>>' binary type syntax and
+%%% `fun((...) -> ...)' function type syntax) are not allowed in `?FORALL's,
+%%% because they are rejected by the Erlang parser.</li>
+%%% <li>Anything other than a tuple constructor, list constructor, `++'
+%%% application, local or remote call will automatically be considered a
+%%% PropEr type constructor and not be processed further by the parse
+%%% transform.</li>
+%%% <li>Parametric native types are fully supported; of course, they can only
+%%% appear instantiated in a `?FORALL'. The arguments of parametric native
+%%% types are always interpreted as native types.</li>
+%%% <li>Parametric PropEr types, on the other hand, can take any kind of
+%%% argument. You can even mix native and PropEr types in the arguments of a
+%%% PropEr type. For example, assuming that the following declarations are
+%%% present:
+%%% ``` my_proper_type() -> ?LET(...).
+%%% -type my_native_type() :: ... .'''
+%%% Then the following expressions are all legal:
+%%% ``` vector(2, my_native_type())
+%%% function(0, my_native_type())
+%%% union([my_proper_type(), my_native_type()])''' </li>
+%%% <li>Some type constructors can take native types as arguments (but only
+%%% inside `?FORALL's):
+%%% <ul>
+%%% <li>`?SUCHTHAT', `?SUCHTHATMAYBE', `non_empty', `noshrink': these work
+%%% with native types too</li>
+%%% <li>`?LAZY', `?SHRINK', `resize', `?SIZED': these don't work with native
+%%% types</li>
+%%% <li>`?LET', `?LETSHRINK': only the top-level base type can be a native
+%%% type</li>
+%%% </ul></li>
+%%% <li>Native type declarations in the `?FORALL's of a module can reference any
+%%% custom type declared in a `-type' or `-opaque' attribute of the same
+%%% module, as long as no module identifier is used.</li>
+%%% <li>Typed records cannot be referenced inside `?FORALL's using the
+%%% `#rec_name{}' syntax. To use a typed record in a `?FORALL', enclose the
+%%% record in a custom type like so:
+%%% ``` -type rec_name() :: #rec_name{}. '''
+%%% and use the custom type instead.</li>
+%%% <li>`?FORALL's may contain references to self-recursive or mutually
+%%% recursive native types, so long as each type in the hierarchy has a clear
+%%% base case.
+%%% Currently, PropEr requires that the toplevel of any recursive type
+%%% declaration is either a (maybe empty) list or a union containing at least
+%%% one choice that doesn't reference the type directly (it may, however,
+%%% reference any of the types that are mutually recursive with it). This
+%%% means, for example, that some valid recursive type declarations, such as
+%%% this one:
+%%% ``` ?FORALL(..., a(), ...) '''
+%%% where:
+%%% ``` -type a() :: {'a','none' | a()}. '''
+%%% are not accepted by PropEr. However, such types can be rewritten in a way
+%%% that allows PropEr to parse them:
+%%% ``` ?FORALL(..., a(), ...) '''
+%%% where:
+%%% ``` -type a() :: {'a','none'} | {'a',a()}. '''
+%%% This also means that recursive record declarations are not allowed:
+%%% ``` ?FORALL(..., rec(), ...) '''
+%%% where:
+%%% ``` -type rec() :: #rec{}.
+%%% -record(rec, {a = 0 :: integer(), b = 'nil' :: 'nil' | #rec{}}). '''
+%%% A little rewritting can usually remedy this problem as well:
+%%% ``` ?FORALL(..., rec(), ...) '''
+%%% where:
+%%% ``` -type rec() :: #rec{b :: 'nil'} | #rec{b :: rec()}.
+%%% -record(rec, {a = 0 :: integer(), b = 'nil' :: 'nil' | #rec{}}). '''
+%%% </li>
+%%% <li>Remote types may be referenced in a `?FORALL', so long as they are
+%%% exported from the remote module. Currently, PropEr requires that any
+%%% remote modules whose types are directly referenced from within properties
+%%% are present in the code path at compile time, either compiled with
+%%% `debug_info' enabled or in source form. If PropEr cannot find a remote
+%%% module at all, finds only a compiled object file with no debug
+%%% information or fails to compile the source file, all calls to that module
+%%% will automatically be considered calls to PropEr type constructors.</li>
+%%% <li>For native types to be translated correctly, both the module that
+%%% contains the `?FORALL' declaration as well as any module that contains
+%%% the declaration of a type referenced (directly or indirectly) from inside
+%%% a `?FORALL' must be present in the code path at runtime, either compiled
+%%% with `debug_info' enabled or in source form.</li>
+%%% <li>Local types with the same name as an auto-imported BIF are not accepted
+%%% by PropEr, unless the BIF in question has been declared in a
+%%% `no_auto_import' option.</li>
+%%% <li>When an expression can be interpreted both as a PropEr type and as a
+%%% native type, the former takes precedence. This means that a function
+%%% `foo()' will shadow a type `foo()' if they are both present in the module.
+%%% The same rule applies to remote functions and types as well.</li>
+%%% <li>The above may cause some confusion when list syntax is used:
+%%% <ul>
+%%% <li>The expression `[integer()]' can be interpreted both ways, so the
+%%% PropEr way applies. Therefore, instances of this type will always be
+%%% lists of length 1, not arbitrary integer lists, as would be expected
+%%% when interpreting the expression as a native type.</li>
+%%% <li>Assuming that a custom type foo/1 has been declared, the expression
+%%% `foo([integer()])' can only be interpreted as a native type declaration,
+%%% which means that the generic type of integer lists will be passed to
+%%% `foo/1'.</li>
+%%% </ul></li>
+%%% <li>Currently, PropEr does not detect the following mistakes:
+%%% <ul>
+%%% <li>inline record-field specializations that reference non-existent
+%%% fields</li>
+%%% <li>type parameters that are not present in the RHS of a `-type'
+%%% declaration</li>
+%%% <li>using `_' as a type variable in the LHS of a `-type' declaration</li>
+%%% <li>using the same variable in more than one position in the LHS of a
+%%% `-type' declaration</li>
+%%% </ul>
+%%% </li>
+%%% </ul>
+%%%
+%%% You can use <a href="#index">these</a> functions to try out the type
+%%% translation subsystem.
+%%%
+%%% CAUTION: These functions should never be used inside properties. They are
+%%% meant for demonstration purposes only.
+
+-module(proper_typeserver).
+-behaviour(gen_server).
+-export([demo_translate_type/2, demo_is_instance/3]).
+
+-export([start/0, restart/0, stop/0, create_spec_test/3, get_exp_specced/1,
+ is_instance/3, translate_type/1]).
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
+ code_change/3]).
+-export([get_exp_info/1, match/2]).
+
+-export_type([imm_type/0, mod_exp_types/0, mod_exp_funs/0]).
+
+-include("proper_internal.hrl").
+
+
+%%------------------------------------------------------------------------------
+%% Macros
+%%------------------------------------------------------------------------------
+
+-define(SRC_FILE_EXT, ".erl").
+
+%% CAUTION: all these must be sorted
+-define(STD_TYPES_0,
+ [any,arity,atom,binary,bitstring,bool,boolean,byte,char,float,integer,
+ list,neg_integer,non_neg_integer,number,pos_integer,string,term,
+ timeout]).
+-define(HARD_ADTS,
+ %% gb_trees:iterator and gb_sets:iterator are NOT hardcoded
+ [{{array,0},array}, {{array,1},proper_array},
+ {{dict,0},dict}, {{dict,2},proper_dict},
+ {{gb_set,0},gb_sets}, {{gb_set,1},proper_gb_sets},
+ {{gb_tree,0},gb_trees}, {{gb_tree,2},proper_gb_trees},
+ {{orddict,2},proper_orddict},
+ {{ordset,1},proper_ordsets},
+ {{queue,0},queue}, {{queue,1},proper_queue},
+ {{set,0},sets}, {{set,1},proper_sets}]).
+-define(HARD_ADT_MODS,
+ [{array, [{{array,0},
+ {{type,0,record,[{atom,0,array}]},[]}}]},
+ {dict, [{{dict,0},
+ {{type,0,record,[{atom,0,dict}]},[]}}]},
+ {gb_sets, [{{gb_set,0},
+ {{type,0,tuple,[{type,0,non_neg_integer,[]},
+ {type,0,gb_set_node,[]}]},[]}}]},
+ {gb_trees, [{{gb_tree,0},
+ {{type,0,tuple,[{type,0,non_neg_integer,[]},
+ {type,0,gb_tree_node,[]}]},[]}}]},
+ %% Our parametric ADTs are already declared as normal types, we just
+ %% need to change them to opaques.
+ {proper_array, [{{array,1},already_declared}]},
+ {proper_dict, [{{dict,2},already_declared}]},
+ {proper_gb_sets, [{{gb_set,1},already_declared},
+ {{iterator,1},already_declared}]},
+ {proper_gb_trees, [{{gb_tree,2},already_declared},
+ {{iterator,2},already_declared}]},
+ {proper_orddict, [{{orddict,2},already_declared}]},
+ {proper_ordsets, [{{ordset,1},already_declared}]},
+ {proper_queue, [{{queue,1},already_declared}]},
+ {proper_sets, [{{set,1},already_declared}]},
+ {queue, [{{queue,0},
+ {{type,0,tuple,[{type,0,list,[]},{type,0,list,[]}]},[]}}]},
+ {sets, [{{set,0},
+ {{type,0,record,[{atom,0,set}]},[]}}]}]).
+
+
+%%------------------------------------------------------------------------------
+%% Types
+%%------------------------------------------------------------------------------
+
+-type type_name() :: atom().
+-type var_name() :: atom(). %% TODO: also integers?
+-type field_name() :: atom().
+
+-type type_kind() :: 'type' | 'record'.
+-type type_ref() :: {type_kind(),type_name(),arity()}.
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type substs_dict() :: dict(). %% dict(field_name(),ret_type())
+-else.
+-type substs_dict() :: dict:dict(field_name(),ret_type()).
+-endif.
+-type full_type_ref() :: {mod_name(),type_kind(),type_name(),
+ [ret_type()] | substs_dict()}.
+-type symb_info() :: 'not_symb' | {'orig_abs',abs_type()}.
+-type type_repr() :: {'abs_type',abs_type(),[var_name()],symb_info()}
+ | {'cached',fin_type(),abs_type(),symb_info()}
+ | {'abs_record',[{field_name(),abs_type()}]}.
+-type gen_fun() :: fun((size()) -> fin_type()).
+-type rec_fun() :: fun(([gen_fun()],size()) -> fin_type()).
+-type rec_arg() :: {boolean() | {'list',boolean(),rec_fun()},full_type_ref()}.
+-type rec_args() :: [rec_arg()].
+-type ret_type() :: {'simple',fin_type()} | {'rec',rec_fun(),rec_args()}.
+-type rec_fun_info() :: {pos_integer(),pos_integer(),[arity(),...],
+ [rec_fun(),...]}.
+
+-type imm_type_ref() :: {type_name(),arity()}.
+-type hard_adt_repr() :: {abs_type(),[var_name()]} | 'already_declared'.
+-type fun_ref() :: {fun_name(),arity()}.
+-type fun_repr() :: fun_clause_repr().
+-type fun_clause_repr() :: {[abs_type()],abs_type()}.
+-type proc_fun_ref() :: {fun_name(),[abs_type()],abs_type()}.
+-type full_imm_type_ref() :: {mod_name(),type_name(),arity()}.
+-type imm_stack() :: [full_imm_type_ref()].
+-type pat_field() :: 0 | 1 | atom().
+-type pattern() :: loose_tuple(pat_field()).
+-type next_step() :: 'none' | 'take_head' | {'match_with',pattern()}.
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+%% @private_type
+-type mod_exp_types() :: set(). %% set(imm_type_ref())
+-type mod_types() :: dict(). %% dict(type_ref(),type_repr())
+%% @private_type
+-type mod_exp_funs() :: set(). %% set(fun_ref())
+-type mod_specs() :: dict(). %% dict(fun_ref(),fun_repr())
+-else.
+%% @private_type
+-type mod_exp_types() :: sets:set(imm_type_ref()).
+-type mod_types() :: dict:dict(type_ref(),type_repr()).
+%% @private_type
+-type mod_exp_funs() :: sets:set(fun_ref()).
+-type mod_specs() :: dict:dict(fun_ref(),fun_repr()).
+-endif.
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+-record(state,
+ {cached = dict:new() :: dict(), %% dict(imm_type(),fin_type())
+ exp_types = dict:new() :: dict(), %% dict(mod_name(),mod_exp_types())
+ types = dict:new() :: dict(), %% dict(mod_name(),mod_types())
+ exp_specs = dict:new() :: dict()}). %% dict(mod_name(),mod_specs())
+-else.
+-record(state,
+ {cached = dict:new() :: dict:dict(), %% dict(imm_type(),fin_type())
+ exp_types = dict:new() :: dict:dict(), %% dict(mod_name(),mod_exp_types())
+ types = dict:new() :: dict:dict(), %% dict(mod_name(),mod_types())
+ exp_specs = dict:new() :: dict:dict()}). %% dict(mod_name(),mod_specs())
+%% {cached = dict:new() :: dict:dict(imm_type(),fin_type()),
+%% exp_types = dict:new() :: dict:dict(mod_name(),mod_exp_types()),
+%% types = dict:new() :: dict:dict(mod_name(),mod_types()),
+%% exp_specs = dict:new() :: dict:dict(mod_name(),mod_specs())}).
+-endif.
+-type state() :: #state{}.
+
+-record(mod_info,
+ {mod_exp_types = sets:new() :: mod_exp_types(),
+ mod_types = dict:new() :: mod_types(),
+ mod_opaques = sets:new() :: mod_exp_types(),
+ mod_exp_funs = sets:new() :: mod_exp_funs(),
+ mod_specs = dict:new() :: mod_specs()}).
+-type mod_info() :: #mod_info{}.
+
+-type stack() :: [full_type_ref() | 'tuple' | 'list' | 'union' | 'fun'].
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type var_dict() :: dict(). %% dict(var_name(),ret_type())
+-else.
+-type var_dict() :: dict:dict(var_name(),ret_type()).
+-endif.
+%% @private_type
+-type imm_type() :: {mod_name(),string()}.
+%% @alias
+-type fin_type() :: proper_types:type().
+-type tagged_result(T) :: {'ok',T} | 'error'.
+-type tagged_result2(T,S) :: {'ok',T,S} | 'error'.
+%% @alias
+-type rich_result(T) :: {'ok',T} | {'error',term()}.
+-type rich_result2(T,S) :: {'ok',T,S} | {'error',term()}.
+-type false_positive_mfas() :: proper:false_positive_mfas().
+
+-type server_call() :: {'create_spec_test',mfa(),timeout(),false_positive_mfas()}
+ | {'get_exp_specced',mod_name()}
+ | {'get_type_repr',mod_name(),type_ref(),boolean()}
+ | {'translate_type',imm_type()}.
+-type server_response() :: rich_result(proper:test())
+ | rich_result([mfa()])
+ | rich_result(type_repr())
+ | rich_result(fin_type()).
+
+
+%%------------------------------------------------------------------------------
+%% Server interface functions
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec start() -> 'ok'.
+start() ->
+ {ok,TypeserverPid} = gen_server:start_link(?MODULE, dummy, []),
+ put('$typeserver_pid', TypeserverPid),
+ ok.
+
+%% @private
+-spec restart() -> 'ok'.
+restart() ->
+ TypeserverPid = get('$typeserver_pid'),
+ case (TypeserverPid =:= undefined orelse not is_process_alive(TypeserverPid)) of
+ true -> start();
+ false -> ok
+ end.
+
+%% @private
+-spec stop() -> 'ok'.
+stop() ->
+ TypeserverPid = get('$typeserver_pid'),
+ erase('$typeserver_pid'),
+ gen_server:cast(TypeserverPid, stop).
+
+%% @private
+-spec create_spec_test(mfa(), timeout(), false_positive_mfas()) -> rich_result(proper:test()).
+create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {create_spec_test,MFA,SpecTimeout,FalsePositiveMFAs}).
+
+%% @private
+-spec get_exp_specced(mod_name()) -> rich_result([mfa()]).
+get_exp_specced(Mod) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {get_exp_specced,Mod}).
+
+-spec get_type_repr(mod_name(), type_ref(), boolean()) ->
+ rich_result(type_repr()).
+get_type_repr(Mod, TypeRef, IsRemote) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {get_type_repr,Mod,TypeRef,IsRemote}).
+
+%% @private
+-spec translate_type(imm_type()) -> rich_result(fin_type()).
+translate_type(ImmType) ->
+ TypeserverPid = get('$typeserver_pid'),
+ gen_server:call(TypeserverPid, {translate_type,ImmType}).
+
+%% @doc Translates the native type expression `TypeExpr' (which should be
+%% provided inside a string) into a PropEr type, which can then be passed to any
+%% of the demo functions defined in the {@link proper_gen} module. PropEr acts
+%% as if it found this type expression inside the code of module `Mod'.
+-spec demo_translate_type(mod_name(), string()) -> rich_result(fin_type()).
+demo_translate_type(Mod, TypeExpr) ->
+ start(),
+ Result = translate_type({Mod,TypeExpr}),
+ stop(),
+ Result.
+
+%% @doc Checks if `Term' is a valid instance of native type `TypeExpr' (which
+%% should be provided inside a string). PropEr acts as if it found this type
+%% expression inside the code of module `Mod'.
+-spec demo_is_instance(term(), mod_name(), string()) ->
+ boolean() | {'error',term()}.
+demo_is_instance(Term, Mod, TypeExpr) ->
+ case parse_type(TypeExpr) of
+ {ok,TypeForm} ->
+ start(),
+ Result =
+ %% Force the typeserver to load the module.
+ case translate_type({Mod,"integer()"}) of
+ {ok,_FinType} ->
+ try is_instance(Term, Mod, TypeForm)
+ catch
+ throw:{'$typeserver',Reason} -> {error, Reason}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end,
+ stop(),
+ Result;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+
+%%------------------------------------------------------------------------------
+%% Implementation of gen_server interface
+%%------------------------------------------------------------------------------
+
+%% @private
+-spec init(_) -> {'ok',state()}.
+init(_) ->
+ {ok, #state{}}.
+
+%% @private
+-spec handle_call(server_call(), _, state()) ->
+ {'reply',server_response(),state()}.
+handle_call({create_spec_test,MFA,SpecTimeout,FalsePositiveMFAs}, _From, State) ->
+ case create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs, State) of
+ {ok,Test,NewState} ->
+ {reply, {ok,Test}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({get_exp_specced,Mod}, _From, State) ->
+ case get_exp_specced(Mod, State) of
+ {ok,MFAs,NewState} ->
+ {reply, {ok,MFAs}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({get_type_repr,Mod,TypeRef,IsRemote}, _From, State) ->
+ case get_type_repr(Mod, TypeRef, IsRemote, State) of
+ {ok,TypeRepr,NewState} ->
+ {reply, {ok,TypeRepr}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end;
+handle_call({translate_type,ImmType}, _From, State) ->
+ case translate_type(ImmType, State) of
+ {ok,FinType,NewState} ->
+ {reply, {ok,FinType}, NewState};
+ {error,_Reason} = Error ->
+ {reply, Error, State}
+ end.
+
+%% @private
+-spec handle_cast('stop', state()) -> {'stop','normal',state()}.
+handle_cast(stop, State) ->
+ {stop, normal, State}.
+
+%% @private
+-spec handle_info(term(), state()) -> {'stop',{'received_info',term()},state()}.
+handle_info(Info, State) ->
+ {stop, {received_info,Info}, State}.
+
+%% @private
+-spec terminate(term(), state()) -> 'ok'.
+terminate(_Reason, _State) ->
+ ok.
+
+%% @private
+-spec code_change(term(), state(), _) -> {'ok',state()}.
+code_change(_OldVsn, State, _) ->
+ {ok, State}.
+
+
+%%------------------------------------------------------------------------------
+%% Top-level interface
+%%------------------------------------------------------------------------------
+
+-spec create_spec_test(mfa(), timeout(), false_positive_mfas(), state()) ->
+ rich_result2(proper:test(),state()).
+create_spec_test(MFA, SpecTimeout, FalsePositiveMFAs, State) ->
+ case get_exp_spec(MFA, State) of
+ {ok,FunRepr,NewState} ->
+ make_spec_test(MFA, FunRepr, SpecTimeout, FalsePositiveMFAs, NewState);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_exp_spec(mfa(), state()) -> rich_result2(fun_repr(),state()).
+get_exp_spec({Mod,Fun,Arity} = MFA, State) ->
+ case add_module(Mod, State) of
+ {ok,#state{exp_specs = ExpSpecs} = NewState} ->
+ ModExpSpecs = dict:fetch(Mod, ExpSpecs),
+ case dict:find({Fun,Arity}, ModExpSpecs) of
+ {ok,FunRepr} ->
+ {ok, FunRepr, NewState};
+ error ->
+ {error, {function_not_exported_or_specced,MFA}}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec make_spec_test(mfa(), fun_repr(), timeout(), false_positive_mfas(), state()) ->
+ rich_result2(proper:test(),state()).
+make_spec_test({Mod,_Fun,_Arity}=MFA, {Domain,_Range}=FunRepr, SpecTimeout, FalsePositiveMFAs, State) ->
+ case convert(Mod, {type,0,'$fixed_list',Domain}, State) of
+ {ok,FinType,NewState} ->
+ Test = ?FORALL(Args, FinType, apply_spec_test(MFA, FunRepr, SpecTimeout, FalsePositiveMFAs, Args)),
+ {ok, Test, NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec apply_spec_test(mfa(), fun_repr(), timeout(), false_positive_mfas(), term()) -> proper:test().
+apply_spec_test({Mod,Fun,_Arity}=MFA, {_Domain,Range}, SpecTimeout, FalsePositiveMFAs, Args) ->
+ ?TIMEOUT(SpecTimeout,
+ begin
+ %% NOTE: only call apply/3 inside try/catch (do not trust ?MODULE:is_instance/3)
+ Result =
+ try apply(Mod,Fun,Args) of
+ X -> {ok, X}
+ catch
+ X:Y -> {X, Y}
+ end,
+ case Result of
+ {ok, Z} ->
+ case ?MODULE:is_instance(Z,Mod,Range) of
+ true ->
+ true;
+ false when is_function(FalsePositiveMFAs) ->
+ FalsePositiveMFAs(MFA, Args, {fail, Z});
+ false ->
+ false
+ end;
+ Exception when is_function(FalsePositiveMFAs) ->
+ case FalsePositiveMFAs(MFA, Args, Exception) of
+ true ->
+ true;
+ false ->
+ error(Exception, erlang:get_stacktrace())
+ end;
+ Exception ->
+ error(Exception, erlang:get_stacktrace())
+ end
+ end).
+
+-spec get_exp_specced(mod_name(), state()) -> rich_result2([mfa()],state()).
+get_exp_specced(Mod, State) ->
+ case add_module(Mod, State) of
+ {ok,#state{exp_specs = ExpSpecs} = NewState} ->
+ ModExpSpecs = dict:fetch(Mod, ExpSpecs),
+ ExpSpecced = [{Mod,F,A} || {F,A} <- dict:fetch_keys(ModExpSpecs)],
+ {ok, ExpSpecced, NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_type_repr(mod_name(), type_ref(), boolean(), state()) ->
+ rich_result2(type_repr(),state()).
+get_type_repr(Mod, {type,Name,Arity} = TypeRef, true, State) ->
+ case prepare_for_remote(Mod, Name, Arity, State) of
+ {ok,NewState} ->
+ get_type_repr(Mod, TypeRef, false, NewState);
+ {error,_Reason} = Error ->
+ Error
+ end;
+get_type_repr(Mod, TypeRef, false, #state{types = Types} = State) ->
+ ModTypes = dict:fetch(Mod, Types),
+ case dict:find(TypeRef, ModTypes) of
+ {ok,TypeRepr} ->
+ {ok, TypeRepr, State};
+ error ->
+ {error, {missing_type,Mod,TypeRef}}
+ end.
+
+-spec prepare_for_remote(mod_name(), type_name(), arity(), state()) ->
+ rich_result(state()).
+prepare_for_remote(RemMod, Name, Arity, State) ->
+ case add_module(RemMod, State) of
+ {ok,#state{exp_types = ExpTypes} = NewState} ->
+ RemModExpTypes = dict:fetch(RemMod, ExpTypes),
+ case sets:is_element({Name,Arity}, RemModExpTypes) of
+ true -> {ok, NewState};
+ false -> {error, {type_not_exported,{RemMod,Name,Arity}}}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec translate_type(imm_type(), state()) -> rich_result2(fin_type(),state()).
+translate_type({Mod,Str} = ImmType, #state{cached = Cached} = State) ->
+ case dict:find(ImmType, Cached) of
+ {ok,Type} ->
+ {ok, Type, State};
+ error ->
+ case parse_type(Str) of
+ {ok,TypeForm} ->
+ case add_module(Mod, State) of
+ {ok,NewState} ->
+ case convert(Mod, TypeForm, NewState) of
+ {ok,FinType,
+ #state{cached = Cached} = FinalState} ->
+ NewCached = dict:store(ImmType, FinType,
+ Cached),
+ {ok, FinType,
+ FinalState#state{cached = NewCached}};
+ {error,_Reason} = Error ->
+ Error
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end;
+ {error,Reason} ->
+ {error, {parse_error,Str,Reason}}
+ end
+ end.
+
+-spec parse_type(string()) -> rich_result(abs_type()).
+parse_type(Str) ->
+ TypeStr = "-type mytype() :: " ++ Str ++ ".",
+ case erl_scan:string(TypeStr) of
+ {ok,Tokens,_EndLocation} ->
+ case erl_parse:parse_form(Tokens) of
+ {ok,{attribute,_Line,type,{mytype,TypeExpr,[]}}} ->
+ {ok, TypeExpr};
+ {error,_ErrorInfo} = Error ->
+ Error
+ end;
+ {error,ErrorInfo,_EndLocation} ->
+ {error, ErrorInfo}
+ end.
+
+-spec add_module(mod_name(), state()) -> rich_result(state()).
+add_module(Mod, #state{exp_types = ExpTypes} = State) ->
+ case dict:is_key(Mod, ExpTypes) of
+ true ->
+ {ok, State};
+ false ->
+ case get_code_and_exports(Mod) of
+ {ok,AbsCode,ModExpFuns} ->
+ RawModInfo = get_mod_info(Mod, AbsCode, ModExpFuns),
+ ModInfo = process_adts(Mod, RawModInfo),
+ {ok, store_mod_info(Mod,ModInfo,State)};
+ {error,Reason} ->
+ {error, {cant_load_code,Mod,Reason}}
+ end
+ end.
+
+%% @private
+-spec get_exp_info(mod_name()) -> rich_result2(mod_exp_types(),mod_exp_funs()).
+get_exp_info(Mod) ->
+ case get_code_and_exports(Mod) of
+ {ok,AbsCode,ModExpFuns} ->
+ RawModInfo = get_mod_info(Mod, AbsCode, ModExpFuns),
+ {ok, RawModInfo#mod_info.mod_exp_types, ModExpFuns};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec get_code_and_exports(mod_name()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_code_and_exports(Mod) ->
+ case code:get_object_code(Mod) of
+ {Mod, ObjBin, _ObjFileName} ->
+ case get_chunks(ObjBin) of
+ {ok,_AbsCode,_ModExpFuns} = Result ->
+ Result;
+ {error,Reason} ->
+ get_code_and_exports_from_source(Mod, Reason)
+ end;
+ error ->
+ get_code_and_exports_from_source(Mod, cant_find_object_file)
+ end.
+
+-spec get_code_and_exports_from_source(mod_name(), term()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_code_and_exports_from_source(Mod, ObjError) ->
+ SrcFileName = atom_to_list(Mod) ++ ?SRC_FILE_EXT,
+ case code:where_is_file(SrcFileName) of
+ FullSrcFileName when is_list(FullSrcFileName) ->
+ Opts = [binary,debug_info,return_errors,{d,'PROPER_REMOVE_PROPS'}],
+ case compile:file(FullSrcFileName, Opts) of
+ {ok,Mod,Binary} ->
+ get_chunks(Binary);
+ {error,Errors,_Warnings} ->
+ {error, {ObjError,{cant_compile_source_file,Errors}}}
+ end;
+ non_existing ->
+ {error, {ObjError,cant_find_source_file}}
+ end.
+
+-spec get_chunks(string() | binary()) ->
+ rich_result2([abs_form()],mod_exp_funs()).
+get_chunks(ObjFile) ->
+ case beam_lib:chunks(ObjFile, [abstract_code,exports]) of
+ {ok,{_Mod,[{abstract_code,AbsCodeChunk},{exports,ExpFunsList}]}} ->
+ case AbsCodeChunk of
+ {raw_abstract_v1,AbsCode} ->
+ %% HACK: Add a declaration for iolist() to every module
+ {ok, add_iolist(AbsCode), sets:from_list(ExpFunsList)};
+ no_abstract_code ->
+ {error, no_abstract_code};
+ _ ->
+ {error, unsupported_abstract_code_format}
+ end;
+ {error,beam_lib,Reason} ->
+ {error, Reason}
+ end.
+
+-spec add_iolist([abs_form()]) -> [abs_form()].
+add_iolist(Forms) ->
+ IOListDef =
+ {type,0,maybe_improper_list,
+ [{type,0,union,[{type,0,byte,[]},{type,0,binary,[]},
+ {type,0,iolist,[]}]},
+ {type,0,binary,[]}]},
+ IOListDecl = {attribute,0,type,{iolist,IOListDef,[]}},
+ [IOListDecl | Forms].
+
+-spec get_mod_info(mod_name(), [abs_form()], mod_exp_funs()) -> mod_info().
+get_mod_info(Mod, AbsCode, ModExpFuns) ->
+ StartModInfo = #mod_info{mod_exp_funs = ModExpFuns},
+ ImmModInfo = lists:foldl(fun add_mod_info/2, StartModInfo, AbsCode),
+ #mod_info{mod_specs = AllModSpecs} = ImmModInfo,
+ IsExported = fun(FunRef,_FunRepr) -> sets:is_element(FunRef,ModExpFuns) end,
+ ModExpSpecs = dict:filter(IsExported, AllModSpecs),
+ ModInfo = ImmModInfo#mod_info{mod_specs = ModExpSpecs},
+ case orddict:find(Mod, ?HARD_ADT_MODS) of
+ {ok,ModADTs} ->
+ #mod_info{mod_exp_types = ModExpTypes, mod_types = ModTypes,
+ mod_opaques = ModOpaques} = ModInfo,
+ ModADTsSet =
+ sets:from_list([ImmTypeRef
+ || {ImmTypeRef,_HardADTRepr} <- ModADTs]),
+ NewModExpTypes = sets:union(ModExpTypes, ModADTsSet),
+ NewModTypes = lists:foldl(fun store_hard_adt/2, ModTypes, ModADTs),
+ NewModOpaques = sets:union(ModOpaques, ModADTsSet),
+ ModInfo#mod_info{mod_exp_types = NewModExpTypes,
+ mod_types = NewModTypes,
+ mod_opaques = NewModOpaques};
+ error ->
+ ModInfo
+ end.
+
+-spec store_hard_adt({imm_type_ref(),hard_adt_repr()}, mod_types()) ->
+ mod_types().
+store_hard_adt({_ImmTypeRef,already_declared}, ModTypes) ->
+ ModTypes;
+store_hard_adt({{Name,Arity},{TypeForm,VarNames}}, ModTypes) ->
+ TypeRef = {type,Name,Arity},
+ TypeRepr = {abs_type,TypeForm,VarNames,not_symb},
+ dict:store(TypeRef, TypeRepr, ModTypes).
+
+-spec add_mod_info(abs_form(), mod_info()) -> mod_info().
+add_mod_info({attribute,_Line,export_type,TypesList},
+ #mod_info{mod_exp_types = ModExpTypes} = ModInfo) ->
+ NewModExpTypes = sets:union(sets:from_list(TypesList), ModExpTypes),
+ ModInfo#mod_info{mod_exp_types = NewModExpTypes};
+add_mod_info({attribute,_Line,type,{{record,RecName},Fields,[]}},
+ #mod_info{mod_types = ModTypes} = ModInfo) ->
+ FieldInfo = [process_rec_field(F) || F <- Fields],
+ NewModTypes = dict:store({record,RecName,0}, {abs_record,FieldInfo},
+ ModTypes),
+ ModInfo#mod_info{mod_types = NewModTypes};
+add_mod_info({attribute,_Line,record,{RecName,Fields}},
+ #mod_info{mod_types = ModTypes} = ModInfo) ->
+ case dict:is_key(RecName, ModTypes) of
+ true ->
+ ModInfo;
+ false ->
+ A = erl_anno:new(0),
+ TypedRecord = {attribute,A,type,{{record,RecName},Fields,[]}},
+ add_mod_info(TypedRecord, ModInfo)
+ end;
+add_mod_info({attribute,_Line,Kind,{Name,TypeForm,VarForms}},
+ #mod_info{mod_types = ModTypes,
+ mod_opaques = ModOpaques} = ModInfo)
+ when Kind =:= type; Kind =:= opaque ->
+ Arity = length(VarForms),
+ VarNames = [V || {var,_,V} <- VarForms],
+ %% TODO: No check whether variables are different, or non-'_'.
+ NewModTypes = dict:store({type,Name,Arity},
+ {abs_type,TypeForm,VarNames,not_symb}, ModTypes),
+ NewModOpaques =
+ case Kind of
+ type -> ModOpaques;
+ opaque -> sets:add_element({Name,Arity}, ModOpaques)
+ end,
+ ModInfo#mod_info{mod_types = NewModTypes, mod_opaques = NewModOpaques};
+add_mod_info({attribute,_Line,spec,{RawFunRef,[RawFirstClause | _Rest]}},
+ #mod_info{mod_specs = ModSpecs} = ModInfo) ->
+ FunRef = case RawFunRef of
+ {_Mod,Name,Arity} -> {Name,Arity};
+ {_Name,_Arity} = F -> F
+ end,
+ %% TODO: We just take the first function clause.
+ FirstClause = process_fun_clause(RawFirstClause),
+ NewModSpecs = dict:store(FunRef, FirstClause, ModSpecs),
+ ModInfo#mod_info{mod_specs = NewModSpecs};
+add_mod_info(_Form, ModInfo) ->
+ ModInfo.
+
+-spec process_rec_field(abs_rec_field()) -> {field_name(),abs_type()}.
+process_rec_field({record_field,_,{atom,_,FieldName}}) ->
+ {FieldName, {type,0,any,[]}};
+process_rec_field({record_field,_,{atom,_,FieldName},_Initialization}) ->
+ {FieldName, {type,0,any,[]}};
+process_rec_field({typed_record_field,RecField,FieldType}) ->
+ {FieldName,_} = process_rec_field(RecField),
+ {FieldName, FieldType}.
+
+-spec process_fun_clause(abs_type()) -> fun_clause_repr().
+process_fun_clause({type,_,'fun',[{type,_,product,Domain},Range]}) ->
+ {Domain, Range};
+process_fun_clause({type,_,bounded_fun,[MainClause,Constraints]}) ->
+ {RawDomain,RawRange} = process_fun_clause(MainClause),
+ VarSubsts = [{V,T} || {type,_,constraint,
+ [{atom,_,is_subtype},[{var,_,V},T]]} <- Constraints,
+ V =/= '_'],
+ VarSubstsDict = dict:from_list(VarSubsts),
+ Domain = [update_vars(A, VarSubstsDict, false) || A <- RawDomain],
+ Range = update_vars(RawRange, VarSubstsDict, false),
+ {Domain, Range}.
+
+-spec store_mod_info(mod_name(), mod_info(), state()) -> state().
+store_mod_info(Mod, #mod_info{mod_exp_types = ModExpTypes, mod_types = ModTypes,
+ mod_specs = ImmModExpSpecs},
+ #state{exp_types = ExpTypes, types = Types,
+ exp_specs = ExpSpecs} = State) ->
+ NewExpTypes = dict:store(Mod, ModExpTypes, ExpTypes),
+ NewTypes = dict:store(Mod, ModTypes, Types),
+ ModExpSpecs = dict:map(fun unbound_to_any/2, ImmModExpSpecs),
+ NewExpSpecs = dict:store(Mod, ModExpSpecs, ExpSpecs),
+ State#state{exp_types = NewExpTypes, types = NewTypes,
+ exp_specs = NewExpSpecs}.
+
+-spec unbound_to_any(fun_ref(), fun_repr()) -> fun_repr().
+unbound_to_any(_FunRef, {Domain,Range}) ->
+ EmptySubstsDict = dict:new(),
+ NewDomain = [update_vars(A,EmptySubstsDict,true) || A <- Domain],
+ NewRange = update_vars(Range, EmptySubstsDict, true),
+ {NewDomain, NewRange}.
+
+
+%%------------------------------------------------------------------------------
+%% ADT translation functions
+%%------------------------------------------------------------------------------
+
+-spec process_adts(mod_name(), mod_info()) -> mod_info().
+process_adts(Mod,
+ #mod_info{mod_exp_types = ModExpTypes, mod_opaques = ModOpaques,
+ mod_specs = ModExpSpecs} = ModInfo) ->
+ %% TODO: No warning on unexported opaques.
+ case sets:to_list(sets:intersection(ModExpTypes,ModOpaques)) of
+ [] ->
+ ModInfo;
+ ModADTs ->
+ %% TODO: No warning on unexported API functions.
+ ModExpSpecsList = [{Name,Domain,Range}
+ || {{Name,_Arity},{Domain,Range}}
+ <- dict:to_list(ModExpSpecs)],
+ AddADT = fun(ADT,Acc) -> add_adt(Mod,ADT,Acc,ModExpSpecsList) end,
+ lists:foldl(AddADT, ModInfo, ModADTs)
+ end.
+
+-spec add_adt(mod_name(), imm_type_ref(), mod_info(), [proc_fun_ref()]) ->
+ mod_info().
+add_adt(Mod, {Name,Arity}, #mod_info{mod_types = ModTypes} = ModInfo,
+ ModExpFunSpecs) ->
+ ADTRef = {type,Name,Arity},
+ {abs_type,InternalRepr,VarNames,not_symb} = dict:fetch(ADTRef, ModTypes),
+ FullADTRef = {Mod,Name,Arity},
+ %% TODO: No warning on unsuitable range.
+ SymbCalls1 = [get_symb_call(FullADTRef,Spec) || Spec <- ModExpFunSpecs],
+ %% TODO: No warning on bad use of variables.
+ SymbCalls2 = [fix_vars(FullADTRef,Call,RangeVars,VarNames)
+ || {ok,Call,RangeVars} <- SymbCalls1],
+ case [Call || {ok,Call} <- SymbCalls2] of
+ [] ->
+ %% TODO: No warning on no acceptable spec.
+ ModInfo;
+ SymbCalls3 ->
+ NewADTRepr = {abs_type,{type,0,union,SymbCalls3},VarNames,
+ {orig_abs,InternalRepr}},
+ NewModTypes = dict:store(ADTRef, NewADTRepr, ModTypes),
+ ModInfo#mod_info{mod_types = NewModTypes}
+ end.
+
+-spec get_symb_call(full_imm_type_ref(), proc_fun_ref()) ->
+ tagged_result2(abs_type(),[var_name()]).
+get_symb_call({Mod,_TypeName,_Arity} = FullADTRef, {FunName,Domain,Range}) ->
+ BaseCall = {type,0,tuple,[{atom,0,'$call'},{atom,0,Mod},{atom,0,FunName},
+ {type,0,'$fixed_list',Domain}]},
+ unwrap_range(FullADTRef, BaseCall, Range, false).
+
+-spec unwrap_range(full_imm_type_ref(), abs_type() | next_step(), abs_type(),
+ boolean()) ->
+ tagged_result2(abs_type() | next_step(),[var_name()]).
+unwrap_range(FullADTRef, Call, {paren_type,_,[Type]}, TestRun) ->
+ unwrap_range(FullADTRef, Call, Type, TestRun);
+unwrap_range(FullADTRef, Call, {ann_type,_,[_Var,Type]}, TestRun) ->
+ unwrap_range(FullADTRef, Call, Type, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,list,[ElemType]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, ElemType, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,maybe_improper_list,[Cont,_Term]},
+ TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,nonempty_list,[ElemType]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, ElemType, TestRun);
+unwrap_range(FullADTRef, Call, {type,_,nonempty_improper_list,[Cont,_Term]},
+ TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(FullADTRef, Call,
+ {type,_,nonempty_maybe_improper_list,[Cont,_Term]}, TestRun) ->
+ unwrap_list(FullADTRef, Call, Cont, TestRun);
+unwrap_range(_FullADTRef, _Call, {type,_,tuple,any}, _TestRun) ->
+ error;
+unwrap_range(FullADTRef, Call, {type,_,tuple,FieldForms}, TestRun) ->
+ Translates = fun(T) -> unwrap_range(FullADTRef,none,T,true) =/= error end,
+ case proper_arith:find_first(Translates, FieldForms) of
+ none ->
+ error;
+ {TargetPos,TargetElem} ->
+ Pattern = get_pattern(TargetPos, FieldForms),
+ case TestRun of
+ true ->
+ NewCall =
+ case Call of
+ none -> {match_with,Pattern};
+ _ -> Call
+ end,
+ {ok, NewCall, []};
+ false ->
+ AbsPattern = term_to_singleton_type(Pattern),
+ NewCall =
+ {type,0,tuple,
+ [{atom,0,'$call'},{atom,0,?MODULE},{atom,0,match},
+ {type,0,'$fixed_list',[AbsPattern,Call]}]},
+ unwrap_range(FullADTRef, NewCall, TargetElem, TestRun)
+ end
+ end;
+unwrap_range(FullADTRef, Call, {type,_,union,Choices}, TestRun) ->
+ TestedChoices = [unwrap_range(FullADTRef,none,C,true) || C <- Choices],
+ NotError = fun(error) -> false; (_) -> true end,
+ case proper_arith:find_first(NotError, TestedChoices) of
+ none ->
+ error;
+ {_ChoicePos,{ok,none,_RangeVars}} ->
+ error;
+ {ChoicePos,{ok,NextStep,_RangeVars}} ->
+ {A, [ChoiceElem|B]} = lists:split(ChoicePos-1, Choices),
+ OtherChoices = A ++ B,
+ DistinctChoice =
+ case NextStep of
+ take_head ->
+ fun cant_have_head/1;
+ {match_with,Pattern} ->
+ fun(C) -> cant_match(Pattern, C) end
+ end,
+ case {lists:all(DistinctChoice,OtherChoices), TestRun} of
+ {true,true} ->
+ {ok, NextStep, []};
+ {true,false} ->
+ unwrap_range(FullADTRef, Call, ChoiceElem, TestRun);
+ {false,_} ->
+ error
+ end
+ end;
+unwrap_range({_Mod,SameName,Arity}, Call, {type,_,SameName,ArgForms},
+ _TestRun) ->
+ RangeVars = [V || {var,_,V} <- ArgForms, V =/= '_'],
+ case length(ArgForms) =:= Arity andalso length(RangeVars) =:= Arity of
+ true -> {ok, Call, RangeVars};
+ false -> error
+ end;
+unwrap_range({SameMod,SameName,_Arity} = FullADTRef, Call,
+ {remote_type,_,[{atom,_,SameMod},{atom,_,SameName},ArgForms]},
+ TestRun) ->
+ unwrap_range(FullADTRef, Call, {type,0,SameName,ArgForms}, TestRun);
+unwrap_range(_FullADTRef, _Call, _Range, _TestRun) ->
+ error.
+
+-spec unwrap_list(full_imm_type_ref(), abs_type() | next_step(), abs_type(),
+ boolean()) ->
+ tagged_result2(abs_type() | next_step(),[var_name()]).
+unwrap_list(FullADTRef, Call, HeadType, TestRun) ->
+ NewCall =
+ case TestRun of
+ true ->
+ case Call of
+ none -> take_head;
+ _ -> Call
+ end;
+ false ->
+ {type,0,tuple,[{atom,0,'$call'},{atom,0,erlang},{atom,0,hd},
+ {type,0,'$fixed_list',[Call]}]}
+ end,
+ unwrap_range(FullADTRef, NewCall, HeadType, TestRun).
+
+-spec fix_vars(full_imm_type_ref(), abs_type(), [var_name()], [var_name()]) ->
+ tagged_result(abs_type()).
+fix_vars(FullADTRef, Call, RangeVars, VarNames) ->
+ NotAnyVar = fun(V) -> V =/= '_' end,
+ case no_duplicates(VarNames) andalso lists:all(NotAnyVar,VarNames) of
+ true ->
+ RawUsedVars =
+ collect_vars(FullADTRef, Call, [[V] || V <- RangeVars]),
+ UsedVars = [lists:usort(L) || L <- RawUsedVars],
+ case correct_var_use(UsedVars) of
+ true ->
+ PairAll = fun(L,Y) -> [{X,{var,0,Y}} || X <- L] end,
+ VarSubsts =
+ lists:flatten(lists:zipwith(PairAll,UsedVars,VarNames)),
+ VarSubstsDict = dict:from_list(VarSubsts),
+ {ok, update_vars(Call,VarSubstsDict,true)};
+ false ->
+ error
+ end;
+ false ->
+ error
+ end.
+
+-spec no_duplicates(list()) -> boolean().
+no_duplicates(L) ->
+ length(lists:usort(L)) =:= length(L).
+
+-spec correct_var_use([[var_name() | 0]]) -> boolean().
+correct_var_use(UsedVars) ->
+ NoNonVarArgs = fun([0|_]) -> false; (_) -> true end,
+ lists:all(NoNonVarArgs, UsedVars)
+ andalso no_duplicates(lists:flatten(UsedVars)).
+
+-spec collect_vars(full_imm_type_ref(), abs_type(), [[var_name() | 0]]) ->
+ [[var_name() | 0]].
+collect_vars(FullADTRef, {paren_type,_,[Type]}, UsedVars) ->
+ collect_vars(FullADTRef, Type, UsedVars);
+collect_vars(FullADTRef, {ann_type,_,[_Var,Type]}, UsedVars) ->
+ collect_vars(FullADTRef, Type, UsedVars);
+collect_vars(_FullADTRef, {type,_,tuple,any}, UsedVars) ->
+ UsedVars;
+collect_vars({_Mod,SameName,Arity} = FullADTRef, {type,_,SameName,ArgForms},
+ UsedVars) ->
+ case length(ArgForms) =:= Arity of
+ true ->
+ VarArgs = [V || {var,_,V} <- ArgForms, V =/= '_'],
+ case length(VarArgs) =:= Arity of
+ true ->
+ AddToList = fun(X,L) -> [X | L] end,
+ lists:zipwith(AddToList, VarArgs, UsedVars);
+ false ->
+ [[0|L] || L <- UsedVars]
+ end;
+ false ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars)
+ end;
+collect_vars(FullADTRef, {type,_,_Name,ArgForms}, UsedVars) ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars);
+collect_vars({SameMod,SameName,_Arity} = FullADTRef,
+ {remote_type,_,[{atom,_,SameMod},{atom,_,SameName},ArgForms]},
+ UsedVars) ->
+ collect_vars(FullADTRef, {type,0,SameName,ArgForms}, UsedVars);
+collect_vars(FullADTRef, {remote_type,_,[_RemModForm,_NameForm,ArgForms]},
+ UsedVars) ->
+ multi_collect_vars(FullADTRef, ArgForms, UsedVars);
+collect_vars(_FullADTRef, _Call, UsedVars) ->
+ UsedVars.
+
+-spec multi_collect_vars(full_imm_type_ref(), [abs_type()],
+ [[var_name() | 0]]) -> [[var_name() | 0]].
+multi_collect_vars({_Mod,_Name,Arity} = FullADTRef, Forms, UsedVars) ->
+ NoUsedVars = lists:duplicate(Arity, []),
+ MoreUsedVars = [collect_vars(FullADTRef,T,NoUsedVars) || T <- Forms],
+ CombineVars = fun(L1,L2) -> lists:zipwith(fun erlang:'++'/2, L1, L2) end,
+ lists:foldl(CombineVars, UsedVars, MoreUsedVars).
+
+-ifdef(NO_MODULES_IN_OPAQUES).
+-type var_substs_dict() :: dict().
+-else.
+-type var_substs_dict() :: dict:dict(var_name(),abs_type()).
+-endif.
+-spec update_vars(abs_type(), var_substs_dict(), boolean()) -> abs_type().
+update_vars({paren_type,Line,[Type]}, VarSubstsDict, UnboundToAny) ->
+ {paren_type, Line, [update_vars(Type,VarSubstsDict,UnboundToAny)]};
+update_vars({ann_type,Line,[Var,Type]}, VarSubstsDict, UnboundToAny) ->
+ {ann_type, Line, [Var,update_vars(Type,VarSubstsDict,UnboundToAny)]};
+update_vars({var,Line,VarName} = Call, VarSubstsDict, UnboundToAny) ->
+ case dict:find(VarName, VarSubstsDict) of
+ {ok,SubstType} ->
+ SubstType;
+ error when UnboundToAny =:= false ->
+ Call;
+ error when UnboundToAny =:= true ->
+ {type,Line,any,[]}
+ end;
+update_vars({remote_type,Line,[RemModForm,NameForm,ArgForms]}, VarSubstsDict,
+ UnboundToAny) ->
+ NewArgForms = [update_vars(A,VarSubstsDict,UnboundToAny) || A <- ArgForms],
+ {remote_type, Line, [RemModForm,NameForm,NewArgForms]};
+update_vars({type,_,tuple,any} = Call, _VarSubstsDict, _UnboundToAny) ->
+ Call;
+update_vars({type,Line,Name,ArgForms}, VarSubstsDict, UnboundToAny) ->
+ {type, Line, Name, [update_vars(A,VarSubstsDict,UnboundToAny)
+ || A <- ArgForms]};
+update_vars(Call, _VarSubstsDict, _UnboundToAny) ->
+ Call.
+
+
+%%------------------------------------------------------------------------------
+%% Match-related functions
+%%------------------------------------------------------------------------------
+
+-spec get_pattern(position(), [abs_type()]) -> pattern().
+get_pattern(TargetPos, FieldForms) ->
+ {0,RevPattern} = lists:foldl(fun add_field/2, {TargetPos,[]}, FieldForms),
+ list_to_tuple(lists:reverse(RevPattern)).
+
+-spec add_field(abs_type(), {non_neg_integer(),[pat_field()]}) ->
+ {non_neg_integer(),[pat_field(),...]}.
+add_field(_Type, {1,Acc}) ->
+ {0, [1|Acc]};
+add_field({atom,_,Tag}, {Left,Acc}) ->
+ {erlang:max(0,Left-1), [Tag|Acc]};
+add_field(_Type, {Left,Acc}) ->
+ {erlang:max(0,Left-1), [0|Acc]}.
+
+%% @private
+-spec match(pattern(), tuple()) -> term().
+match(Pattern, Term) when tuple_size(Pattern) =:= tuple_size(Term) ->
+ match(tuple_to_list(Pattern), tuple_to_list(Term), none, false);
+match(_Pattern, _Term) ->
+ throw(no_match).
+
+-spec match([pat_field()], [term()], 'none' | {'ok',T}, boolean()) -> T.
+match([], [], {ok,Target}, _TypeMode) ->
+ Target;
+match([0|PatRest], [_|ToMatchRest], Acc, TypeMode) ->
+ match(PatRest, ToMatchRest, Acc, TypeMode);
+match([1|PatRest], [Target|ToMatchRest], none, TypeMode) ->
+ match(PatRest, ToMatchRest, {ok,Target}, TypeMode);
+match([Tag|PatRest], [X|ToMatchRest], Acc, TypeMode) when is_atom(Tag) ->
+ MatchesTag =
+ case TypeMode of
+ true -> can_be_tag(Tag, X);
+ false -> Tag =:= X
+ end,
+ case MatchesTag of
+ true -> match(PatRest, ToMatchRest, Acc, TypeMode);
+ false -> throw(no_match)
+ end.
+
+%% CAUTION: these must be sorted
+-define(NON_ATOM_TYPES,
+ [arity,binary,bitstring,byte,char,float,'fun',function,integer,iodata,
+ iolist,list,maybe_improper_list,mfa,neg_integer,nil,no_return,
+ non_neg_integer,none,nonempty_improper_list,nonempty_list,
+ nonempty_maybe_improper_list,nonempty_string,number,pid,port,
+ pos_integer,range,record,reference,string,tuple]).
+-define(NON_TUPLE_TYPES,
+ [arity,atom,binary,bitstring,bool,boolean,byte,char,float,'fun',
+ function,identifier,integer,iodata,iolist,list,maybe_improper_list,
+ neg_integer,nil,no_return,node,non_neg_integer,none,
+ nonempty_improper_list,nonempty_list,nonempty_maybe_improper_list,
+ nonempty_string,number,pid,port,pos_integer,range,reference,string,
+ timeout]).
+-define(NO_HEAD_TYPES,
+ [arity,atom,binary,bitstring,bool,boolean,byte,char,float,'fun',
+ function,identifier,integer,mfa,module,neg_integer,nil,no_return,node,
+ non_neg_integer,none,number,pid,port,pos_integer,range,record,
+ reference,timeout,tuple]).
+
+-spec can_be_tag(atom(), abs_type()) -> boolean().
+can_be_tag(Tag, {ann_type,_,[_Var,Type]}) ->
+ can_be_tag(Tag, Type);
+can_be_tag(Tag, {paren_type,_,[Type]}) ->
+ can_be_tag(Tag, Type);
+can_be_tag(Tag, {atom,_,Atom}) ->
+ Tag =:= Atom;
+can_be_tag(_Tag, {integer,_,_Int}) ->
+ false;
+can_be_tag(_Tag, {op,_,_Op,_Arg}) ->
+ false;
+can_be_tag(_Tag, {op,_,_Op,_Arg1,_Arg2}) ->
+ false;
+can_be_tag(Tag, {type,_,BName,[]}) when BName =:= bool; BName =:= boolean ->
+ is_boolean(Tag);
+can_be_tag(Tag, {type,_,timeout,[]}) ->
+ Tag =:= infinity;
+can_be_tag(Tag, {type,_,union,Choices}) ->
+ lists:any(fun(C) -> can_be_tag(Tag,C) end, Choices);
+can_be_tag(_Tag, {type,_,Name,_Args}) ->
+ not ordsets:is_element(Name, ?NON_ATOM_TYPES);
+can_be_tag(_Tag, _Type) ->
+ true.
+
+-spec cant_match(pattern(), abs_type()) -> boolean().
+cant_match(Pattern, {ann_type,_,[_Var,Type]}) ->
+ cant_match(Pattern, Type);
+cant_match(Pattern, {paren_type,_,[Type]}) ->
+ cant_match(Pattern, Type);
+cant_match(_Pattern, {atom,_,_Atom}) ->
+ true;
+cant_match(_Pattern, {integer,_,_Int}) ->
+ true;
+cant_match(_Pattern, {op,_,_Op,_Arg}) ->
+ true;
+cant_match(_Pattern, {op,_,_Op,_Arg1,_Arg2}) ->
+ true;
+cant_match(Pattern, {type,_,mfa,[]}) ->
+ cant_match(Pattern, {type,0,tuple,[{type,0,atom,[]},{type,0,atom,[]},
+ {type,0,arity,[]}]});
+cant_match(Pattern, {type,_,union,Choices}) ->
+ lists:all(fun(C) -> cant_match(Pattern,C) end, Choices);
+cant_match(_Pattern, {type,_,tuple,any}) ->
+ false;
+cant_match(Pattern, {type,_,tuple,Fields}) ->
+ tuple_size(Pattern) =/= length(Fields) orelse
+ try match(tuple_to_list(Pattern), Fields, none, true) of
+ _ -> false
+ catch
+ throw:no_match -> true
+ end;
+cant_match(_Pattern, {type,_,Name,_Args}) ->
+ ordsets:is_element(Name, ?NON_TUPLE_TYPES);
+cant_match(_Pattern, _Type) ->
+ false.
+
+-spec cant_have_head(abs_type()) -> boolean().
+cant_have_head({ann_type,_,[_Var,Type]}) ->
+ cant_have_head(Type);
+cant_have_head({paren_type,_,[Type]}) ->
+ cant_have_head(Type);
+cant_have_head({atom,_,_Atom}) ->
+ true;
+cant_have_head({integer,_,_Int}) ->
+ true;
+cant_have_head({op,_,_Op,_Arg}) ->
+ true;
+cant_have_head({op,_,_Op,_Arg1,_Arg2}) ->
+ true;
+cant_have_head({type,_,union,Choices}) ->
+ lists:all(fun cant_have_head/1, Choices);
+cant_have_head({type,_,Name,_Args}) ->
+ ordsets:is_element(Name, ?NO_HEAD_TYPES);
+cant_have_head(_Type) ->
+ false.
+
+%% Only covers atoms, integers and tuples, i.e. those that can be specified
+%% through singleton types.
+-spec term_to_singleton_type(atom() | integer()
+ | loose_tuple(atom() | integer())) -> abs_type().
+term_to_singleton_type(Atom) when is_atom(Atom) ->
+ {atom,0,Atom};
+term_to_singleton_type(Int) when is_integer(Int), Int >= 0 ->
+ {integer,0,Int};
+term_to_singleton_type(Int) when is_integer(Int), Int < 0 ->
+ {op,0,'-',{integer,0,-Int}};
+term_to_singleton_type(Tuple) when is_tuple(Tuple) ->
+ Fields = tuple_to_list(Tuple),
+ {type,0,tuple,[term_to_singleton_type(F) || F <- Fields]}.
+
+
+%%------------------------------------------------------------------------------
+%% Instance testing functions
+%%------------------------------------------------------------------------------
+
+%% CAUTION: this must be sorted
+-define(EQUIV_TYPES,
+ [{arity, {type,0,range,[{integer,0,0},{integer,0,255}]}},
+ {bool, {type,0,boolean,[]}},
+ {byte, {type,0,range,[{integer,0,0},{integer,0,255}]}},
+ {char, {type,0,range,[{integer,0,0},{integer,0,16#10ffff}]}},
+ {function, {type,0,'fun',[]}},
+ {identifier, {type,0,union,[{type,0,pid,[]},{type,0,port,[]},
+ {type,0,reference,[]}]}},
+ {iodata, {type,0,union,[{type,0,binary,[]},{type,0,iolist,[]}]}},
+ {iolist, {type,0,maybe_improper_list,
+ [{type,0,union,[{type,0,byte,[]},{type,0,binary,[]},
+ {type,0,iolist,[]}]},
+ {type,0,binary,[]}]}},
+ {list, {type,0,list,[{type,0,any,[]}]}},
+ {maybe_improper_list, {type,0,maybe_improper_list,[{type,0,any,[]},
+ {type,0,any,[]}]}},
+ {mfa, {type,0,tuple,[{type,0,atom,[]},{type,0,atom,[]},
+ {type,0,arity,[]}]}},
+ {node, {type,0,atom,[]}},
+ {nonempty_list, {type,0,nonempty_list,[{type,0,any,[]}]}},
+ {nonempty_maybe_improper_list, {type,0,nonempty_maybe_improper_list,
+ [{type,0,any,[]},{type,0,any,[]}]}},
+ {nonempty_string, {type,0,nonempty_list,[{type,0,char,[]}]}},
+ {string, {type,0,list,[{type,0,char,[]}]}},
+ {term, {type,0,any,[]}},
+ {timeout, {type,0,union,[{atom,0,infinity},
+ {type,0,non_neg_integer,[]}]}}]).
+
+%% @private
+%% TODO: Most of these functions accept an extended form of abs_type(), namely
+%% the addition of a custom wrapper: {'from_mod',mod_name(),...}
+-spec is_instance(term(), mod_name(), abs_type()) -> boolean().
+is_instance(X, Mod, TypeForm) ->
+ is_instance(X, Mod, TypeForm, []).
+
+-spec is_instance(term(), mod_name(), abs_type(), imm_stack()) -> boolean().
+is_instance(X, _Mod, {from_mod,OrigMod,Type}, Stack) ->
+ is_instance(X, OrigMod, Type, Stack);
+is_instance(_X, _Mod, {var,_,'_'}, _Stack) ->
+ true;
+is_instance(_X, _Mod, {var,_,Name}, _Stack) ->
+ %% All unconstrained spec vars have been replaced by 'any()' and we always
+ %% replace the variables on the RHS of types before recursing into them.
+ %% Provided that '-type' declarations contain no unbound variables, we
+ %% don't expect to find any non-'_' variables while recursing.
+ throw({'$typeserver',{unbound_var_in_type_declaration,Name}});
+is_instance(X, Mod, {ann_type,_,[_Var,Type]}, Stack) ->
+ is_instance(X, Mod, Type, Stack);
+is_instance(X, Mod, {paren_type,_,[Type]}, Stack) ->
+ is_instance(X, Mod, Type, Stack);
+is_instance(X, Mod, {remote_type,_,[{atom,_,RemMod},{atom,_,Name},ArgForms]},
+ Stack) ->
+ is_custom_instance(X, Mod, RemMod, Name, ArgForms, true, Stack);
+is_instance(SameAtom, _Mod, {atom,_,SameAtom}, _Stack) ->
+ true;
+is_instance(SameInt, _Mod, {integer,_,SameInt}, _Stack) ->
+ true;
+is_instance(X, _Mod, {op,_,_Op,_Arg} = Expr, _Stack) ->
+ is_int_const(X, Expr);
+is_instance(X, _Mod, {op,_,_Op,_Arg1,_Arg2} = Expr, _Stack) ->
+ is_int_const(X, Expr);
+is_instance(_X, _Mod, {type,_,any,[]}, _Stack) ->
+ true;
+is_instance(X, _Mod, {type,_,atom,[]}, _Stack) ->
+ is_atom(X);
+is_instance(X, _Mod, {type,_,binary,[]}, _Stack) ->
+ is_binary(X);
+is_instance(X, _Mod, {type,_,binary,[BaseExpr,UnitExpr]}, _Stack) ->
+ %% <<_:X,_:_*Y>> means "bitstrings of X + k*Y bits, k >= 0"
+ case eval_int(BaseExpr) of
+ {ok,Base} when Base >= 0 ->
+ case eval_int(UnitExpr) of
+ {ok,Unit} when Unit >= 0 ->
+ case is_bitstring(X) of
+ true ->
+ BitSizeX = bit_size(X),
+ case Unit =:= 0 of
+ true ->
+ BitSizeX =:= Base;
+ false ->
+ BitSizeX >= Base
+ andalso
+ (BitSizeX - Base) rem Unit =:= 0
+ end;
+ false -> false
+ end;
+ _ ->
+ abs_expr_error(invalid_unit, UnitExpr)
+ end;
+ _ ->
+ abs_expr_error(invalid_base, BaseExpr)
+ end;
+is_instance(X, _Mod, {type,_,bitstring,[]}, _Stack) ->
+ is_bitstring(X);
+is_instance(X, _Mod, {type,_,boolean,[]}, _Stack) ->
+ is_boolean(X);
+is_instance(X, _Mod, {type,_,float,[]}, _Stack) ->
+ is_float(X);
+is_instance(X, _Mod, {type,_,'fun',[]}, _Stack) ->
+ is_function(X);
+%% TODO: how to check range type? random inputs? special case for 0-arity?
+is_instance(X, _Mod, {type,_,'fun',[{type,_,any,[]},_Range]}, _Stack) ->
+ is_function(X);
+is_instance(X, _Mod, {type,_,'fun',[{type,_,product,Domain},_Range]}, _Stack) ->
+ is_function(X, length(Domain));
+is_instance(X, _Mod, {type,_,integer,[]}, _Stack) ->
+ is_integer(X);
+is_instance(X, Mod, {type,_,list,[Type]}, _Stack) ->
+ list_test(X, Mod, Type, dummy, true, true, false);
+is_instance(X, Mod, {type,_,maybe_improper_list,[Cont,Term]}, _Stack) ->
+ list_test(X, Mod, Cont, Term, true, true, true);
+is_instance(X, _Mod, {type,_,module,[]}, _Stack) ->
+ is_atom(X) orelse
+ is_tuple(X) andalso X =/= {} andalso is_atom(element(1,X));
+is_instance([], _Mod, {type,_,nil,[]}, _Stack) ->
+ true;
+is_instance(X, _Mod, {type,_,neg_integer,[]}, _Stack) ->
+ is_integer(X) andalso X < 0;
+is_instance(X, _Mod, {type,_,non_neg_integer,[]}, _Stack) ->
+ is_integer(X) andalso X >= 0;
+is_instance(X, Mod, {type,_,nonempty_list,[Type]}, _Stack) ->
+ list_test(X, Mod, Type, dummy, false, true, false);
+is_instance(X, Mod, {type,_,nonempty_improper_list,[Cont,Term]}, _Stack) ->
+ list_test(X, Mod, Cont, Term, false, false, true);
+is_instance(X, Mod, {type,_,nonempty_maybe_improper_list,[Cont,Term]},
+ _Stack) ->
+ list_test(X, Mod, Cont, Term, false, true, true);
+is_instance(X, _Mod, {type,_,number,[]}, _Stack) ->
+ is_number(X);
+is_instance(X, _Mod, {type,_,pid,[]}, _Stack) ->
+ is_pid(X);
+is_instance(X, _Mod, {type,_,port,[]}, _Stack) ->
+ is_port(X);
+is_instance(X, _Mod, {type,_,pos_integer,[]}, _Stack) ->
+ is_integer(X) andalso X > 0;
+is_instance(_X, _Mod, {type,_,product,_Elements}, _Stack) ->
+ throw({'$typeserver',{internal,product_in_is_instance}});
+is_instance(X, _Mod, {type,_,range,[LowExpr,HighExpr]}, _Stack) ->
+ case {eval_int(LowExpr),eval_int(HighExpr)} of
+ {{ok,Low},{ok,High}} when Low =< High ->
+ X >= Low andalso X =< High;
+ _ ->
+ abs_expr_error(invalid_range, LowExpr, HighExpr)
+ end;
+is_instance(X, Mod, {type,_,record,[{atom,_,Name} = NameForm | RawSubsts]},
+ Stack) ->
+ Substs = [{N,T} || {type,_,field_type,[{atom,_,N},T]} <- RawSubsts],
+ SubstsDict = dict:from_list(Substs),
+ case get_type_repr(Mod, {record,Name,0}, false) of
+ {ok,{abs_record,OrigFields}} ->
+ Fields = [case dict:find(FieldName, SubstsDict) of
+ {ok,NewFieldType} -> NewFieldType;
+ error -> OrigFieldType
+ end
+ || {FieldName,OrigFieldType} <- OrigFields],
+ is_instance(X, Mod, {type,0,tuple,[NameForm|Fields]}, Stack);
+ {error,Reason} ->
+ throw({'$typeserver',Reason})
+ end;
+is_instance(X, _Mod, {type,_,reference,[]}, _Stack) ->
+ is_reference(X);
+is_instance(X, _Mod, {type,_,tuple,any}, _Stack) ->
+ is_tuple(X);
+is_instance(X, Mod, {type,_,tuple,Fields}, _Stack) ->
+ is_tuple(X) andalso tuple_test(tuple_to_list(X), Mod, Fields);
+is_instance(X, Mod, {type,_,union,Choices}, Stack) ->
+ IsInstance = fun(Choice) -> is_instance(X,Mod,Choice,Stack) end,
+ lists:any(IsInstance, Choices);
+is_instance(X, Mod, {type,_,Name,[]}, Stack) ->
+ case orddict:find(Name, ?EQUIV_TYPES) of
+ {ok,EquivType} ->
+ is_instance(X, Mod, EquivType, Stack);
+ error ->
+ is_maybe_hard_adt(X, Mod, Name, [], Stack)
+ end;
+is_instance(X, Mod, {type,_,Name,ArgForms}, Stack) ->
+ is_maybe_hard_adt(X, Mod, Name, ArgForms, Stack);
+is_instance(_X, _Mod, _Type, _Stack) ->
+ false.
+
+-spec is_int_const(term(), abs_expr()) -> boolean().
+is_int_const(X, Expr) ->
+ case eval_int(Expr) of
+ {ok,Int} ->
+ X =:= Int;
+ error ->
+ abs_expr_error(invalid_int_const, Expr)
+ end.
+
+%% TODO: We implicitly add the '| []' at the termination of maybe_improper_list.
+%% TODO: We ignore a '[]' termination in improper_list.
+-spec list_test(term(), mod_name(), abs_type(), 'dummy' | abs_type(), boolean(),
+ boolean(), boolean()) -> boolean().
+list_test(X, Mod, Content, Termination, CanEmpty, CanProper, CanImproper) ->
+ is_list(X) andalso
+ list_rec(X, Mod, Content, Termination, CanEmpty, CanProper, CanImproper).
+
+-spec list_rec(term(), mod_name(), abs_type(), 'dummy' | abs_type(), boolean(),
+ boolean(), boolean()) -> boolean().
+list_rec([], _Mod, _Content, _Termination, CanEmpty, CanProper, _CanImproper) ->
+ CanEmpty andalso CanProper;
+list_rec([X | Rest], Mod, Content, Termination, _CanEmpty, CanProper,
+ CanImproper) ->
+ is_instance(X, Mod, Content, []) andalso
+ list_rec(Rest, Mod, Content, Termination, true, CanProper, CanImproper);
+list_rec(X, Mod, _Content, Termination, _CanEmpty, _CanProper, CanImproper) ->
+ CanImproper andalso is_instance(X, Mod, Termination, []).
+
+-spec tuple_test([term()], mod_name(), [abs_type()]) -> boolean().
+tuple_test([], _Mod, []) ->
+ true;
+tuple_test([X | XTail], Mod, [T | TTail]) ->
+ is_instance(X, Mod, T, []) andalso tuple_test(XTail, Mod, TTail);
+tuple_test(_, _Mod, _) ->
+ false.
+
+-spec is_maybe_hard_adt(term(), mod_name(), type_name(), [abs_type()],
+ imm_stack()) -> boolean().
+is_maybe_hard_adt(X, Mod, Name, ArgForms, Stack) ->
+ case orddict:find({Name,length(ArgForms)}, ?HARD_ADTS) of
+ {ok,ADTMod} ->
+ is_custom_instance(X, Mod, ADTMod, Name, ArgForms, true, Stack);
+ error ->
+ is_custom_instance(X, Mod, Mod, Name, ArgForms, false, Stack)
+ end.
+
+-spec is_custom_instance(term(), mod_name(), mod_name(), type_name(),
+ [abs_type()], boolean(), imm_stack()) -> boolean().
+is_custom_instance(X, Mod, RemMod, Name, RawArgForms, IsRemote, Stack) ->
+ ArgForms = case Mod =/= RemMod of
+ true -> [{from_mod,Mod,A} || A <- RawArgForms];
+ false -> RawArgForms
+ end,
+ Arity = length(ArgForms),
+ FullTypeRef = {RemMod,Name,Arity},
+ case lists:member(FullTypeRef, Stack) of
+ true ->
+ throw({'$typeserver',{self_reference,FullTypeRef}});
+ false ->
+ TypeRef = {type,Name,Arity},
+ AbsType = get_abs_type(RemMod, TypeRef, ArgForms, IsRemote),
+ is_instance(X, RemMod, AbsType, [FullTypeRef|Stack])
+ end.
+
+-spec get_abs_type(mod_name(), type_ref(), [abs_type()], boolean()) ->
+ abs_type().
+get_abs_type(RemMod, TypeRef, ArgForms, IsRemote) ->
+ case get_type_repr(RemMod, TypeRef, IsRemote) of
+ {ok,TypeRepr} ->
+ {FinalAbsType,SymbInfo,VarNames} =
+ case TypeRepr of
+ {cached,_FinType,FAT,SI} -> {FAT,SI,[]};
+ {abs_type,FAT,VN,SI} -> {FAT,SI,VN}
+ end,
+ AbsType =
+ case SymbInfo of
+ not_symb -> FinalAbsType;
+ {orig_abs,OrigAbsType} -> OrigAbsType
+ end,
+ VarSubstsDict = dict:from_list(lists:zip(VarNames,ArgForms)),
+ update_vars(AbsType, VarSubstsDict, false);
+ {error,Reason} ->
+ throw({'$typeserver',Reason})
+ end.
+
+-spec abs_expr_error(atom(), abs_expr()) -> no_return().
+abs_expr_error(ImmReason, Expr) ->
+ {error,Reason} = expr_error(ImmReason, Expr),
+ throw({'$typeserver',Reason}).
+
+-spec abs_expr_error(atom(), abs_expr(), abs_expr()) -> no_return().
+abs_expr_error(ImmReason, Expr1, Expr2) ->
+ {error,Reason} = expr_error(ImmReason, Expr1, Expr2),
+ throw({'$typeserver',Reason}).
+
+
+%%------------------------------------------------------------------------------
+%% Type translation functions
+%%------------------------------------------------------------------------------
+
+-spec convert(mod_name(), abs_type(), state()) ->
+ rich_result2(fin_type(),state()).
+convert(Mod, TypeForm, State) ->
+ case convert(Mod, TypeForm, State, [], dict:new()) of
+ {ok,{simple,Type},NewState} ->
+ {ok, Type, NewState};
+ {ok,{rec,_RecFun,_RecArgs},_NewState} ->
+ {error, {internal,rec_returned_to_toplevel}};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert(mod_name(), abs_type(), state(), stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert(Mod, {paren_type,_,[Type]}, State, Stack, VarDict) ->
+ convert(Mod, Type, State, Stack, VarDict);
+convert(Mod, {ann_type,_,[_Var,Type]}, State, Stack, VarDict) ->
+ convert(Mod, Type, State, Stack, VarDict);
+convert(_Mod, {var,_,'_'}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:any()}, State};
+convert(_Mod, {var,_,VarName}, State, _Stack, VarDict) ->
+ case dict:find(VarName, VarDict) of
+ %% TODO: do we need to check if we are at toplevel of a recursive?
+ {ok,RetType} -> {ok, RetType, State};
+ error -> {error, {unbound_var,VarName}}
+ end;
+convert(Mod, {remote_type,_,[{atom,_,RemMod},{atom,_,Name},ArgForms]}, State,
+ Stack, VarDict) ->
+ case prepare_for_remote(RemMod, Name, length(ArgForms), State) of
+ {ok,NewState} ->
+ convert_custom(Mod,RemMod,Name,ArgForms,NewState,Stack,VarDict);
+ {error,_Reason} = Error ->
+ Error
+ end;
+convert(_Mod, {atom,_,Atom}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:exactly(Atom)}, State};
+convert(_Mod, {integer,_,_Int} = IntExpr, State, _Stack, _VarDict) ->
+ convert_integer(IntExpr, State);
+convert(_Mod, {op,_,_Op,_Arg} = OpExpr, State, _Stack, _VarDict) ->
+ convert_integer(OpExpr, State);
+convert(_Mod, {op,_,_Op,_Arg1,_Arg2} = OpExpr, State, _Stack, _VarDict) ->
+ convert_integer(OpExpr, State);
+convert(_Mod, {type,_,binary,[BaseExpr,UnitExpr]}, State, _Stack, _VarDict) ->
+ %% <<_:X,_:_*Y>> means "bitstrings of X + k*Y bits, k >= 0"
+ case eval_int(BaseExpr) of
+ {ok,0} ->
+ case eval_int(UnitExpr) of
+ {ok,0} -> {ok, {simple,proper_types:exactly(<<>>)}, State};
+ {ok,1} -> {ok, {simple,proper_types:bitstring()}, State};
+ {ok,8} -> {ok, {simple,proper_types:binary()}, State};
+ {ok,N} when N > 0 ->
+ Gen = ?LET(L, proper_types:list(proper_types:bitstring(N)),
+ concat_bitstrings(L)),
+ {ok, {simple,Gen}, State};
+ _ -> expr_error(invalid_unit, UnitExpr)
+ end;
+ {ok,Base} when Base > 0 ->
+ Head = proper_types:bitstring(Base),
+ case eval_int(UnitExpr) of
+ {ok,0} -> {ok, {simple,Head}, State};
+ {ok,1} ->
+ Tail = proper_types:bitstring(),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ {ok,8} ->
+ Tail = proper_types:binary(),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ {ok,N} when N > 0 ->
+ Tail =
+ ?LET(L, proper_types:list(proper_types:bitstring(N)),
+ concat_bitstrings(L)),
+ {ok, {simple,concat_binary_gens(Head, Tail)}, State};
+ _ -> expr_error(invalid_unit, UnitExpr)
+ end;
+ _ ->
+ expr_error(invalid_base, BaseExpr)
+ end;
+convert(_Mod, {type,_,range,[LowExpr,HighExpr]}, State, _Stack, _VarDict) ->
+ case {eval_int(LowExpr),eval_int(HighExpr)} of
+ {{ok,Low},{ok,High}} when Low =< High ->
+ {ok, {simple,proper_types:integer(Low,High)}, State};
+ _ ->
+ expr_error(invalid_range, LowExpr, HighExpr)
+ end;
+convert(_Mod, {type,_,nil,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:exactly([])}, State};
+convert(Mod, {type,_,list,[ElemForm]}, State, Stack, VarDict) ->
+ convert_list(Mod, false, ElemForm, State, Stack, VarDict);
+convert(Mod, {type,_,nonempty_list,[ElemForm]}, State, Stack, VarDict) ->
+ convert_list(Mod, true, ElemForm, State, Stack, VarDict);
+convert(_Mod, {type,_,nonempty_list,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:non_empty(proper_types:list())}, State};
+convert(_Mod, {type,_,nonempty_string,[]}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:non_empty(proper_types:string())}, State};
+convert(_Mod, {type,_,tuple,any}, State, _Stack, _VarDict) ->
+ {ok, {simple,proper_types:tuple()}, State};
+convert(Mod, {type,_,tuple,ElemForms}, State, Stack, VarDict) ->
+ convert_tuple(Mod, ElemForms, false, State, Stack, VarDict);
+convert(Mod, {type,_,'$fixed_list',ElemForms}, State, Stack, VarDict) ->
+ convert_tuple(Mod, ElemForms, true, State, Stack, VarDict);
+convert(Mod, {type,_,record,[{atom,_,Name}|FieldForms]}, State, Stack,
+ VarDict) ->
+ convert_record(Mod, Name, FieldForms, State, Stack, VarDict);
+convert(Mod, {type,_,union,ChoiceForms}, State, Stack, VarDict) ->
+ convert_union(Mod, ChoiceForms, State, Stack, VarDict);
+convert(Mod, {type,_,'fun',[{type,_,product,Domain},Range]}, State, Stack,
+ VarDict) ->
+ convert_fun(Mod, length(Domain), Range, State, Stack, VarDict);
+%% TODO: These types should be replaced with accurate types.
+%% TODO: Add support for nonempty_improper_list/2.
+convert(Mod, {type,_,maybe_improper_list,[]}, State, Stack, VarDict) ->
+ convert(Mod, {type,0,list,[]}, State, Stack, VarDict);
+convert(Mod, {type,_,maybe_improper_list,[Cont,_Ter]}, State, Stack, VarDict) ->
+ convert(Mod, {type,0,list,[Cont]}, State, Stack, VarDict);
+convert(Mod, {type,_,nonempty_maybe_improper_list,[]}, State, Stack, VarDict) ->
+ convert(Mod, {type,0,nonempty_list,[]}, State, Stack, VarDict);
+convert(Mod, {type,_,nonempty_maybe_improper_list,[Cont,_Term]}, State, Stack,
+ VarDict) ->
+ convert(Mod, {type,0,nonempty_list,[Cont]}, State, Stack, VarDict);
+convert(Mod, {type,_,iodata,[]}, State, Stack, VarDict) ->
+ RealType = {type,0,union,[{type,0,binary,[]},{type,0,iolist,[]}]},
+ convert(Mod, RealType, State, Stack, VarDict);
+convert(Mod, {type,_,Name,[]}, State, Stack, VarDict) ->
+ case ordsets:is_element(Name, ?STD_TYPES_0) of
+ true ->
+ {ok, {simple,proper_types:Name()}, State};
+ false ->
+ convert_maybe_hard_adt(Mod, Name, [], State, Stack, VarDict)
+ end;
+convert(Mod, {type,_,Name,ArgForms}, State, Stack, VarDict) ->
+ convert_maybe_hard_adt(Mod, Name, ArgForms, State, Stack, VarDict);
+convert(_Mod, TypeForm, _State, _Stack, _VarDict) ->
+ {error, {unsupported_type,TypeForm}}.
+
+-spec concat_bitstrings([bitstring()]) -> bitstring().
+concat_bitstrings(BitStrings) ->
+ concat_bitstrings_tr(BitStrings, <<>>).
+
+-spec concat_bitstrings_tr([bitstring()], bitstring()) -> bitstring().
+concat_bitstrings_tr([], Acc) ->
+ Acc;
+concat_bitstrings_tr([BitString | Rest], Acc) ->
+ concat_bitstrings_tr(Rest, <<Acc/bits,BitString/bits>>).
+
+-spec concat_binary_gens(fin_type(), fin_type()) -> fin_type().
+concat_binary_gens(HeadType, TailType) ->
+ ?LET({H,T}, {HeadType,TailType}, <<H/bits,T/bits>>).
+
+-spec convert_fun(mod_name(), arity(), abs_type(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_fun(Mod, Arity, Range, State, Stack, VarDict) ->
+ case convert(Mod, Range, State, ['fun' | Stack], VarDict) of
+ {ok,{simple,RangeType},NewState} ->
+ {ok, {simple,proper_types:function(Arity,RangeType)}, NewState};
+ {ok,{rec,RecFun,RecArgs},NewState} ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> convert_rec_fun(Arity, RecFun, RecArgs, NewState)
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_rec_fun(arity(), rec_fun(), rec_args(), state()) ->
+ {'ok',ret_type(),state()}.
+convert_rec_fun(Arity, RecFun, RecArgs, State) ->
+ %% We bind the generated value by size.
+ NewRecFun =
+ fun(GenFuns,Size) ->
+ proper_types:function(Arity, RecFun(GenFuns,Size))
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, State}.
+
+-spec convert_list(mod_name(), boolean(), abs_type(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_list(Mod, NonEmpty, ElemForm, State, Stack, VarDict) ->
+ case convert(Mod, ElemForm, State, [list | Stack], VarDict) of
+ {ok,{simple,ElemType},NewState} ->
+ InnerType = proper_types:list(ElemType),
+ FinType = case NonEmpty of
+ true -> proper_types:non_empty(InnerType);
+ false -> InnerType
+ end,
+ {ok, {simple,FinType}, NewState};
+ {ok,{rec,RecFun,RecArgs},NewState} ->
+ case {at_toplevel(RecArgs,Stack), NonEmpty} of
+ {true,true} ->
+ base_case_error(Stack);
+ {true,false} ->
+ NewRecFun =
+ fun(GenFuns,Size) ->
+ ElemGen = fun(S) -> ?LAZY(RecFun(GenFuns,S)) end,
+ proper_types:distlist(Size, ElemGen, false)
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState};
+ {false,_} ->
+ {NewRecFun,NewRecArgs} =
+ convert_rec_list(RecFun, RecArgs, NonEmpty),
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_rec_list(rec_fun(), rec_args(), boolean()) ->
+ {rec_fun(),rec_args()}.
+convert_rec_list(RecFun, [{true,FullTypeRef}] = RecArgs, NonEmpty) ->
+ {NewRecFun,_NormalRecArgs} =
+ convert_normal_rec_list(RecFun, RecArgs, NonEmpty),
+ AltRecFun =
+ fun([InstListGen],Size) ->
+ InstTypesList =
+ proper_types:get_prop(internal_types, InstListGen(Size)),
+ proper_types:fixed_list([RecFun([fun(_Size) -> I end],0)
+ || I <- InstTypesList])
+ end,
+ NewRecArgs = [{{list,NonEmpty,AltRecFun},FullTypeRef}],
+ {NewRecFun, NewRecArgs};
+convert_rec_list(RecFun, RecArgs, NonEmpty) ->
+ convert_normal_rec_list(RecFun, RecArgs, NonEmpty).
+
+-spec convert_normal_rec_list(rec_fun(), rec_args(), boolean()) ->
+ {rec_fun(),rec_args()}.
+convert_normal_rec_list(RecFun, RecArgs, NonEmpty) ->
+ NewRecFun = fun(GenFuns,Size) ->
+ ElemGen = fun(S) -> RecFun(GenFuns, S) end,
+ proper_types:distlist(Size, ElemGen, NonEmpty)
+ end,
+ NewRecArgs = clean_rec_args(RecArgs),
+ {NewRecFun, NewRecArgs}.
+
+-spec convert_tuple(mod_name(), [abs_type()], boolean(), state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_tuple(Mod, ElemForms, ToList, State, Stack, VarDict) ->
+ case process_list(Mod, ElemForms, State, [tuple | Stack], VarDict) of
+ {ok,RetTypes,NewState} ->
+ case combine_ret_types(RetTypes, {tuple,ToList}) of
+ {simple,_FinType} = RetType ->
+ {ok, RetType, NewState};
+ {rec,_RecFun,RecArgs} = RetType ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> {ok, RetType, NewState}
+ end
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_union(mod_name(), [abs_type()], state(), stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert_union(Mod, ChoiceForms, State, Stack, VarDict) ->
+ case process_list(Mod, ChoiceForms, State, [union | Stack], VarDict) of
+ {ok,RawChoices,NewState} ->
+ ProcessChoice = fun(T,A) -> process_choice(T,A,Stack) end,
+ {RevSelfRecs,RevNonSelfRecs,RevNonRecs} =
+ lists:foldl(ProcessChoice, {[],[],[]}, RawChoices),
+ case {lists:reverse(RevSelfRecs),lists:reverse(RevNonSelfRecs),
+ lists:reverse(RevNonRecs)} of
+ {_SelfRecs,[],[]} ->
+ base_case_error(Stack);
+ {[],NonSelfRecs,NonRecs} ->
+ {ok, combine_ret_types(NonRecs ++ NonSelfRecs, union),
+ NewState};
+ {SelfRecs,NonSelfRecs,NonRecs} ->
+ {BCaseRecFun,BCaseRecArgs} =
+ case combine_ret_types(NonRecs ++ NonSelfRecs, union) of
+ {simple,BCaseType} ->
+ {fun([],_Size) -> BCaseType end,[]};
+ {rec,BCRecFun,BCRecArgs} ->
+ {BCRecFun,BCRecArgs}
+ end,
+ NumBCaseGens = length(BCaseRecArgs),
+ [ParentRef | _Upper] = Stack,
+ FallbackRecFun = fun([SelfGen],_Size) -> SelfGen(0) end,
+ FallbackRecArgs = [{false,ParentRef}],
+ FallbackRetType = {rec,FallbackRecFun,FallbackRecArgs},
+ {rec,RCaseRecFun,RCaseRecArgs} =
+ combine_ret_types([FallbackRetType] ++ SelfRecs
+ ++ NonSelfRecs, wunion),
+ NewRecFun =
+ fun(AllGens,Size) ->
+ {BCaseGens,RCaseGens} =
+ lists:split(NumBCaseGens, AllGens),
+ case Size of
+ 0 -> BCaseRecFun(BCaseGens,0);
+ _ -> RCaseRecFun(RCaseGens,Size)
+ end
+ end,
+ NewRecArgs = BCaseRecArgs ++ RCaseRecArgs,
+ {ok, {rec,NewRecFun,NewRecArgs}, NewState}
+ end;
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec process_choice(ret_type(), {[ret_type()],[ret_type()],[ret_type()]},
+ stack()) -> {[ret_type()],[ret_type()],[ret_type()]}.
+process_choice({simple,_} = RetType, {SelfRecs,NonSelfRecs,NonRecs}, _Stack) ->
+ {SelfRecs, NonSelfRecs, [RetType | NonRecs]};
+process_choice({rec,RecFun,RecArgs}, {SelfRecs,NonSelfRecs,NonRecs}, Stack) ->
+ case at_toplevel(RecArgs, Stack) of
+ true ->
+ case partition_by_toplevel(RecArgs, Stack, true) of
+ {[],[],_,_} ->
+ NewRecArgs = clean_rec_args(RecArgs),
+ {[{rec,RecFun,NewRecArgs} | SelfRecs], NonSelfRecs,
+ NonRecs};
+ {SelfRecArgs,SelfPos,OtherRecArgs,_OtherPos} ->
+ NumInstances = length(SelfRecArgs),
+ IsListInst = fun({true,_FTRef}) -> false
+ ; ({{list,_NE,_AltRecFun},_FTRef}) -> true
+ end,
+ NewRecFun =
+ case proper_arith:filter(IsListInst,SelfRecArgs) of
+ {[],[]} ->
+ no_list_inst_rec_fun(RecFun,NumInstances,
+ SelfPos);
+ {[{{list,NonEmpty,AltRecFun},_}],[ListInstPos]} ->
+ list_inst_rec_fun(AltRecFun,NumInstances,
+ SelfPos,NonEmpty,ListInstPos)
+ end,
+ [{_B,SelfRef} | _] = SelfRecArgs,
+ NewRecArgs =
+ [{false,SelfRef} | clean_rec_args(OtherRecArgs)],
+ {[{rec,NewRecFun,NewRecArgs} | SelfRecs], NonSelfRecs,
+ NonRecs}
+ end;
+ false ->
+ NewRecArgs = clean_rec_args(RecArgs),
+ {SelfRecs, [{rec,RecFun,NewRecArgs} | NonSelfRecs], NonRecs}
+ end.
+
+-spec no_list_inst_rec_fun(rec_fun(), pos_integer(), [position()]) -> rec_fun().
+no_list_inst_rec_fun(RecFun, NumInstances, SelfPos) ->
+ fun([SelfGen|OtherGens], Size) ->
+ ?LETSHRINK(
+ Instances,
+ %% Size distribution will be a little off if both normal and
+ %% instance-accepting generators are present.
+ lists:duplicate(NumInstances, SelfGen(Size div NumInstances)),
+ begin
+ InstGens = [fun(_Size) -> proper_types:exactly(I) end
+ || I <- Instances],
+ AllGens = proper_arith:insert(InstGens, SelfPos, OtherGens),
+ RecFun(AllGens, Size)
+ end)
+ end.
+
+-spec list_inst_rec_fun(rec_fun(), pos_integer(), [position()], boolean(),
+ position()) -> rec_fun().
+list_inst_rec_fun(AltRecFun, NumInstances, SelfPos, NonEmpty, ListInstPos) ->
+ fun([SelfGen|OtherGens], Size) ->
+ ?LETSHRINK(
+ AllInsts,
+ lists:duplicate(NumInstances - 1, SelfGen(Size div NumInstances))
+ ++ proper_types:distlist(Size div NumInstances, SelfGen, NonEmpty),
+ begin
+ {Instances,InstList} = lists:split(NumInstances - 1, AllInsts),
+ InstGens = [fun(_Size) -> proper_types:exactly(I) end
+ || I <- Instances],
+ InstTypesList = [proper_types:exactly(I) || I <- InstList],
+ InstListGen =
+ fun(_Size) -> proper_types:fixed_list(InstTypesList) end,
+ AllInstGens = proper_arith:list_insert(ListInstPos, InstListGen,
+ InstGens),
+ AllGens = proper_arith:insert(AllInstGens, SelfPos, OtherGens),
+ AltRecFun(AllGens, Size)
+ end)
+ end.
+
+-spec convert_maybe_hard_adt(mod_name(), type_name(), [abs_type()], state(),
+ stack(), var_dict()) ->
+ rich_result2(ret_type(),state()).
+convert_maybe_hard_adt(Mod, Name, ArgForms, State, Stack, VarDict) ->
+ Arity = length(ArgForms),
+ case orddict:find({Name,Arity}, ?HARD_ADTS) of
+ {ok,Mod} ->
+ convert_custom(Mod, Mod, Name, ArgForms, State, Stack, VarDict);
+ {ok,ADTMod} ->
+ ADT = {remote_type,0,[{atom,0,ADTMod},{atom,0,Name},ArgForms]},
+ convert(Mod, ADT, State, Stack, VarDict);
+ error ->
+ convert_custom(Mod, Mod, Name, ArgForms, State, Stack, VarDict)
+ end.
+
+-spec convert_custom(mod_name(), mod_name(), type_name(), [abs_type()], state(),
+ stack(), var_dict()) -> rich_result2(ret_type(),state()).
+convert_custom(Mod, RemMod, Name, ArgForms, State, Stack, VarDict) ->
+ case process_list(Mod, ArgForms, State, Stack, VarDict) of
+ {ok,Args,NewState} ->
+ Arity = length(Args),
+ TypeRef = {type,Name,Arity},
+ FullTypeRef = {RemMod,type,Name,Args},
+ convert_type(TypeRef, FullTypeRef, NewState, Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_record(mod_name(), type_name(), [abs_type()], state(), stack(),
+ var_dict()) -> rich_result2(ret_type(),state()).
+convert_record(Mod, Name, RawSubsts, State, Stack, VarDict) ->
+ Substs = [{N,T} || {type,_,field_type,[{atom,_,N},T]} <- RawSubsts],
+ {SubstFields,SubstTypeForms} = lists:unzip(Substs),
+ case process_list(Mod, SubstTypeForms, State, Stack, VarDict) of
+ {ok,SubstTypes,NewState} ->
+ SubstsDict = dict:from_list(lists:zip(SubstFields, SubstTypes)),
+ TypeRef = {record,Name,0},
+ FullTypeRef = {Mod,record,Name,SubstsDict},
+ convert_type(TypeRef, FullTypeRef, NewState, Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_type(type_ref(), full_type_ref(), state(), stack()) ->
+ rich_result2(ret_type(),state()).
+convert_type(TypeRef, {Mod,_Kind,_Name,_Spec} = FullTypeRef, State, Stack) ->
+ case stack_position(FullTypeRef, Stack) of
+ none ->
+ case get_type_repr(Mod, TypeRef, false, State) of
+ {ok,TypeRepr,NewState} ->
+ convert_new_type(TypeRef, FullTypeRef, TypeRepr, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end;
+ 1 ->
+ base_case_error(Stack);
+ _Pos ->
+ {ok, {rec,fun([Gen],Size) -> Gen(Size) end,[{true,FullTypeRef}]},
+ State}
+ end.
+
+-spec convert_new_type(type_ref(), full_type_ref(), type_repr(), state(),
+ stack()) -> rich_result2(ret_type(),state()).
+convert_new_type(_TypeRef, {_Mod,type,_Name,[]},
+ {cached,FinType,_TypeForm,_SymbInfo}, State, _Stack) ->
+ {ok, {simple,FinType}, State};
+convert_new_type(TypeRef, {Mod,type,_Name,Args} = FullTypeRef,
+ {abs_type,TypeForm,Vars,SymbInfo}, State, Stack) ->
+ VarDict = dict:from_list(lists:zip(Vars, Args)),
+ case convert(Mod, TypeForm, State, [FullTypeRef | Stack], VarDict) of
+ {ok, {simple,ImmFinType}, NewState} ->
+ FinType = case SymbInfo of
+ not_symb ->
+ ImmFinType;
+ {orig_abs,_OrigAbsType} ->
+ proper_symb:internal_well_defined(ImmFinType)
+ end,
+ FinalState = case Vars of
+ [] -> cache_type(Mod, TypeRef, FinType, TypeForm,
+ SymbInfo, NewState);
+ _ -> NewState
+ end,
+ {ok, {simple,FinType}, FinalState};
+ {ok, {rec,RecFun,RecArgs}, NewState} ->
+ convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end;
+convert_new_type(_TypeRef, {Mod,record,Name,SubstsDict} = FullTypeRef,
+ {abs_record,OrigFields}, State, Stack) ->
+ Fields = [case dict:find(FieldName, SubstsDict) of
+ {ok,NewFieldType} -> NewFieldType;
+ error -> OrigFieldType
+ end
+ || {FieldName,OrigFieldType} <- OrigFields],
+ case convert_tuple(Mod, [{atom,0,Name} | Fields], false, State,
+ [FullTypeRef | Stack], dict:new()) of
+ {ok, {simple,_FinType}, _NewState} = Result ->
+ Result;
+ {ok, {rec,RecFun,RecArgs}, NewState} ->
+ convert_maybe_rec(FullTypeRef, not_symb, RecFun, RecArgs, NewState,
+ Stack);
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec cache_type(mod_name(), type_ref(), fin_type(), abs_type(), symb_info(),
+ state()) -> state().
+cache_type(Mod, TypeRef, FinType, TypeForm, SymbInfo,
+ #state{types = Types} = State) ->
+ TypeRepr = {cached,FinType,TypeForm,SymbInfo},
+ ModTypes = dict:fetch(Mod, Types),
+ NewModTypes = dict:store(TypeRef, TypeRepr, ModTypes),
+ NewTypes = dict:store(Mod, NewModTypes, Types),
+ State#state{types = NewTypes}.
+
+-spec convert_maybe_rec(full_type_ref(), symb_info(), rec_fun(), rec_args(),
+ state(), stack()) -> rich_result2(ret_type(),state()).
+convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, State, Stack) ->
+ case at_toplevel(RecArgs, Stack) of
+ true -> base_case_error(Stack);
+ false -> safe_convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs,
+ State)
+ end.
+
+-spec safe_convert_maybe_rec(full_type_ref(),symb_info(),rec_fun(),rec_args(),
+ state()) -> rich_result2(ret_type(),state()).
+safe_convert_maybe_rec(FullTypeRef, SymbInfo, RecFun, RecArgs, State) ->
+ case partition_rec_args(FullTypeRef, RecArgs, false) of
+ {[],[],_,_} ->
+ {ok, {rec,RecFun,RecArgs}, State};
+ {MyRecArgs,MyPos,OtherRecArgs,_OtherPos} ->
+ case lists:all(fun({B,_T}) -> B =:= false end, MyRecArgs) of
+ true -> convert_rec_type(SymbInfo, RecFun, MyPos, OtherRecArgs,
+ State);
+ false -> {error, {internal,true_rec_arg_reached_type}}
+ end
+ end.
+
+-spec convert_rec_type(symb_info(), rec_fun(), [position()], rec_args(),
+ state()) -> {ok, ret_type(), state()}.
+convert_rec_type(SymbInfo, RecFun, MyPos, [], State) ->
+ NumRecArgs = length(MyPos),
+ M = fun(GenFun) ->
+ fun(Size) ->
+ GenFuns = lists:duplicate(NumRecArgs, GenFun),
+ RecFun(GenFuns, erlang:max(0,Size - 1))
+ end
+ end,
+ SizedGen = y(M),
+ ImmFinType = ?SIZED(Size,SizedGen(Size + 1)),
+ FinType = case SymbInfo of
+ not_symb ->
+ ImmFinType;
+ {orig_abs,_OrigAbsType} ->
+ proper_symb:internal_well_defined(ImmFinType)
+ end,
+ {ok, {simple,FinType}, State};
+convert_rec_type(_SymbInfo, RecFun, MyPos, OtherRecArgs, State) ->
+ NumRecArgs = length(MyPos),
+ NewRecFun =
+ fun(OtherGens,TopSize) ->
+ M = fun(GenFun) ->
+ fun(Size) ->
+ GenFuns = lists:duplicate(NumRecArgs, GenFun),
+ AllGens =
+ proper_arith:insert(GenFuns, MyPos, OtherGens),
+ RecFun(AllGens, erlang:max(0,Size - 1))
+ end
+ end,
+ (y(M))(TopSize)
+ end,
+ NewRecArgs = clean_rec_args(OtherRecArgs),
+ {ok, {rec,NewRecFun,NewRecArgs}, State}.
+
+%% Y Combinator: Read more at http://bc.tech.coop/blog/070611.html.
+-spec y(fun((fun((T) -> S)) -> fun((T) -> S))) -> fun((T) -> S).
+y(M) ->
+ G = fun(F) ->
+ M(fun(A) -> (F(F))(A) end)
+ end,
+ G(G).
+
+-spec process_list(mod_name(), [abs_type() | ret_type()], state(), stack(),
+ var_dict()) -> rich_result2([ret_type()],state()).
+process_list(Mod, RawTypes, State, Stack, VarDict) ->
+ Process = fun({simple,_FinType} = Type, {ok,Types,State1}) ->
+ {ok, [Type|Types], State1};
+ ({rec,_RecFun,_RecArgs} = Type, {ok,Types,State1}) ->
+ {ok, [Type|Types], State1};
+ (TypeForm, {ok,Types,State1}) ->
+ case convert(Mod, TypeForm, State1, Stack, VarDict) of
+ {ok,Type,State2} -> {ok,[Type|Types],State2};
+ {error,_} = Err -> Err
+ end;
+ (_RawType, {error,_} = Err) ->
+ Err
+ end,
+ case lists:foldl(Process, {ok,[],State}, RawTypes) of
+ {ok,RevTypes,NewState} ->
+ {ok, lists:reverse(RevTypes), NewState};
+ {error,_Reason} = Error ->
+ Error
+ end.
+
+-spec convert_integer(abs_expr(), state()) -> rich_result2(ret_type(),state()).
+convert_integer(Expr, State) ->
+ case eval_int(Expr) of
+ {ok,Int} -> {ok, {simple,proper_types:exactly(Int)}, State};
+ error -> expr_error(invalid_int_const, Expr)
+ end.
+
+-spec eval_int(abs_expr()) -> tagged_result(integer()).
+eval_int(Expr) ->
+ NoBindings = erl_eval:new_bindings(),
+ try erl_eval:expr(Expr, NoBindings) of
+ {value,Value,_NewBindings} when is_integer(Value) ->
+ {ok, Value};
+ _ ->
+ error
+ catch
+ error:_ ->
+ error
+ end.
+
+-spec expr_error(atom(), abs_expr()) -> {'error',term()}.
+expr_error(Reason, Expr) ->
+ {error, {Reason,lists:flatten(erl_pp:expr(Expr))}}.
+
+-spec expr_error(atom(), abs_expr(), abs_expr()) -> {'error',term()}.
+expr_error(Reason, Expr1, Expr2) ->
+ Str1 = lists:flatten(erl_pp:expr(Expr1)),
+ Str2 = lists:flatten(erl_pp:expr(Expr2)),
+ {error, {Reason,Str1,Str2}}.
+
+-spec base_case_error(stack()) -> {'error',term()}.
+%% TODO: This might confuse, since it doesn't record the arguments to parametric
+%% types or the type subsitutions of a record.
+base_case_error([{Mod,type,Name,Args} | _Upper]) ->
+ Arity = length(Args),
+ {error, {no_base_case,{Mod,type,Name,Arity}}};
+base_case_error([{Mod,record,Name,_SubstsDict} | _Upper]) ->
+ {error, {no_base_case,{Mod,record,Name}}}.
+
+
+%%------------------------------------------------------------------------------
+%% Helper datatypes handling functions
+%%------------------------------------------------------------------------------
+
+-spec stack_position(full_type_ref(), stack()) -> 'none' | pos_integer().
+stack_position(FullTypeRef, Stack) ->
+ SameType = fun(A) -> same_full_type_ref(A,FullTypeRef) end,
+ case proper_arith:find_first(SameType, Stack) of
+ {Pos,_} -> Pos;
+ none -> none
+ end.
+
+-spec partition_by_toplevel(rec_args(), stack(), boolean()) ->
+ {rec_args(),[position()],rec_args(),[position()]}.
+partition_by_toplevel(RecArgs, [], _OnlyInstanceAccepting) ->
+ {[],[],RecArgs,lists:seq(1,length(RecArgs))};
+partition_by_toplevel(RecArgs, [_Parent | _Upper], _OnlyInstanceAccepting)
+ when is_atom(_Parent) ->
+ {[],[],RecArgs,lists:seq(1,length(RecArgs))};
+partition_by_toplevel(RecArgs, [Parent | _Upper], OnlyInstanceAccepting) ->
+ partition_rec_args(Parent, RecArgs, OnlyInstanceAccepting).
+
+-spec at_toplevel(rec_args(), stack()) -> boolean().
+at_toplevel(RecArgs, Stack) ->
+ case partition_by_toplevel(RecArgs, Stack, false) of
+ {[],[],_,_} -> false;
+ _ -> true
+ end.
+
+-spec partition_rec_args(full_type_ref(), rec_args(), boolean()) ->
+ {rec_args(),[position()],rec_args(),[position()]}.
+partition_rec_args(FullTypeRef, RecArgs, OnlyInstanceAccepting) ->
+ SameType =
+ case OnlyInstanceAccepting of
+ true -> fun({false,_T}) -> false
+ ; ({_B,T}) -> same_full_type_ref(T,FullTypeRef) end;
+ false -> fun({_B,T}) -> same_full_type_ref(T,FullTypeRef) end
+ end,
+ proper_arith:partition(SameType, RecArgs).
+
+%% Tuples can be of 0 arity, unions of 1 and wunions at least of 2.
+-spec combine_ret_types([ret_type()], {'tuple',boolean()} | 'union'
+ | 'wunion') -> ret_type().
+combine_ret_types(RetTypes, EnclosingType) ->
+ case lists:all(fun is_simple_ret_type/1, RetTypes) of
+ true ->
+ %% This should never happen for wunion.
+ Combine = case EnclosingType of
+ {tuple,false} -> fun proper_types:tuple/1;
+ {tuple,true} -> fun proper_types:fixed_list/1;
+ union -> fun proper_types:union/1
+ end,
+ FinTypes = [T || {simple,T} <- RetTypes],
+ {simple, Combine(FinTypes)};
+ false ->
+ NumTypes = length(RetTypes),
+ {RevRecFuns,RevRecArgsList,NumRecs} =
+ lists:foldl(fun add_ret_type/2, {[],[],0}, RetTypes),
+ RecFuns = lists:reverse(RevRecFuns),
+ RecArgsList = lists:reverse(RevRecArgsList),
+ RecArgLens = [length(RecArgs) || RecArgs <- RecArgsList],
+ RecFunInfo = {NumTypes,NumRecs,RecArgLens,RecFuns},
+ FlatRecArgs = lists:flatten(RecArgsList),
+ {NewRecFun,NewRecArgs} =
+ case EnclosingType of
+ {tuple,ToList} ->
+ {tuple_rec_fun(RecFunInfo,ToList),
+ soft_clean_rec_args(FlatRecArgs,RecFunInfo,ToList)};
+ union ->
+ {union_rec_fun(RecFunInfo),clean_rec_args(FlatRecArgs)};
+ wunion ->
+ {wunion_rec_fun(RecFunInfo),
+ clean_rec_args(FlatRecArgs)}
+ end,
+ {rec, NewRecFun, NewRecArgs}
+ end.
+
+-spec tuple_rec_fun(rec_fun_info(), boolean()) -> rec_fun().
+tuple_rec_fun({_NumTypes,NumRecs,RecArgLens,RecFuns}, ToList) ->
+ Combine = case ToList of
+ true -> fun proper_types:fixed_list/1;
+ false -> fun proper_types:tuple/1
+ end,
+ fun(AllGFs,TopSize) ->
+ Size = TopSize div NumRecs,
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun erlang:apply/2,
+ Combine(lists:zipwith(ZipFun, RecFuns, ArgsList))
+ end.
+
+-spec union_rec_fun(rec_fun_info()) -> rec_fun().
+union_rec_fun({_NumTypes,_NumRecs,RecArgLens,RecFuns}) ->
+ fun(AllGFs,Size) ->
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun(F,A) -> ?LAZY(apply(F,A)) end,
+ proper_types:union(lists:zipwith(ZipFun, RecFuns, ArgsList))
+ end.
+
+-spec wunion_rec_fun(rec_fun_info()) -> rec_fun().
+wunion_rec_fun({NumTypes,_NumRecs,RecArgLens,RecFuns}) ->
+ fun(AllGFs,Size) ->
+ GFsList = proper_arith:unflatten(AllGFs, RecArgLens),
+ ArgsList = [[GenFuns,Size] || GenFuns <- GFsList],
+ ZipFun = fun(W,F,A) -> {W,?LAZY(apply(F,A))} end,
+ RecWeight = erlang:max(1, Size div (NumTypes - 1)),
+ Weights = [1 | lists:duplicate(NumTypes - 1, RecWeight)],
+ WeightedChoices = lists:zipwith3(ZipFun, Weights, RecFuns, ArgsList),
+ proper_types:wunion(WeightedChoices)
+ end.
+
+-spec add_ret_type(ret_type(), {[rec_fun()],[rec_args()],non_neg_integer()}) ->
+ {[rec_fun()],[rec_args()],non_neg_integer()}.
+add_ret_type({simple,FinType}, {RecFuns,RecArgsList,NumRecs}) ->
+ {[fun([],_) -> FinType end | RecFuns], [[] | RecArgsList], NumRecs};
+add_ret_type({rec,RecFun,RecArgs}, {RecFuns,RecArgsList,NumRecs}) ->
+ {[RecFun | RecFuns], [RecArgs | RecArgsList], NumRecs + 1}.
+
+-spec is_simple_ret_type(ret_type()) -> boolean().
+is_simple_ret_type({simple,_FinType}) ->
+ true;
+is_simple_ret_type({rec,_RecFun,_RecArgs}) ->
+ false.
+
+-spec clean_rec_args(rec_args()) -> rec_args().
+clean_rec_args(RecArgs) ->
+ [{false,F} || {_B,F} <- RecArgs].
+
+-spec soft_clean_rec_args(rec_args(), rec_fun_info(), boolean()) -> rec_args().
+soft_clean_rec_args(RecArgs, RecFunInfo, ToList) ->
+ soft_clean_rec_args_tr(RecArgs, [], RecFunInfo, ToList, false, 1).
+
+-spec soft_clean_rec_args_tr(rec_args(), rec_args(), rec_fun_info(), boolean(),
+ boolean(), position()) -> rec_args().
+soft_clean_rec_args_tr([], Acc, _RecFunInfo, _ToList, _FoundListInst, _Pos) ->
+ lists:reverse(Acc);
+soft_clean_rec_args_tr([{{list,_NonEmpty,_AltRecFun},FTRef} | Rest], Acc,
+ RecFunInfo, ToList, true, Pos) ->
+ NewArg = {false,FTRef},
+ soft_clean_rec_args_tr(Rest, [NewArg|Acc], RecFunInfo, ToList, true, Pos+1);
+soft_clean_rec_args_tr([{{list,NonEmpty,AltRecFun},FTRef} | Rest], Acc,
+ RecFunInfo, ToList, false, Pos) ->
+ {NumTypes,NumRecs,RecArgLens,RecFuns} = RecFunInfo,
+ AltRecFunPos = get_group(Pos, RecArgLens),
+ AltRecFuns = proper_arith:list_update(AltRecFunPos, AltRecFun, RecFuns),
+ AltRecFunInfo = {NumTypes,NumRecs,RecArgLens,AltRecFuns},
+ NewArg = {{list,NonEmpty,tuple_rec_fun(AltRecFunInfo,ToList)},FTRef},
+ soft_clean_rec_args_tr(Rest, [NewArg|Acc], RecFunInfo, ToList, true, Pos+1);
+soft_clean_rec_args_tr([Arg | Rest], Acc, RecFunInfo, ToList, FoundListInst,
+ Pos) ->
+ soft_clean_rec_args_tr(Rest, [Arg | Acc], RecFunInfo, ToList, FoundListInst,
+ Pos+1).
+
+-spec get_group(pos_integer(), [non_neg_integer()]) -> pos_integer().
+get_group(Pos, AllMembers) ->
+ get_group_tr(Pos, AllMembers, 1).
+
+-spec get_group_tr(pos_integer(), [non_neg_integer()], pos_integer()) ->
+ pos_integer().
+get_group_tr(Pos, [Members | Rest], GroupNum) ->
+ case Pos =< Members of
+ true -> GroupNum;
+ false -> get_group_tr(Pos - Members, Rest, GroupNum + 1)
+ end.
+
+-spec same_full_type_ref(full_type_ref(), term()) -> boolean().
+same_full_type_ref({SameMod,type,SameName,Args1},
+ {SameMod,type,SameName,Args2}) ->
+ length(Args1) =:= length(Args2)
+ andalso lists:all(fun({A,B}) -> same_ret_type(A,B) end,
+ lists:zip(Args1, Args2));
+same_full_type_ref({SameMod,record,SameName,SubstsDict1},
+ {SameMod,record,SameName,SubstsDict2}) ->
+ same_substs_dict(SubstsDict1, SubstsDict2);
+same_full_type_ref(_, _) ->
+ false.
+
+-spec same_ret_type(ret_type(), ret_type()) -> boolean().
+same_ret_type({simple,FinType1}, {simple,FinType2}) ->
+ same_fin_type(FinType1, FinType2);
+same_ret_type({rec,RecFun1,RecArgs1}, {rec,RecFun2,RecArgs2}) ->
+ NumRecArgs = length(RecArgs1),
+ length(RecArgs2) =:= NumRecArgs
+ andalso lists:all(fun({A1,A2}) -> same_rec_arg(A1,A2,NumRecArgs) end,
+ lists:zip(RecArgs1,RecArgs2))
+ andalso same_rec_fun(RecFun1, RecFun2, NumRecArgs);
+same_ret_type(_, _) ->
+ false.
+
+%% TODO: Is this too strict?
+-spec same_rec_arg(rec_arg(), rec_arg(), arity()) -> boolean().
+same_rec_arg({{list,SameBool,AltRecFun1},FTRef1},
+ {{list,SameBool,AltRecFun2},FTRef2}, NumRecArgs) ->
+ same_rec_fun(AltRecFun1, AltRecFun2, NumRecArgs)
+ andalso same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg({true,FTRef1}, {true,FTRef2}, _NumRecArgs) ->
+ same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg({false,FTRef1}, {false,FTRef2}, _NumRecArgs) ->
+ same_full_type_ref(FTRef1, FTRef2);
+same_rec_arg(_, _, _NumRecArgs) ->
+ false.
+
+-spec same_substs_dict(substs_dict(), substs_dict()) -> boolean().
+same_substs_dict(SubstsDict1, SubstsDict2) ->
+ SameKVPair = fun({{_K,V1},{_K,V2}}) -> same_ret_type(V1,V2);
+ (_) -> false
+ end,
+ SubstsKVList1 = lists:sort(dict:to_list(SubstsDict1)),
+ SubstsKVList2 = lists:sort(dict:to_list(SubstsDict2)),
+ length(SubstsKVList1) =:= length(SubstsKVList2)
+ andalso lists:all(SameKVPair, lists:zip(SubstsKVList1,SubstsKVList2)).
+
+-spec same_fin_type(fin_type(), fin_type()) -> boolean().
+same_fin_type(Type1, Type2) ->
+ proper_types:equal_types(Type1, Type2).
+
+-spec same_rec_fun(rec_fun(), rec_fun(), arity()) -> boolean().
+same_rec_fun(RecFun1, RecFun2, NumRecArgs) ->
+ %% It's ok that we return a type, even if there's a 'true' for use of
+ %% an instance.
+ GenFun = fun(_Size) -> proper_types:exactly('$dummy') end,
+ GenFuns = lists:duplicate(NumRecArgs,GenFun),
+ same_fin_type(RecFun1(GenFuns,0), RecFun2(GenFuns,0)).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/cerl.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/cerl.erl
new file mode 100644
index 0000000000..299ebe60d6
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/cerl.erl
@@ -0,0 +1,4595 @@
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+%% =====================================================================
+%% @doc Core Erlang abstract syntax trees.
+%%
+%% <p> This module defines an abstract data type for representing Core
+%% Erlang source code as syntax trees.</p>
+%%
+%% <p>A recommended starting point for the first-time user is the
+%% documentation of the function <a
+%% href="#type-1"><code>type/1</code></a>.</p>
+%%
+%% <h3><b>NOTES:</b></h3>
+%%
+%% <p>This module deals with the composition and decomposition of
+%% <em>syntactic</em> entities (as opposed to semantic ones); its
+%% purpose is to hide all direct references to the data structures
+%% used to represent these entities. With few exceptions, the
+%% functions in this module perform no semantic interpretation of
+%% their inputs, and in general, the user is assumed to pass
+%% type-correct arguments - if this is not done, the effects are not
+%% defined.</p>
+%%
+%% <p>Currently, the internal data structure used is the same as
+%% the record-based data structures used traditionally in the Beam
+%% compiler.</p>
+%%
+%% <p>The internal representations of abstract syntax trees are
+%% subject to change without notice, and should not be documented
+%% outside this module. Furthermore, we do not give any guarantees on
+%% how an abstract syntax tree may or may not be represented, <em>with
+%% the following exceptions</em>: no syntax tree is represented by a
+%% single atom, such as <code>none</code>, by a list constructor
+%% <code>[X | Y]</code>, or by the empty list <code>[]</code>. This
+%% can be relied on when writing functions that operate on syntax
+%% trees.</p>
+%%
+%% @type cerl(). An abstract Core Erlang syntax tree.
+%%
+%% <p>Every abstract syntax tree has a <em>type</em>, given by the
+%% function <a href="#type-1"><code>type/1</code></a>. In addition,
+%% each syntax tree has a list of <em>user annotations</em> (cf. <a
+%% href="#get_ann-1"><code>get_ann/1</code></a>), which are included
+%% in the Core Erlang syntax.</p>
+
+-module(cerl).
+
+-export([abstract/1, add_ann/2, alias_pat/1, alias_var/1,
+ ann_abstract/2, ann_c_alias/3, ann_c_apply/3, ann_c_atom/2,
+ ann_c_call/4, ann_c_case/3, ann_c_catch/2, ann_c_char/2,
+ ann_c_clause/3, ann_c_clause/4, ann_c_cons/3, ann_c_float/2,
+ ann_c_fname/3, ann_c_fun/3, ann_c_int/2, ann_c_let/4,
+ ann_c_letrec/3, ann_c_module/4, ann_c_module/5, ann_c_nil/1,
+ ann_c_cons_skel/3, ann_c_tuple_skel/2, ann_c_primop/3,
+ ann_c_receive/2, ann_c_receive/4, ann_c_seq/3, ann_c_string/2,
+ ann_c_try/6, ann_c_tuple/2, ann_c_values/2, ann_c_var/2,
+ ann_make_data/3, ann_make_list/2, ann_make_list/3,
+ ann_make_data_skel/3, ann_make_tree/3, apply_args/1,
+ apply_arity/1, apply_op/1, atom_lit/1, atom_name/1, atom_val/1,
+ c_alias/2, c_apply/2, c_atom/1, c_call/3, c_case/2, c_catch/1,
+ c_char/1, c_clause/2, c_clause/3, c_cons/2, c_float/1,
+ c_fname/2, c_fun/2, c_int/1, c_let/3, c_letrec/2, c_module/3,
+ c_module/4, c_nil/0, c_cons_skel/2, c_tuple_skel/1, c_primop/2,
+ c_receive/1, c_receive/3, c_seq/2, c_string/1, c_try/5,
+ c_tuple/1, c_values/1, c_var/1, call_args/1, call_arity/1,
+ call_module/1, call_name/1, case_arg/1, case_arity/1,
+ case_clauses/1, catch_body/1, char_lit/1, char_val/1,
+ clause_arity/1, clause_body/1, clause_guard/1, clause_pats/1,
+ clause_vars/1, concrete/1, cons_hd/1, cons_tl/1, copy_ann/2,
+ data_arity/1, data_es/1, data_type/1, float_lit/1, float_val/1,
+ fname_arity/1, fname_id/1, fold_literal/1, from_records/1,
+ fun_arity/1, fun_body/1, fun_vars/1, get_ann/1, int_lit/1,
+ int_val/1, is_c_alias/1, is_c_apply/1, is_c_atom/1,
+ is_c_call/1, is_c_case/1, is_c_catch/1, is_c_char/1,
+ is_c_clause/1, is_c_cons/1, is_c_float/1, is_c_fname/1,
+ is_c_fun/1, is_c_int/1, is_c_let/1, is_c_letrec/1, is_c_list/1,
+ is_c_module/1, is_c_nil/1, is_c_primop/1, is_c_receive/1,
+ is_c_seq/1, is_c_string/1, is_c_try/1, is_c_tuple/1,
+ is_c_values/1, is_c_var/1, is_data/1, is_leaf/1, is_literal/1,
+ is_literal_term/1, is_print_char/1, is_print_string/1,
+ let_arg/1, let_arity/1, let_body/1, let_vars/1, letrec_body/1,
+ letrec_defs/1, letrec_vars/1, list_elements/1, list_length/1,
+ make_data/2, make_list/1, make_list/2, make_data_skel/2,
+ make_tree/2, meta/1, module_attrs/1, module_defs/1,
+ module_exports/1, module_name/1, module_vars/1,
+ pat_list_vars/1, pat_vars/1, primop_args/1, primop_arity/1,
+ primop_name/1, receive_action/1, receive_clauses/1,
+ receive_timeout/1, seq_arg/1, seq_body/1, set_ann/2,
+ string_lit/1, string_val/1, subtrees/1, to_records/1,
+ try_arg/1, try_body/1, try_vars/1, try_evars/1, try_handler/1,
+ tuple_arity/1, tuple_es/1, type/1, unfold_literal/1,
+ update_c_alias/3, update_c_apply/3, update_c_call/4,
+ update_c_case/3, update_c_catch/2, update_c_clause/4,
+ update_c_cons/3, update_c_cons_skel/3, update_c_fname/2,
+ update_c_fname/3, update_c_fun/3, update_c_let/4,
+ update_c_letrec/3, update_c_module/5, update_c_primop/3,
+ update_c_receive/4, update_c_seq/3, update_c_try/6,
+ update_c_tuple/2, update_c_tuple_skel/2, update_c_values/2,
+ update_c_var/2, update_data/3, update_list/2, update_list/3,
+ update_data_skel/3, update_tree/2, update_tree/3,
+ values_arity/1, values_es/1, var_name/1, c_binary/1,
+ update_c_binary/2, ann_c_binary/2, is_c_binary/1,
+ binary_segments/1, c_bitstr/3, c_bitstr/4, c_bitstr/5,
+ update_c_bitstr/5, update_c_bitstr/6, ann_c_bitstr/5,
+ ann_c_bitstr/6, is_c_bitstr/1, bitstr_val/1, bitstr_size/1,
+ bitstr_bitsize/1, bitstr_unit/1, bitstr_type/1, bitstr_flags/1,
+
+ %% keep map exports here for now
+ c_map_pattern/1,
+ is_c_map/1,
+ is_c_map_pattern/1,
+ map_es/1,
+ map_arg/1,
+ update_c_map/3,
+ c_map/1, is_c_map_empty/1,
+ ann_c_map/2, ann_c_map/3,
+ ann_c_map_pattern/2,
+ map_pair_op/1,map_pair_key/1,map_pair_val/1,
+ update_c_map_pair/4,
+ c_map_pair/2, c_map_pair_exact/2,
+ ann_c_map_pair/4
+ ]).
+
+-export_type([c_binary/0, c_bitstr/0, c_call/0, c_clause/0, c_cons/0, c_fun/0,
+ c_let/0, c_literal/0, c_map/0, c_map_pair/0,
+ c_module/0, c_tuple/0,
+ c_values/0, c_var/0, cerl/0,
+ anns/0, attrs/0, defs/0, litval/0, var_name/0]).
+
+-include("core_parse.hrl").
+
+-type c_alias() :: #c_alias{}.
+-type c_apply() :: #c_apply{}.
+-type c_binary() :: #c_binary{}.
+-type c_bitstr() :: #c_bitstr{}.
+-type c_call() :: #c_call{}.
+-type c_case() :: #c_case{}.
+-type c_catch() :: #c_catch{}.
+-type c_clause() :: #c_clause{}.
+-type c_cons() :: #c_cons{}.
+-type c_fun() :: #c_fun{}.
+-type c_let() :: #c_let{}.
+-type c_letrec() :: #c_letrec{}.
+-type c_literal() :: #c_literal{}.
+-type c_map() :: #c_map{}.
+-type c_map_pair() :: #c_map_pair{}.
+-type c_module() :: #c_module{}.
+-type c_primop() :: #c_primop{}.
+-type c_receive() :: #c_receive{}.
+-type c_seq() :: #c_seq{}.
+-type c_try() :: #c_try{}.
+-type c_tuple() :: #c_tuple{}.
+-type c_values() :: #c_values{}.
+-type c_var() :: #c_var{}.
+
+-type cerl() :: c_alias() | c_apply() | c_binary() | c_bitstr()
+ | c_call() | c_case() | c_catch() | c_clause() | c_cons()
+ | c_fun() | c_let() | c_letrec() | c_literal()
+ | c_map() | c_map_pair()
+ | c_module() | c_primop() | c_receive() | c_seq()
+ | c_try() | c_tuple() | c_values() | c_var().
+
+-type anns() :: [term()].
+-type attr() :: {c_literal(), c_literal()}.
+-type attrs() :: [attr()].
+-type def() :: {c_var(), c_fun()}.
+-type defs() :: [def()].
+
+-type litval() :: atom() | bitstring() | map() | number()
+ | string() | tuple() | [litval()].
+
+-type var_name() :: integer() | atom() | {atom(), arity()}.
+
+
+%% =====================================================================
+%% Representation (general)
+%%
+%% All nodes are represented by tuples of arity 2 or (generally)
+%% greater, whose first element is an atom which uniquely identifies the
+%% type of the node, and whose second element is a (proper) list of
+%% annotation terms associated with the node - this is by default empty.
+%%
+%% For most node constructor functions, there are analogous functions
+%% named 'ann_...', taking one extra argument 'As' (always the first
+%% argument), specifying an annotation list at node creation time.
+%% Similarly, there are also functions named 'update_...', taking one
+%% extra argument 'Old', specifying a node from which all fields not
+%% explicitly given as arguments should be copied (generally, this is
+%% the annotation field only).
+%% =====================================================================
+
+%% @spec type(Node::cerl()) -> atom()
+%%
+%% @doc Returns the type tag of <code>Node</code>. Current node types
+%% are:
+%%
+%% <p><center><table border="1">
+%% <tr>
+%% <td>alias</td>
+%% <td>apply</td>
+%% <td>binary</td>
+%% <td>bitstr</td>
+%% <td>call</td>
+%% <td>case</td>
+%% <td>catch</td>
+%% <td>clause</td>
+%% </tr><tr>
+%% <td>cons</td>
+%% <td>fun</td>
+%% <td>let</td>
+%% <td>letrec</td>
+%% <td>literal</td>
+%% <td>map</td>
+%% <td>map_pair</td>
+%% <td>module</td>
+%% </tr><tr>
+%% <td>primop</td>
+%% <td>receive</td>
+%% <td>seq</td>
+%% <td>try</td>
+%% <td>tuple</td>
+%% <td>values</td>
+%% <td>var</td>
+%% </tr>
+%% </table></center></p>
+%%
+%% <p>Note: The name of the primary constructor function for a node
+%% type is always the name of the type itself, prefixed by
+%% "<code>c_</code>"; recognizer predicates are correspondingly
+%% prefixed by "<code>is_c_</code>". Furthermore, to simplify
+%% preservation of annotations (cf. <code>get_ann/1</code>), there are
+%% analogous constructor functions prefixed by "<code>ann_c_</code>"
+%% and "<code>update_c_</code>", for setting the annotation list of
+%% the new node to either a specific value or to the annotations of an
+%% existing node, respectively.</p>
+%%
+%% @see abstract/1
+%% @see c_alias/2
+%% @see c_apply/2
+%% @see c_binary/1
+%% @see c_bitstr/5
+%% @see c_call/3
+%% @see c_case/2
+%% @see c_catch/1
+%% @see c_clause/3
+%% @see c_cons/2
+%% @see c_fun/2
+%% @see c_let/3
+%% @see c_letrec/2
+%% @see c_module/3
+%% @see c_primop/2
+%% @see c_receive/1
+%% @see c_seq/2
+%% @see c_try/5
+%% @see c_tuple/1
+%% @see c_values/1
+%% @see c_var/1
+%% @see get_ann/1
+%% @see to_records/1
+%% @see from_records/1
+%% @see data_type/1
+%% @see subtrees/1
+%% @see meta/1
+
+-type ctype() :: 'alias' | 'apply' | 'binary' | 'bitrst' | 'call' | 'case'
+ | 'catch' | 'clause' | 'cons' | 'fun' | 'let' | 'letrec'
+ | 'literal' | 'map' | 'map_pair' | 'module' | 'primop'
+ | 'receive' | 'seq' | 'try' | 'tuple' | 'values' | 'var'.
+
+-spec type(cerl()) -> ctype().
+
+type(#c_alias{}) -> alias;
+type(#c_apply{}) -> apply;
+type(#c_binary{}) -> binary;
+type(#c_bitstr{}) -> bitstr;
+type(#c_call{}) -> call;
+type(#c_case{}) -> 'case';
+type(#c_catch{}) -> 'catch';
+type(#c_clause{}) -> clause;
+type(#c_cons{}) -> cons;
+type(#c_fun{}) -> 'fun';
+type(#c_let{}) -> 'let';
+type(#c_letrec{}) -> letrec;
+type(#c_literal{}) -> literal;
+type(#c_map{}) -> map;
+type(#c_map_pair{}) -> map_pair;
+type(#c_module{}) -> module;
+type(#c_primop{}) -> primop;
+type(#c_receive{}) -> 'receive';
+type(#c_seq{}) -> seq;
+type(#c_try{}) -> 'try';
+type(#c_tuple{}) -> tuple;
+type(#c_values{}) -> values;
+type(#c_var{}) -> var.
+
+
+%% @spec is_leaf(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is a leaf node,
+%% otherwise <code>false</code>. The current leaf node types are
+%% <code>literal</code> and <code>var</code>.
+%%
+%% <p>Note: all literals (cf. <code>is_literal/1</code>) are leaf
+%% nodes, even if they represent structured (constant) values such as
+%% <code>{foo, [bar, baz]}</code>. Also note that variables are leaf
+%% nodes but not literals.</p>
+%%
+%% @see type/1
+%% @see is_literal/1
+
+-spec is_leaf(cerl()) -> boolean().
+
+is_leaf(Node) ->
+ case type(Node) of
+ literal -> true;
+ var -> true;
+ _ -> false
+ end.
+
+
+%% @spec get_ann(cerl()) -> anns()
+%%
+%% @doc Returns the list of user annotations associated with a syntax
+%% tree node. For a newly created node, this is the empty list. The
+%% annotations may be any terms.
+%%
+%% @see set_ann/2
+
+-spec get_ann(cerl()) -> anns().
+
+get_ann(Node) ->
+ element(2, Node).
+
+
+%% @spec set_ann(Node::cerl(), Annotations::anns()) -> cerl()
+%%
+%% @doc Sets the list of user annotations of <code>Node</code> to
+%% <code>Annotations</code>.
+%%
+%% @see get_ann/1
+%% @see add_ann/2
+%% @see copy_ann/2
+
+-spec set_ann(cerl(), anns()) -> cerl().
+
+set_ann(Node, List) ->
+ setelement(2, Node, List).
+
+
+%% @spec add_ann(Annotations::anns(), Node::cerl()) -> cerl()
+%%
+%% @doc Appends <code>Annotations</code> to the list of user
+%% annotations of <code>Node</code>.
+%%
+%% <p>Note: this is equivalent to <code>set_ann(Node, Annotations ++
+%% get_ann(Node))</code>, but potentially more efficient.</p>
+%%
+%% @see get_ann/1
+%% @see set_ann/2
+
+-spec add_ann(anns(), cerl()) -> cerl().
+
+add_ann(Terms, Node) ->
+ set_ann(Node, Terms ++ get_ann(Node)).
+
+
+%% @spec copy_ann(Source::cerl(), Target::cerl()) -> cerl()
+%%
+%% @doc Copies the list of user annotations from <code>Source</code>
+%% to <code>Target</code>.
+%%
+%% <p>Note: this is equivalent to <code>set_ann(Target,
+%% get_ann(Source))</code>, but potentially more efficient.</p>
+%%
+%% @see get_ann/1
+%% @see set_ann/2
+
+-spec copy_ann(cerl(), cerl()) -> cerl().
+
+copy_ann(Source, Target) ->
+ set_ann(Target, get_ann(Source)).
+
+
+%% @spec abstract(Term::litval()) -> cerl()
+%%
+%% @doc Creates a syntax tree corresponding to an Erlang term.
+%% <code>Term</code> must be a literal term, i.e., one that can be
+%% represented as a source code literal. Thus, it may not contain a
+%% process identifier, port, reference or function value as a subterm.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see ann_abstract/2
+%% @see concrete/1
+%% @see is_literal/1
+%% @see is_literal_term/1
+
+-spec abstract(litval()) -> c_literal().
+
+abstract(T) ->
+ #c_literal{val = T}.
+
+
+%% @spec ann_abstract(Annotations::anns(), Term::litval()) -> cerl()
+%% @see abstract/1
+
+-spec ann_abstract(anns(), litval()) -> c_literal().
+
+ann_abstract(As, T) ->
+ #c_literal{val = T, anno = As}.
+
+
+%% @spec is_literal_term(Term::term()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Term</code> can be
+%% represented as a literal, otherwise <code>false</code>. This
+%% function takes time proportional to the size of <code>Term</code>.
+%%
+%% @see abstract/1
+
+-spec is_literal_term(term()) -> boolean().
+
+is_literal_term(T) when is_integer(T) -> true;
+is_literal_term(T) when is_float(T) -> true;
+is_literal_term(T) when is_atom(T) -> true;
+is_literal_term([]) -> true;
+is_literal_term([H | T]) ->
+ is_literal_term(H) andalso is_literal_term(T);
+is_literal_term(T) when is_tuple(T) ->
+ is_literal_term_list(tuple_to_list(T));
+is_literal_term(B) when is_bitstring(B) -> true;
+is_literal_term(M) when is_map(M) ->
+ is_literal_term_list(maps:to_list(M));
+is_literal_term(_) ->
+ false.
+
+-spec is_literal_term_list([term()]) -> boolean().
+
+is_literal_term_list([T | Ts]) ->
+ case is_literal_term(T) of
+ true ->
+ is_literal_term_list(Ts);
+ false ->
+ false
+ end;
+is_literal_term_list([]) ->
+ true.
+
+
+%% @spec concrete(Node::c_literal()) -> litval()
+%%
+%% @doc Returns the Erlang term represented by a syntax tree. An
+%% exception is thrown if <code>Node</code> does not represent a
+%% literal term.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see abstract/1
+%% @see is_literal/1
+
+%% Because the normal tuple and list constructor operations always
+%% return a literal if the arguments are literals, 'concrete' and
+%% 'is_literal' never need to traverse the structure.
+
+-spec concrete(c_literal()) -> litval().
+
+concrete(#c_literal{val = V}) ->
+ V.
+
+
+%% @spec is_literal(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% literal term, otherwise <code>false</code>. This function returns
+%% <code>true</code> if and only if the value of
+%% <code>concrete(Node)</code> is defined.
+%%
+%% <p>Note: This is a constant time operation.</p>
+%%
+%% @see abstract/1
+%% @see concrete/1
+%% @see fold_literal/1
+
+-spec is_literal(cerl()) -> boolean().
+
+is_literal(#c_literal{}) ->
+ true;
+is_literal(_) ->
+ false.
+
+
+%% @spec fold_literal(Node::cerl()) -> cerl()
+%%
+%% @doc Assures that literals have a compact representation. This is
+%% occasionally useful if <code>c_cons_skel/2</code>,
+%% <code>c_tuple_skel/1</code> or <code>unfold_literal/1</code> were
+%% used in the construction of <code>Node</code>, and you want to revert
+%% to the normal "folded" representation of literals. If
+%% <code>Node</code> represents a tuple or list constructor, its
+%% elements are rewritten recursively, and the node is reconstructed
+%% using <code>c_cons/2</code> or <code>c_tuple/1</code>, respectively;
+%% otherwise, <code>Node</code> is not changed.
+%%
+%% @see is_literal/1
+%% @see c_cons_skel/2
+%% @see c_tuple_skel/1
+%% @see c_cons/2
+%% @see c_tuple/1
+%% @see unfold_literal/1
+
+-spec fold_literal(cerl()) -> cerl().
+
+fold_literal(Node) ->
+ case type(Node) of
+ tuple ->
+ update_c_tuple(Node, fold_literal_list(tuple_es(Node)));
+ cons ->
+ update_c_cons(Node, fold_literal(cons_hd(Node)),
+ fold_literal(cons_tl(Node)));
+ _ ->
+ Node
+ end.
+
+fold_literal_list([E | Es]) ->
+ [fold_literal(E) | fold_literal_list(Es)];
+fold_literal_list([]) ->
+ [].
+
+
+%% @spec unfold_literal(Node::cerl()) -> cerl()
+%%
+%% @doc Assures that literals have a fully expanded representation. If
+%% <code>Node</code> represents a literal tuple or list constructor, its
+%% elements are rewritten recursively, and the node is reconstructed
+%% using <code>c_cons_skel/2</code> or <code>c_tuple_skel/1</code>,
+%% respectively; otherwise, <code>Node</code> is not changed. The {@link
+%% fold_literal/1} can be used to revert to the normal compact
+%% representation.
+%%
+%% @see is_literal/1
+%% @see c_cons_skel/2
+%% @see c_tuple_skel/1
+%% @see c_cons/2
+%% @see c_tuple/1
+%% @see fold_literal/1
+
+-spec unfold_literal(cerl()) -> cerl().
+
+unfold_literal(Node) ->
+ case type(Node) of
+ literal ->
+ copy_ann(Node, unfold_concrete(concrete(Node)));
+ _ ->
+ Node
+ end.
+
+unfold_concrete(Val) ->
+ case Val of
+ _ when is_tuple(Val) ->
+ c_tuple_skel(unfold_concrete_list(tuple_to_list(Val)));
+ [H|T] ->
+ c_cons_skel(unfold_concrete(H), unfold_concrete(T));
+ _ ->
+ abstract(Val)
+ end.
+
+unfold_concrete_list([E | Es]) ->
+ [unfold_concrete(E) | unfold_concrete_list(Es)];
+unfold_concrete_list([]) ->
+ [].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_module(Name::c_literal(), Exports, Definitions) -> c_module()
+%%
+%% Exports = [c_var()]
+%% Definitions = defs()
+%%
+%% @equiv c_module(Name, Exports, [], Definitions)
+
+-spec c_module(c_literal(), [c_var()], defs()) -> c_module().
+
+c_module(Name, Exports, Defs) ->
+ #c_module{name = Name, exports = Exports, attrs = [], defs = Defs}.
+
+
+%% @spec c_module(Name::c_literal(), Exports, Attributes, Definitions) ->
+%% c_module()
+%%
+%% Exports = [c_var()]
+%% Attributes = attrs()
+%% Definitions = defs()
+%%
+%% @doc Creates an abstract module definition. The result represents
+%% <pre>
+%% module <em>Name</em> [<em>E1</em>, ..., <em>Ek</em>]
+%% attributes [<em>K1</em> = <em>T1</em>, ...,
+%% <em>Km</em> = <em>Tm</em>]
+%% <em>V1</em> = <em>F1</em>
+%% ...
+%% <em>Vn</em> = <em>Fn</em>
+%% end</pre>
+%%
+%% if <code>Exports</code> = <code>[E1, ..., Ek]</code>,
+%% <code>Attributes</code> = <code>[{K1, T1}, ..., {Km, Tm}]</code>,
+%% and <code>Definitions</code> = <code>[{V1, F1}, ..., {Vn,
+%% Fn}]</code>.
+%%
+%% <p><code>Name</code> and all the <code>Ki</code> must be atom
+%% literals, and all the <code>Ti</code> must be constant literals. All
+%% the <code>Vi</code> and <code>Ei</code> must have type
+%% <code>var</code> and represent function names. All the
+%% <code>Fi</code> must have type <code>'fun'</code>.</p>
+%%
+%% @see c_module/3
+%% @see module_name/1
+%% @see module_exports/1
+%% @see module_attrs/1
+%% @see module_defs/1
+%% @see module_vars/1
+%% @see ann_c_module/4
+%% @see ann_c_module/5
+%% @see update_c_module/5
+%% @see c_atom/1
+%% @see c_var/1
+%% @see c_fun/2
+%% @see is_literal/1
+
+-spec c_module(c_literal(), [c_var()], attrs(), defs()) -> c_module().
+
+c_module(Name, Exports, Attrs, Defs) ->
+ #c_module{name = Name, exports = Exports, attrs = Attrs, defs = Defs}.
+
+
+%% @spec ann_c_module(As::anns(), Name::c_literal(), Exports,
+%% Definitions) -> c_module()
+%%
+%% Exports = [c_var()]
+%% Definitions = defs()
+%%
+%% @see c_module/3
+%% @see ann_c_module/5
+
+-spec ann_c_module(anns(), c_literal(), [c_var()], defs()) -> c_module().
+
+ann_c_module(As, Name, Exports, Defs) ->
+ #c_module{name = Name, exports = Exports, attrs = [], defs = Defs,
+ anno = As}.
+
+
+%% @spec ann_c_module(As::anns(), Name::c_literal(), Exports,
+%% Attributes, Definitions) -> c_module()
+%%
+%% Exports = [c_var()]
+%% Attributes = attrs()
+%% Definitions = defs()
+%%
+%% @see c_module/4
+%% @see ann_c_module/4
+
+-spec ann_c_module(anns(), c_literal(), [c_var()], attrs(), defs()) ->
+ c_module().
+
+ann_c_module(As, Name, Exports, Attrs, Defs) ->
+ #c_module{name = Name, exports = Exports, attrs = Attrs, defs = Defs,
+ anno = As}.
+
+
+%% @spec update_c_module(Old::cerl(), Name::c_literal(), Exports,
+%% Attributes, Definitions) -> c_module()
+%%
+%% Exports = [c_var()]
+%% Attributes = attrs()
+%% Definitions = defs()
+%%
+%% @see c_module/4
+
+-spec update_c_module(c_module(), c_literal(), [c_var()], attrs(), defs()) ->
+ c_module().
+
+update_c_module(Node, Name, Exports, Attrs, Defs) ->
+ #c_module{name = Name, exports = Exports, attrs = Attrs, defs = Defs,
+ anno = get_ann(Node)}.
+
+
+%% @spec is_c_module(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% module definition, otherwise <code>false</code>.
+%%
+%% @see type/1
+
+-spec is_c_module(cerl()) -> boolean().
+
+is_c_module(#c_module{}) ->
+ true;
+is_c_module(_) ->
+ false.
+
+
+%% @spec module_name(Node::c_module()) -> c_literal()
+%%
+%% @doc Returns the name subtree of an abstract module definition.
+%%
+%% @see c_module/4
+
+-spec module_name(c_module()) -> c_literal().
+
+module_name(Node) ->
+ Node#c_module.name.
+
+
+%% @spec module_exports(Node::c_module()) -> [c_var()]
+%%
+%% @doc Returns the list of exports subtrees of an abstract module
+%% definition.
+%%
+%% @see c_module/4
+
+-spec module_exports(c_module()) -> [c_var()].
+
+module_exports(Node) ->
+ Node#c_module.exports.
+
+
+%% @spec module_attrs(Node::c_module()) -> [{cerl(), cerl()}]
+%%
+%% @doc Returns the list of pairs of attribute key/value subtrees of
+%% an abstract module definition.
+%%
+%% @see c_module/4
+
+-spec module_attrs(c_module()) -> attrs().
+
+module_attrs(Node) ->
+ Node#c_module.attrs.
+
+
+%% @spec module_defs(Node::c_module()) -> defs()
+%%
+%% @doc Returns the list of function definitions of an abstract module
+%% definition.
+%%
+%% @see c_module/4
+
+-spec module_defs(c_module()) -> defs().
+
+module_defs(Node) ->
+ Node#c_module.defs.
+
+
+%% @spec module_vars(Node::c_module()) -> [c_var()]
+%%
+%% @doc Returns the list of left-hand side function variable subtrees
+%% of an abstract module definition.
+%%
+%% @see c_module/4
+
+-spec module_vars(c_module()) -> [c_var()].
+
+module_vars(Node) ->
+ [F || {F, _} <- module_defs(Node)].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_int(Value::integer()) -> c_literal()
+%%
+%% @doc Creates an abstract integer literal. The lexical
+%% representation is the canonical decimal numeral of
+%% <code>Value</code>.
+%%
+%% @see ann_c_int/2
+%% @see is_c_int/1
+%% @see int_val/1
+%% @see int_lit/1
+%% @see c_char/1
+
+-spec c_int(integer()) -> c_literal().
+
+c_int(Value) ->
+ #c_literal{val = Value}.
+
+
+%% @spec ann_c_int(As::anns(), Value::integer()) -> c_literal()
+%% @see c_int/1
+
+-spec ann_c_int(anns(), integer()) -> c_literal().
+
+ann_c_int(As, Value) ->
+ #c_literal{val = Value, anno = As}.
+
+
+%% @spec is_c_int(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents an
+%% integer literal, otherwise <code>false</code>.
+%% @see c_int/1
+
+-spec is_c_int(cerl()) -> boolean().
+
+is_c_int(#c_literal{val = V}) when is_integer(V) ->
+ true;
+is_c_int(_) ->
+ false.
+
+
+%% @spec int_val(c_literal()) -> integer()
+%%
+%% @doc Returns the value represented by an integer literal node.
+%% @see c_int/1
+
+-spec int_val(c_literal()) -> integer().
+
+int_val(Node) ->
+ Node#c_literal.val.
+
+
+%% @spec int_lit(c_literal()) -> string()
+%%
+%% @doc Returns the numeral string represented by an integer literal
+%% node.
+%% @see c_int/1
+
+-spec int_lit(c_literal()) -> string().
+
+int_lit(Node) ->
+ integer_to_list(int_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_float(Value::float()) -> c_literal()
+%%
+%% @doc Creates an abstract floating-point literal. The lexical
+%% representation is the decimal floating-point numeral of
+%% <code>Value</code>.
+%%
+%% @see ann_c_float/2
+%% @see is_c_float/1
+%% @see float_val/1
+%% @see float_lit/1
+
+%% Note that not all floating-point numerals can be represented with
+%% full precision.
+
+-spec c_float(float()) -> c_literal().
+
+c_float(Value) ->
+ #c_literal{val = Value}.
+
+
+%% @spec ann_c_float(As::anns(), Value::float()) -> c_literal()
+%% @see c_float/1
+
+-spec ann_c_float(anns(), float()) -> c_literal().
+
+ann_c_float(As, Value) ->
+ #c_literal{val = Value, anno = As}.
+
+
+%% @spec is_c_float(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% floating-point literal, otherwise <code>false</code>.
+%% @see c_float/1
+
+-spec is_c_float(cerl()) -> boolean().
+
+is_c_float(#c_literal{val = V}) when is_float(V) ->
+ true;
+is_c_float(_) ->
+ false.
+
+
+%% @spec float_val(c_literal()) -> float()
+%%
+%% @doc Returns the value represented by a floating-point literal
+%% node.
+%% @see c_float/1
+
+-spec float_val(c_literal()) -> float().
+
+float_val(Node) ->
+ Node#c_literal.val.
+
+
+%% @spec float_lit(c_literal()) -> string()
+%%
+%% @doc Returns the numeral string represented by a floating-point
+%% literal node.
+%% @see c_float/1
+
+-spec float_lit(c_literal()) -> string().
+
+float_lit(Node) ->
+ float_to_list(float_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_atom(Name) -> c_literal()
+%% Name = atom() | string()
+%%
+%% @doc Creates an abstract atom literal. The print name of the atom
+%% is the character sequence represented by <code>Name</code>.
+%%
+%% <p>Note: passing a string as argument to this function causes a
+%% corresponding atom to be created for the internal representation.</p>
+%%
+%% @see ann_c_atom/2
+%% @see is_c_atom/1
+%% @see atom_val/1
+%% @see atom_name/1
+%% @see atom_lit/1
+
+-spec c_atom(atom() | string()) -> c_literal().
+
+c_atom(Name) when is_atom(Name) ->
+ #c_literal{val = Name};
+c_atom(Name) ->
+ #c_literal{val = list_to_atom(Name)}.
+
+
+%% @spec ann_c_atom(As::anns(), Name) -> cerl()
+%% Name = atom() | string()
+%% @see c_atom/1
+
+-spec ann_c_atom(anns(), atom() | string()) -> c_literal().
+
+ann_c_atom(As, Name) when is_atom(Name) ->
+ #c_literal{val = Name, anno = As};
+ann_c_atom(As, Name) ->
+ #c_literal{val = list_to_atom(Name), anno = As}.
+
+
+%% @spec is_c_atom(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents an
+%% atom literal, otherwise <code>false</code>.
+%%
+%% @see c_atom/1
+
+-spec is_c_atom(cerl()) -> boolean().
+
+is_c_atom(#c_literal{val = V}) when is_atom(V) ->
+ true;
+is_c_atom(_) ->
+ false.
+
+%% @spec atom_val(c_literal()) -> atom()
+%%
+%% @doc Returns the value represented by an abstract atom.
+%%
+%% @see c_atom/1
+
+-spec atom_val(c_literal()) -> atom().
+
+atom_val(Node) ->
+ Node#c_literal.val.
+
+
+%% @spec atom_name(c_literal()) -> string()
+%%
+%% @doc Returns the printname of an abstract atom.
+%%
+%% @see c_atom/1
+
+-spec atom_name(c_literal()) -> string().
+
+atom_name(Node) ->
+ atom_to_list(atom_val(Node)).
+
+
+%% @spec atom_lit(cerl()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract
+%% atom. This always includes surrounding single-quote characters.
+%%
+%% <p>Note that an abstract atom may have several literal
+%% representations, and that the representation yielded by this
+%% function is not fixed; e.g.,
+%% <code>atom_lit(c_atom("a\012b"))</code> could yield the string
+%% <code>"\'a\\nb\'"</code>.</p>
+%%
+%% @see c_atom/1
+
+%% TODO: replace the use of the unofficial 'write_string/2'.
+
+-spec atom_lit(cerl()) -> nonempty_string().
+
+atom_lit(Node) ->
+ io_lib:write_string(atom_name(Node), $'). %' stupid Emacs.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_char(Value) -> c_literal()
+%%
+%% Value = char() | integer()
+%%
+%% @doc Creates an abstract character literal. If the local
+%% implementation of Erlang defines <code>char()</code> as a subset of
+%% <code>integer()</code>, this function is equivalent to
+%% <code>c_int/1</code>. Otherwise, if the given value is an integer,
+%% it will be converted to the character with the corresponding
+%% code. The lexical representation of a character is
+%% "<code>$<em>Char</em></code>", where <code>Char</code> is a single
+%% printing character or an escape sequence.
+%%
+%% @see c_int/1
+%% @see c_string/1
+%% @see ann_c_char/2
+%% @see is_c_char/1
+%% @see char_val/1
+%% @see char_lit/1
+%% @see is_print_char/1
+
+-spec c_char(non_neg_integer()) -> c_literal().
+
+c_char(Value) when is_integer(Value), Value >= 0 ->
+ #c_literal{val = Value}.
+
+
+%% @spec ann_c_char(As::anns(), Value::char()) -> c_literal()
+%% @see c_char/1
+
+-spec ann_c_char(anns(), char()) -> c_literal().
+
+ann_c_char(As, Value) ->
+ #c_literal{val = Value, anno = As}.
+
+
+%% @spec is_c_char(Node::c_literal()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% character literal, otherwise <code>false</code>.
+%%
+%% <p>If the local implementation of Erlang defines
+%% <code>char()</code> as a subset of <code>integer()</code>, then
+%% <code>is_c_int(<em>Node</em>)</code> will also yield
+%% <code>true</code>.</p>
+%%
+%% @see c_char/1
+%% @see is_print_char/1
+
+-spec is_c_char(c_literal()) -> boolean().
+
+is_c_char(#c_literal{val = V}) when is_integer(V), V >= 0 ->
+ is_char_value(V);
+is_c_char(_) ->
+ false.
+
+
+%% @spec is_print_char(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% "printing" character, otherwise <code>false</code>. (Cf.
+%% <code>is_c_char/1</code>.) A "printing" character has either a
+%% given graphical representation, or a "named" escape sequence such
+%% as "<code>\n</code>". Currently, only ISO 8859-1 (Latin-1)
+%% character values are recognized.
+%%
+%% @see c_char/1
+%% @see is_c_char/1
+
+-spec is_print_char(cerl()) -> boolean().
+
+is_print_char(#c_literal{val = V}) when is_integer(V), V >= 0 ->
+ is_print_char_value(V);
+is_print_char(_) ->
+ false.
+
+
+%% @spec char_val(c_literal()) -> char()
+%%
+%% @doc Returns the value represented by an abstract character literal.
+%%
+%% @see c_char/1
+
+-spec char_val(c_literal()) -> char().
+
+char_val(Node) ->
+ Node#c_literal.val.
+
+
+%% @spec char_lit(c_literal()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract
+%% character. This includes a leading <code>$</code>
+%% character. Currently, all characters that are not in the set of ISO
+%% 8859-1 (Latin-1) "printing" characters will be escaped.
+%%
+%% @see c_char/1
+
+-spec char_lit(c_literal()) -> nonempty_string().
+
+char_lit(Node) ->
+ io_lib:write_char(char_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_string(Value::string()) -> c_literal()
+%%
+%% @doc Creates an abstract string literal. Equivalent to creating an
+%% abstract list of the corresponding character literals
+%% (cf. <code>is_c_string/1</code>), but is typically more
+%% efficient. The lexical representation of a string is
+%% "<code>"<em>Chars</em>"</code>", where <code>Chars</code> is a
+%% sequence of printing characters or spaces.
+%%
+%% @see c_char/1
+%% @see ann_c_string/2
+%% @see is_c_string/1
+%% @see string_val/1
+%% @see string_lit/1
+%% @see is_print_string/1
+
+-spec c_string(string()) -> c_literal().
+
+c_string(Value) ->
+ #c_literal{val = Value}.
+
+
+%% @spec ann_c_string(As::anns(), Value::string()) -> c_literal()
+%% @see c_string/1
+
+-spec ann_c_string(anns(), string()) -> c_literal().
+
+ann_c_string(As, Value) ->
+ #c_literal{val = Value, anno = As}.
+
+
+%% @spec is_c_string(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% string literal, otherwise <code>false</code>. Strings are defined
+%% as lists of characters; see <code>is_c_char/1</code> for details.
+%%
+%% @see c_string/1
+%% @see is_c_char/1
+%% @see is_print_string/1
+
+-spec is_c_string(cerl()) -> boolean().
+
+is_c_string(#c_literal{val = V}) ->
+ is_char_list(V);
+is_c_string(_) ->
+ false.
+
+
+%% @spec is_print_string(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> may represent a
+%% string literal containing only "printing" characters, otherwise
+%% <code>false</code>. See <code>is_c_string/1</code> and
+%% <code>is_print_char/1</code> for details. Currently, only ISO
+%% 8859-1 (Latin-1) character values are recognized.
+%%
+%% @see c_string/1
+%% @see is_c_string/1
+%% @see is_print_char/1
+
+-spec is_print_string(cerl()) -> boolean().
+
+is_print_string(#c_literal{val = V}) ->
+ is_print_char_list(V);
+is_print_string(_) ->
+ false.
+
+
+%% @spec string_val(cerl()) -> string()
+%%
+%% @doc Returns the value represented by an abstract string literal.
+%%
+%% @see c_string/1
+
+-spec string_val(c_literal()) -> string().
+
+string_val(Node) ->
+ Node#c_literal.val.
+
+
+%% @spec string_lit(cerl()) -> string()
+%%
+%% @doc Returns the literal string represented by an abstract string.
+%% This includes surrounding double-quote characters
+%% <code>"..."</code>. Currently, characters that are not in the set
+%% of ISO 8859-1 (Latin-1) "printing" characters will be escaped,
+%% except for spaces.
+%%
+%% @see c_string/1
+
+-spec string_lit(c_literal()) -> nonempty_string().
+
+string_lit(Node) ->
+ io_lib:write_string(string_val(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_nil() -> cerl()
+%%
+%% @doc Creates an abstract empty list. The result represents
+%% "<code>[]</code>". The empty list is traditionally called "nil".
+%%
+%% @see ann_c_nil/1
+%% @see is_c_list/1
+%% @see c_cons/2
+
+-spec c_nil() -> c_literal().
+
+c_nil() ->
+ #c_literal{val = []}.
+
+
+%% @spec ann_c_nil(As::anns()) -> cerl()
+%% @see c_nil/0
+
+-spec ann_c_nil(anns()) -> c_literal().
+
+ann_c_nil(As) ->
+ #c_literal{val = [], anno = As}.
+
+
+%% @spec is_c_nil(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% empty list, otherwise <code>false</code>.
+
+-spec is_c_nil(cerl()) -> boolean().
+
+is_c_nil(#c_literal{val = []}) ->
+ true;
+is_c_nil(_) ->
+ false.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_cons(Head::cerl(), Tail::cerl()) -> cerl()
+%%
+%% @doc Creates an abstract list constructor. The result represents
+%% "<code>[<em>Head</em> | <em>Tail</em>]</code>". Note that if both
+%% <code>Head</code> and <code>Tail</code> have type
+%% <code>literal</code>, then the result will also have type
+%% <code>literal</code>, and annotations on <code>Head</code> and
+%% <code>Tail</code> are lost.
+%%
+%% <p>Recall that in Erlang, the tail element of a list constructor is
+%% not necessarily a list.</p>
+%%
+%% @see ann_c_cons/3
+%% @see update_c_cons/3
+%% @see c_cons_skel/2
+%% @see is_c_cons/1
+%% @see cons_hd/1
+%% @see cons_tl/1
+%% @see is_c_list/1
+%% @see c_nil/0
+%% @see list_elements/1
+%% @see list_length/1
+%% @see make_list/2
+
+%% *Always* collapse literals.
+
+-spec c_cons(cerl(), cerl()) -> c_literal() | c_cons().
+
+c_cons(#c_literal{val = Head}, #c_literal{val = Tail}) ->
+ #c_literal{val = [Head | Tail]};
+c_cons(Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail}.
+
+
+%% @spec ann_c_cons(As::anns(), Head::cerl(), Tail::cerl()) -> cerl()
+%% @see c_cons/2
+
+-spec ann_c_cons(anns(), cerl(), cerl()) -> c_literal() | c_cons().
+
+ann_c_cons(As, #c_literal{val = Head}, #c_literal{val = Tail}) ->
+ #c_literal{val = [Head | Tail], anno = As};
+ann_c_cons(As, Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail, anno = As}.
+
+
+%% @spec update_c_cons(Old::cerl(), Head::cerl(), Tail::cerl()) ->
+%% cerl()
+%% @see c_cons/2
+
+-spec update_c_cons(c_literal() | c_cons(), cerl(), cerl()) ->
+ c_literal() | c_cons().
+
+update_c_cons(Node, #c_literal{val = Head}, #c_literal{val = Tail}) ->
+ #c_literal{val = [Head | Tail], anno = get_ann(Node)};
+update_c_cons(Node, Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail, anno = get_ann(Node)}.
+
+
+%% @spec c_cons_skel(Head::cerl(), Tail::cerl()) -> c_cons()
+%%
+%% @doc Creates an abstract list constructor skeleton. Does not fold
+%% constant literals, i.e., the result always has type
+%% <code>cons</code>, representing "<code>[<em>Head</em> |
+%% <em>Tail</em>]</code>".
+%%
+%% <p>This function is occasionally useful when it is necessary to have
+%% annotations on the subnodes of a list constructor node, even when the
+%% subnodes are constant literals. Note however that
+%% <code>is_literal/1</code> will yield <code>false</code> and
+%% <code>concrete/1</code> will fail if passed the result from this
+%% function.</p>
+%%
+%% <p><code>fold_literal/1</code> can be used to revert a node to the
+%% normal-form representation.</p>
+%%
+%% @see ann_c_cons_skel/3
+%% @see update_c_cons_skel/3
+%% @see c_cons/2
+%% @see is_c_cons/1
+%% @see is_c_list/1
+%% @see c_nil/0
+%% @see is_literal/1
+%% @see fold_literal/1
+%% @see concrete/1
+
+%% *Never* collapse literals.
+
+-spec c_cons_skel(cerl(), cerl()) -> c_cons().
+
+c_cons_skel(Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail}.
+
+
+%% @spec ann_c_cons_skel(As::anns(), Head::cerl(), Tail::cerl()) ->
+%% c_cons()
+%% @see c_cons_skel/2
+
+-spec ann_c_cons_skel(anns(), cerl(), cerl()) -> c_cons().
+
+ann_c_cons_skel(As, Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail, anno = As}.
+
+
+%% @spec update_c_cons_skel(Old::cerl(), Head::cerl(), Tail::cerl()) ->
+%% c_cons()
+%% @see c_cons_skel/2
+
+-spec update_c_cons_skel(c_cons() | c_literal(), cerl(), cerl()) -> c_cons().
+
+update_c_cons_skel(Node, Head, Tail) ->
+ #c_cons{hd = Head, tl = Tail, anno = get_ann(Node)}.
+
+
+%% @spec is_c_cons(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% list constructor, otherwise <code>false</code>.
+
+-spec is_c_cons(cerl()) -> boolean().
+
+is_c_cons(#c_cons{}) ->
+ true;
+is_c_cons(#c_literal{val = [_ | _]}) ->
+ true;
+is_c_cons(_) ->
+ false.
+
+
+%% @spec cons_hd(cerl()) -> cerl()
+%%
+%% @doc Returns the head subtree of an abstract list constructor.
+%%
+%% @see c_cons/2
+
+-spec cons_hd(c_cons() | c_literal()) -> cerl().
+
+cons_hd(#c_cons{hd = Head}) ->
+ Head;
+cons_hd(#c_literal{val = [Head | _]}) ->
+ #c_literal{val = Head}.
+
+
+%% @spec cons_tl(c_cons() | c_literal()) -> cerl()
+%%
+%% @doc Returns the tail subtree of an abstract list constructor.
+%%
+%% <p>Recall that the tail does not necessarily represent a proper
+%% list.</p>
+%%
+%% @see c_cons/2
+
+-spec cons_tl(c_cons() | c_literal()) -> cerl().
+
+cons_tl(#c_cons{tl = Tail}) ->
+ Tail;
+cons_tl(#c_literal{val = [_ | Tail]}) ->
+ #c_literal{val = Tail}.
+
+
+%% @spec is_c_list(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% proper list, otherwise <code>false</code>. A proper list is either
+%% the empty list <code>[]</code>, or a cons cell <code>[<em>Head</em> |
+%% <em>Tail</em>]</code>, where recursively <code>Tail</code> is a
+%% proper list.
+%%
+%% <p>Note: Because <code>Node</code> is a syntax tree, the actual
+%% run-time values corresponding to its subtrees may often be partially
+%% or completely unknown. Thus, if <code>Node</code> represents e.g.
+%% "<code>[... | Ns]</code>" (where <code>Ns</code> is a variable), then
+%% the function will return <code>false</code>, because it is not known
+%% whether <code>Ns</code> will be bound to a list at run-time. If
+%% <code>Node</code> instead represents e.g. "<code>[1, 2, 3]</code>" or
+%% "<code>[A | []]</code>", then the function will return
+%% <code>true</code>.</p>
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see list_elements/1
+%% @see list_length/1
+
+-spec is_c_list(cerl()) -> boolean().
+
+is_c_list(#c_cons{tl = Tail}) ->
+ is_c_list(Tail);
+is_c_list(#c_literal{val = V}) ->
+ is_proper_list(V);
+is_c_list(_) ->
+ false.
+
+is_proper_list([_ | Tail]) ->
+ is_proper_list(Tail);
+is_proper_list([]) ->
+ true;
+is_proper_list(_) ->
+ false.
+
+%% @spec list_elements(c_cons() | c_literal()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract list.
+%% <code>Node</code> must represent a proper list. E.g., if
+%% <code>Node</code> represents "<code>[<em>X1</em>, <em>X2</em> |
+%% [<em>X3</em>, <em>X4</em> | []]</code>", then
+%% <code>list_elements(Node)</code> yields the list <code>[X1, X2, X3,
+%% X4]</code>.
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see is_c_list/1
+%% @see list_length/1
+%% @see make_list/2
+
+-spec list_elements(c_cons() | c_literal()) -> [cerl()].
+
+list_elements(#c_cons{hd = Head, tl = Tail}) ->
+ [Head | list_elements(Tail)];
+list_elements(#c_literal{val = V}) ->
+ abstract_list(V).
+
+abstract_list([X | Xs]) ->
+ [abstract(X) | abstract_list(Xs)];
+abstract_list([]) ->
+ [].
+
+
+%% @spec list_length(Node::c_cons() | c_literal()) -> integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract list.
+%% <code>Node</code> must represent a proper list. E.g., if
+%% <code>Node</code> represents "<code>[X1 | [X2, X3 | [X4, X5,
+%% X6]]]</code>", then <code>list_length(Node)</code> returns the
+%% integer 6.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(list_elements(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see is_c_list/1
+%% @see list_elements/1
+
+-spec list_length(c_cons() | c_literal()) -> non_neg_integer().
+
+list_length(L) ->
+ list_length(L, 0).
+
+list_length(#c_cons{tl = Tail}, A) ->
+ list_length(Tail, A + 1);
+list_length(#c_literal{val = V}, A) ->
+ A + length(V).
+
+
+%% @spec make_list(List) -> Node
+%% @equiv make_list(List, none)
+
+-spec make_list([cerl()]) -> cerl().
+
+make_list(List) ->
+ ann_make_list([], List).
+
+
+%% @spec make_list(List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @doc Creates an abstract list from the elements in <code>List</code>
+%% and the optional <code>Tail</code>. If <code>Tail</code> is
+%% <code>none</code>, the result will represent a nil-terminated list,
+%% otherwise it represents "<code>[... | <em>Tail</em>]</code>".
+%%
+%% @see c_cons/2
+%% @see c_nil/0
+%% @see ann_make_list/3
+%% @see update_list/3
+%% @see list_elements/1
+
+-spec make_list([cerl()], cerl() | 'none') -> cerl().
+
+make_list(List, Tail) ->
+ ann_make_list([], List, Tail).
+
+
+%% @spec update_list(Old::cerl(), List::[cerl()]) -> cerl()
+%% @equiv update_list(Old, List, none)
+
+-spec update_list(cerl(), [cerl()]) -> cerl().
+
+update_list(Node, List) ->
+ ann_make_list(get_ann(Node), List).
+
+
+%% @spec update_list(Old::cerl(), List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @see make_list/2
+%% @see update_list/2
+
+-spec update_list(cerl(), [cerl()], cerl() | 'none') -> cerl().
+
+update_list(Node, List, Tail) ->
+ ann_make_list(get_ann(Node), List, Tail).
+
+
+%% @spec ann_make_list(As::anns(), List::[cerl()]) -> cerl()
+%% @equiv ann_make_list(As, List, none)
+
+-spec ann_make_list(anns(), [cerl()]) -> cerl().
+
+ann_make_list(As, List) ->
+ ann_make_list(As, List, none).
+
+
+%% @spec ann_make_list(As::anns(), List::[cerl()], Tail) -> cerl()
+%%
+%% Tail = cerl() | none
+%%
+%% @see make_list/2
+%% @see ann_make_list/2
+
+-spec ann_make_list(anns(), [cerl()], cerl() | 'none') -> cerl().
+
+ann_make_list(As, [H | T], Tail) ->
+ ann_c_cons(As, H, make_list(T, Tail)); % `c_cons' folds literals
+ann_make_list(As, [], none) ->
+ ann_c_nil(As);
+ann_make_list(_, [], Node) ->
+ Node.
+
+
+%% ---------------------------------------------------------------------
+%% maps
+
+%% @spec is_c_map(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% map constructor, otherwise <code>false</code>.
+
+-spec is_c_map(cerl()) -> boolean().
+
+is_c_map(#c_map{}) ->
+ true;
+is_c_map(#c_literal{val = V}) when is_map(V) ->
+ true;
+is_c_map(_) ->
+ false.
+
+-spec map_es(c_map() | c_literal()) -> [c_map_pair()].
+
+map_es(#c_literal{anno=As,val=M}) when is_map(M) ->
+ [ann_c_map_pair(As,
+ #c_literal{anno=As,val='assoc'},
+ #c_literal{anno=As,val=K},
+ #c_literal{anno=As,val=V}) || {K,V} <- maps:to_list(M)];
+map_es(#c_map{es = Es}) ->
+ Es.
+
+-spec map_arg(c_map() | c_literal()) -> c_map() | c_literal().
+
+map_arg(#c_literal{anno=As,val=M}) when is_map(M) ->
+ #c_literal{anno=As,val=#{}};
+map_arg(#c_map{arg=M}) ->
+ M.
+
+-spec c_map([c_map_pair()]) -> c_map().
+
+c_map(Pairs) ->
+ ann_c_map([], Pairs).
+
+-spec c_map_pattern([c_map_pair()]) -> c_map().
+
+c_map_pattern(Pairs) ->
+ #c_map{es=Pairs, is_pat=true}.
+
+-spec ann_c_map_pattern([term()], [c_map_pair()]) -> c_map().
+
+ann_c_map_pattern(As, Pairs) ->
+ #c_map{anno=As, es=Pairs, is_pat=true}.
+
+-spec is_c_map_empty(c_map() | c_literal()) -> boolean().
+
+is_c_map_empty(#c_map{ es=[] }) -> true;
+is_c_map_empty(#c_literal{val=M}) when is_map(M),map_size(M) =:= 0 -> true;
+is_c_map_empty(_) -> false.
+
+-spec is_c_map_pattern(c_map()) -> boolean().
+
+is_c_map_pattern(#c_map{is_pat=IsPat}) ->
+ IsPat.
+
+-spec ann_c_map([term()], [c_map_pair()]) -> c_map() | c_literal().
+
+ann_c_map(As, Es) ->
+ ann_c_map(As, #c_literal{val=#{}}, Es).
+
+-spec ann_c_map(anns(), c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
+
+ann_c_map(As, #c_literal{val=M}, Es) when is_map(M) ->
+ fold_map_pairs(As,Es,M);
+ann_c_map(As, M, Es) ->
+ #c_map{arg=M, es=Es, anno=As}.
+
+fold_map_pairs(As,[],M) -> #c_literal{anno=As,val=M};
+%% M#{ K => V}
+fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=assoc},key=Ck,val=Cv}=E|Es],M) ->
+ case is_lit_list([Ck,Cv]) of
+ true ->
+ [K,V] = lit_list_vals([Ck,Cv]),
+ fold_map_pairs(As,Es,maps:put(K,V,M));
+ false ->
+ #c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As}
+ end;
+%% M#{ K := V}
+fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=exact},key=Ck,val=Cv}=E|Es],M) ->
+ case is_lit_list([Ck,Cv]) of
+ true ->
+ [K,V] = lit_list_vals([Ck,Cv]),
+ case maps:is_key(K,M) of
+ true -> fold_map_pairs(As,Es,maps:put(K,V,M));
+ false ->
+ #c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As }
+ end;
+ false ->
+ #c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As }
+ end.
+
+-spec update_c_map(c_map(), cerl(), [cerl()]) -> c_map() | c_literal().
+
+update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
+ Old#c_map{arg=M, es=Es};
+update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
+ ann_c_map(get_ann(Old), M, Es).
+
+map_pair_key(#c_map_pair{key=K}) -> K.
+map_pair_val(#c_map_pair{val=V}) -> V.
+map_pair_op(#c_map_pair{op=Op}) -> Op.
+
+-spec c_map_pair(cerl(), cerl()) -> c_map_pair().
+
+c_map_pair(Key,Val) ->
+ #c_map_pair{op=#c_literal{val=assoc},key=Key,val=Val}.
+
+-spec c_map_pair_exact(cerl(), cerl()) -> c_map_pair().
+
+c_map_pair_exact(Key,Val) ->
+ #c_map_pair{op=#c_literal{val=exact},key=Key,val=Val}.
+
+-spec ann_c_map_pair(anns(), cerl(), cerl(), cerl()) ->
+ c_map_pair().
+
+ann_c_map_pair(As,Op,K,V) ->
+ #c_map_pair{op = Op, key = K, val = V, anno = As}.
+
+update_c_map_pair(Old,Op,K,V) ->
+ #c_map_pair{op = Op, key = K, val = V, anno = get_ann(Old)}.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_tuple(Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract tuple. If <code>Elements</code> is
+%% <code>[E1, ..., En]</code>, the result represents
+%% "<code>{<em>E1</em>, ..., <em>En</em>}</code>". Note that if all
+%% nodes in <code>Elements</code> have type <code>literal</code>, or if
+%% <code>Elements</code> is empty, then the result will also have type
+%% <code>literal</code> and annotations on nodes in
+%% <code>Elements</code> are lost.
+%%
+%% <p>Recall that Erlang has distinct 1-tuples, i.e., <code>{X}</code>
+%% is always distinct from <code>X</code> itself.</p>
+%%
+%% @see ann_c_tuple/2
+%% @see update_c_tuple/2
+%% @see is_c_tuple/1
+%% @see tuple_es/1
+%% @see tuple_arity/1
+%% @see c_tuple_skel/1
+
+%% *Always* collapse literals.
+
+-spec c_tuple([cerl()]) -> c_tuple() | c_literal().
+
+c_tuple(Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #c_tuple{es = Es};
+ true ->
+ #c_literal{val = list_to_tuple(lit_list_vals(Es))}
+ end.
+
+
+%% @spec ann_c_tuple(As::anns(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple/1
+
+-spec ann_c_tuple(anns(), [cerl()]) -> c_tuple() | c_literal().
+
+ann_c_tuple(As, Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #c_tuple{es = Es, anno = As};
+ true ->
+ #c_literal{val = list_to_tuple(lit_list_vals(Es)), anno = As}
+ end.
+
+
+%% @spec update_c_tuple(Old::cerl(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple/1
+
+-spec update_c_tuple(c_tuple() | c_literal(), [cerl()]) -> c_tuple() | c_literal().
+
+update_c_tuple(Node, Es) ->
+ case is_lit_list(Es) of
+ false ->
+ #c_tuple{es = Es, anno = get_ann(Node)};
+ true ->
+ #c_literal{val = list_to_tuple(lit_list_vals(Es)),
+ anno = get_ann(Node)}
+ end.
+
+
+%% @spec c_tuple_skel(Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates an abstract tuple skeleton. Does not fold constant
+%% literals, i.e., the result always has type <code>tuple</code>,
+%% representing "<code>{<em>E1</em>, ..., <em>En</em>}</code>", if
+%% <code>Elements</code> is <code>[E1, ..., En]</code>.
+%%
+%% <p>This function is occasionally useful when it is necessary to have
+%% annotations on the subnodes of a tuple node, even when all the
+%% subnodes are constant literals. Note however that
+%% <code>is_literal/1</code> will yield <code>false</code> and
+%% <code>concrete/1</code> will fail if passed the result from this
+%% function.</p>
+%%
+%% <p><code>fold_literal/1</code> can be used to revert a node to the
+%% normal-form representation.</p>
+%%
+%% @see ann_c_tuple_skel/2
+%% @see update_c_tuple_skel/2
+%% @see c_tuple/1
+%% @see tuple_es/1
+%% @see is_c_tuple/1
+%% @see is_literal/1
+%% @see fold_literal/1
+%% @see concrete/1
+
+%% *Never* collapse literals.
+
+-spec c_tuple_skel([cerl()]) -> c_tuple().
+
+c_tuple_skel(Es) ->
+ #c_tuple{es = Es}.
+
+
+%% @spec ann_c_tuple_skel(As::anns(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple_skel/1
+
+-spec ann_c_tuple_skel(anns(), [cerl()]) -> c_tuple().
+
+ann_c_tuple_skel(As, Es) ->
+ #c_tuple{es = Es, anno = As}.
+
+
+%% @spec update_c_tuple_skel(Old::cerl(), Elements::[cerl()]) -> cerl()
+%% @see c_tuple_skel/1
+
+-spec update_c_tuple_skel(c_tuple(), [cerl()]) -> c_tuple().
+
+update_c_tuple_skel(Old, Es) ->
+ #c_tuple{es = Es, anno = get_ann(Old)}.
+
+
+%% @spec is_c_tuple(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% tuple, otherwise <code>false</code>.
+%%
+%% @see c_tuple/1
+
+-spec is_c_tuple(cerl()) -> boolean().
+
+is_c_tuple(#c_tuple{}) ->
+ true;
+is_c_tuple(#c_literal{val = V}) when is_tuple(V) ->
+ true;
+is_c_tuple(_) ->
+ false.
+
+
+%% @spec tuple_es(cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract tuple.
+%%
+%% @see c_tuple/1
+
+-spec tuple_es(c_tuple() | c_literal()) -> [cerl()].
+
+tuple_es(#c_tuple{es = Es}) ->
+ Es;
+tuple_es(#c_literal{val = V}) ->
+ make_lit_list(tuple_to_list(V)).
+
+
+%% @spec tuple_arity(Node::cerl()) -> integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract tuple.
+%%
+%% <p>Note: this is equivalent to <code>length(tuple_es(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see tuple_es/1
+%% @see c_tuple/1
+
+-spec tuple_arity(c_tuple() | c_literal()) -> non_neg_integer().
+
+tuple_arity(#c_tuple{es = Es}) ->
+ length(Es);
+tuple_arity(#c_literal{val = V}) when is_tuple(V) ->
+ tuple_size(V).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_var(Name::var_name()) -> cerl()
+%%
+%% var_name() = integer() | atom() | {atom(), arity()}
+%%
+%% @doc Creates an abstract variable. A variable is identified by its
+%% name, given by the <code>Name</code> parameter.
+%%
+%% <p>If a name is given by a single atom, it should either be a
+%% "simple" atom which does not need to be single-quoted in Erlang, or
+%% otherwise its print name should correspond to a proper Erlang
+%% variable, i.e., begin with an uppercase character or an
+%% underscore. Names on the form <code>{A, N}</code> represent
+%% function name variables "<code><em>A</em>/<em>N</em></code>"; these
+%% are special variables which may be bound only in the function
+%% definitions of a module or a <code>letrec</code>. They may not be
+%% bound in <code>let</code> expressions and cannot occur in clause
+%% patterns. The atom <code>A</code> in a function name may be any
+%% atom; the integer <code>N</code> must be nonnegative. The functions
+%% <code>c_fname/2</code> etc. are utilities for handling function
+%% name variables.</p>
+%%
+%% <p>When printing variable names, they must have the form of proper
+%% Core Erlang variables and function names. E.g., a name represented
+%% by an integer such as <code>42</code> could be formatted as
+%% "<code>_42</code>", an atom <code>'Xxx'</code> simply as
+%% "<code>Xxx</code>", and an atom <code>foo</code> as
+%% "<code>_foo</code>". However, one must assure that any two valid
+%% distinct names are never mapped to the same strings. Tuples such
+%% as <code>{foo, 2}</code> representing function names can simply by
+%% formatted as "<code>'foo'/2</code>", with no risk of conflicts.</p>
+%%
+%% @see ann_c_var/2
+%% @see update_c_var/2
+%% @see is_c_var/1
+%% @see var_name/1
+%% @see c_fname/2
+%% @see c_module/4
+%% @see c_letrec/2
+
+-spec c_var(var_name()) -> c_var().
+
+c_var(Name) ->
+ #c_var{name = Name}.
+
+
+%% @spec ann_c_var(As::anns(), Name::var_name()) -> c_var()
+%%
+%% @see c_var/1
+
+-spec ann_c_var(anns(), var_name()) -> c_var().
+
+ann_c_var(As, Name) ->
+ #c_var{name = Name, anno = As}.
+
+%% @spec update_c_var(Old::cerl(), Name::var_name()) -> c_var()
+%%
+%% @see c_var/1
+
+-spec update_c_var(c_var(), var_name()) -> c_var().
+
+update_c_var(Node, Name) ->
+ #c_var{name = Name, anno = get_ann(Node)}.
+
+
+%% @spec is_c_var(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% variable, otherwise <code>false</code>.
+%%
+%% @see c_var/1
+
+-spec is_c_var(cerl()) -> boolean().
+
+is_c_var(#c_var{}) ->
+ true;
+is_c_var(_) ->
+ false.
+
+
+%% @spec c_fname(Name::atom(), Arity::arity()) -> c_var()
+%% @equiv c_var({Name, Arity})
+%% @see fname_id/1
+%% @see fname_arity/1
+%% @see is_c_fname/1
+%% @see ann_c_fname/3
+%% @see update_c_fname/3
+
+-spec c_fname(atom(), arity()) -> c_var().
+
+c_fname(Atom, Arity) ->
+ c_var({Atom, Arity}).
+
+
+%% @spec ann_c_fname(As::anns(), Name::atom(), Arity::arity()) -> c_var()
+%%
+%% @equiv ann_c_var(As, {Atom, Arity})
+%% @see c_fname/2
+
+-spec ann_c_fname(anns(), atom(), arity()) -> c_var().
+
+ann_c_fname(As, Atom, Arity) ->
+ ann_c_var(As, {Atom, Arity}).
+
+
+%% @spec update_c_fname(Old::c_var(), Name::atom()) -> c_var()
+%% @doc Like <code>update_c_fname/3</code>, but takes the arity from
+%% <code>Node</code>.
+%% @see update_c_fname/3
+%% @see c_fname/2
+
+-spec update_c_fname(c_var(), atom()) -> c_var().
+
+update_c_fname(#c_var{name = {_, Arity}, anno = As}, Atom) ->
+ #c_var{name = {Atom, Arity}, anno = As}.
+
+
+%% @spec update_c_fname(Old::var(), Name::atom(), Arity::arity()) -> c_var()
+%%
+%% @equiv update_c_var(Old, {Atom, Arity})
+%% @see update_c_fname/2
+%% @see c_fname/2
+
+-spec update_c_fname(c_var(), atom(), arity()) -> c_var().
+
+update_c_fname(Node, Atom, Arity) ->
+ update_c_var(Node, {Atom, Arity}).
+
+
+%% @spec is_c_fname(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% function name variable, otherwise <code>false</code>.
+%%
+%% @see c_fname/2
+%% @see c_var/1
+%% @see var_name/1
+
+-spec is_c_fname(cerl()) -> boolean().
+
+is_c_fname(#c_var{name = {A, N}}) when is_atom(A), is_integer(N), N >= 0 ->
+ true;
+is_c_fname(_) ->
+ false.
+
+
+%% @spec var_name(c_var()) -> var_name()
+%%
+%% @doc Returns the name of an abstract variable.
+%%
+%% @see c_var/1
+
+-spec var_name(c_var()) -> var_name().
+
+var_name(Node) ->
+ Node#c_var.name.
+
+
+%% @spec fname_id(c_var()) -> atom()
+%%
+%% @doc Returns the identifier part of an abstract function name
+%% variable.
+%%
+%% @see fname_arity/1
+%% @see c_fname/2
+
+-spec fname_id(c_var()) -> atom().
+
+fname_id(#c_var{name={A,_}}) ->
+ A.
+
+
+%% @spec fname_arity(c_var()) -> arity()
+%%
+%% @doc Returns the arity part of an abstract function name variable.
+%%
+%% @see fname_id/1
+%% @see c_fname/2
+
+-spec fname_arity(c_var()) -> arity().
+
+fname_arity(#c_var{name={_,N}}) ->
+ N.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_values(Elements::[cerl()]) -> c_values()
+%%
+%% @doc Creates an abstract value list. If <code>Elements</code> is
+%% <code>[E1, ..., En]</code>, the result represents
+%% "<code>&lt;<em>E1</em>, ..., <em>En</em>&gt;</code>".
+%%
+%% @see ann_c_values/2
+%% @see update_c_values/2
+%% @see is_c_values/1
+%% @see values_es/1
+%% @see values_arity/1
+
+-spec c_values([cerl()]) -> c_values().
+
+c_values(Es) ->
+ #c_values{es = Es}.
+
+
+%% @spec ann_c_values(As::anns(), Elements::[cerl()]) -> c_values()
+%% @see c_values/1
+
+-spec ann_c_values(anns(), [cerl()]) -> c_values().
+
+ann_c_values(As, Es) ->
+ #c_values{es = Es, anno = As}.
+
+
+%% @spec update_c_values(Old::cerl(), Elements::[cerl()]) -> c_values()
+%% @see c_values/1
+
+-spec update_c_values(c_values(), [cerl()]) -> c_values().
+
+update_c_values(Node, Es) ->
+ #c_values{es = Es, anno = get_ann(Node)}.
+
+
+%% @spec is_c_values(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% value list; otherwise <code>false</code>.
+%%
+%% @see c_values/1
+
+-spec is_c_values(cerl()) -> boolean().
+
+is_c_values(#c_values{}) ->
+ true;
+is_c_values(_) ->
+ false.
+
+
+%% @spec values_es(c_values()) -> [cerl()]
+%%
+%% @doc Returns the list of element subtrees of an abstract value
+%% list.
+%%
+%% @see c_values/1
+%% @see values_arity/1
+
+-spec values_es(c_values()) -> [cerl()].
+
+values_es(Node) ->
+ Node#c_values.es.
+
+
+%% @spec values_arity(Node::c_values()) -> non_neg_integer()
+%%
+%% @doc Returns the number of element subtrees of an abstract value
+%% list.
+%%
+%% <p>Note: This is equivalent to
+%% <code>length(values_es(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_values/1
+%% @see values_es/1
+
+-spec values_arity(c_values()) -> non_neg_integer().
+
+values_arity(Node) ->
+ length(values_es(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_binary(Segments::[c_bitstr()]) -> c_binary()
+%%
+%% @doc Creates an abstract binary-template. A binary object is a
+%% sequence of 8-bit bytes. It is specified by zero or more bit-string
+%% template <em>segments</em> of arbitrary lengths (in number of bits),
+%% such that the sum of the lengths is evenly divisible by 8. If
+%% <code>Segments</code> is <code>[S1, ..., Sn]</code>, the result
+%% represents "<code>#{<em>S1</em>, ..., <em>Sn</em>}#</code>". All the
+%% <code>Si</code> must have type <code>bitstr</code>.
+%%
+%% @see ann_c_binary/2
+%% @see update_c_binary/2
+%% @see is_c_binary/1
+%% @see binary_segments/1
+%% @see c_bitstr/5
+
+-spec c_binary([c_bitstr()]) -> c_binary().
+
+c_binary(Segments) ->
+ #c_binary{segments = Segments}.
+
+
+%% @spec ann_c_binary(As::anns(), Segments::[c_bitstr()]) -> c_binary()
+%% @see c_binary/1
+
+-spec ann_c_binary(anns(), [c_bitstr()]) -> c_binary().
+
+ann_c_binary(As, Segments) ->
+ #c_binary{segments = Segments, anno = As}.
+
+
+%% @spec update_c_binary(Old::cerl(), Segments::[c_bitstr()]) -> cerl()
+%% @see c_binary/1
+
+-spec update_c_binary(c_binary(), [c_bitstr()]) -> c_binary().
+
+update_c_binary(Node, Segments) ->
+ #c_binary{segments = Segments, anno = get_ann(Node)}.
+
+
+%% @spec is_c_binary(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% binary-template; otherwise <code>false</code>.
+%%
+%% @see c_binary/1
+
+-spec is_c_binary(cerl()) -> boolean().
+
+is_c_binary(#c_binary{}) ->
+ true;
+is_c_binary(_) ->
+ false.
+
+
+%% @spec binary_segments(cerl()) -> [c_bitstr()]
+%%
+%% @doc Returns the list of segment subtrees of an abstract
+%% binary-template.
+%%
+%% @see c_binary/1
+%% @see c_bitstr/5
+
+-spec binary_segments(c_binary()) -> [c_bitstr()].
+
+binary_segments(Node) ->
+ Node#c_binary.segments.
+
+
+%% @spec c_bitstr(Value::cerl(), Size::cerl(), Unit::cerl(),
+%% Type::cerl(), Flags::cerl()) -> c_bitstr()
+%%
+%% @doc Creates an abstract bit-string template. These can only occur as
+%% components of an abstract binary-template (see {@link c_binary/1}).
+%% The result represents "<code>#&lt;<em>Value</em>&gt;(<em>Size</em>,
+%% <em>Unit</em>, <em>Type</em>, <em>Flags</em>)</code>", where
+%% <code>Unit</code> must represent a positive integer constant,
+%% <code>Type</code> must represent a constant atom (one of
+%% <code>'integer'</code>, <code>'float'</code>, or
+%% <code>'binary'</code>), and <code>Flags</code> must represent a
+%% constant list <code>"[<em>F1</em>, ..., <em>Fn</em>]"</code> where
+%% all the <code>Fi</code> are atoms.
+%%
+%% @see c_binary/1
+%% @see ann_c_bitstr/6
+%% @see update_c_bitstr/6
+%% @see is_c_bitstr/1
+%% @see bitstr_val/1
+%% @see bitstr_size/1
+%% @see bitstr_unit/1
+%% @see bitstr_type/1
+%% @see bitstr_flags/1
+
+-spec c_bitstr(cerl(), cerl(), cerl(), cerl(), cerl()) -> c_bitstr().
+
+c_bitstr(Val, Size, Unit, Type, Flags) ->
+ #c_bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags}.
+
+
+%% @spec c_bitstr(Value::cerl(), Size::cerl(), Type::cerl(),
+%% Flags::cerl()) -> c_bitstr()
+%% @equiv c_bitstr(Value, Size, abstract(1), Type, Flags)
+
+-spec c_bitstr(cerl(), cerl(), cerl(), cerl()) -> c_bitstr().
+
+c_bitstr(Val, Size, Type, Flags) ->
+ c_bitstr(Val, Size, abstract(1), Type, Flags).
+
+
+%% @spec c_bitstr(Value::cerl(), Type::cerl(),
+%% Flags::cerl()) -> c_bitstr()
+%% @equiv c_bitstr(Value, abstract(all), abstract(1), Type, Flags)
+
+-spec c_bitstr(cerl(), cerl(), cerl()) -> c_bitstr().
+
+c_bitstr(Val, Type, Flags) ->
+ c_bitstr(Val, abstract(all), abstract(1), Type, Flags).
+
+
+%% @spec ann_c_bitstr(As::anns(), Value::cerl(), Size::cerl(),
+%% Unit::cerl(), Type::cerl(), Flags::cerl()) -> cerl()
+%% @see c_bitstr/5
+%% @see ann_c_bitstr/5
+
+-spec ann_c_bitstr(anns(), cerl(), cerl(), cerl(), cerl(), cerl()) ->
+ c_bitstr().
+
+ann_c_bitstr(As, Val, Size, Unit, Type, Flags) ->
+ #c_bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags, anno = As}.
+
+%% @spec ann_c_bitstr(As::anns(), Value::cerl(), Size::cerl(),
+%% Type::cerl(), Flags::cerl()) -> c_bitstr()
+%% @equiv ann_c_bitstr(As, Value, Size, abstract(1), Type, Flags)
+
+-spec ann_c_bitstr(anns(), cerl(), cerl(), cerl(), cerl()) -> c_bitstr().
+
+ann_c_bitstr(As, Value, Size, Type, Flags) ->
+ ann_c_bitstr(As, Value, Size, abstract(1), Type, Flags).
+
+
+%% @spec update_c_bitstr(Old::c_bitstr(), Value::cerl(), Size::cerl(),
+%% Unit::cerl(), Type::cerl(), Flags::cerl()) -> c_bitstr()
+%% @see c_bitstr/5
+%% @see update_c_bitstr/5
+
+-spec update_c_bitstr(c_bitstr(), cerl(), cerl(), cerl(), cerl(), cerl()) ->
+ c_bitstr().
+
+update_c_bitstr(Node, Val, Size, Unit, Type, Flags) ->
+ #c_bitstr{val = Val, size = Size, unit = Unit, type = Type,
+ flags = Flags, anno = get_ann(Node)}.
+
+
+%% @spec update_c_bitstr(Old::c_bitstr(), Value::cerl(), Size::cerl(),
+%% Type::cerl(), Flags::cerl()) -> c_bitstr()
+%% @equiv update_c_bitstr(Node, Value, Size, abstract(1), Type, Flags)
+
+-spec update_c_bitstr(c_bitstr(), cerl(), cerl(), cerl(), cerl()) -> c_bitstr().
+
+update_c_bitstr(Node, Value, Size, Type, Flags) ->
+ update_c_bitstr(Node, Value, Size, abstract(1), Type, Flags).
+
+%% @spec is_c_bitstr(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% bit-string template; otherwise <code>false</code>.
+%%
+%% @see c_bitstr/5
+
+-spec is_c_bitstr(cerl()) -> boolean().
+
+is_c_bitstr(#c_bitstr{}) ->
+ true;
+is_c_bitstr(_) ->
+ false.
+
+
+%% @spec bitstr_val(c_bitstr()) -> cerl()
+%%
+%% @doc Returns the value subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_val(c_bitstr()) -> cerl().
+
+bitstr_val(Node) ->
+ Node#c_bitstr.val.
+
+
+%% @spec bitstr_size(c_bitstr()) -> cerl()
+%%
+%% @doc Returns the size subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_size(c_bitstr()) -> cerl().
+
+bitstr_size(Node) ->
+ Node#c_bitstr.size.
+
+
+%% @spec bitstr_bitsize(c_bitstr()) -> any | all | utf | integer()
+%%
+%% @doc Returns the total size in bits of an abstract bit-string
+%% template. If the size field is an integer literal, the result is the
+%% product of the size and unit values; if the size field is the atom
+%% literal <code>all</code>, the atom <code>all</code> is returned.
+%% If the size is not a literal, the atom <code>any</code> is returned.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_bitsize(c_bitstr()) -> 'all' | 'any' | 'utf' | non_neg_integer().
+
+bitstr_bitsize(Node) ->
+ Size = Node#c_bitstr.size,
+ case is_literal(Size) of
+ true ->
+ case concrete(Size) of
+ all ->
+ all;
+ undefined ->
+ %% just an assertion below
+ "utf" ++ _ = atom_to_list(concrete(Node#c_bitstr.type)),
+ utf;
+ S when is_integer(S) ->
+ S * concrete(Node#c_bitstr.unit)
+ end;
+ false ->
+ any
+ end.
+
+
+%% @spec bitstr_unit(c_bitstr()) -> cerl()
+%%
+%% @doc Returns the unit subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_unit(c_bitstr()) -> cerl().
+
+bitstr_unit(Node) ->
+ Node#c_bitstr.unit.
+
+
+%% @spec bitstr_type(c_bitstr()) -> cerl()
+%%
+%% @doc Returns the type subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_type(c_bitstr()) -> cerl().
+
+bitstr_type(Node) ->
+ Node#c_bitstr.type.
+
+
+%% @spec bitstr_flags(c_bitstr()) -> cerl()
+%%
+%% @doc Returns the flags subtree of an abstract bit-string template.
+%%
+%% @see c_bitstr/5
+
+-spec bitstr_flags(c_bitstr()) -> cerl().
+
+bitstr_flags(Node) ->
+ Node#c_bitstr.flags.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_fun(Variables::[c_var()], Body::cerl()) -> c_fun()
+%%
+%% @doc Creates an abstract fun-expression. If <code>Variables</code>
+%% is <code>[V1, ..., Vn]</code>, the result represents "<code>fun
+%% (<em>V1</em>, ..., <em>Vn</em>) -> <em>Body</em></code>". All the
+%% <code>Vi</code> must have type <code>var</code>.
+%%
+%% @see ann_c_fun/3
+%% @see update_c_fun/3
+%% @see is_c_fun/1
+%% @see fun_vars/1
+%% @see fun_body/1
+%% @see fun_arity/1
+
+-spec c_fun([c_var()], cerl()) -> c_fun().
+
+c_fun(Variables, Body) ->
+ #c_fun{vars = Variables, body = Body}.
+
+
+%% @spec ann_c_fun(As::anns(), Variables::[c_var()], Body::cerl()) ->
+%% c_fun()
+%% @see c_fun/2
+
+-spec ann_c_fun(anns(), [c_var()], cerl()) -> c_fun().
+
+ann_c_fun(As, Variables, Body) ->
+ #c_fun{vars = Variables, body = Body, anno = As}.
+
+
+%% @spec update_c_fun(Old::c_fun(), Variables::[c_var()],
+%% Body::cerl()) -> c_fun()
+%% @see c_fun/2
+
+-spec update_c_fun(c_fun(), [c_var()], cerl()) -> c_fun().
+
+update_c_fun(Node, Variables, Body) ->
+ #c_fun{vars = Variables, body = Body, anno = get_ann(Node)}.
+
+
+%% @spec is_c_fun(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% fun-expression, otherwise <code>false</code>.
+%%
+%% @see c_fun/2
+
+-spec is_c_fun(cerl()) -> boolean().
+
+is_c_fun(#c_fun{}) ->
+ true; % Now this is fun!
+is_c_fun(_) ->
+ false.
+
+
+%% @spec fun_vars(c_fun()) -> [c_var()]
+%%
+%% @doc Returns the list of parameter subtrees of an abstract
+%% fun-expression.
+%%
+%% @see c_fun/2
+%% @see fun_arity/1
+
+-spec fun_vars(c_fun()) -> [c_var()].
+
+fun_vars(Node) ->
+ Node#c_fun.vars.
+
+
+%% @spec fun_body(c_fun()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract fun-expression.
+%%
+%% @see c_fun/2
+
+-spec fun_body(c_fun()) -> cerl().
+
+fun_body(Node) ->
+ Node#c_fun.body.
+
+
+%% @spec fun_arity(Node::c_fun()) -> arity()
+%%
+%% @doc Returns the number of parameter subtrees of an abstract
+%% fun-expression.
+%%
+%% <p>Note: this is equivalent to <code>length(fun_vars(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see c_fun/2
+%% @see fun_vars/1
+
+-spec fun_arity(c_fun()) -> arity().
+
+fun_arity(Node) ->
+ length(fun_vars(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_seq(Argument::cerl(), Body::cerl()) -> c_seq()
+%%
+%% @doc Creates an abstract sequencing expression. The result
+%% represents "<code>do <em>Argument</em> <em>Body</em></code>".
+%%
+%% @see ann_c_seq/3
+%% @see update_c_seq/3
+%% @see is_c_seq/1
+%% @see seq_arg/1
+%% @see seq_body/1
+
+-spec c_seq(cerl(), cerl()) -> c_seq().
+
+c_seq(Argument, Body) ->
+ #c_seq{arg = Argument, body = Body}.
+
+
+%% @spec ann_c_seq(As::anns(), Argument::cerl(), Body::cerl()) -> c_seq()
+%%
+%% @see c_seq/2
+
+-spec ann_c_seq(anns(), cerl(), cerl()) -> c_seq().
+
+ann_c_seq(As, Argument, Body) ->
+ #c_seq{arg = Argument, body = Body, anno = As}.
+
+
+%% @spec update_c_seq(Old::c_seq(), Argument::cerl(), Body::cerl()) ->
+%% c_seq()
+%% @see c_seq/2
+
+-spec update_c_seq(c_seq(), cerl(), cerl()) -> c_seq().
+
+update_c_seq(Node, Argument, Body) ->
+ #c_seq{arg = Argument, body = Body, anno = get_ann(Node)}.
+
+
+%% @spec is_c_seq(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% sequencing expression, otherwise <code>false</code>.
+%%
+%% @see c_seq/2
+
+-spec is_c_seq(cerl()) -> boolean().
+
+is_c_seq(#c_seq{}) ->
+ true;
+is_c_seq(_) ->
+ false.
+
+
+%% @spec seq_arg(c_seq()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract sequencing
+%% expression.
+%%
+%% @see c_seq/2
+
+-spec seq_arg(c_seq()) -> cerl().
+
+seq_arg(Node) ->
+ Node#c_seq.arg.
+
+
+%% @spec seq_body(c_seq()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract sequencing expression.
+%%
+%% @see c_seq/2
+
+-spec seq_body(c_seq()) -> cerl().
+
+seq_body(Node) ->
+ Node#c_seq.body.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_let(Variables::[c_var()], Argument::cerl(), Body::cerl()) ->
+%% c_let()
+%%
+%% @doc Creates an abstract let-expression. If <code>Variables</code>
+%% is <code>[V1, ..., Vn]</code>, the result represents "<code>let
+%% &lt;<em>V1</em>, ..., <em>Vn</em>&gt; = <em>Argument</em> in
+%% <em>Body</em></code>". All the <code>Vi</code> must have type
+%% <code>var</code>.
+%%
+%% @see ann_c_let/4
+%% @see update_c_let/4
+%% @see is_c_let/1
+%% @see let_vars/1
+%% @see let_arg/1
+%% @see let_body/1
+%% @see let_arity/1
+
+-spec c_let([c_var()], cerl(), cerl()) -> c_let().
+
+c_let(Variables, Argument, Body) ->
+ #c_let{vars = Variables, arg = Argument, body = Body}.
+
+
+%% ann_c_let(As, Variables, Argument, Body) -> c_let()
+%% @see c_let/3
+
+-spec ann_c_let(anns(), [c_var()], cerl(), cerl()) -> c_let().
+
+ann_c_let(As, Variables, Argument, Body) ->
+ #c_let{vars = Variables, arg = Argument, body = Body, anno = As}.
+
+
+%% update_c_let(Old, Variables, Argument, Body) -> c_let()
+%% @see c_let/3
+
+-spec update_c_let(c_let(), [c_var()], cerl(), cerl()) -> c_let().
+
+update_c_let(Node, Variables, Argument, Body) ->
+ #c_let{vars = Variables, arg = Argument, body = Body,
+ anno = get_ann(Node)}.
+
+
+%% @spec is_c_let(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% let-expression, otherwise <code>false</code>.
+%%
+%% @see c_let/3
+
+-spec is_c_let(cerl()) -> boolean().
+
+is_c_let(#c_let{}) ->
+ true;
+is_c_let(_) ->
+ false.
+
+
+%% @spec let_vars(c_let()) -> [c_var()]
+%%
+%% @doc Returns the list of left-hand side variables of an abstract
+%% let-expression.
+%%
+%% @see c_let/3
+%% @see let_arity/1
+
+-spec let_vars(c_let()) -> [c_var()].
+
+let_vars(Node) ->
+ Node#c_let.vars.
+
+
+%% @spec let_arg(c_let()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract let-expression.
+%%
+%% @see c_let/3
+
+-spec let_arg(c_let()) -> cerl().
+
+let_arg(Node) ->
+ Node#c_let.arg.
+
+
+%% @spec let_body(c_let()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract let-expression.
+%%
+%% @see c_let/3
+
+-spec let_body(c_let()) -> cerl().
+
+let_body(Node) ->
+ Node#c_let.body.
+
+
+%% @spec let_arity(Node::c_let()) -> non_neg_integer()
+%%
+%% @doc Returns the number of left-hand side variables of an abstract
+%% let-expression.
+%%
+%% <p>Note: this is equivalent to <code>length(let_vars(Node))</code>,
+%% but potentially more efficient.</p>
+%%
+%% @see c_let/3
+%% @see let_vars/1
+
+-spec let_arity(c_let()) -> non_neg_integer().
+
+let_arity(Node) ->
+ length(let_vars(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_letrec(Definitions::defs(), Body::cerl()) -> c_letrec()
+%%
+%% @doc Creates an abstract letrec-expression. If
+%% <code>Definitions</code> is <code>[{V1, F1}, ..., {Vn, Fn}]</code>,
+%% the result represents "<code>letrec <em>V1</em> = <em>F1</em>
+%% ... <em>Vn</em> = <em>Fn</em> in <em>Body</em></code>. All the
+%% <code>Vi</code> must have type <code>var</code> and represent
+%% function names. All the <code>Fi</code> must have type
+%% <code>'fun'</code>.
+%%
+%% @see ann_c_letrec/3
+%% @see update_c_letrec/3
+%% @see is_c_letrec/1
+%% @see letrec_defs/1
+%% @see letrec_body/1
+%% @see letrec_vars/1
+
+-spec c_letrec(defs(), cerl()) -> c_letrec().
+
+c_letrec(Defs, Body) ->
+ #c_letrec{defs = Defs, body = Body}.
+
+
+%% @spec ann_c_letrec(As::anns(), Definitions::defs(),
+%% Body::cerl()) -> c_letrec()
+%% @see c_letrec/2
+
+-spec ann_c_letrec(anns(), defs(), cerl()) -> c_letrec().
+
+ann_c_letrec(As, Defs, Body) ->
+ #c_letrec{defs = Defs, body = Body, anno = As}.
+
+
+%% @spec update_c_letrec(Old::c_letrec(), Definitions::defs(),
+%% Body::cerl()) -> c_letrec()
+%% @see c_letrec/2
+
+-spec update_c_letrec(c_letrec(), defs(), cerl()) -> c_letrec().
+
+update_c_letrec(Node, Defs, Body) ->
+ #c_letrec{defs = Defs, body = Body, anno = get_ann(Node)}.
+
+
+%% @spec is_c_letrec(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% letrec-expression, otherwise <code>false</code>.
+%%
+%% @see c_letrec/2
+
+-spec is_c_letrec(cerl()) -> boolean().
+
+is_c_letrec(#c_letrec{}) ->
+ true;
+is_c_letrec(_) ->
+ false.
+
+
+%% @spec letrec_defs(Node::c_letrec()) -> defs()
+%%
+%% @doc Returns the list of definitions of an abstract
+%% letrec-expression. If <code>Node</code> represents "<code>letrec
+%% <em>V1</em> = <em>F1</em> ... <em>Vn</em> = <em>Fn</em> in
+%% <em>Body</em></code>", the returned value is <code>[{V1, F1}, ...,
+%% {Vn, Fn}]</code>.
+%%
+%% @see c_letrec/2
+
+-spec letrec_defs(c_letrec()) -> defs().
+
+letrec_defs(Node) ->
+ Node#c_letrec.defs.
+
+
+%% @spec letrec_body(c_letrec()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract letrec-expression.
+%%
+%% @see c_letrec/2
+
+-spec letrec_body(c_letrec()) -> cerl().
+
+letrec_body(Node) ->
+ Node#c_letrec.body.
+
+
+%% @spec letrec_vars(c_letrec()) -> [cerl()]
+%%
+%% @doc Returns the list of left-hand side function variable subtrees
+%% of a letrec-expression. If <code>Node</code> represents
+%% "<code>letrec <em>V1</em> = <em>F1</em> ... <em>Vn</em> =
+%% <em>Fn</em> in <em>Body</em></code>", the returned value is
+%% <code>[V1, ..., Vn]</code>.
+%%
+%% @see c_letrec/2
+
+-spec letrec_vars(c_letrec()) -> [cerl()].
+
+letrec_vars(Node) ->
+ [F || {F, _} <- letrec_defs(Node)].
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_case(Argument::cerl(), Clauses::[cerl()]) -> c_case()
+%%
+%% @doc Creates an abstract case-expression. If <code>Clauses</code>
+%% is <code>[C1, ..., Cn]</code>, the result represents "<code>case
+%% <em>Argument</em> of <em>C1</em> ... <em>Cn</em>
+%% end</code>". <code>Clauses</code> must not be empty.
+%%
+%% @see ann_c_case/3
+%% @see update_c_case/3
+%% @see is_c_case/1
+%% @see c_clause/3
+%% @see case_arg/1
+%% @see case_clauses/1
+%% @see case_arity/1
+
+-spec c_case(cerl(), [cerl()]) -> c_case().
+
+c_case(Expr, Clauses) ->
+ #c_case{arg = Expr, clauses = Clauses}.
+
+
+%% @spec ann_c_case(As::anns(), Argument::cerl(),
+%% Clauses::[cerl()]) -> c_case()
+%% @see c_case/2
+
+-spec ann_c_case(anns(), cerl(), [cerl()]) -> c_case().
+
+ann_c_case(As, Expr, Clauses) ->
+ #c_case{arg = Expr, clauses = Clauses, anno = As}.
+
+
+%% @spec update_c_case(Old::cerl(), Argument::cerl(),
+%% Clauses::[cerl()]) -> c_case()
+%% @see c_case/2
+
+-spec update_c_case(c_case(), cerl(), [cerl()]) -> c_case().
+
+update_c_case(Node, Expr, Clauses) ->
+ #c_case{arg = Expr, clauses = Clauses, anno = get_ann(Node)}.
+
+
+%% is_c_case(Node) -> boolean()
+%%
+%% Node = cerl()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% case-expression; otherwise <code>false</code>.
+%%
+%% @see c_case/2
+
+-spec is_c_case(cerl()) -> boolean().
+
+is_c_case(#c_case{}) ->
+ true;
+is_c_case(_) ->
+ false.
+
+
+%% @spec case_arg(c_case()) -> cerl()
+%%
+%% @doc Returns the argument subtree of an abstract case-expression.
+%%
+%% @see c_case/2
+
+-spec case_arg(c_case()) -> cerl().
+
+case_arg(Node) ->
+ Node#c_case.arg.
+
+
+%% @spec case_clauses(c_case()) -> [cerl()]
+%%
+%% @doc Returns the list of clause subtrees of an abstract
+%% case-expression.
+%%
+%% @see c_case/2
+%% @see case_arity/1
+
+-spec case_clauses(c_case()) -> [cerl()].
+
+case_clauses(Node) ->
+ Node#c_case.clauses.
+
+
+%% @spec case_arity(Node::c_case()) -> non_neg_integer()
+%%
+%% @doc Equivalent to
+%% <code>clause_arity(hd(case_clauses(Node)))</code>, but potentially
+%% more efficient.
+%%
+%% @see c_case/2
+%% @see case_clauses/1
+%% @see clause_arity/1
+
+-spec case_arity(c_case()) -> non_neg_integer().
+
+case_arity(Node) ->
+ clause_arity(hd(case_clauses(Node))).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_clause(Patterns::[cerl()], Body::cerl()) -> c_clause()
+%% @equiv c_clause(Patterns, c_atom(true), Body)
+%% @see c_atom/1
+
+-spec c_clause([cerl()], cerl()) -> c_clause().
+
+c_clause(Patterns, Body) ->
+ c_clause(Patterns, c_atom(true), Body).
+
+
+%% @spec c_clause(Patterns::[cerl()], Guard::cerl(), Body::cerl()) ->
+%% c_clause()
+%%
+%% @doc Creates an an abstract clause. If <code>Patterns</code> is
+%% <code>[P1, ..., Pn]</code>, the result represents
+%% "<code>&lt;<em>P1</em>, ..., <em>Pn</em>&gt; when <em>Guard</em> ->
+%% <em>Body</em></code>".
+%%
+%% @see c_clause/2
+%% @see ann_c_clause/4
+%% @see update_c_clause/4
+%% @see is_c_clause/1
+%% @see c_case/2
+%% @see c_receive/3
+%% @see clause_pats/1
+%% @see clause_guard/1
+%% @see clause_body/1
+%% @see clause_arity/1
+%% @see clause_vars/1
+
+-spec c_clause([cerl()], cerl(), cerl()) -> c_clause().
+
+c_clause(Patterns, Guard, Body) ->
+ #c_clause{pats = Patterns, guard = Guard, body = Body}.
+
+
+%% @spec ann_c_clause(As::anns(), Patterns::[cerl()],
+%% Body::cerl()) -> c_clause()
+%% @equiv ann_c_clause(As, Patterns, c_atom(true), Body)
+%% @see c_clause/3
+
+-spec ann_c_clause(anns(), [cerl()], cerl()) -> c_clause().
+
+ann_c_clause(As, Patterns, Body) ->
+ ann_c_clause(As, Patterns, c_atom(true), Body).
+
+
+%% @spec ann_c_clause(As::anns(), Patterns::[cerl()], Guard::cerl(),
+%% Body::cerl()) -> c_clause()
+%% @see ann_c_clause/3
+%% @see c_clause/3
+
+-spec ann_c_clause(anns(), [cerl()], cerl(), cerl()) -> c_clause().
+
+ann_c_clause(As, Patterns, Guard, Body) ->
+ #c_clause{pats = Patterns, guard = Guard, body = Body, anno = As}.
+
+
+%% @spec update_c_clause(Old::c_clause(), Patterns::[cerl()],
+%% Guard::cerl(), Body::cerl()) -> c_clause()
+%% @see c_clause/3
+
+-spec update_c_clause(c_clause(), [cerl()], cerl(), cerl()) -> c_clause().
+
+update_c_clause(Node, Patterns, Guard, Body) ->
+ #c_clause{pats = Patterns, guard = Guard, body = Body,
+ anno = get_ann(Node)}.
+
+
+%% @spec is_c_clause(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% clause, otherwise <code>false</code>.
+%%
+%% @see c_clause/3
+
+-spec is_c_clause(cerl()) -> boolean().
+
+is_c_clause(#c_clause{}) ->
+ true;
+is_c_clause(_) ->
+ false.
+
+
+%% @spec clause_pats(c_clause()) -> [cerl()]
+%%
+%% @doc Returns the list of pattern subtrees of an abstract clause.
+%%
+%% @see c_clause/3
+%% @see clause_arity/1
+
+-spec clause_pats(c_clause()) -> [cerl()].
+
+clause_pats(Node) ->
+ Node#c_clause.pats.
+
+
+%% @spec clause_guard(c_clause()) -> cerl()
+%%
+%% @doc Returns the guard subtree of an abstract clause.
+%%
+%% @see c_clause/3
+
+-spec clause_guard(c_clause()) -> cerl().
+
+clause_guard(Node) ->
+ Node#c_clause.guard.
+
+
+%% @spec clause_body(c_clause()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract clause.
+%%
+%% @see c_clause/3
+
+-spec clause_body(c_clause()) -> cerl().
+
+clause_body(Node) ->
+ Node#c_clause.body.
+
+
+%% @spec clause_arity(Node::c_clause()) -> non_neg_integer()
+%%
+%% @doc Returns the number of pattern subtrees of an abstract clause.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(clause_pats(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_clause/3
+%% @see clause_pats/1
+
+-spec clause_arity(c_clause()) -> non_neg_integer().
+
+clause_arity(Node) ->
+ length(clause_pats(Node)).
+
+
+%% @spec clause_vars(c_clause()) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in the patterns of
+%% an abstract clause. The order of listing is not defined.
+%%
+%% @see c_clause/3
+%% @see pat_list_vars/1
+
+-spec clause_vars(c_clause()) -> [cerl()].
+
+clause_vars(Clause) ->
+ pat_list_vars(clause_pats(Clause)).
+
+
+%% @spec pat_vars(Pattern::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in a pattern. An
+%% exception is thrown if <code>Node</code> does not represent a
+%% well-formed Core Erlang clause pattern. The order of listing is not
+%% defined.
+%%
+%% @see pat_list_vars/1
+%% @see clause_vars/1
+
+-spec pat_vars(cerl()) -> [cerl()].
+
+pat_vars(Node) ->
+ pat_vars(Node, []).
+
+pat_vars(Node, Vs) ->
+ case type(Node) of
+ var ->
+ [Node | Vs];
+ literal ->
+ Vs;
+ cons ->
+ pat_vars(cons_hd(Node), pat_vars(cons_tl(Node), Vs));
+ tuple ->
+ pat_list_vars(tuple_es(Node), Vs);
+ map ->
+ pat_list_vars(map_es(Node), Vs);
+ map_pair ->
+ %% map_pair_key is not a pattern var, excluded
+ pat_list_vars([map_pair_op(Node),map_pair_val(Node)],Vs);
+ binary ->
+ pat_list_vars(binary_segments(Node), Vs);
+ bitstr ->
+ %% bitstr_size is not a pattern var, excluded
+ pat_vars(bitstr_val(Node), Vs);
+ alias ->
+ pat_vars(alias_pat(Node), [alias_var(Node) | Vs])
+ end.
+
+
+%% @spec pat_list_vars(Patterns::[cerl()]) -> [cerl()]
+%%
+%% @doc Returns the list of all abstract variables in the given
+%% patterns. An exception is thrown if some element in
+%% <code>Patterns</code> does not represent a well-formed Core Erlang
+%% clause pattern. The order of listing is not defined.
+%%
+%% @see pat_vars/1
+%% @see clause_vars/1
+
+-spec pat_list_vars([cerl()]) -> [cerl()].
+
+pat_list_vars(Ps) ->
+ pat_list_vars(Ps, []).
+
+pat_list_vars([P | Ps], Vs) ->
+ pat_list_vars(Ps, pat_vars(P, Vs));
+pat_list_vars([], Vs) ->
+ Vs.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_alias(Variable::c_var(), Pattern::cerl()) -> c_alias()
+%%
+%% @doc Creates an abstract pattern alias. The result represents
+%% "<code><em>Variable</em> = <em>Pattern</em></code>".
+%%
+%% @see ann_c_alias/3
+%% @see update_c_alias/3
+%% @see is_c_alias/1
+%% @see alias_var/1
+%% @see alias_pat/1
+%% @see c_clause/3
+
+-spec c_alias(c_var(), cerl()) -> c_alias().
+
+c_alias(Var, Pattern) ->
+ #c_alias{var = Var, pat = Pattern}.
+
+
+%% @spec ann_c_alias(As::anns(), Variable::c_var(),
+%% Pattern::cerl()) -> c_alias()
+%% @see c_alias/2
+
+-spec ann_c_alias(anns(), c_var(), cerl()) -> c_alias().
+
+ann_c_alias(As, Var, Pattern) ->
+ #c_alias{var = Var, pat = Pattern, anno = As}.
+
+
+%% @spec update_c_alias(Old::cerl(), Variable::c_var(),
+%% Pattern::cerl()) -> c_alias()
+%% @see c_alias/2
+
+-spec update_c_alias(c_alias(), c_var(), cerl()) -> c_alias().
+
+update_c_alias(Node, Var, Pattern) ->
+ #c_alias{var = Var, pat = Pattern, anno = get_ann(Node)}.
+
+
+%% @spec is_c_alias(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% pattern alias, otherwise <code>false</code>.
+%%
+%% @see c_alias/2
+
+-spec is_c_alias(cerl()) -> boolean().
+
+is_c_alias(#c_alias{}) ->
+ true;
+is_c_alias(_) ->
+ false.
+
+
+%% @spec alias_var(c_alias()) -> c_var()
+%%
+%% @doc Returns the variable subtree of an abstract pattern alias.
+%%
+%% @see c_alias/2
+
+-spec alias_var(c_alias()) -> c_var().
+
+alias_var(Node) ->
+ Node#c_alias.var.
+
+
+%% @spec alias_pat(c_alias()) -> cerl()
+%%
+%% @doc Returns the pattern subtree of an abstract pattern alias.
+%%
+%% @see c_alias/2
+
+-spec alias_pat(c_alias()) -> cerl().
+
+alias_pat(Node) ->
+ Node#c_alias.pat.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_receive(Clauses::[cerl()]) -> c_receive()
+%% @equiv c_receive(Clauses, c_atom(infinity), c_atom(true))
+%% @see c_atom/1
+
+-spec c_receive([cerl()]) -> c_receive().
+
+c_receive(Clauses) ->
+ c_receive(Clauses, c_atom(infinity), c_atom(true)).
+
+
+%% @spec c_receive(Clauses::[cerl()], Timeout::cerl(),
+%% Action::cerl()) -> c_receive()
+%%
+%% @doc Creates an abstract receive-expression. If
+%% <code>Clauses</code> is <code>[C1, ..., Cn]</code>, the result
+%% represents "<code>receive <em>C1</em> ... <em>Cn</em> after
+%% <em>Timeout</em> -> <em>Action</em> end</code>".
+%%
+%% @see c_receive/1
+%% @see ann_c_receive/4
+%% @see update_c_receive/4
+%% @see is_c_receive/1
+%% @see receive_clauses/1
+%% @see receive_timeout/1
+%% @see receive_action/1
+
+-spec c_receive([cerl()], cerl(), cerl()) -> c_receive().
+
+c_receive(Clauses, Timeout, Action) ->
+ #c_receive{clauses = Clauses, timeout = Timeout, action = Action}.
+
+
+%% @spec ann_c_receive(As::anns(), Clauses::[cerl()]) -> c_receive()
+%% @equiv ann_c_receive(As, Clauses, c_atom(infinity), c_atom(true))
+%% @see c_receive/3
+%% @see c_atom/1
+
+-spec ann_c_receive(anns(), [cerl()]) -> c_receive().
+
+ann_c_receive(As, Clauses) ->
+ ann_c_receive(As, Clauses, c_atom(infinity), c_atom(true)).
+
+
+%% @spec ann_c_receive(As::anns(), Clauses::[cerl()],
+%% Timeout::cerl(), Action::cerl()) -> c_receive()
+%% @see ann_c_receive/2
+%% @see c_receive/3
+
+-spec ann_c_receive(anns(), [cerl()], cerl(), cerl()) -> c_receive().
+
+ann_c_receive(As, Clauses, Timeout, Action) ->
+ #c_receive{clauses = Clauses, timeout = Timeout, action = Action,
+ anno = As}.
+
+
+%% @spec update_c_receive(Old::cerl(), Clauses::[cerl()],
+%% Timeout::cerl(), Action::cerl()) -> c_receive()
+%% @see c_receive/3
+
+-spec update_c_receive(c_receive(), [cerl()], cerl(), cerl()) -> c_receive().
+
+update_c_receive(Node, Clauses, Timeout, Action) ->
+ #c_receive{clauses = Clauses, timeout = Timeout, action = Action,
+ anno = get_ann(Node)}.
+
+
+%% @spec is_c_receive(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% receive-expression, otherwise <code>false</code>.
+%%
+%% @see c_receive/3
+
+-spec is_c_receive(cerl()) -> boolean().
+
+is_c_receive(#c_receive{}) ->
+ true;
+is_c_receive(_) ->
+ false.
+
+
+%% @spec receive_clauses(c_receive()) -> [cerl()]
+%%
+%% @doc Returns the list of clause subtrees of an abstract
+%% receive-expression.
+%%
+%% @see c_receive/3
+
+-spec receive_clauses(c_receive()) -> [cerl()].
+
+receive_clauses(Node) ->
+ Node#c_receive.clauses.
+
+
+%% @spec receive_timeout(c_receive()) -> cerl()
+%%
+%% @doc Returns the timeout subtree of an abstract receive-expression.
+%%
+%% @see c_receive/3
+
+-spec receive_timeout(c_receive()) -> cerl().
+
+receive_timeout(Node) ->
+ Node#c_receive.timeout.
+
+
+%% @spec receive_action(c_receive()) -> cerl()
+%%
+%% @doc Returns the action subtree of an abstract receive-expression.
+%%
+%% @see c_receive/3
+
+-spec receive_action(c_receive()) -> cerl().
+
+receive_action(Node) ->
+ Node#c_receive.action.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_apply(Operator::c_var(), Arguments::[cerl()]) -> c_apply()
+%%
+%% @doc Creates an abstract function application. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>apply <em>Operator</em>(<em>A1</em>, ...,
+%% <em>An</em>)</code>".
+%%
+%% @see ann_c_apply/3
+%% @see update_c_apply/3
+%% @see is_c_apply/1
+%% @see apply_op/1
+%% @see apply_args/1
+%% @see apply_arity/1
+%% @see c_call/3
+%% @see c_primop/2
+
+-spec c_apply(c_var(), [cerl()]) -> c_apply().
+
+c_apply(Operator, Arguments) ->
+ #c_apply{op = Operator, args = Arguments}.
+
+
+%% @spec ann_c_apply(As::anns(), Operator::c_var(),
+%% Arguments::[cerl()]) -> c_apply()
+%% @see c_apply/2
+
+-spec ann_c_apply(anns(), c_var(), [cerl()]) -> c_apply().
+
+ann_c_apply(As, Operator, Arguments) ->
+ #c_apply{op = Operator, args = Arguments, anno = As}.
+
+
+%% @spec update_c_apply(Old::c_apply(), Operator::cerl(),
+%% Arguments::[cerl()]) -> c_apply()
+%% @see c_apply/2
+
+-spec update_c_apply(c_apply(), c_var(), [cerl()]) -> c_apply().
+
+update_c_apply(Node, Operator, Arguments) ->
+ #c_apply{op = Operator, args = Arguments, anno = get_ann(Node)}.
+
+
+%% @spec is_c_apply(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% function application, otherwise <code>false</code>.
+%%
+%% @see c_apply/2
+
+-spec is_c_apply(cerl()) -> boolean().
+
+is_c_apply(#c_apply{}) ->
+ true;
+is_c_apply(_) ->
+ false.
+
+
+%% @spec apply_op(c_apply()) -> c_var()
+%%
+%% @doc Returns the operator subtree of an abstract function
+%% application.
+%%
+%% @see c_apply/2
+
+-spec apply_op(c_apply()) -> c_var().
+
+apply_op(Node) ->
+ Node#c_apply.op.
+
+
+%% @spec apply_args(c_apply()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract function
+%% application.
+%%
+%% @see c_apply/2
+%% @see apply_arity/1
+
+-spec apply_args(c_apply()) -> [cerl()].
+
+apply_args(Node) ->
+ Node#c_apply.args.
+
+
+%% @spec apply_arity(Node::c_apply()) -> arity()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% function application.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(apply_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_apply/2
+%% @see apply_args/1
+
+-spec apply_arity(c_apply()) -> arity().
+
+apply_arity(Node) ->
+ length(apply_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_call(Module::cerl(), Name::cerl(), Arguments::[cerl()]) ->
+%% c_call()
+%%
+%% @doc Creates an abstract inter-module call. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>call <em>Module</em>:<em>Name</em>(<em>A1</em>,
+%% ..., <em>An</em>)</code>".
+%%
+%% @see ann_c_call/4
+%% @see update_c_call/4
+%% @see is_c_call/1
+%% @see call_module/1
+%% @see call_name/1
+%% @see call_args/1
+%% @see call_arity/1
+%% @see c_apply/2
+%% @see c_primop/2
+
+-spec c_call(cerl(), cerl(), [cerl()]) -> c_call().
+
+c_call(Module, Name, Arguments) ->
+ #c_call{module = Module, name = Name, args = Arguments}.
+
+
+%% @spec ann_c_call(As::anns(), Module::cerl(), Name::cerl(),
+%% Arguments::[cerl()]) -> c_call()
+%% @see c_call/3
+
+-spec ann_c_call(anns(), cerl(), cerl(), [cerl()]) -> c_call().
+
+ann_c_call(As, Module, Name, Arguments) ->
+ #c_call{module = Module, name = Name, args = Arguments, anno = As}.
+
+
+%% @spec update_c_call(Old::cerl(), Module::cerl(), Name::cerl(),
+%% Arguments::[cerl()]) -> c_call()
+%% @see c_call/3
+
+-spec update_c_call(cerl(), cerl(), cerl(), [cerl()]) -> c_call().
+
+update_c_call(Node, Module, Name, Arguments) ->
+ #c_call{module = Module, name = Name, args = Arguments,
+ anno = get_ann(Node)}.
+
+
+%% @spec is_c_call(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% inter-module call expression; otherwise <code>false</code>.
+%%
+%% @see c_call/3
+
+-spec is_c_call(cerl()) -> boolean().
+
+is_c_call(#c_call{}) ->
+ true;
+is_c_call(_) ->
+ false.
+
+
+%% @spec call_module(c_call()) -> cerl()
+%%
+%% @doc Returns the module subtree of an abstract inter-module call.
+%%
+%% @see c_call/3
+
+-spec call_module(c_call()) -> cerl().
+
+call_module(Node) ->
+ Node#c_call.module.
+
+
+%% @spec call_name(c_call()) -> cerl()
+%%
+%% @doc Returns the name subtree of an abstract inter-module call.
+%%
+%% @see c_call/3
+
+-spec call_name(c_call()) -> cerl().
+
+call_name(Node) ->
+ Node#c_call.name.
+
+
+%% @spec call_args(c_call()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract
+%% inter-module call.
+%%
+%% @see c_call/3
+%% @see call_arity/1
+
+-spec call_args(c_call()) -> [cerl()].
+
+call_args(Node) ->
+ Node#c_call.args.
+
+
+%% @spec call_arity(Node::c_call()) -> arity()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% inter-module call.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(call_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_call/3
+%% @see call_args/1
+
+-spec call_arity(c_call()) -> arity().
+
+call_arity(Node) ->
+ length(call_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_primop(Name::c_literal(), Arguments::[cerl()]) -> c_primop()
+%%
+%% @doc Creates an abstract primitive operation call. If
+%% <code>Arguments</code> is <code>[A1, ..., An]</code>, the result
+%% represents "<code>primop <em>Name</em>(<em>A1</em>, ...,
+%% <em>An</em>)</code>". <code>Name</code> must be an atom literal.
+%%
+%% @see ann_c_primop/3
+%% @see update_c_primop/3
+%% @see is_c_primop/1
+%% @see primop_name/1
+%% @see primop_args/1
+%% @see primop_arity/1
+%% @see c_apply/2
+%% @see c_call/3
+
+-spec c_primop(c_literal(), [cerl()]) -> c_primop().
+
+c_primop(Name, Arguments) ->
+ #c_primop{name = Name, args = Arguments}.
+
+
+%% @spec ann_c_primop(As::anns(), Name::c_literal(),
+%% Arguments::[cerl()]) -> c_primop()
+%% @see c_primop/2
+
+-spec ann_c_primop(anns(), c_literal(), [cerl()]) -> c_primop().
+
+ann_c_primop(As, Name, Arguments) ->
+ #c_primop{name = Name, args = Arguments, anno = As}.
+
+
+%% @spec update_c_primop(Old::cerl(), Name::c_literal(),
+%% Arguments::[cerl()]) -> c_primop()
+%% @see c_primop/2
+
+-spec update_c_primop(cerl(), c_literal(), [cerl()]) -> c_primop().
+
+update_c_primop(Node, Name, Arguments) ->
+ #c_primop{name = Name, args = Arguments, anno = get_ann(Node)}.
+
+
+%% @spec is_c_primop(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% primitive operation call, otherwise <code>false</code>.
+%%
+%% @see c_primop/2
+
+-spec is_c_primop(cerl()) -> boolean().
+
+is_c_primop(#c_primop{}) ->
+ true;
+is_c_primop(_) ->
+ false.
+
+
+%% @spec primop_name(c_primop()) -> c_literal()
+%%
+%% @doc Returns the name subtree of an abstract primitive operation
+%% call.
+%%
+%% @see c_primop/2
+
+-spec primop_name(c_primop()) -> c_literal().
+
+primop_name(Node) ->
+ Node#c_primop.name.
+
+
+%% @spec primop_args(c_primop()) -> [cerl()]
+%%
+%% @doc Returns the list of argument subtrees of an abstract primitive
+%% operation call.
+%%
+%% @see c_primop/2
+%% @see primop_arity/1
+
+-spec primop_args(c_primop()) -> [cerl()].
+
+primop_args(Node) ->
+ Node#c_primop.args.
+
+
+%% @spec primop_arity(Node::c_primop()) -> arity()
+%%
+%% @doc Returns the number of argument subtrees of an abstract
+%% primitive operation call.
+%%
+%% <p>Note: this is equivalent to
+%% <code>length(primop_args(Node))</code>, but potentially more
+%% efficient.</p>
+%%
+%% @see c_primop/2
+%% @see primop_args/1
+
+-spec primop_arity(c_primop()) -> arity().
+
+primop_arity(Node) ->
+ length(primop_args(Node)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_try(Argument::cerl(), Variables::[c_var()], Body::cerl(),
+%% ExceptionVars::[c_var()], Handler::cerl()) -> c_try()
+%%
+%% @doc Creates an abstract try-expression. If <code>Variables</code> is
+%% <code>[V1, ..., Vn]</code> and <code>ExceptionVars</code> is
+%% <code>[X1, ..., Xm]</code>, the result represents "<code>try
+%% <em>Argument</em> of &lt;<em>V1</em>, ..., <em>Vn</em>&gt; ->
+%% <em>Body</em> catch &lt;<em>X1</em>, ..., <em>Xm</em>&gt; ->
+%% <em>Handler</em></code>". All the <code>Vi</code> and <code>Xi</code>
+%% must have type <code>var</code>.
+%%
+%% @see ann_c_try/6
+%% @see update_c_try/6
+%% @see is_c_try/1
+%% @see try_arg/1
+%% @see try_vars/1
+%% @see try_body/1
+%% @see c_catch/1
+
+-spec c_try(cerl(), [c_var()], cerl(), [c_var()], cerl()) -> c_try().
+
+c_try(Expr, Vs, Body, Evs, Handler) ->
+ #c_try{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler}.
+
+
+%% @spec ann_c_try(As::[term()], Expression::cerl(),
+%% Variables::[c_var()], Body::cerl(),
+%% EVars::[c_var()], Handler::cerl()) -> c_try()
+%% @see c_try/5
+
+-spec ann_c_try(anns(), cerl(), [c_var()], cerl(), [c_var()], cerl()) ->
+ c_try().
+
+ann_c_try(As, Expr, Vs, Body, Evs, Handler) ->
+ #c_try{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler, anno = As}.
+
+
+%% @spec update_c_try(Old::c_try(), Expression::cerl(),
+%% Variables::[c_var()], Body::cerl(),
+%% EVars::[c_var()], Handler::cerl()) -> cerl()
+%% @see c_try/5
+
+-spec update_c_try(c_try(), cerl(), [c_var()], cerl(), [c_var()], cerl()) ->
+ c_try().
+
+update_c_try(Node, Expr, Vs, Body, Evs, Handler) ->
+ #c_try{arg = Expr, vars = Vs, body = Body,
+ evars = Evs, handler = Handler, anno = get_ann(Node)}.
+
+
+%% @spec is_c_try(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% try-expression, otherwise <code>false</code>.
+%%
+%% @see c_try/5
+
+-spec is_c_try(cerl()) -> boolean().
+
+is_c_try(#c_try{}) ->
+ true;
+is_c_try(_) ->
+ false.
+
+
+%% @spec try_arg(c_try()) -> cerl()
+%%
+%% @doc Returns the expression subtree of an abstract try-expression.
+%%
+%% @see c_try/5
+
+-spec try_arg(c_try()) -> cerl().
+
+try_arg(Node) ->
+ Node#c_try.arg.
+
+
+%% @spec try_vars(c_try()) -> [c_var()]
+%%
+%% @doc Returns the list of success variable subtrees of an abstract
+%% try-expression.
+%%
+%% @see c_try/5
+
+-spec try_vars(c_try()) -> [c_var()].
+
+try_vars(Node) ->
+ Node#c_try.vars.
+
+
+%% @spec try_body(c_try()) -> cerl()
+%%
+%% @doc Returns the success body subtree of an abstract try-expression.
+%%
+%% @see c_try/5
+
+-spec try_body(c_try()) -> cerl().
+
+try_body(Node) ->
+ Node#c_try.body.
+
+
+%% @spec try_evars(c_try()) -> [c_var()]
+%%
+%% @doc Returns the list of exception variable subtrees of an abstract
+%% try-expression.
+%%
+%% @see c_try/5
+
+-spec try_evars(c_try()) -> [c_var()].
+
+try_evars(Node) ->
+ Node#c_try.evars.
+
+
+%% @spec try_handler(c_try()) -> cerl()
+%%
+%% @doc Returns the exception body subtree of an abstract
+%% try-expression.
+%%
+%% @see c_try/5
+
+-spec try_handler(c_try()) -> cerl().
+
+try_handler(Node) ->
+ Node#c_try.handler.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec c_catch(Body::cerl()) -> c_catch()
+%%
+%% @doc Creates an abstract catch-expression. The result represents
+%% "<code>catch <em>Body</em></code>".
+%%
+%% <p>Note: catch-expressions can be rewritten as try-expressions, and
+%% will eventually be removed from Core Erlang.</p>
+%%
+%% @see ann_c_catch/2
+%% @see update_c_catch/2
+%% @see is_c_catch/1
+%% @see catch_body/1
+%% @see c_try/5
+
+-spec c_catch(cerl()) -> c_catch().
+
+c_catch(Body) ->
+ #c_catch{body = Body}.
+
+
+%% @spec ann_c_catch(As::anns(), Body::cerl()) -> c_catch()
+%% @see c_catch/1
+
+-spec ann_c_catch(anns(), cerl()) -> c_catch().
+
+ann_c_catch(As, Body) ->
+ #c_catch{body = Body, anno = As}.
+
+
+%% @spec update_c_catch(Old::c_catch(), Body::cerl()) -> c_catch()
+%% @see c_catch/1
+
+-spec update_c_catch(c_catch(), cerl()) -> c_catch().
+
+update_c_catch(Node, Body) ->
+ #c_catch{body = Body, anno = get_ann(Node)}.
+
+
+%% @spec is_c_catch(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% catch-expression, otherwise <code>false</code>.
+%%
+%% @see c_catch/1
+
+-spec is_c_catch(cerl()) -> boolean().
+
+is_c_catch(#c_catch{}) ->
+ true;
+is_c_catch(_) ->
+ false.
+
+
+%% @spec catch_body(Node::c_catch()) -> cerl()
+%%
+%% @doc Returns the body subtree of an abstract catch-expression.
+%%
+%% @see c_catch/1
+
+-spec catch_body(c_catch()) -> cerl().
+
+catch_body(Node) ->
+ Node#c_catch.body.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec to_records(Tree::cerl()) -> record(record_types())
+%%
+%% @doc Translates an abstract syntax tree to a corresponding explicit
+%% record representation. The records are defined in the file
+%% "<code>cerl.hrl</code>".
+%%
+%% @see type/1
+%% @see from_records/1
+
+-spec to_records(cerl()) -> cerl().
+
+to_records(Node) ->
+ Node.
+
+%% @spec from_records(Tree::record(record_types())) -> cerl()
+%%
+%% record_types() = c_alias | c_apply | c_binary | c_bitstr | c_call |
+%% c_case | c_catch | c_clause | c_cons | c_fun |
+%% c_let | c_letrec | c_literal | c_map | c_map_pair |
+%% c_module | c_primop | c_receive | c_seq |
+%% c_try | c_tuple | c_values | c_var
+%%
+%% @doc Translates an explicit record representation to a
+%% corresponding abstract syntax tree. The records are defined in the
+%% file "<code>core_parse.hrl</code>".
+%%
+%% @see type/1
+%% @see to_records/1
+
+-spec from_records(cerl()) -> cerl().
+
+from_records(Node) ->
+ Node.
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec is_data(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> represents a
+%% data constructor, otherwise <code>false</code>. Data constructors
+%% are cons cells, tuples, and atomic literals.
+%%
+%% @see data_type/1
+%% @see data_es/1
+%% @see data_arity/1
+
+-spec is_data(cerl()) -> boolean().
+
+is_data(#c_literal{}) ->
+ true;
+is_data(#c_cons{}) ->
+ true;
+is_data(#c_tuple{}) ->
+ true;
+is_data(_) ->
+ false.
+
+
+%% @spec data_type(Node::cerl()) -> dtype()
+%%
+%% dtype() = cons | tuple | {atomic, Value}
+%% Value = integer() | float() | atom() | []
+%%
+%% @doc Returns a type descriptor for a data constructor
+%% node. (Cf. <code>is_data/1</code>.) This is mainly useful for
+%% comparing types and for constructing new nodes of the same type
+%% (cf. <code>make_data/2</code>). If <code>Node</code> represents an
+%% integer, floating-point number, atom or empty list, the result is
+%% <code>{atomic, Value}</code>, where <code>Value</code> is the value
+%% of <code>concrete(Node)</code>, otherwise the result is either
+%% <code>cons</code> or <code>tuple</code>.
+%%
+%% <p>Type descriptors can be compared for equality or order (in the
+%% Erlang term order), but remember that floating-point values should
+%% in general never be tested for equality.</p>
+%%
+%% @see is_data/1
+%% @see make_data/2
+%% @see type/1
+%% @see concrete/1
+
+-type value() :: integer() | float() | atom() | [].
+-type dtype() :: 'cons' | 'tuple' | {'atomic', value()}.
+-type c_lct() :: c_literal() | c_cons() | c_tuple().
+
+-spec data_type(c_lct()) -> dtype().
+
+data_type(#c_literal{val = V}) ->
+ case V of
+ [_ | _] ->
+ cons;
+ _ when is_tuple(V) ->
+ tuple;
+ _ ->
+ {atomic, V}
+ end;
+data_type(#c_cons{}) ->
+ cons;
+data_type(#c_tuple{}) ->
+ tuple.
+
+%% @spec data_es(Node::cerl()) -> [cerl()]
+%%
+%% @doc Returns the list of subtrees of a data constructor node. If
+%% the arity of the constructor is zero, the result is the empty list.
+%%
+%% <p>Note: if <code>data_type(Node)</code> is <code>cons</code>, the
+%% number of subtrees is exactly two. If <code>data_type(Node)</code>
+%% is <code>{atomic, Value}</code>, the number of subtrees is
+%% zero.</p>
+%%
+%% @see is_data/1
+%% @see data_type/1
+%% @see data_arity/1
+%% @see make_data/2
+
+-spec data_es(c_lct()) -> [cerl()].
+
+data_es(#c_literal{val = V}) ->
+ case V of
+ [Head | Tail] ->
+ [#c_literal{val = Head}, #c_literal{val = Tail}];
+ _ when is_tuple(V) ->
+ make_lit_list(tuple_to_list(V));
+ _ ->
+ []
+ end;
+data_es(#c_cons{hd = H, tl = T}) ->
+ [H, T];
+data_es(#c_tuple{es = Es}) ->
+ Es.
+
+%% @spec data_arity(Node::cerl()) -> non_neg_integer()
+%%
+%% @doc Returns the number of subtrees of a data constructor
+%% node. This is equivalent to <code>length(data_es(Node))</code>, but
+%% potentially more efficient.
+%%
+%% @see is_data/1
+%% @see data_es/1
+
+-spec data_arity(c_lct()) -> non_neg_integer().
+
+data_arity(#c_literal{val = V}) ->
+ case V of
+ [_ | _] ->
+ 2;
+ _ when is_tuple(V) ->
+ tuple_size(V);
+ _ ->
+ 0
+ end;
+data_arity(#c_cons{}) ->
+ 2;
+data_arity(#c_tuple{es = Es}) ->
+ length(Es).
+
+
+%% @spec make_data(Type::dtype(), Elements::[cerl()]) -> cerl()
+%%
+%% @doc Creates a data constructor node with the specified type and
+%% subtrees. (Cf. <code>data_type/1</code>.) An exception is thrown
+%% if the length of <code>Elements</code> is invalid for the given
+%% <code>Type</code>; see <code>data_es/1</code> for arity constraints
+%% on constructor types.
+%%
+%% @see data_type/1
+%% @see data_es/1
+%% @see ann_make_data/3
+%% @see update_data/3
+%% @see make_data_skel/2
+
+-spec make_data(dtype(), [cerl()]) -> c_lct().
+
+make_data(CType, Es) ->
+ ann_make_data([], CType, Es).
+
+
+%% @spec ann_make_data(As::anns(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data/2
+
+-spec ann_make_data(anns(), dtype(), [cerl()]) -> c_lct().
+
+ann_make_data(As, {atomic, V}, []) -> #c_literal{val = V, anno = As};
+ann_make_data(As, cons, [H, T]) -> ann_c_cons(As, H, T);
+ann_make_data(As, tuple, Es) -> ann_c_tuple(As, Es).
+
+%% @spec update_data(Old::cerl(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data/2
+
+-spec update_data(cerl(), dtype(), [cerl()]) -> c_lct().
+
+update_data(Node, CType, Es) ->
+ ann_make_data(get_ann(Node), CType, Es).
+
+
+%% @spec make_data_skel(Type::dtype(), Elements::[cerl()]) -> cerl()
+%%
+%% @doc Like <code>make_data/2</code>, but analogous to
+%% <code>c_tuple_skel/1</code> and <code>c_cons_skel/2</code>.
+%%
+%% @see ann_make_data_skel/3
+%% @see update_data_skel/3
+%% @see make_data/2
+%% @see c_tuple_skel/1
+%% @see c_cons_skel/2
+
+-spec make_data_skel(dtype(), [cerl()]) -> c_lct().
+
+make_data_skel(CType, Es) ->
+ ann_make_data_skel([], CType, Es).
+
+
+%% @spec ann_make_data_skel(As::anns(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data_skel/2
+
+-spec ann_make_data_skel(anns(), dtype(), [cerl()]) -> c_lct().
+
+ann_make_data_skel(As, {atomic, V}, []) -> #c_literal{val = V, anno = As};
+ann_make_data_skel(As, cons, [H, T]) -> ann_c_cons_skel(As, H, T);
+ann_make_data_skel(As, tuple, Es) -> ann_c_tuple_skel(As, Es).
+
+
+%% @spec update_data_skel(Old::cerl(), Type::dtype(),
+%% Elements::[cerl()]) -> cerl()
+%% @see make_data_skel/2
+
+-spec update_data_skel(cerl(), dtype(), [cerl()]) -> c_lct().
+
+update_data_skel(Node, CType, Es) ->
+ ann_make_data_skel(get_ann(Node), CType, Es).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec subtrees(Node::cerl()) -> [[cerl()]]
+%%
+%% @doc Returns the grouped list of all subtrees of a node. If
+%% <code>Node</code> is a leaf node (cf. <code>is_leaf/1</code>), this
+%% is the empty list, otherwise the result is always a nonempty list,
+%% containing the lists of subtrees of <code>Node</code>, in
+%% left-to-right order as they occur in the printed program text, and
+%% grouped by category. Often, each group contains only a single
+%% subtree.
+%%
+%% <p>Depending on the type of <code>Node</code>, the size of some
+%% groups may be variable (e.g., the group consisting of all the
+%% elements of a tuple), while others always contain the same number
+%% of elements - usually exactly one (e.g., the group containing the
+%% argument expression of a case-expression). Note, however, that the
+%% exact structure of the returned list (for a given node type) should
+%% in general not be depended upon, since it might be subject to
+%% change without notice.</p>
+%%
+%% <p>The function <code>subtrees/1</code> and the constructor functions
+%% <code>make_tree/2</code> and <code>update_tree/2</code> can be a
+%% great help if one wants to traverse a syntax tree, visiting all its
+%% subtrees, but treat nodes of the tree in a uniform way in most or all
+%% cases. Using these functions makes this simple, and also assures that
+%% your code is not overly sensitive to extensions of the syntax tree
+%% data type, because any node types not explicitly handled by your code
+%% can be left to a default case.</p>
+%%
+%% <p>For example:
+%% <pre>
+%% postorder(F, Tree) ->
+%% F(case subtrees(Tree) of
+%% [] -> Tree;
+%% List -> update_tree(Tree,
+%% [[postorder(F, Subtree)
+%% || Subtree &lt;- Group]
+%% || Group &lt;- List])
+%% end).
+%% </pre>
+%% maps the function <code>F</code> on <code>Tree</code> and all its
+%% subtrees, doing a post-order traversal of the syntax tree. (Note
+%% the use of <code>update_tree/2</code> to preserve annotations.) For
+%% a simple function like:
+%% <pre>
+%% f(Node) ->
+%% case type(Node) of
+%% atom -> atom("a_" ++ atom_name(Node));
+%% _ -> Node
+%% end.
+%% </pre>
+%% the call <code>postorder(fun f/1, Tree)</code> will yield a new
+%% representation of <code>Tree</code> in which all atom names have
+%% been extended with the prefix "a_", but nothing else (including
+%% annotations) has been changed.</p>
+%%
+%% @see is_leaf/1
+%% @see make_tree/2
+%% @see update_tree/2
+
+-spec subtrees(cerl()) -> [[cerl()]].
+
+subtrees(T) ->
+ case is_leaf(T) of
+ true ->
+ [];
+ false ->
+ case type(T) of
+ values ->
+ [values_es(T)];
+ binary ->
+ [binary_segments(T)];
+ bitstr ->
+ [[bitstr_val(T)], [bitstr_size(T)],
+ [bitstr_unit(T)], [bitstr_type(T)],
+ [bitstr_flags(T)]];
+ cons ->
+ [[cons_hd(T)], [cons_tl(T)]];
+ tuple ->
+ [tuple_es(T)];
+ map ->
+ [map_es(T)];
+ map_pair ->
+ [[map_pair_op(T)],[map_pair_key(T)],[map_pair_val(T)]];
+ 'let' ->
+ [let_vars(T), [let_arg(T)], [let_body(T)]];
+ seq ->
+ [[seq_arg(T)], [seq_body(T)]];
+ apply ->
+ [[apply_op(T)], apply_args(T)];
+ call ->
+ [[call_module(T)], [call_name(T)],
+ call_args(T)];
+ primop ->
+ [[primop_name(T)], primop_args(T)];
+ 'case' ->
+ [[case_arg(T)], case_clauses(T)];
+ clause ->
+ [clause_pats(T), [clause_guard(T)],
+ [clause_body(T)]];
+ alias ->
+ [[alias_var(T)], [alias_pat(T)]];
+ 'fun' ->
+ [fun_vars(T), [fun_body(T)]];
+ 'receive' ->
+ [receive_clauses(T), [receive_timeout(T)],
+ [receive_action(T)]];
+ 'try' ->
+ [[try_arg(T)], try_vars(T), [try_body(T)],
+ try_evars(T), [try_handler(T)]];
+ 'catch' ->
+ [[catch_body(T)]];
+ letrec ->
+ Es = unfold_tuples(letrec_defs(T)),
+ [Es, [letrec_body(T)]];
+ module ->
+ As = unfold_tuples(module_attrs(T)),
+ Es = unfold_tuples(module_defs(T)),
+ [[module_name(T)], module_exports(T), As, Es]
+ end
+ end.
+
+
+%% @spec update_tree(Old::cerl(), Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given subtrees, and the same
+%% type and annotations as the <code>Old</code> node. This is
+%% equivalent to <code>ann_make_tree(get_ann(Node), type(Node),
+%% Groups)</code>, but potentially more efficient.
+%%
+%% @see update_tree/3
+%% @see ann_make_tree/3
+%% @see get_ann/1
+%% @see type/1
+
+-spec update_tree(cerl(), [[cerl()],...]) -> cerl().
+
+update_tree(Node, Gs) ->
+ ann_make_tree(get_ann(Node), type(Node), Gs).
+
+
+%% @spec update_tree(Old::cerl(), Type::ctype(), Groups::[[cerl()]]) ->
+%% cerl()
+%%
+%% @doc Creates a syntax tree with the given type and subtrees, and
+%% the same annotations as the <code>Old</code> node. This is
+%% equivalent to <code>ann_make_tree(get_ann(Node), Type,
+%% Groups)</code>, but potentially more efficient.
+%%
+%% @see update_tree/2
+%% @see ann_make_tree/3
+%% @see get_ann/1
+
+-spec update_tree(cerl(), ctype(), [[cerl()],...]) -> cerl().
+
+update_tree(Node, Type, Gs) ->
+ ann_make_tree(get_ann(Node), Type, Gs).
+
+
+%% @spec make_tree(Type::ctype(), Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given type and subtrees.
+%% <code>Type</code> must be a node type name
+%% (cf. <code>type/1</code>) that does not denote a leaf node type
+%% (cf. <code>is_leaf/1</code>). <code>Groups</code> must be a
+%% <em>nonempty</em> list of groups of syntax trees, representing the
+%% subtrees of a node of the given type, in left-to-right order as
+%% they would occur in the printed program text, grouped by category
+%% as done by <code>subtrees/1</code>.
+%%
+%% <p>The result of <code>ann_make_tree(get_ann(Node), type(Node),
+%% subtrees(Node))</code> (cf. <code>update_tree/2</code>) represents
+%% the same source code text as the original <code>Node</code>,
+%% assuming that <code>subtrees(Node)</code> yields a nonempty
+%% list. However, it does not necessarily have the exact same data
+%% representation as <code>Node</code>.</p>
+%%
+%% @see ann_make_tree/3
+%% @see type/1
+%% @see is_leaf/1
+%% @see subtrees/1
+%% @see update_tree/2
+
+-spec make_tree(ctype(), [[cerl()],...]) -> cerl().
+
+make_tree(Type, Gs) ->
+ ann_make_tree([], Type, Gs).
+
+
+%% @spec ann_make_tree(As::anns(), Type::ctype(),
+%% Groups::[[cerl()]]) -> cerl()
+%%
+%% @doc Creates a syntax tree with the given annotations, type and
+%% subtrees. See <code>make_tree/2</code> for details.
+%%
+%% @see make_tree/2
+
+-spec ann_make_tree(anns(), ctype(), [[cerl()],...]) -> cerl().
+
+ann_make_tree(As, values, [Es]) -> ann_c_values(As, Es);
+ann_make_tree(As, binary, [Ss]) -> ann_c_binary(As, Ss);
+ann_make_tree(As, bitstr, [[V],[S],[U],[T],[Fs]]) ->
+ ann_c_bitstr(As, V, S, U, T, Fs);
+ann_make_tree(As, cons, [[H], [T]]) -> ann_c_cons(As, H, T);
+ann_make_tree(As, tuple, [Es]) -> ann_c_tuple(As, Es);
+ann_make_tree(As, map, [Es]) -> ann_c_map(As, Es);
+ann_make_tree(As, map, [[A], Es]) -> ann_c_map(As, A, Es);
+ann_make_tree(As, map_pair, [[Op], [K], [V]]) -> ann_c_map_pair(As, Op, K, V);
+ann_make_tree(As, 'let', [Vs, [A], [B]]) -> ann_c_let(As, Vs, A, B);
+ann_make_tree(As, seq, [[A], [B]]) -> ann_c_seq(As, A, B);
+ann_make_tree(As, apply, [[Op], Es]) -> ann_c_apply(As, Op, Es);
+ann_make_tree(As, call, [[M], [N], Es]) -> ann_c_call(As, M, N, Es);
+ann_make_tree(As, primop, [[N], Es]) -> ann_c_primop(As, N, Es);
+ann_make_tree(As, 'case', [[A], Cs]) -> ann_c_case(As, A, Cs);
+ann_make_tree(As, clause, [Ps, [G], [B]]) -> ann_c_clause(As, Ps, G, B);
+ann_make_tree(As, alias, [[V], [P]]) -> ann_c_alias(As, V, P);
+ann_make_tree(As, 'fun', [Vs, [B]]) -> ann_c_fun(As, Vs, B);
+ann_make_tree(As, 'receive', [Cs, [T], [A]]) ->
+ ann_c_receive(As, Cs, T, A);
+ann_make_tree(As, 'try', [[E], Vs, [B], Evs, [H]]) ->
+ ann_c_try(As, E, Vs, B, Evs, H);
+ann_make_tree(As, 'catch', [[B]]) -> ann_c_catch(As, B);
+ann_make_tree(As, letrec, [Es, [B]]) ->
+ ann_c_letrec(As, fold_tuples(Es), B);
+ann_make_tree(As, module, [[N], Xs, Es, Ds]) ->
+ ann_c_module(As, N, Xs, fold_tuples(Es), fold_tuples(Ds)).
+
+
+%% ---------------------------------------------------------------------
+
+%% @spec meta(Tree::cerl()) -> cerl()
+%%
+%% @doc Creates a meta-representation of a syntax tree. The result
+%% represents an Erlang expression "<code><em>MetaTree</em></code>"
+%% which, if evaluated, will yield a new syntax tree representing the
+%% same source code text as <code>Tree</code> (although the actual
+%% data representation may be different). The expression represented
+%% by <code>MetaTree</code> is <em>implementation independent</em>
+%% with regard to the data structures used by the abstract syntax tree
+%% implementation.
+%%
+%% <p>Any node in <code>Tree</code> whose node type is
+%% <code>var</code> (cf. <code>type/1</code>), and whose list of
+%% annotations (cf. <code>get_ann/1</code>) contains the atom
+%% <code>meta_var</code>, will remain unchanged in the resulting tree,
+%% except that exactly one occurrence of <code>meta_var</code> is
+%% removed from its annotation list.</p>
+%%
+%% <p>The main use of the function <code>meta/1</code> is to transform
+%% a data structure <code>Tree</code>, which represents a piece of
+%% program code, into a form that is <em>representation independent
+%% when printed</em>. E.g., suppose <code>Tree</code> represents a
+%% variable named "V". Then (assuming a function <code>print/1</code>
+%% for printing syntax trees), evaluating
+%% <code>print(abstract(Tree))</code> - simply using
+%% <code>abstract/1</code> to map the actual data structure onto a
+%% syntax tree representation - would output a string that might look
+%% something like "<code>{var, ..., 'V'}</code>", which is obviously
+%% dependent on the implementation of the abstract syntax trees. This
+%% could e.g. be useful for caching a syntax tree in a file. However,
+%% in some situations like in a program generator generator (with two
+%% "generator"), it may be unacceptable. Using
+%% <code>print(meta(Tree))</code> instead would output a
+%% <em>representation independent</em> syntax tree generating
+%% expression; in the above case, something like
+%% "<code>cerl:c_var('V')</code>".</p>
+%%
+%% <p>The implementation tries to generate compact code with respect
+%% to literals and lists.</p>
+%%
+%% @see abstract/1
+%% @see type/1
+%% @see get_ann/1
+
+-spec meta(cerl()) -> cerl().
+
+meta(Node) ->
+ %% First of all we check for metavariables:
+ case type(Node) of
+ var ->
+ case lists:member(meta_var, get_ann(Node)) of
+ false ->
+ meta_0(var, Node);
+ true ->
+ %% A meta-variable: remove the first found
+ %% 'meta_var' annotation, but otherwise leave
+ %% the node unchanged.
+ set_ann(Node, lists:delete(meta_var, get_ann(Node)))
+ end;
+ Type ->
+ meta_0(Type, Node)
+ end.
+
+meta_0(Type, Node) ->
+ case get_ann(Node) of
+ [] ->
+ meta_1(Type, Node);
+ As ->
+ meta_call(set_ann, [meta_1(Type, Node), abstract(As)])
+ end.
+
+meta_1(literal, Node) ->
+ %% We handle atomic literals separately, to get a bit
+ %% more compact code. For the rest, we use 'abstract'.
+ case concrete(Node) of
+ V when is_atom(V) ->
+ meta_call(c_atom, [Node]);
+ V when is_integer(V) ->
+ meta_call(c_int, [Node]);
+ V when is_float(V) ->
+ meta_call(c_float, [Node]);
+ [] ->
+ meta_call(c_nil, []);
+ _ ->
+ meta_call(abstract, [Node])
+ end;
+meta_1(var, Node) ->
+ %% A normal variable or function name.
+ meta_call(c_var, [abstract(var_name(Node))]);
+meta_1(values, Node) ->
+ meta_call(c_values,
+ [make_list(meta_list(values_es(Node)))]);
+meta_1(binary, Node) ->
+ meta_call(c_binary,
+ [make_list(meta_list(binary_segments(Node)))]);
+meta_1(bitstr, Node) ->
+ meta_call(c_bitstr,
+ [meta(bitstr_val(Node)),
+ meta(bitstr_size(Node)),
+ meta(bitstr_unit(Node)),
+ meta(bitstr_type(Node)),
+ meta(bitstr_flags(Node))]);
+meta_1(cons, Node) ->
+ %% The list is split up if some sublist has annotatations. If
+ %% we get exactly one element, we generate a 'c_cons' call
+ %% instead of 'make_list' to reconstruct the node.
+ case split_list(Node) of
+ {[H], Node1} ->
+ meta_call(c_cons, [meta(H), meta(Node1)]);
+ {L, Node1} ->
+ meta_call(make_list,
+ [make_list(meta_list(L)), meta(Node1)])
+ end;
+meta_1(tuple, Node) ->
+ meta_call(c_tuple,
+ [make_list(meta_list(tuple_es(Node)))]);
+meta_1('let', Node) ->
+ meta_call(c_let,
+ [make_list(meta_list(let_vars(Node))),
+ meta(let_arg(Node)), meta(let_body(Node))]);
+meta_1(seq, Node) ->
+ meta_call(c_seq,
+ [meta(seq_arg(Node)), meta(seq_body(Node))]);
+meta_1(apply, Node) ->
+ meta_call(c_apply,
+ [meta(apply_op(Node)),
+ make_list(meta_list(apply_args(Node)))]);
+meta_1(call, Node) ->
+ meta_call(c_call,
+ [meta(call_module(Node)), meta(call_name(Node)),
+ make_list(meta_list(call_args(Node)))]);
+meta_1(primop, Node) ->
+ meta_call(c_primop,
+ [meta(primop_name(Node)),
+ make_list(meta_list(primop_args(Node)))]);
+meta_1('case', Node) ->
+ meta_call(c_case,
+ [meta(case_arg(Node)),
+ make_list(meta_list(case_clauses(Node)))]);
+meta_1(clause, Node) ->
+ meta_call(c_clause,
+ [make_list(meta_list(clause_pats(Node))),
+ meta(clause_guard(Node)),
+ meta(clause_body(Node))]);
+meta_1(alias, Node) ->
+ meta_call(c_alias,
+ [meta(alias_var(Node)), meta(alias_pat(Node))]);
+meta_1('fun', Node) ->
+ meta_call(c_fun,
+ [make_list(meta_list(fun_vars(Node))),
+ meta(fun_body(Node))]);
+meta_1('receive', Node) ->
+ meta_call(c_receive,
+ [make_list(meta_list(receive_clauses(Node))),
+ meta(receive_timeout(Node)),
+ meta(receive_action(Node))]);
+meta_1('try', Node) ->
+ meta_call(c_try,
+ [meta(try_arg(Node)),
+ make_list(meta_list(try_vars(Node))),
+ meta(try_body(Node)),
+ make_list(meta_list(try_evars(Node))),
+ meta(try_handler(Node))]);
+meta_1('catch', Node) ->
+ meta_call(c_catch, [meta(catch_body(Node))]);
+meta_1(letrec, Node) ->
+ meta_call(c_letrec,
+ [make_list([c_tuple([meta(N), meta(F)])
+ || {N, F} <- letrec_defs(Node)]),
+ meta(letrec_body(Node))]);
+meta_1(module, Node) ->
+ meta_call(c_module,
+ [meta(module_name(Node)),
+ make_list(meta_list(module_exports(Node))),
+ make_list([c_tuple([meta(A), meta(V)])
+ || {A, V} <- module_attrs(Node)]),
+ make_list([c_tuple([meta(N), meta(F)])
+ || {N, F} <- module_defs(Node)])]).
+
+meta_call(F, As) ->
+ c_call(c_atom(?MODULE), c_atom(F), As).
+
+meta_list([T | Ts]) ->
+ [meta(T) | meta_list(Ts)];
+meta_list([]) ->
+ [].
+
+split_list(Node) ->
+ split_list(set_ann(Node, []), []).
+
+split_list(Node, L) ->
+ A = get_ann(Node),
+ case type(Node) of
+ cons when A =:= [] ->
+ split_list(cons_tl(Node), [cons_hd(Node) | L]);
+ _ ->
+ {lists:reverse(L), Node}
+ end.
+
+
+%% ---------------------------------------------------------------------
+
+%% General utilities
+
+is_lit_list([#c_literal{} | Es]) ->
+ is_lit_list(Es);
+is_lit_list([_ | _]) ->
+ false;
+is_lit_list([]) ->
+ true.
+
+lit_list_vals([#c_literal{val = V} | Es]) ->
+ [V | lit_list_vals(Es)];
+lit_list_vals([]) ->
+ [].
+
+-spec make_lit_list([litval()]) -> [c_literal()].
+
+make_lit_list([V | Vs]) ->
+ [#c_literal{val = V} | make_lit_list(Vs)];
+make_lit_list([]) ->
+ [].
+
+%% The following tests are the same as done by 'io_lib:char_list' and
+%% 'io_lib:printable_list', respectively, but for a single character.
+
+is_char_value(V) when V >= $\000, V =< $\377 -> true;
+is_char_value(_) -> false.
+
+is_print_char_value(V) when V >= $\040, V =< $\176 -> true;
+is_print_char_value(V) when V >= $\240, V =< $\377 -> true;
+is_print_char_value(V) when V =:= $\b -> true;
+is_print_char_value(V) when V =:= $\d -> true;
+is_print_char_value(V) when V =:= $\e -> true;
+is_print_char_value(V) when V =:= $\f -> true;
+is_print_char_value(V) when V =:= $\n -> true;
+is_print_char_value(V) when V =:= $\r -> true;
+is_print_char_value(V) when V =:= $\s -> true;
+is_print_char_value(V) when V =:= $\t -> true;
+is_print_char_value(V) when V =:= $\v -> true;
+is_print_char_value(V) when V =:= $\" -> true;
+is_print_char_value(V) when V =:= $\' -> true; %' stupid Emacs.
+is_print_char_value(V) when V =:= $\\ -> true;
+is_print_char_value(_) -> false.
+
+is_char_list([V | Vs]) when is_integer(V) ->
+ is_char_value(V) andalso is_char_list(Vs);
+is_char_list([]) ->
+ true;
+is_char_list(_) ->
+ false.
+
+is_print_char_list([V | Vs]) when is_integer(V) ->
+ is_print_char_value(V) andalso is_print_char_list(Vs);
+is_print_char_list([]) ->
+ true;
+is_print_char_list(_) ->
+ false.
+
+unfold_tuples([{X, Y} | Ps]) ->
+ [X, Y | unfold_tuples(Ps)];
+unfold_tuples([]) ->
+ [].
+
+fold_tuples([X, Y | Es]) ->
+ [{X, Y} | fold_tuples(Es)];
+fold_tuples([]) ->
+ [].
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/core_parse.hrl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/core_parse.hrl
new file mode 100644
index 0000000000..14a53d0d2d
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/core_parse.hrl
@@ -0,0 +1,115 @@
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% Purpose : Core Erlang syntax trees as records.
+
+%% It would be nice to incorporate some generic functions as well but
+%% this could make including this file difficult.
+
+%% Note: the annotation list is *always* the first record field.
+%% Thus it is possible to define the macros:
+%% -define(get_ann(X), element(2, X)).
+%% -define(set_ann(X, Y), setelement(2, X, Y)).
+
+%% The record definitions appear alphabetically
+
+-record(c_alias, {anno=[] :: cerl:anns(),
+ var :: cerl:c_var(),
+ pat :: cerl:cerl()}).
+
+-record(c_apply, {anno=[] :: cerl:anns(),
+ op :: cerl:c_var(),
+ args :: [cerl:cerl()]}).
+
+-record(c_binary, {anno=[] :: cerl:anns(),
+ segments :: [cerl:c_bitstr()]}).
+
+-record(c_bitstr, {anno=[], val, % val :: Tree,
+ size, % size :: Tree,
+ unit, % unit :: Tree,
+ type, % type :: Tree,
+ flags}). % flags :: Tree
+
+-record(c_call, {anno=[], module, % module :: cerl:cerl(),
+ name, % name :: cerl:cerl(),
+ args}). % args :: [cerl:cerl()]
+
+-record(c_case, {anno=[] :: cerl:anns(),
+ arg :: cerl:cerl(),
+ clauses :: [cerl:cerl()]}).
+
+-record(c_catch, {anno=[] :: cerl:anns(), body :: cerl:cerl()}).
+
+-record(c_clause, {anno=[] :: cerl:anns(),
+ pats, % :: [cerl:cerl()], % pats :: [Tree],
+ guard, % :: cerl:cerl(), % guard :: Tree,
+ body}). % :: cerl:cerl()}). % body :: Tree
+
+-record(c_cons, {anno=[] :: cerl:anns(),
+ hd :: cerl:cerl(),
+ tl :: cerl:cerl()}).
+
+-record(c_fun, {anno=[] :: cerl:anns(),
+ vars :: [cerl:c_var()],
+ body :: cerl:cerl()}).
+
+-record(c_let, {anno=[] :: cerl:anns(),
+ vars :: [cerl:c_var()],
+ arg :: cerl:cerl(),
+ body :: cerl:cerl()}).
+
+-record(c_letrec, {anno=[] :: cerl:anns(),
+ defs :: cerl:defs(),
+ body :: cerl:cerl()}).
+
+-record(c_literal, {anno=[] :: cerl:anns(), val :: cerl:litval()}).
+
+-record(c_map, {anno=[] :: cerl:anns(),
+ arg=#c_literal{val=#{}} :: cerl:c_var() | cerl:c_literal(),
+ es :: [cerl:c_map_pair()],
+ is_pat=false :: boolean()}).
+
+-record(c_map_pair, {anno=[] :: cerl:anns(),
+ op, %:: #c_literal{val::'assoc'} | #c_literal{val::'exact'},
+ key,
+ val}).
+
+-record(c_module, {anno=[] :: cerl:anns(),
+ name :: cerl:c_literal(),
+ exports :: [cerl:c_var()],
+ attrs :: cerl:attrs(),
+ defs :: cerl:defs()}).
+
+-record(c_primop, {anno=[] :: cerl:anns(),
+ name :: cerl:c_literal(),
+ args :: [cerl:cerl()]}).
+
+-record(c_receive, {anno=[]:: cerl:anns(),
+ clauses, % clauses :: [Tree],
+ timeout, % timeout :: Tree,
+ action}). % action :: Tree
+
+-record(c_seq, {anno=[] :: cerl:anns(),
+ arg, % arg :: cerl:cerl(),
+ body}). % body :: cerl:cerl()
+
+-record(c_try, {anno=[], arg, % arg :: cerl:cerl(),
+ vars, % vars :: [cerl:c_var()],
+ body, % body :: cerl:cerl(),
+ evars, % evars :: [cerl:c_var()],
+ handler}). % handler :: cerl:cerl()
+
+-record(c_tuple, {anno=[] :: cerl:anns(), es :: [cerl:cerl()]}).
+
+-record(c_values, {anno=[] :: cerl:anns(), es :: [cerl:cerl()]}).
+
+-record(c_var, {anno=[] :: cerl:anns(), name :: cerl:var_name()}).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer.hrl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer.hrl
new file mode 100644
index 0000000000..c0f287893e
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer.hrl
@@ -0,0 +1,173 @@
+%%% This is an -*- Erlang -*- file.
+%%%
+%%% Licensed under the Apache License, Version 2.0 (the "License");
+%%% you may not use this file except in compliance with the License.
+%%% You may obtain a copy of the License at
+%%%
+%%% http://www.apache.org/licenses/LICENSE-2.0
+%%%
+%%% Unless required by applicable law or agreed to in writing, software
+%%% distributed under the License is distributed on an "AS IS" BASIS,
+%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%%% See the License for the specific language governing permissions and
+%%% limitations under the License.
+%%%-------------------------------------------------------------------
+%%% File : dialyzer.hrl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Kostis Sagonas <[email protected]>
+%%% Description : Header file for Dialyzer.
+%%%
+%%% Created : 1 Oct 2004 by Kostis Sagonas <[email protected]>
+%%%-------------------------------------------------------------------
+
+-define(RET_NOTHING_SUSPICIOUS, 0).
+-define(RET_INTERNAL_ERROR, 1).
+-define(RET_DISCREPANCIES, 2).
+
+-type dial_ret() :: ?RET_NOTHING_SUSPICIOUS
+ | ?RET_INTERNAL_ERROR
+ | ?RET_DISCREPANCIES.
+
+%%--------------------------------------------------------------------
+%% Warning classification
+%%--------------------------------------------------------------------
+
+-define(WARN_RETURN_NO_RETURN, warn_return_no_exit).
+-define(WARN_RETURN_ONLY_EXIT, warn_return_only_exit).
+-define(WARN_NOT_CALLED, warn_not_called).
+-define(WARN_NON_PROPER_LIST, warn_non_proper_list).
+-define(WARN_FUN_APP, warn_fun_app).
+-define(WARN_MATCHING, warn_matching).
+-define(WARN_OPAQUE, warn_opaque).
+-define(WARN_FAILING_CALL, warn_failing_call).
+-define(WARN_BIN_CONSTRUCTION, warn_bin_construction).
+-define(WARN_CONTRACT_TYPES, warn_contract_types).
+-define(WARN_CONTRACT_SYNTAX, warn_contract_syntax).
+-define(WARN_CONTRACT_NOT_EQUAL, warn_contract_not_equal).
+-define(WARN_CONTRACT_SUBTYPE, warn_contract_subtype).
+-define(WARN_CONTRACT_SUPERTYPE, warn_contract_supertype).
+-define(WARN_CONTRACT_RANGE, warn_contract_range).
+-define(WARN_CALLGRAPH, warn_callgraph).
+-define(WARN_UNMATCHED_RETURN, warn_umatched_return).
+-define(WARN_RACE_CONDITION, warn_race_condition).
+-define(WARN_BEHAVIOUR, warn_behaviour).
+-define(WARN_UNDEFINED_CALLBACK, warn_undefined_callbacks).
+-define(WARN_UNKNOWN, warn_unknown).
+-define(WARN_MAP_CONSTRUCTION, warn_map_construction).
+
+%%
+%% The following type has double role:
+%% 1. It is the set of warnings that will be collected.
+%% 2. It is also the set of tags for warnings that will be returned.
+%%
+-type dial_warn_tag() :: ?WARN_RETURN_NO_RETURN | ?WARN_RETURN_ONLY_EXIT
+ | ?WARN_NOT_CALLED | ?WARN_NON_PROPER_LIST
+ | ?WARN_MATCHING | ?WARN_OPAQUE | ?WARN_FUN_APP
+ | ?WARN_FAILING_CALL | ?WARN_BIN_CONSTRUCTION
+ | ?WARN_CONTRACT_TYPES | ?WARN_CONTRACT_SYNTAX
+ | ?WARN_CONTRACT_NOT_EQUAL | ?WARN_CONTRACT_SUBTYPE
+ | ?WARN_CONTRACT_SUPERTYPE | ?WARN_CALLGRAPH
+ | ?WARN_UNMATCHED_RETURN | ?WARN_RACE_CONDITION
+ | ?WARN_BEHAVIOUR | ?WARN_CONTRACT_RANGE
+ | ?WARN_UNDEFINED_CALLBACK | ?WARN_UNKNOWN
+ | ?WARN_MAP_CONSTRUCTION.
+
+%%
+%% This is the representation of each warning as they will be returned
+%% to dialyzer's callers
+%%
+-type file_line() :: {file:filename(), non_neg_integer()}.
+-type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}.
+
+%%
+%% This is the representation of each warning before suppressions have
+%% been applied
+%%
+-type m_or_mfa() :: module() % warnings not associated with any function
+ | mfa().
+-type warning_info() :: {file:filename(), non_neg_integer(), m_or_mfa()}.
+-type raw_warning() :: {dial_warn_tag(), warning_info(), {atom(), [term()]}}.
+
+%%
+%% This is the representation of dialyzer's internal errors
+%%
+-type dial_error() :: any(). %% XXX: underspecified
+
+%%--------------------------------------------------------------------
+%% Basic types used either in the record definitions below or in other
+%% parts of the application
+%%--------------------------------------------------------------------
+
+-type anal_type() :: 'succ_typings' | 'plt_build'.
+-type anal_type1() :: anal_type() | 'plt_add' | 'plt_check' | 'plt_remove'.
+-type contr_constr() :: {'subtype', erl_types:erl_type(), erl_types:erl_type()}.
+-type contract_pair() :: {erl_types:erl_type(), [contr_constr()]}.
+-type dial_define() :: {atom(), term()}.
+-type dial_option() :: {atom(), term()}.
+-type dial_options() :: [dial_option()].
+-type fopt() :: 'basename' | 'fullpath'.
+-type format() :: 'formatted' | 'raw'.
+-type label() :: non_neg_integer().
+-type dial_warn_tags():: ordsets:ordset(dial_warn_tag()).
+-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
+-type start_from() :: 'byte_code' | 'src_code'.
+-type mfa_or_funlbl() :: label() | mfa().
+-type solver() :: 'v1' | 'v2'.
+
+%%--------------------------------------------------------------------
+%% Record declarations used by various files
+%%--------------------------------------------------------------------
+
+-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
+
+-record(analysis, {analysis_pid :: pid() | 'undefined',
+ type = succ_typings :: anal_type(),
+ defines = [] :: [dial_define()],
+ doc_plt :: doc_plt(),
+ files = [] :: [file:filename()],
+ include_dirs = [] :: [file:filename()],
+ start_from = byte_code :: start_from(),
+ plt :: dialyzer_plt:plt(),
+ use_contracts = true :: boolean(),
+ race_detection = false :: boolean(),
+ behaviours_chk = false :: boolean(),
+ timing = false :: boolean() | 'debug',
+ timing_server = none :: dialyzer_timing:timing_server(),
+ callgraph_file = "" :: file:filename(),
+ solvers :: [solver()]}).
+
+-record(options, {files = [] :: [file:filename()],
+ files_rec = [] :: [file:filename()],
+ analysis_type = succ_typings :: anal_type1(),
+ timing = false :: boolean() | 'debug',
+ defines = [] :: [dial_define()],
+ from = byte_code :: start_from(),
+ get_warnings = maybe :: boolean() | 'maybe',
+ init_plts = [] :: [file:filename()],
+ include_dirs = [] :: [file:filename()],
+ output_plt = none :: 'none' | file:filename(),
+ legal_warnings = ordsets:new() :: dial_warn_tags(),
+ report_mode = normal :: rep_mode(),
+ erlang_mode = false :: boolean(),
+ use_contracts = true :: boolean(),
+ output_file = none :: 'none' | file:filename(),
+ output_format = formatted :: format(),
+ filename_opt = basename :: fopt(),
+ callgraph_file = "" :: file:filename(),
+ check_plt = true :: boolean(),
+ solvers = [] :: [solver()]}).
+
+-record(contract, {contracts = [] :: [contract_pair()],
+ args = [] :: [erl_types:erl_type()],
+ forms = [] :: [{_, _}]}).
+
+%%--------------------------------------------------------------------
+
+-define(timing(Server, Msg, Var, Expr),
+ begin
+ dialyzer_timing:start_stamp(Server, Msg),
+ Var = Expr,
+ dialyzer_timing:end_stamp(Server),
+ Var
+ end).
+-define(timing(Server, Msg, Expr), ?timing(Server, Msg, _T, Expr)).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
new file mode 100644
index 0000000000..53b08cc5c9
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
@@ -0,0 +1,3795 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+%%%-------------------------------------------------------------------
+%%% File : dialyzer_dataflow.erl
+%%% Author : Tobias Lindahl <[email protected]>
+%%% Description :
+%%%
+%%% Created : 19 Apr 2005 by Tobias Lindahl <[email protected]>
+%%%-------------------------------------------------------------------
+
+-module(dialyzer_dataflow).
+
+-export([get_fun_types/5, get_warnings/5, format_args/3]).
+
+%% Data structure interfaces.
+-export([state__add_warning/2, state__cleanup/1,
+ state__duplicate/1, dispose_state/1,
+ state__get_callgraph/1, state__get_races/1,
+ state__get_records/1, state__put_callgraph/2,
+ state__put_races/2, state__records_only/1,
+ state__find_function/2]).
+
+-export_type([state/0]).
+
+-include("dialyzer.hrl").
+
+-import(erl_types,
+ [t_inf/2, t_inf/3, t_inf_lists/2, t_inf_lists/3,
+ t_inf_lists/3, t_is_equal/2, t_is_subtype/2, t_subtract/2,
+ t_sup/1, t_sup/2]).
+
+-import(erl_types,
+ [any_none/1, t_any/0, t_atom/0, t_atom/1, t_atom_vals/1, t_atom_vals/2,
+ t_binary/0, t_boolean/0,
+ t_bitstr/0, t_bitstr/2, t_bitstr_concat/1, t_bitstr_match/2,
+ t_cons/0, t_cons/2, t_cons_hd/2, t_cons_tl/2,
+ t_contains_opaque/2,
+ t_find_opaque_mismatch/3, t_float/0, t_from_range/2, t_from_term/1,
+ t_fun/0, t_fun/2, t_fun_args/1, t_fun_args/2, t_fun_range/1,
+ t_fun_range/2, t_integer/0, t_integers/1,
+ t_is_any/1, t_is_atom/1, t_is_atom/2, t_is_any_atom/3,
+ t_is_boolean/2,
+ t_is_integer/2, t_is_list/1,
+ t_is_nil/2, t_is_none/1, t_is_none_or_unit/1,
+ t_is_number/2, t_is_reference/2, t_is_pid/2, t_is_port/2,
+ t_is_unit/1,
+ t_limit/2, t_list/0, t_list_elements/2,
+ t_maybe_improper_list/0, t_module/0,
+ t_none/0, t_non_neg_integer/0, t_number/0, t_number_vals/2,
+ t_pid/0, t_port/0, t_product/1, t_reference/0,
+ t_to_string/2, t_to_tlist/1,
+ t_tuple/0, t_tuple/1, t_tuple_args/1, t_tuple_args/2,
+ t_tuple_subtypes/2,
+ t_unit/0, t_unopaque/2,
+ t_map/0, t_map/1, t_is_singleton/2
+ ]).
+
+%%-define(DEBUG, true).
+%%-define(DEBUG_PP, true).
+%%-define(DEBUG_TIME, true).
+
+-ifdef(DEBUG).
+-import(erl_types, [t_to_string/1]).
+-define(debug(S_, L_), io:format(S_, L_)).
+-else.
+-define(debug(S_, L_), ok).
+-endif.
+
+%%--------------------------------------------------------------------
+
+-type type() :: erl_types:erl_type().
+-type types() :: erl_types:type_table().
+
+-type curr_fun() :: 'undefined' | 'top' | mfa_or_funlbl().
+
+-define(no_arg, no_arg).
+
+-define(TYPE_LIMIT, 3).
+
+-define(BITS, 128).
+
+%% Types with comment 'race' are due to dialyzer_races.erl.
+-record(state, {callgraph :: dialyzer_callgraph:callgraph()
+ | 'undefined', % race
+ codeserver :: dialyzer_codeserver:codeserver()
+ | 'undefined', % race
+ envs :: env_tab()
+ | 'undefined', % race
+ fun_tab :: fun_tab()
+ | 'undefined', % race
+ fun_homes :: dict:dict(label(), mfa())
+ | 'undefined', % race
+ plt :: dialyzer_plt:plt()
+ | 'undefined', % race
+ opaques :: [type()]
+ | 'undefined', % race
+ races = dialyzer_races:new() :: dialyzer_races:races(),
+ records = dict:new() :: types(),
+ tree_map :: dict:dict(label(), cerl:cerl())
+ | 'undefined', % race
+ warning_mode = false :: boolean(),
+ warnings = [] :: [raw_warning()],
+ work :: {[_], [_], sets:set()}
+ | 'undefined', % race
+ module :: module(),
+ curr_fun :: curr_fun()
+ }).
+
+-record(map, {map = maps:new() :: type_tab(),
+ subst = maps:new() :: subst_tab(),
+ modified = [] :: [Key :: term()],
+ modified_stack = [] :: [{[Key :: term()],reference()}],
+ ref = undefined :: reference() | undefined}).
+
+-type env_tab() :: dict:dict(label(), #map{}).
+-type fun_entry() :: {Args :: [type()], RetType :: type()}.
+-type fun_tab() :: dict:dict('top' | label(),
+ {'not_handled', fun_entry()} | fun_entry()).
+-type key() :: label() | cerl:cerl().
+-type type_tab() :: #{key() => type()}.
+-type subst_tab() :: #{key() => cerl:cerl()}.
+
+%% Exported Types
+
+-opaque state() :: #state{}.
+
+%%--------------------------------------------------------------------
+
+-type fun_types() :: dict:dict(label(), type()).
+
+-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
+ types()) ->
+ {[raw_warning()], fun_types()}.
+
+get_warnings(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State1 = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, true),
+ State2 = state__renew_warnings(state__get_warnings(State1), State1),
+ State3 = state__get_race_warnings(State2),
+ {State3#state.warnings, state__all_fun_types(State3)}.
+
+-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
+ types()) -> fun_types().
+
+get_fun_types(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, false),
+ state__all_fun_types(State).
+
+%%% ===========================================================================
+%%%
+%%% The analysis.
+%%%
+%%% ===========================================================================
+
+analyze_module(Tree, Plt, Callgraph, Codeserver, Records, GetWarnings) ->
+ debug_pp(Tree, false),
+ Module = cerl:atom_val(cerl:module_name(Tree)),
+ TopFun = cerl:ann_c_fun([{label, top}], [], Tree),
+ State = state__new(Callgraph, Codeserver, TopFun, Plt, Module, Records),
+ State1 = state__race_analysis(not GetWarnings, State),
+ State2 = analyze_loop(State1),
+ case GetWarnings of
+ true ->
+ State3 = state__set_warning_mode(State2),
+ State4 = analyze_loop(State3),
+ dialyzer_races:race(State4);
+ false ->
+ State2
+ end.
+
+analyze_loop(State) ->
+ case state__get_work(State) of
+ none -> state__set_curr_fun(undefined, State);
+ {Fun, NewState0} ->
+ NewState1 = state__set_curr_fun(get_label(Fun), NewState0),
+ {ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1),
+ case not IsCalled of
+ true ->
+ ?debug("Not handling (not called) ~w: ~s\n",
+ [NewState1#state.curr_fun,
+ t_to_string(t_product(ArgTypes))]),
+ analyze_loop(NewState1);
+ false ->
+ case state__fun_env(Fun, NewState1) of
+ none ->
+ ?debug("Not handling (no env) ~w: ~s\n",
+ [NewState1#state.curr_fun,
+ t_to_string(t_product(ArgTypes))]),
+ analyze_loop(NewState1);
+ Map ->
+ ?debug("Handling fun ~p: ~s\n",
+ [NewState1#state.curr_fun,
+ t_to_string(state__fun_type(Fun, NewState1))]),
+ Vars = cerl:fun_vars(Fun),
+ Map1 = enter_type_lists(Vars, ArgTypes, Map),
+ Body = cerl:fun_body(Fun),
+ FunLabel = get_label(Fun),
+ IsRaceAnalysisEnabled = is_race_analysis_enabled(State),
+ NewState3 =
+ case IsRaceAnalysisEnabled of
+ true ->
+ NewState2 = state__renew_curr_fun(
+ state__lookup_name(FunLabel, NewState1), FunLabel,
+ NewState1),
+ state__renew_race_list([], 0, NewState2);
+ false -> NewState1
+ end,
+ {NewState4, _Map2, BodyType} =
+ traverse(Body, Map1, NewState3),
+ ?debug("Done analyzing: ~w:~s\n",
+ [NewState1#state.curr_fun,
+ t_to_string(t_fun(ArgTypes, BodyType))]),
+ NewState5 =
+ case IsRaceAnalysisEnabled of
+ true -> renew_race_code(NewState4);
+ false -> NewState4
+ end,
+ NewState6 =
+ state__update_fun_entry(Fun, ArgTypes, BodyType, NewState5),
+ ?debug("done adding stuff for ~w\n",
+ [state__lookup_name(get_label(Fun), State)]),
+ analyze_loop(NewState6)
+ end
+ end
+ end.
+
+traverse(Tree, Map, State) ->
+ ?debug("Handling ~p\n", [cerl:type(Tree)]),
+ %% debug_pp_map(Map),
+ case cerl:type(Tree) of
+ alias ->
+ %% This only happens when checking for illegal record patterns
+ %% so the handling is a bit rudimentary.
+ traverse(cerl:alias_pat(Tree), Map, State);
+ apply ->
+ handle_apply(Tree, Map, State);
+ binary ->
+ Segs = cerl:binary_segments(Tree),
+ {State1, Map1, SegTypes} = traverse_list(Segs, Map, State),
+ {State1, Map1, t_bitstr_concat(SegTypes)};
+ bitstr ->
+ handle_bitstr(Tree, Map, State);
+ call ->
+ handle_call(Tree, Map, State);
+ 'case' ->
+ handle_case(Tree, Map, State);
+ 'catch' ->
+ {State1, _Map1, _} = traverse(cerl:catch_body(Tree), Map, State),
+ {State1, Map, t_any()};
+ cons ->
+ handle_cons(Tree, Map, State);
+ 'fun' ->
+ Type = state__fun_type(Tree, State),
+ case state__warning_mode(State) of
+ true -> {State, Map, Type};
+ false ->
+ State2 = state__add_work(get_label(Tree), State),
+ State3 = state__update_fun_env(Tree, Map, State2),
+ {State3, Map, Type}
+ end;
+ 'let' ->
+ handle_let(Tree, Map, State);
+ letrec ->
+ Defs = cerl:letrec_defs(Tree),
+ Body = cerl:letrec_body(Tree),
+ %% By not including the variables in scope we can assure that we
+ %% will get the current function type when using the variables.
+ FoldFun = fun({Var, Fun}, {AccState, AccMap}) ->
+ {NewAccState, NewAccMap0, FunType} =
+ traverse(Fun, AccMap, AccState),
+ NewAccMap = enter_type(Var, FunType, NewAccMap0),
+ {NewAccState, NewAccMap}
+ end,
+ {State1, Map1} = lists:foldl(FoldFun, {State, Map}, Defs),
+ traverse(Body, Map1, State1);
+ literal ->
+ Type = literal_type(Tree),
+ {State, Map, Type};
+ module ->
+ handle_module(Tree, Map, State);
+ primop ->
+ Type =
+ case cerl:atom_val(cerl:primop_name(Tree)) of
+ match_fail -> t_none();
+ raise -> t_none();
+ bs_init_writable -> t_from_term(<<>>);
+ Other -> erlang:error({'Unsupported primop', Other})
+ end,
+ {State, Map, Type};
+ 'receive' ->
+ handle_receive(Tree, Map, State);
+ seq ->
+ Arg = cerl:seq_arg(Tree),
+ Body = cerl:seq_body(Tree),
+ {State1, Map1, ArgType} = SMA = traverse(Arg, Map, State),
+ case t_is_none_or_unit(ArgType) of
+ true ->
+ SMA;
+ false ->
+ State2 =
+ case
+ t_is_any(ArgType)
+ orelse t_is_simple(ArgType, State)
+ orelse is_call_to_send(Arg)
+ orelse is_lc_simple_list(Arg, ArgType, State)
+ of
+ true -> % do not warn in these cases
+ State1;
+ false ->
+ state__add_warning(State1, ?WARN_UNMATCHED_RETURN, Arg,
+ {unmatched_return,
+ [format_type(ArgType, State1)]})
+ end,
+ traverse(Body, Map1, State2)
+ end;
+ 'try' ->
+ handle_try(Tree, Map, State);
+ tuple ->
+ handle_tuple(Tree, Map, State);
+ map ->
+ handle_map(Tree, Map, State);
+ values ->
+ Elements = cerl:values_es(Tree),
+ {State1, Map1, EsType} = traverse_list(Elements, Map, State),
+ Type = t_product(EsType),
+ {State1, Map1, Type};
+ var ->
+ ?debug("Looking up unknown variable: ~p\n", [Tree]),
+ case state__lookup_type_for_letrec(Tree, State) of
+ error ->
+ LType = lookup_type(Tree, Map),
+ {State, Map, LType};
+ {ok, Type} -> {State, Map, Type}
+ end;
+ Other ->
+ erlang:error({'Unsupported type', Other})
+ end.
+
+traverse_list(Trees, Map, State) ->
+ traverse_list(Trees, Map, State, []).
+
+traverse_list([Tree|Tail], Map, State, Acc) ->
+ {State1, Map1, Type} = traverse(Tree, Map, State),
+ traverse_list(Tail, Map1, State1, [Type|Acc]);
+traverse_list([], Map, State, Acc) ->
+ {State, Map, lists:reverse(Acc)}.
+
+%%________________________________________
+%%
+%% Special instructions
+%%
+
+handle_apply(Tree, Map, State) ->
+ Args = cerl:apply_args(Tree),
+ Op = cerl:apply_op(Tree),
+ {State0, Map1, ArgTypes} = traverse_list(Args, Map, State),
+ {State1, Map2, OpType} = traverse(Op, Map1, State0),
+ case any_none(ArgTypes) of
+ true ->
+ {State1, Map2, t_none()};
+ false ->
+ FunList =
+ case state__lookup_call_site(Tree, State) of
+ error -> [external]; %% so that we go directly in the fallback
+ {ok, List} -> List
+ end,
+ FunInfoList = [{local, state__fun_info(Fun, State)} || Fun <- FunList],
+ case
+ handle_apply_or_call(FunInfoList, Args, ArgTypes, Map2, Tree, State1)
+ of
+ {had_external, State2} ->
+ %% Fallback: use whatever info we collected from traversing the op
+ %% instead of the result that has been generalized to t_any().
+ Arity = length(Args),
+ OpType1 = t_inf(OpType, t_fun(Arity, t_any())),
+ case t_is_none(OpType1) of
+ true ->
+ Msg = {fun_app_no_fun,
+ [format_cerl(Op), format_type(OpType, State2), Arity]},
+ State3 = state__add_warning(State2, ?WARN_FAILING_CALL,
+ Tree, Msg),
+ {State3, Map2, t_none()};
+ false ->
+ NewArgs = t_inf_lists(ArgTypes,
+ t_fun_args(OpType1, 'universe')),
+ case any_none(NewArgs) of
+ true ->
+ Msg = {fun_app_args,
+ [format_args(Args, ArgTypes, State),
+ format_type(OpType, State)]},
+ State3 = state__add_warning(State2, ?WARN_FAILING_CALL,
+ Tree, Msg),
+ {State3, enter_type(Op, OpType1, Map2), t_none()};
+ false ->
+ Map3 = enter_type_lists(Args, NewArgs, Map2),
+ Range0 = t_fun_range(OpType1, 'universe'),
+ Range =
+ case t_is_unit(Range0) of
+ true -> t_none();
+ false -> Range0
+ end,
+ {State2, enter_type(Op, OpType1, Map3), Range}
+ end
+ end;
+ Normal -> Normal
+ end
+ end.
+
+handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State) ->
+ None = t_none(),
+ %% Call-site analysis may be inaccurate and consider more funs than those that
+ %% are actually possible. If all of them are incorrect, then warnings can be
+ %% emitted. If at least one fun is ok, however, then no warning is emitted,
+ %% just in case the bad ones are not really possible. The last argument is
+ %% used for this, with the following encoding:
+ %% Initial value: {none, []}
+ %% First fun checked: {one, <List of warns>}
+ %% More funs checked: {many, <List of warns>}
+ %% A '{one, []}' can only become '{many, []}'.
+ %% If at any point an fun does not add warnings, then the list is also
+ %% replaced with an empty list.
+ handle_apply_or_call(FunInfoList, Args, ArgTypes, Map, Tree, State,
+ [None || _ <- ArgTypes], None, false, {none, []}).
+
+handle_apply_or_call([{local, external}|Left], Args, ArgTypes, Map, Tree, State,
+ _AccArgTypes, _AccRet, _HadExternal, Warns) ->
+ {HowMany, _} = Warns,
+ NewHowMany =
+ case HowMany of
+ none -> one;
+ _ -> many
+ end,
+ NewWarns = {NewHowMany, []},
+ handle_apply_or_call(Left, Args, ArgTypes, Map, Tree, State,
+ ArgTypes, t_any(), true, NewWarns);
+handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
+ Args, ArgTypes, Map, Tree,
+ #state{opaques = Opaques} = State,
+ AccArgTypes, AccRet, HadExternal, Warns) ->
+ Any = t_any(),
+ AnyArgs = [Any || _ <- Args],
+ GenSig = {AnyArgs, fun(_) -> t_any() end},
+ {CArgs, CRange} =
+ case Contr of
+ {value, #contract{args = As} = C} ->
+ {As, fun(FunArgs) ->
+ dialyzer_contracts:get_contract_return(C, FunArgs)
+ end};
+ none -> GenSig
+ end,
+ {BifArgs, BifRange} =
+ case TypeOfApply of
+ remote ->
+ {M, F, A} = Fun,
+ case erl_bif_types:is_known(M, F, A) of
+ true ->
+ BArgs = erl_bif_types:arg_types(M, F, A),
+ BRange =
+ fun(FunArgs) ->
+ erl_bif_types:type(M, F, A, FunArgs, Opaques)
+ end,
+ {BArgs, BRange};
+ false ->
+ GenSig
+ end;
+ local -> GenSig
+ end,
+ {SigArgs, SigRange} =
+ case Sig of
+ {value, {SR, SA}} -> {SA, SR};
+ none -> {AnyArgs, t_any()}
+ end,
+
+ ?debug("--------------------------------------------------------\n", []),
+ ?debug("Fun: ~p\n", [state__lookup_name(Fun, State)]),
+ ?debug("Module ~p\n", [State#state.module]),
+ ?debug("CArgs ~s\n", [erl_types:t_to_string(t_product(CArgs))]),
+ ?debug("ArgTypes ~s\n", [erl_types:t_to_string(t_product(ArgTypes))]),
+ ?debug("BifArgs ~p\n", [erl_types:t_to_string(t_product(BifArgs))]),
+
+ NewArgsSig = t_inf_lists(SigArgs, ArgTypes, Opaques),
+ ?debug("SigArgs ~s\n", [erl_types:t_to_string(t_product(SigArgs))]),
+ ?debug("NewArgsSig: ~s\n", [erl_types:t_to_string(t_product(NewArgsSig))]),
+ NewArgsContract = t_inf_lists(CArgs, ArgTypes, Opaques),
+ ?debug("NewArgsContract: ~s\n",
+ [erl_types:t_to_string(t_product(NewArgsContract))]),
+ NewArgsBif = t_inf_lists(BifArgs, ArgTypes, Opaques),
+ ?debug("NewArgsBif: ~s\n", [erl_types:t_to_string(t_product(NewArgsBif))]),
+ NewArgTypes0 = t_inf_lists(NewArgsSig, NewArgsContract),
+ NewArgTypes = t_inf_lists(NewArgTypes0, NewArgsBif, Opaques),
+ ?debug("NewArgTypes ~s\n", [erl_types:t_to_string(t_product(NewArgTypes))]),
+ ?debug("\n", []),
+
+ BifRet = BifRange(NewArgTypes),
+ ContrRet = CRange(NewArgTypes),
+ RetWithoutContr = t_inf(SigRange, BifRet),
+ RetWithoutLocal = t_inf(ContrRet, RetWithoutContr),
+
+ ?debug("RetWithoutContr: ~s\n",[erl_types:t_to_string(RetWithoutContr)]),
+ ?debug("RetWithoutLocal: ~s\n", [erl_types:t_to_string(RetWithoutLocal)]),
+ ?debug("BifRet: ~s\n", [erl_types:t_to_string(BifRange(NewArgTypes))]),
+ ?debug("SigRange: ~s\n", [erl_types:t_to_string(SigRange)]),
+ ?debug("ContrRet: ~s\n", [erl_types:t_to_string(ContrRet)]),
+ ?debug("LocalRet: ~s\n", [erl_types:t_to_string(LocalRet)]),
+
+ State1 =
+ case is_race_analysis_enabled(State) of
+ true ->
+ Ann = cerl:get_ann(Tree),
+ File = get_file(Ann),
+ Line = abs(get_line(Ann)),
+ dialyzer_races:store_race_call(Fun, ArgTypes, Args,
+ {File, Line}, State);
+ false -> State
+ end,
+ FailedConj = any_none([RetWithoutLocal|NewArgTypes]),
+ IsFailBif = t_is_none(BifRange(BifArgs)),
+ IsFailSig = t_is_none(SigRange),
+ ?debug("FailedConj: ~p~n", [FailedConj]),
+ ?debug("IsFailBif: ~p~n", [IsFailBif]),
+ ?debug("IsFailSig: ~p~n", [IsFailSig]),
+ State2 =
+ case FailedConj andalso not (IsFailBif orelse IsFailSig) of
+ true ->
+ case t_is_none(RetWithoutLocal) andalso
+ not t_is_none(RetWithoutContr) andalso
+ not any_none(NewArgTypes) of
+ true ->
+ {value, C1} = Contr,
+ Contract = dialyzer_contracts:contract_to_string(C1),
+ {M1, F1, A1} = state__lookup_name(Fun, State),
+ ArgStrings = format_args(Args, ArgTypes, State),
+ CRet = erl_types:t_to_string(RetWithoutContr),
+ %% This Msg will be post_processed by dialyzer_succ_typings
+ Msg =
+ {contract_range, [Contract, M1, F1, A1, ArgStrings, CRet]},
+ state__add_warning(State1, ?WARN_CONTRACT_RANGE, Tree, Msg);
+ false ->
+ FailedSig = any_none(NewArgsSig),
+ FailedContract =
+ any_none([CRange(NewArgsContract)|NewArgsContract]),
+ FailedBif = any_none([BifRange(NewArgsBif)|NewArgsBif]),
+ InfSig = t_inf(t_fun(SigArgs, SigRange),
+ t_fun(BifArgs, BifRange(BifArgs))),
+ FailReason =
+ apply_fail_reason(FailedSig, FailedBif, FailedContract),
+ Msg = get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes, InfSig,
+ Contr, CArgs, State1, FailReason, Opaques),
+ WarnType = case Msg of
+ {call, _} -> ?WARN_FAILING_CALL;
+ {apply, _} -> ?WARN_FAILING_CALL;
+ {call_with_opaque, _} -> ?WARN_OPAQUE;
+ {call_without_opaque, _} -> ?WARN_OPAQUE;
+ {opaque_type_test, _} -> ?WARN_OPAQUE
+ end,
+ Frc = {erlang, is_record, 3} =:= state__lookup_name(Fun, State),
+ state__add_warning(State1, WarnType, Tree, Msg, Frc)
+ end;
+ false -> State1
+ end,
+ State3 =
+ case TypeOfApply of
+ local ->
+ case state__is_escaping(Fun, State2) of
+ true -> State2;
+ false ->
+ ForwardArgs = [t_limit(X, ?TYPE_LIMIT) || X <- ArgTypes],
+ forward_args(Fun, ForwardArgs, State2)
+ end;
+ remote ->
+ add_bif_warnings(Fun, NewArgTypes, Tree, State2)
+ end,
+ NewAccArgTypes =
+ case FailedConj of
+ true -> AccArgTypes;
+ false -> [t_sup(X, Y) || {X, Y} <- lists:zip(NewArgTypes, AccArgTypes)]
+ end,
+ TotalRet =
+ case t_is_none(LocalRet) andalso t_is_unit(RetWithoutLocal) of
+ true -> RetWithoutLocal;
+ false -> t_inf(RetWithoutLocal, LocalRet)
+ end,
+ NewAccRet = t_sup(AccRet, TotalRet),
+ ?debug("NewAccRet: ~s\n", [t_to_string(NewAccRet)]),
+ {NewWarnings, State4} = state__remove_added_warnings(State, State3),
+ {HowMany, OldWarnings} = Warns,
+ NewWarns =
+ case HowMany of
+ none -> {one, NewWarnings};
+ _ ->
+ case OldWarnings =:= [] of
+ true -> {many, []};
+ false ->
+ case NewWarnings =:= [] of
+ true -> {many, []};
+ false -> {many, NewWarnings ++ OldWarnings}
+ end
+ end
+ end,
+ handle_apply_or_call(Left, Args, ArgTypes, Map, Tree,
+ State4, NewAccArgTypes, NewAccRet, HadExternal, NewWarns);
+handle_apply_or_call([], Args, _ArgTypes, Map, _Tree, State,
+ AccArgTypes, AccRet, HadExternal, {_, Warnings}) ->
+ State1 = state__add_warnings(Warnings, State),
+ case HadExternal of
+ false ->
+ NewMap = enter_type_lists(Args, AccArgTypes, Map),
+ {State1, NewMap, AccRet};
+ true ->
+ {had_external, State1}
+ end.
+
+apply_fail_reason(FailedSig, FailedBif, FailedContract) ->
+ if
+ (FailedSig orelse FailedBif) andalso (not FailedContract) -> only_sig;
+ FailedContract andalso (not (FailedSig orelse FailedBif)) -> only_contract;
+ true -> both
+ end.
+
+get_apply_fail_msg(Fun, Args, ArgTypes, NewArgTypes,
+ Sig, Contract, ContrArgs, State, FailReason, Opaques) ->
+ ArgStrings = format_args(Args, ArgTypes, State),
+ ContractInfo =
+ case Contract of
+ {value, #contract{} = C} ->
+ {dialyzer_contracts:is_overloaded(C),
+ dialyzer_contracts:contract_to_string(C)};
+ none -> {false, none}
+ end,
+ EnumArgTypes = lists:zip(lists:seq(1, length(NewArgTypes)), NewArgTypes),
+ ArgNs = [Arg || {Arg, Type} <- EnumArgTypes, t_is_none(Type)],
+ case state__lookup_name(Fun, State) of
+ {M, F, A} ->
+ case is_opaque_type_test_problem(Fun, Args, NewArgTypes, State) of
+ {yes, Arg, ArgType} ->
+ {opaque_type_test, [atom_to_list(F), ArgStrings,
+ format_arg(Arg), format_type(ArgType, State)]};
+ no ->
+ SigArgs = t_fun_args(Sig),
+ BadOpaque =
+ opaque_problems([SigArgs, ContrArgs], ArgTypes, Opaques, ArgNs),
+ %% In fact *both* 'call_with_opaque' and
+ %% 'call_without_opaque' are possible.
+ case lists:keyfind(decl, 1, BadOpaque) of
+ {decl, BadArgs} ->
+ %% a structured term is used where an opaque is expected
+ ExpectedTriples =
+ case FailReason of
+ only_sig -> expected_arg_triples(BadArgs, SigArgs, State);
+ _ -> expected_arg_triples(BadArgs, ContrArgs, State)
+ end,
+ {call_without_opaque, [M, F, ArgStrings, ExpectedTriples]};
+ false ->
+ case lists:keyfind(use, 1, BadOpaque) of
+ {use, BadArgs} ->
+ %% an opaque term is used where a structured term is expected
+ ExpectedArgs =
+ case FailReason of
+ only_sig -> SigArgs;
+ _ -> ContrArgs
+ end,
+ {call_with_opaque, [M, F, ArgStrings, BadArgs, ExpectedArgs]};
+ false ->
+ case
+ erl_bif_types:opaque_args(M, F, A, ArgTypes, Opaques)
+ of
+ [] -> %% there is a structured term clash in some argument
+ {call, [M, F, ArgStrings,
+ ArgNs, FailReason,
+ format_sig_args(Sig, State),
+ format_type(t_fun_range(Sig), State),
+ ContractInfo]};
+ Ns ->
+ {call_with_opaque, [M, F, ArgStrings, Ns, ContrArgs]}
+ end
+ end
+ end
+ end;
+ Label when is_integer(Label) ->
+ {apply, [ArgStrings,
+ ArgNs, FailReason,
+ format_sig_args(Sig, State),
+ format_type(t_fun_range(Sig), State),
+ ContractInfo]}
+ end.
+
+%% -> [{ElementI, [ArgN]}] where [ArgN] is a non-empty list of
+%% arguments containing unknown opaque types and Element is 1 or 2.
+opaque_problems(ContractOrSigList, ArgTypes, Opaques, ArgNs) ->
+ ArgElementList = find_unknown(ContractOrSigList, ArgTypes, Opaques, ArgNs),
+ F = fun(1) -> decl; (2) -> use end,
+ [{F(ElementI), lists:usort([ArgN || {ArgN, EI} <- ArgElementList,
+ EI =:= ElementI])} ||
+ ElementI <- lists:usort([EI || {_, EI} <- ArgElementList])].
+
+%% -> [{ArgN, ElementI}] where ElementI = 1 means there is an unknown
+%% opaque type in argument ArgN of the the contract/signature,
+%% and ElementI = 2 means that there is an unknown opaque type in
+%% argument ArgN of the the (current) argument types.
+find_unknown(ContractOrSigList, ArgTypes, Opaques, NoneArgNs) ->
+ ArgNs = lists:seq(1, length(ArgTypes)),
+ [{ArgN, ElementI} ||
+ ContractOrSig <- ContractOrSigList,
+ {E1, E2, ArgN} <- lists:zip3(ContractOrSig, ArgTypes, ArgNs),
+ lists:member(ArgN, NoneArgNs),
+ ElementI <- erl_types:t_find_unknown_opaque(E1, E2, Opaques)].
+
+is_opaque_type_test_problem(Fun, Args, ArgTypes, State) ->
+ case Fun of
+ {erlang, FN, 1} when FN =:= is_atom; FN =:= is_boolean;
+ FN =:= is_binary; FN =:= is_bitstring;
+ FN =:= is_float; FN =:= is_function;
+ FN =:= is_integer; FN =:= is_list;
+ FN =:= is_number; FN =:= is_pid; FN =:= is_port;
+ FN =:= is_reference; FN =:= is_tuple;
+ FN =:= is_map ->
+ type_test_opaque_arg(Args, ArgTypes, State#state.opaques);
+ {erlang, FN, 2} when FN =:= is_function ->
+ type_test_opaque_arg(Args, ArgTypes, State#state.opaques);
+ _ -> no
+ end.
+
+type_test_opaque_arg([], [], _Opaques) ->
+ no;
+type_test_opaque_arg([Arg|Args], [ArgType|ArgTypes], Opaques) ->
+ case erl_types:t_has_opaque_subtype(ArgType, Opaques) of
+ true -> {yes, Arg, ArgType};
+ false -> type_test_opaque_arg(Args, ArgTypes, Opaques)
+ end.
+
+expected_arg_triples(ArgNs, ArgTypes, State) ->
+ [begin
+ Arg = lists:nth(N, ArgTypes),
+ {N, Arg, format_type(Arg, State)}
+ end || N <- ArgNs].
+
+add_bif_warnings({erlang, Op, 2}, [T1, T2] = Ts, Tree, State)
+ when Op =:= '=:='; Op =:= '==' ->
+ Opaques = State#state.opaques,
+ Inf = t_inf(T1, T2, Opaques),
+ case
+ t_is_none(Inf) andalso (not any_none(Ts))
+ andalso (not is_int_float_eq_comp(T1, Op, T2, Opaques))
+ of
+ true ->
+ %% Give priority to opaque warning (as usual).
+ case erl_types:t_find_unknown_opaque(T1, T2, Opaques) of
+ [] ->
+ Args = comp_format_args([], T1, Op, T2, State),
+ state__add_warning(State, ?WARN_MATCHING, Tree, {exact_eq, Args});
+ Ns ->
+ Args = comp_format_args(Ns, T1, Op, T2, State),
+ state__add_warning(State, ?WARN_OPAQUE, Tree, {opaque_eq, Args})
+ end;
+ false ->
+ State
+ end;
+add_bif_warnings({erlang, Op, 2}, [T1, T2] = Ts, Tree, State)
+ when Op =:= '=/='; Op =:= '/=' ->
+ Opaques = State#state.opaques,
+ case
+ (not any_none(Ts))
+ andalso (not is_int_float_eq_comp(T1, Op, T2, Opaques))
+ of
+ true ->
+ case erl_types:t_find_unknown_opaque(T1, T2, Opaques) of
+ [] -> State;
+ Ns ->
+ Args = comp_format_args(Ns, T1, Op, T2, State),
+ state__add_warning(State, ?WARN_OPAQUE, Tree, {opaque_neq, Args})
+ end;
+ false ->
+ State
+ end;
+add_bif_warnings(_, _, _, State) ->
+ State.
+
+is_int_float_eq_comp(T1, Op, T2, Opaques) ->
+ (Op =:= '==' orelse Op =:= '/=') andalso
+ ((erl_types:t_is_float(T1, Opaques)
+ andalso t_is_integer(T2, Opaques)) orelse
+ (t_is_integer(T1, Opaques)
+ andalso erl_types:t_is_float(T2, Opaques))).
+
+comp_format_args([1|_], T1, Op, T2, State) ->
+ [format_type(T2, State), Op, format_type(T1, State)];
+comp_format_args(_, T1, Op, T2, State) ->
+ [format_type(T1, State), Op, format_type(T2, State)].
+
+%%----------------------------------------
+
+handle_bitstr(Tree, Map, State) ->
+ %% Construction of binaries.
+ Size = cerl:bitstr_size(Tree),
+ Val = cerl:bitstr_val(Tree),
+ BitstrType = cerl:concrete(cerl:bitstr_type(Tree)),
+ {State1, Map1, SizeType0} = traverse(Size, Map, State),
+ {State2, Map2, ValType0} = traverse(Val, Map1, State1),
+ case cerl:bitstr_bitsize(Tree) of
+ BitSz when BitSz =:= all orelse BitSz =:= utf ->
+ ValType =
+ case BitSz of
+ all ->
+ true = (BitstrType =:= binary),
+ t_inf(ValType0, t_bitstr());
+ utf ->
+ true = lists:member(BitstrType, [utf8, utf16, utf32]),
+ t_inf(ValType0, t_integer())
+ end,
+ Map3 = enter_type(Val, ValType, Map2),
+ case t_is_none(ValType) of
+ true ->
+ Msg = {bin_construction, ["value",
+ format_cerl(Val), format_cerl(Tree),
+ format_type(ValType0, State2)]},
+ State3 = state__add_warning(State2, ?WARN_BIN_CONSTRUCTION, Val, Msg),
+ {State3, Map3, t_none()};
+ false ->
+ {State2, Map3, t_bitstr()}
+ end;
+ BitSz when is_integer(BitSz) orelse BitSz =:= any ->
+ SizeType = t_inf(SizeType0, t_non_neg_integer()),
+ ValType =
+ case BitstrType of
+ binary -> t_inf(ValType0, t_bitstr());
+ float -> t_inf(ValType0, t_number());
+ integer -> t_inf(ValType0, t_integer())
+ end,
+ case any_none([SizeType, ValType]) of
+ true ->
+ {Msg, Offending} =
+ case t_is_none(SizeType) of
+ true ->
+ {{bin_construction,
+ ["size", format_cerl(Size), format_cerl(Tree),
+ format_type(SizeType0, State2)]},
+ Size};
+ false ->
+ {{bin_construction,
+ ["value", format_cerl(Val), format_cerl(Tree),
+ format_type(ValType0, State2)]},
+ Val}
+ end,
+ State3 = state__add_warning(State2, ?WARN_BIN_CONSTRUCTION,
+ Offending, Msg),
+ {State3, Map2, t_none()};
+ false ->
+ UnitVal = cerl:concrete(cerl:bitstr_unit(Tree)),
+ Opaques = State2#state.opaques,
+ NumberVals = t_number_vals(SizeType, Opaques),
+ {State3, Type} =
+ case t_contains_opaque(SizeType, Opaques) of
+ true ->
+ Msg = {opaque_size, [format_type(SizeType, State2),
+ format_cerl(Size)]},
+ {state__add_warning(State2, ?WARN_OPAQUE, Size, Msg),
+ t_none()};
+ false ->
+ case NumberVals of
+ [OneSize] -> {State2, t_bitstr(0, OneSize * UnitVal)};
+ unknown -> {State2, t_bitstr()};
+ _ ->
+ MinSize = erl_types:number_min(SizeType, Opaques),
+ {State2, t_bitstr(UnitVal, UnitVal * MinSize)}
+ end
+ end,
+ Map3 = enter_type_lists([Val, Size, Tree],
+ [ValType, SizeType, Type], Map2),
+ {State3, Map3, Type}
+ end
+ end.
+
+%%----------------------------------------
+
+handle_call(Tree, Map, State) ->
+ M = cerl:call_module(Tree),
+ F = cerl:call_name(Tree),
+ Args = cerl:call_args(Tree),
+ MFAList = [M, F|Args],
+ {State1, Map1, [MType0, FType0|As]} = traverse_list(MFAList, Map, State),
+ Opaques = State#state.opaques,
+ MType = t_inf(t_module(), MType0, Opaques),
+ FType = t_inf(t_atom(), FType0, Opaques),
+ Map2 = enter_type_lists([M, F], [MType, FType], Map1),
+ MOpaque = t_is_none(MType) andalso (not t_is_none(MType0)),
+ FOpaque = t_is_none(FType) andalso (not t_is_none(FType0)),
+ case any_none([MType, FType|As]) of
+ true ->
+ State2 =
+ if
+ MOpaque -> % This is a problem we just detected; not a known one
+ MS = format_cerl(M),
+ case t_is_none(t_inf(t_module(), MType0)) of
+ true ->
+ Msg = {app_call, [MS, format_cerl(F),
+ format_args(Args, As, State1),
+ MS, format_type(t_module(), State1),
+ format_type(MType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg);
+ false ->
+ Msg = {opaque_call, [MS, format_cerl(F),
+ format_args(Args, As, State1),
+ MS, format_type(MType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg)
+ end;
+ FOpaque ->
+ FS = format_cerl(F),
+ case t_is_none(t_inf(t_atom(), FType0)) of
+ true ->
+ Msg = {app_call, [format_cerl(M), FS,
+ format_args(Args, As, State1),
+ FS, format_type(t_atom(), State1),
+ format_type(FType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg);
+ false ->
+ Msg = {opaque_call, [format_cerl(M), FS,
+ format_args(Args, As, State1),
+ FS, format_type(FType0, State1)]},
+ state__add_warning(State1, ?WARN_FAILING_CALL, Tree, Msg)
+ end;
+ true -> State1
+ end,
+ {State2, Map2, t_none()};
+ false ->
+ case t_is_atom(MType) of
+ true ->
+ %% XXX: Consider doing this for all combinations of MF
+ case {t_atom_vals(MType), t_atom_vals(FType)} of
+ {[MAtom], [FAtom]} ->
+ FunInfo = [{remote, state__fun_info({MAtom, FAtom, length(Args)},
+ State1)}],
+ handle_apply_or_call(FunInfo, Args, As, Map2, Tree, State1);
+ {_MAtoms, _FAtoms} ->
+ {State1, Map2, t_any()}
+ end;
+ false ->
+ {State1, Map2, t_any()}
+ end
+ end.
+
+%%----------------------------------------
+
+handle_case(Tree, Map, State) ->
+ Arg = cerl:case_arg(Tree),
+ Clauses = filter_match_fail(cerl:case_clauses(Tree)),
+ {State1, Map1, ArgType} = SMA = traverse(Arg, Map, State),
+ case t_is_none_or_unit(ArgType) of
+ true -> SMA;
+ false ->
+ State2 =
+ case is_race_analysis_enabled(State) of
+ true ->
+ {RaceList, RaceListSize} = get_race_list_and_size(State1),
+ state__renew_race_list([beg_case|RaceList],
+ RaceListSize + 1, State1);
+ false -> State1
+ end,
+ Map2 = join_maps_begin(Map1),
+ {MapList, State3, Type} =
+ handle_clauses(Clauses, Arg, ArgType, ArgType, State2,
+ [], Map2, [], []),
+ Map3 = join_maps_end(MapList, Map2),
+ debug_pp_map(Map3),
+ {State3, Map3, Type}
+ end.
+
+%%----------------------------------------
+
+handle_cons(Tree, Map, State) ->
+ Hd = cerl:cons_hd(Tree),
+ Tl = cerl:cons_tl(Tree),
+ {State1, Map1, HdType} = traverse(Hd, Map, State),
+ {State2, Map2, TlType} = traverse(Tl, Map1, State1),
+ State3 =
+ case t_is_none(t_inf(TlType, t_list(), State2#state.opaques)) of
+ true ->
+ Msg = {improper_list_constr, [format_type(TlType, State2)]},
+ state__add_warning(State2, ?WARN_NON_PROPER_LIST, Tree, Msg);
+ false ->
+ State2
+ end,
+ Type = t_cons(HdType, TlType),
+ {State3, Map2, Type}.
+
+%%----------------------------------------
+
+handle_let(Tree, Map, State) ->
+ IsRaceAnalysisEnabled = is_race_analysis_enabled(State),
+ Arg = cerl:let_arg(Tree),
+ Vars = cerl:let_vars(Tree),
+ {Map0, State0} =
+ case cerl:is_c_var(Arg) of
+ true ->
+ [Var] = Vars,
+ {enter_subst(Var, Arg, Map),
+ case IsRaceAnalysisEnabled of
+ true ->
+ {RaceList, RaceListSize} = get_race_list_and_size(State),
+ state__renew_race_list(
+ [dialyzer_races:let_tag_new(Var, Arg)|RaceList],
+ RaceListSize + 1, State);
+ false -> State
+ end};
+ false -> {Map, State}
+ end,
+ Body = cerl:let_body(Tree),
+ {State1, Map1, ArgTypes} = SMA = traverse(Arg, Map0, State0),
+ State2 =
+ case IsRaceAnalysisEnabled andalso cerl:is_c_call(Arg) of
+ true ->
+ Mod = cerl:call_module(Arg),
+ Name = cerl:call_name(Arg),
+ case cerl:is_literal(Mod) andalso
+ cerl:concrete(Mod) =:= ets andalso
+ cerl:is_literal(Name) andalso
+ cerl:concrete(Name) =:= new of
+ true -> renew_race_public_tables(Vars, State1);
+ false -> State1
+ end;
+ false -> State1
+ end,
+ case t_is_none_or_unit(ArgTypes) of
+ true -> SMA;
+ false ->
+ Map2 = enter_type_lists(Vars, t_to_tlist(ArgTypes), Map1),
+ traverse(Body, Map2, State2)
+ end.
+
+%%----------------------------------------
+
+handle_module(Tree, Map, State) ->
+ %% By not including the variables in scope we can assure that we
+ %% will get the current function type when using the variables.
+ Defs = cerl:module_defs(Tree),
+ PartFun = fun({_Var, Fun}) ->
+ state__is_escaping(get_label(Fun), State)
+ end,
+ {Defs1, Defs2} = lists:partition(PartFun, Defs),
+ Letrec = cerl:c_letrec(Defs1, cerl:c_int(42)),
+ {State1, Map1, _FunTypes} = traverse(Letrec, Map, State),
+ %% Also add environments for the other top-level functions.
+ VarTypes = [{Var, state__fun_type(Fun, State1)} || {Var, Fun} <- Defs],
+ EnvMap = enter_type_list(VarTypes, Map),
+ FoldFun = fun({_Var, Fun}, AccState) ->
+ state__update_fun_env(Fun, EnvMap, AccState)
+ end,
+ State2 = lists:foldl(FoldFun, State1, Defs2),
+ {State2, Map1, t_any()}.
+
+%%----------------------------------------
+
+handle_receive(Tree, Map, State) ->
+ Clauses = filter_match_fail(cerl:receive_clauses(Tree)),
+ Timeout = cerl:receive_timeout(Tree),
+ State1 =
+ case is_race_analysis_enabled(State) of
+ true ->
+ {RaceList, RaceListSize} = get_race_list_and_size(State),
+ state__renew_race_list([beg_case|RaceList],
+ RaceListSize + 1, State);
+ false -> State
+ end,
+ {MapList, State2, ReceiveType} =
+ handle_clauses(Clauses, ?no_arg, t_any(), t_any(), State1, [], Map,
+ [], []),
+ Map1 = join_maps(MapList, Map),
+ {State3, Map2, TimeoutType} = traverse(Timeout, Map1, State2),
+ Opaques = State3#state.opaques,
+ case (t_is_atom(TimeoutType, Opaques) andalso
+ (t_atom_vals(TimeoutType, Opaques) =:= ['infinity'])) of
+ true ->
+ {State3, Map2, ReceiveType};
+ false ->
+ Action = cerl:receive_action(Tree),
+ {State4, Map3, ActionType} = traverse(Action, Map, State3),
+ Map4 = join_maps([Map3, Map1], Map),
+ Type = t_sup(ReceiveType, ActionType),
+ {State4, Map4, Type}
+ end.
+
+%%----------------------------------------
+
+handle_try(Tree, Map, State) ->
+ Arg = cerl:try_arg(Tree),
+ EVars = cerl:try_evars(Tree),
+ Vars = cerl:try_vars(Tree),
+ Body = cerl:try_body(Tree),
+ Handler = cerl:try_handler(Tree),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ Map2 = mark_as_fresh(Vars, Map1),
+ {SuccState, SuccMap, SuccType} =
+ case bind_pat_vars(Vars, t_to_tlist(ArgType), [], Map2, State1) of
+ {error, _, _, _, _} ->
+ {State1, map__new(), t_none()};
+ {SuccMap1, VarTypes} ->
+ %% Try to bind the argument. Will only succeed if
+ %% it is a simple structured term.
+ SuccMap2 =
+ case bind_pat_vars_reverse([Arg], [t_product(VarTypes)], [],
+ SuccMap1, State1) of
+ {error, _, _, _, _} -> SuccMap1;
+ {SM, _} -> SM
+ end,
+ traverse(Body, SuccMap2, State1)
+ end,
+ ExcMap1 = mark_as_fresh(EVars, Map),
+ {State2, ExcMap2, HandlerType} = traverse(Handler, ExcMap1, SuccState),
+ TryType = t_sup(SuccType, HandlerType),
+ {State2, join_maps([ExcMap2, SuccMap], Map1), TryType}.
+
+%%----------------------------------------
+
+handle_map(Tree,Map,State) ->
+ Pairs = cerl:map_es(Tree),
+ Arg = cerl:map_arg(Tree),
+ {State1, Map1, ArgType} = traverse(Arg, Map, State),
+ ArgType1 = t_inf(t_map(), ArgType),
+ case t_is_none_or_unit(ArgType1) of
+ true ->
+ {State1, Map1, ArgType1};
+ false ->
+ {State2, Map2, TypePairs, ExactKeys} =
+ traverse_map_pairs(Pairs, Map1, State1, t_none(), [], []),
+ InsertPair = fun({KV,assoc,_},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact,KVTree},Acc) ->
+ case t_is_none(T=erl_types:t_map_update(KV,Acc)) of
+ true -> throw({none, Acc, KV, KVTree});
+ false -> T
+ end
+ end,
+ try lists:foldl(InsertPair, ArgType1, TypePairs)
+ of ResT ->
+ BindT = t_map([{K, t_any()} || K <- ExactKeys]),
+ case bind_pat_vars_reverse([Arg], [BindT], [], Map2, State2) of
+ {error, _, _, _, _} -> {State2, Map2, ResT};
+ {Map3, _} -> {State2, Map3, ResT}
+ end
+ catch {none, MapType, {K,_}, KVTree} ->
+ Msg2 = {map_update, [format_type(MapType, State2),
+ format_type(K, State2)]},
+ {state__add_warning(State2, ?WARN_MAP_CONSTRUCTION, KVTree, Msg2),
+ Map2, t_none()}
+ end
+ end.
+
+traverse_map_pairs([], Map, State, _ShadowKeys, PairAcc, KeyAcc) ->
+ {State, Map, lists:reverse(PairAcc), KeyAcc};
+traverse_map_pairs([Pair|Pairs], Map, State, ShadowKeys, PairAcc, KeyAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
+ {State1, Map1, [K,V]} = traverse_list([Key,Val],Map,State),
+ KeyAcc1 =
+ case cerl:is_literal(Op) andalso cerl:concrete(Op) =:= exact andalso
+ t_is_singleton(K, State#state.opaques) andalso
+ t_is_none(t_inf(ShadowKeys, K)) of
+ true -> [K|KeyAcc];
+ false -> KeyAcc
+ end,
+ traverse_map_pairs(Pairs, Map1, State1, t_sup(K, ShadowKeys),
+ [{{K,V},cerl:concrete(Op),Pair}|PairAcc], KeyAcc1).
+
+%%----------------------------------------
+
+handle_tuple(Tree, Map, State) ->
+ Elements = cerl:tuple_es(Tree),
+ {State1, Map1, EsType} = traverse_list(Elements, Map, State),
+ TupleType = t_tuple(EsType),
+ case t_is_none(TupleType) of
+ true ->
+ {State1, Map1, t_none()};
+ false ->
+ %% Let's find out if this is a record
+ case Elements of
+ [Tag|Left] ->
+ case cerl:is_c_atom(Tag) andalso is_literal_record(Tree) of
+ true ->
+ TagVal = cerl:atom_val(Tag),
+ case state__lookup_record(TagVal, length(Left), State1) of
+ error -> {State1, Map1, TupleType};
+ {ok, RecType} ->
+ InfTupleType = t_inf(RecType, TupleType),
+ case t_is_none(InfTupleType) of
+ true ->
+ RecC = format_type(TupleType, State1),
+ FieldDiffs = format_field_diffs(TupleType, State1),
+ Msg = {record_constr, [RecC, FieldDiffs]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ false ->
+ case bind_pat_vars(Elements, t_tuple_args(RecType),
+ [], Map1, State1) of
+ {error, bind, ErrorPat, ErrorType, _} ->
+ Msg = {record_constr,
+ [TagVal, format_patterns(ErrorPat),
+ format_type(ErrorType, State1)]},
+ State2 = state__add_warning(State1, ?WARN_MATCHING,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ {error, opaque, ErrorPat, ErrorType, OpaqueType} ->
+ Msg = {opaque_match,
+ [format_patterns(ErrorPat),
+ format_type(ErrorType, State1),
+ format_type(OpaqueType, State1)]},
+ State2 = state__add_warning(State1, ?WARN_OPAQUE,
+ Tree, Msg),
+ {State2, Map1, t_none()};
+ {Map2, ETypes} ->
+ {State1, Map2, t_tuple(ETypes)}
+ end
+ end
+ end;
+ false ->
+ {State1, Map1, t_tuple(EsType)}
+ end;
+ [] ->
+ {State1, Map1, t_tuple([])}
+ end
+ end.
+
+%%----------------------------------------
+%% Clauses
+%%
+handle_clauses([C|Left], Arg, ArgType, OrigArgType, State, CaseTypes, MapIn,
+ Acc, ClauseAcc) ->
+ IsRaceAnalysisEnabled = is_race_analysis_enabled(State),
+ State1 =
+ case IsRaceAnalysisEnabled of
+ true ->
+ {RaceList, RaceListSize} = get_race_list_and_size(State),
+ state__renew_race_list(
+ [dialyzer_races:beg_clause_new(Arg, cerl:clause_pats(C),
+ cerl:clause_guard(C))|
+ RaceList], RaceListSize + 1,
+ State);
+ false -> State
+ end,
+ {State2, ClauseMap, BodyType, NewArgType} =
+ do_clause(C, Arg, ArgType, OrigArgType, MapIn, State1),
+ {NewClauseAcc, State3} =
+ case IsRaceAnalysisEnabled of
+ true ->
+ {RaceList1, RaceListSize1} = get_race_list_and_size(State2),
+ EndClause = dialyzer_races:end_clause_new(Arg, cerl:clause_pats(C),
+ cerl:clause_guard(C)),
+ {[EndClause|ClauseAcc],
+ state__renew_race_list([EndClause|RaceList1],
+ RaceListSize1 + 1, State2)};
+ false -> {ClauseAcc, State2}
+ end,
+ {NewCaseTypes, NewAcc} =
+ case t_is_none(BodyType) of
+ true -> {CaseTypes, Acc};
+ false -> {[BodyType|CaseTypes], [ClauseMap|Acc]}
+ end,
+ handle_clauses(Left, Arg, NewArgType, OrigArgType, State3,
+ NewCaseTypes, MapIn, NewAcc, NewClauseAcc);
+handle_clauses([], _Arg, _ArgType, _OrigArgType, State, CaseTypes, _MapIn, Acc,
+ ClauseAcc) ->
+ State1 =
+ case is_race_analysis_enabled(State) of
+ true ->
+ {RaceList, RaceListSize} = get_race_list_and_size(State),
+ state__renew_race_list(
+ [dialyzer_races:end_case_new(ClauseAcc)|RaceList],
+ RaceListSize + 1, State);
+ false -> State
+ end,
+ {lists:reverse(Acc), State1, t_sup(CaseTypes)}.
+
+do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
+ Pats = cerl:clause_pats(C),
+ Guard = cerl:clause_guard(C),
+ Body = cerl:clause_body(C),
+ State1 =
+ case is_race_analysis_enabled(State) of
+ true ->
+ state__renew_fun_args(Pats, State);
+ false -> State
+ end,
+ Map0 = mark_as_fresh(Pats, Map),
+ Map1 = if Arg =:= ?no_arg -> Map0;
+ true -> bind_subst(Arg, Pats, Map0)
+ end,
+ BindRes =
+ case t_is_none(ArgType0) of
+ true ->
+ {error, bind, Pats, ArgType0, ArgType0};
+ false ->
+ ArgTypes =
+ case t_is_any(ArgType0) of
+ true -> [ArgType0 || _ <- Pats];
+ false -> t_to_tlist(ArgType0)
+ end,
+ bind_pat_vars(Pats, ArgTypes, [], Map1, State1)
+ end,
+ case BindRes of
+ {error, ErrorType, NewPats, Type, OpaqueTerm} ->
+ ?debug("Failed binding pattern: ~s\nto ~s\n",
+ [cerl_prettypr:format(C), format_type(ArgType0, State1)]),
+ case state__warning_mode(State1) of
+ false ->
+ {State1, Map, t_none(), ArgType0};
+ true ->
+ {Msg, Force} =
+ case t_is_none(ArgType0) of
+ true ->
+ PatString = format_patterns(Pats),
+ PatTypes = [PatString, format_type(OrigArgType, State1)],
+ %% See if this is covered by an earlier clause or if it
+ %% simply cannot match
+ OrigArgTypes =
+ case t_is_any(OrigArgType) of
+ true -> Any = t_any(), [Any || _ <- Pats];
+ false -> t_to_tlist(OrigArgType)
+ end,
+ Tag =
+ case bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1) of
+ {error, bind, _, _, _} -> pattern_match;
+ {error, record, _, _, _} -> record_match;
+ {error, opaque, _, _, _} -> opaque_match;
+ {_, _} -> pattern_match_cov
+ end,
+ {{Tag, PatTypes}, false};
+ false ->
+ %% Try to find out if this is a default clause in a list
+ %% comprehension and suppress this. A real Hack(tm)
+ Force0 =
+ case is_compiler_generated(cerl:get_ann(C)) of
+ true ->
+ case Pats of
+ [Pat] ->
+ case cerl:is_c_cons(Pat) of
+ true ->
+ not (cerl:is_c_var(cerl:cons_hd(Pat)) andalso
+ cerl:is_c_var(cerl:cons_tl(Pat)) andalso
+ cerl:is_literal(Guard) andalso
+ (cerl:concrete(Guard) =:= true));
+ false ->
+ true
+ end;
+ [Pat0, Pat1] -> % binary comprehension
+ case cerl:is_c_cons(Pat0) of
+ true ->
+ not (cerl:is_c_var(cerl:cons_hd(Pat0)) andalso
+ cerl:is_c_var(cerl:cons_tl(Pat0)) andalso
+ cerl:is_c_var(Pat1) andalso
+ cerl:is_literal(Guard) andalso
+ (cerl:concrete(Guard) =:= true));
+ false ->
+ true
+ end;
+ _ -> true
+ end;
+ false ->
+ true
+ end,
+ PatString =
+ case ErrorType of
+ bind -> format_patterns(Pats);
+ record -> format_patterns(NewPats);
+ opaque -> format_patterns(NewPats)
+ end,
+ PatTypes = case ErrorType of
+ bind -> [PatString, format_type(ArgType0, State1)];
+ record -> [PatString, format_type(Type, State1)];
+ opaque -> [PatString, format_type(Type, State1),
+ format_type(OpaqueTerm, State1)]
+ end,
+ FailedTag = case ErrorType of
+ bind -> pattern_match;
+ record -> record_match;
+ opaque -> opaque_match
+ end,
+ {{FailedTag, PatTypes}, Force0}
+ end,
+ WarnType = case Msg of
+ {opaque_match, _} -> ?WARN_OPAQUE;
+ {pattern_match, _} -> ?WARN_MATCHING;
+ {record_match, _} -> ?WARN_MATCHING;
+ {pattern_match_cov, _} -> ?WARN_MATCHING
+ end,
+ {state__add_warning(State1, WarnType, C, Msg, Force),
+ Map, t_none(), ArgType0}
+ end;
+ {Map2, PatTypes} ->
+ Map3 =
+ case Arg =:= ?no_arg of
+ true -> Map2;
+ false ->
+ %% Try to bind the argument. Will only succeed if
+ %% it is a simple structured term.
+ case bind_pat_vars_reverse([Arg], [t_product(PatTypes)],
+ [], Map2, State1) of
+ {error, _, _, _, _} -> Map2;
+ {NewMap, _} -> NewMap
+ end
+ end,
+ NewArgType =
+ case Arg =:= ?no_arg of
+ true -> ArgType0;
+ false ->
+ GenType = dialyzer_typesig:get_safe_underapprox(Pats, Guard),
+ t_subtract(t_product(t_to_tlist(ArgType0)), GenType)
+ end,
+ case bind_guard(Guard, Map3, State1) of
+ {error, Reason} ->
+ ?debug("Failed guard: ~s\n",
+ [cerl_prettypr:format(C, [{hook, cerl_typean:pp_hook()}])]),
+ PatString = format_patterns(Pats),
+ DefaultMsg =
+ case Pats =:= [] of
+ true -> {guard_fail, []};
+ false ->
+ {guard_fail_pat, [PatString, format_type(ArgType0, State1)]}
+ end,
+ State2 =
+ case Reason of
+ none -> state__add_warning(State1, ?WARN_MATCHING, C, DefaultMsg);
+ {FailGuard, Msg} ->
+ case is_compiler_generated(cerl:get_ann(FailGuard)) of
+ false ->
+ WarnType = case Msg of
+ {guard_fail, _} -> ?WARN_MATCHING;
+ {neg_guard_fail, _} -> ?WARN_MATCHING;
+ {opaque_guard, _} -> ?WARN_OPAQUE
+ end,
+ state__add_warning(State1, WarnType, FailGuard, Msg);
+ true ->
+ state__add_warning(State1, ?WARN_MATCHING, C, Msg)
+ end
+ end,
+ {State2, Map, t_none(), NewArgType};
+ Map4 ->
+ {RetState, RetMap, BodyType} = traverse(Body, Map4, State1),
+ {RetState, RetMap, BodyType, NewArgType}
+ end
+ end.
+
+bind_subst(Arg, Pats, Map) ->
+ case cerl:type(Arg) of
+ values ->
+ bind_subst_list(cerl:values_es(Arg), Pats, Map);
+ var ->
+ [Pat] = Pats,
+ enter_subst(Arg, Pat, Map);
+ _ ->
+ Map
+ end.
+
+bind_subst_list([Arg|ArgLeft], [Pat|PatLeft], Map) ->
+ NewMap =
+ case {cerl:type(Arg), cerl:type(Pat)} of
+ {var, var} -> enter_subst(Arg, Pat, Map);
+ {var, alias} -> enter_subst(Arg, cerl:alias_pat(Pat), Map);
+ {literal, literal} -> Map;
+ {T, T} -> bind_subst_list(lists:flatten(cerl:subtrees(Arg)),
+ lists:flatten(cerl:subtrees(Pat)),
+ Map);
+ _ -> Map
+ end,
+ bind_subst_list(ArgLeft, PatLeft, NewMap);
+bind_subst_list([], [], Map) ->
+ Map.
+
+%%----------------------------------------
+%% Patterns
+%%
+
+bind_pat_vars(Pats, Types, Acc, Map, State) ->
+ try
+ bind_pat_vars(Pats, Types, Acc, Map, State, false)
+ catch
+ throw:Error ->
+ %% Error = {error, bind | opaque | record, ErrorPats, ErrorType}
+ Error
+ end.
+
+bind_pat_vars_reverse(Pats, Types, Acc, Map, State) ->
+ try
+ bind_pat_vars(Pats, Types, Acc, Map, State, true)
+ catch
+ throw:Error ->
+ %% Error = {error, bind | opaque | record, ErrorPats, ErrorType}
+ Error
+ end.
+
+bind_pat_vars([Pat|PatLeft], [Type|TypeLeft], Acc, Map, State, Rev) ->
+ ?debug("Binding pat: ~w to ~s\n", [cerl:type(Pat), format_type(Type, State)]
+),
+ Opaques = State#state.opaques,
+ {NewMap, TypeOut} =
+ case cerl:type(Pat) of
+ alias ->
+ %% Map patterns are more allowing than the type of their literal. We
+ %% must unfold AliasPat if it is a literal.
+ AliasPat = dialyzer_utils:refold_pattern(cerl:alias_pat(Pat)),
+ Var = cerl:alias_var(Pat),
+ Map1 = enter_subst(Var, AliasPat, Map),
+ {Map2, [PatType]} = bind_pat_vars([AliasPat], [Type], [],
+ Map1, State, Rev),
+ {enter_type(Var, PatType, Map2), PatType};
+ binary ->
+ %% Cannot bind the binary if we are in reverse match since
+ %% binary patterns and binary construction are not symmetric.
+ case Rev of
+ true -> {Map, t_bitstr()};
+ false ->
+ BinType = t_inf(t_bitstr(), Type, Opaques),
+ case t_is_none(BinType) of
+ true ->
+ case t_find_opaque_mismatch(t_bitstr(), Type, Opaques) of
+ {ok, T1, T2} ->
+ bind_error([Pat], T1, T2, opaque);
+ error ->
+ bind_error([Pat], Type, t_none(), bind)
+ end;
+ false ->
+ Segs = cerl:binary_segments(Pat),
+ {Map1, SegTypes} = bind_bin_segs(Segs, BinType, Map, State),
+ {Map1, t_bitstr_concat(SegTypes)}
+ end
+ end;
+ cons ->
+ Cons = t_inf(Type, t_cons(), Opaques),
+ case t_is_none(Cons) of
+ true ->
+ bind_opaque_pats(t_cons(), Type, Pat, State);
+ false ->
+ {Map1, [HdType, TlType]} =
+ bind_pat_vars([cerl:cons_hd(Pat), cerl:cons_tl(Pat)],
+ [t_cons_hd(Cons, Opaques),
+ t_cons_tl(Cons, Opaques)],
+ [], Map, State, Rev),
+ {Map1, t_cons(HdType, TlType)}
+ end;
+ literal ->
+ Pat0 = dialyzer_utils:refold_pattern(Pat),
+ case cerl:is_literal(Pat0) of
+ true ->
+ Literal = literal_type(Pat),
+ case t_is_none(t_inf(Literal, Type, Opaques)) of
+ true ->
+ bind_opaque_pats(Literal, Type, Pat, State);
+ false -> {Map, Literal}
+ end;
+ false ->
+ %% Retry with the unfolded pattern
+ {Map1, [PatType]}
+ = bind_pat_vars([Pat0], [Type], [], Map, State, Rev),
+ {Map1, PatType}
+ end;
+ map ->
+ MapT = t_inf(Type, t_map(), Opaques),
+ case t_is_none(MapT) of
+ true ->
+ bind_opaque_pats(t_map(), Type, Pat, State);
+ false ->
+ case Rev of
+ %% TODO: Reverse matching (propagating a matched subset back to a value)
+ true -> {Map, MapT};
+ false ->
+ FoldFun =
+ fun(Pair, {MapAcc, ListAcc}) ->
+ %% Only exact (:=) can appear in patterns
+ exact = cerl:concrete(cerl:map_pair_op(Pair)),
+ Key = cerl:map_pair_key(Pair),
+ KeyType =
+ case cerl:type(Key) of
+ var ->
+ case state__lookup_type_for_letrec(Key, State) of
+ error -> lookup_type(Key, MapAcc);
+ {ok, RecType} -> RecType
+ end;
+ literal ->
+ literal_type(Key)
+ end,
+ Bind = erl_types:t_map_get(KeyType, MapT),
+ {MapAcc1, [ValType]} =
+ bind_pat_vars([cerl:map_pair_val(Pair)],
+ [Bind], [], MapAcc, State, Rev),
+ case t_is_singleton(KeyType, Opaques) of
+ true -> {MapAcc1, [{KeyType, ValType}|ListAcc]};
+ false -> {MapAcc1, ListAcc}
+ end
+ end,
+ {Map1, Pairs} = lists:foldl(FoldFun, {Map, []}, cerl:map_es(Pat)),
+ {Map1, t_inf(MapT, t_map(Pairs))}
+ end
+ end;
+ tuple ->
+ Es = cerl:tuple_es(Pat),
+ {TypedRecord, Prototype} =
+ case Es of
+ [] -> {false, t_tuple([])};
+ [Tag|Left] ->
+ case cerl:is_c_atom(Tag) andalso is_literal_record(Pat) of
+ true ->
+ TagAtom = cerl:atom_val(Tag),
+ case state__lookup_record(TagAtom, length(Left), State) of
+ error -> {false, t_tuple(length(Es))};
+ {ok, Record} ->
+ [_Head|AnyTail] = [t_any() || _ <- Es],
+ UntypedRecord = t_tuple([t_atom(TagAtom)|AnyTail]),
+ {not t_is_equal(Record, UntypedRecord), Record}
+ end;
+ false -> {false, t_tuple(length(Es))}
+ end
+ end,
+ Tuple = t_inf(Prototype, Type, Opaques),
+ case t_is_none(Tuple) of
+ true ->
+ bind_opaque_pats(Prototype, Type, Pat, State);
+ false ->
+ SubTuples = t_tuple_subtypes(Tuple, Opaques),
+ %% Need to call the top function to get the try-catch wrapper
+ MapJ = join_maps_begin(Map),
+ Results =
+ case Rev of
+ true ->
+ [bind_pat_vars_reverse(Es, t_tuple_args(SubTuple, Opaques),
+ [], MapJ, State)
+ || SubTuple <- SubTuples];
+ false ->
+ [bind_pat_vars(Es, t_tuple_args(SubTuple, Opaques), [],
+ MapJ, State)
+ || SubTuple <- SubTuples]
+ end,
+ case lists:keyfind(opaque, 2, Results) of
+ {error, opaque, _PatList, _Type, Opaque} ->
+ bind_error([Pat], Tuple, Opaque, opaque);
+ false ->
+ case [M || {M, _} <- Results, M =/= error] of
+ [] ->
+ case TypedRecord of
+ true -> bind_error([Pat], Tuple, Prototype, record);
+ false -> bind_error([Pat], Tuple, t_none(), bind)
+ end;
+ Maps ->
+ Map1 = join_maps_end(Maps, MapJ),
+ TupleType = t_sup([t_tuple(EsTypes)
+ || {M, EsTypes} <- Results, M =/= error]),
+ {Map1, TupleType}
+ end
+ end
+ end;
+ values ->
+ Es = cerl:values_es(Pat),
+ {Map1, EsTypes} =
+ bind_pat_vars(Es, t_to_tlist(Type), [], Map, State, Rev),
+ {Map1, t_product(EsTypes)};
+ var ->
+ VarType1 =
+ case state__lookup_type_for_letrec(Pat, State) of
+ error -> lookup_type(Pat, Map);
+ {ok, RecType} -> RecType
+ end,
+ %% Must do inf when binding args to pats. Vars in pats are fresh.
+ VarType2 = t_inf(VarType1, Type, Opaques),
+ case t_is_none(VarType2) of
+ true ->
+ case t_find_opaque_mismatch(VarType1, Type, Opaques) of
+ {ok, T1, T2} ->
+ bind_error([Pat], T1, T2, opaque);
+ error ->
+ bind_error([Pat], Type, t_none(), bind)
+ end;
+ false ->
+ Map1 = enter_type(Pat, VarType2, Map),
+ {Map1, VarType2}
+ end;
+ _Other ->
+ %% Catch all is needed when binding args to pats
+ ?debug("Failed match for ~p\n", [_Other]),
+ bind_error([Pat], Type, t_none(), bind)
+ end,
+ bind_pat_vars(PatLeft, TypeLeft, [TypeOut|Acc], NewMap, State, Rev);
+bind_pat_vars([], [], Acc, Map, _State, _Rev) ->
+ {Map, lists:reverse(Acc)}.
+
+bind_bin_segs(BinSegs, BinType, Map, State) ->
+ bind_bin_segs(BinSegs, BinType, [], Map, State).
+
+bind_bin_segs([Seg|Segs], BinType, Acc, Map, State) ->
+ Val = cerl:bitstr_val(Seg),
+ SegType = cerl:concrete(cerl:bitstr_type(Seg)),
+ UnitVal = cerl:concrete(cerl:bitstr_unit(Seg)),
+ case cerl:bitstr_bitsize(Seg) of
+ all ->
+ binary = SegType, [] = Segs, %% just an assert
+ T = t_inf(t_bitstr(UnitVal, 0), BinType),
+ {Map1, [Type]} = bind_pat_vars([Val], [T], [], Map, State, false),
+ Type1 = remove_local_opaque_types(Type, State#state.opaques),
+ bind_bin_segs(Segs, t_bitstr(0, 0), [Type1|Acc], Map1, State);
+ utf -> % XXX: possibly can be strengthened
+ true = lists:member(SegType, [utf8, utf16, utf32]),
+ {Map1, [_]} = bind_pat_vars([Val], [t_integer()], [], Map, State, false),
+ Type = t_binary(),
+ bind_bin_segs(Segs, BinType, [Type|Acc], Map1, State);
+ BitSz when is_integer(BitSz) orelse BitSz =:= any ->
+ Size = cerl:bitstr_size(Seg),
+ {Map1, [SizeType]} =
+ bind_pat_vars([Size], [t_non_neg_integer()], [], Map, State, false),
+ Opaques = State#state.opaques,
+ NumberVals = t_number_vals(SizeType, Opaques),
+ case t_contains_opaque(SizeType, Opaques) of
+ true -> bind_error([Seg], SizeType, t_none(), opaque);
+ false -> ok
+ end,
+ Type =
+ case NumberVals of
+ [OneSize] -> t_bitstr(0, UnitVal * OneSize);
+ _ -> % 'unknown' too
+ MinSize = erl_types:number_min(SizeType, Opaques),
+ t_bitstr(UnitVal, UnitVal * MinSize)
+ end,
+ ValConstr =
+ case SegType of
+ binary -> Type; %% The same constraints as for the whole bitstr
+ float -> t_float();
+ integer ->
+ case NumberVals of
+ unknown -> t_integer();
+ List ->
+ SizeVal = lists:max(List),
+ Flags = cerl:concrete(cerl:bitstr_flags(Seg)),
+ N = SizeVal * UnitVal,
+ case N >= ?BITS of
+ true ->
+ case lists:member(signed, Flags) of
+ true -> t_from_range(neg_inf, pos_inf);
+ false -> t_from_range(0, pos_inf)
+ end;
+ false ->
+ case lists:member(signed, Flags) of
+ true -> t_from_range(-(1 bsl (N - 1)), 1 bsl (N - 1) - 1);
+ false -> t_from_range(0, 1 bsl N - 1)
+ end
+ end
+ end
+ end,
+ {Map2, [_]} = bind_pat_vars([Val], [ValConstr], [], Map1, State, false),
+ NewBinType = t_bitstr_match(Type, BinType),
+ case t_is_none(NewBinType) of
+ true -> bind_error([Seg], BinType, t_none(), bind);
+ false -> bind_bin_segs(Segs, NewBinType, [Type|Acc], Map2, State)
+ end
+ end;
+bind_bin_segs([], _BinType, Acc, Map, _State) ->
+ {Map, lists:reverse(Acc)}.
+
+bind_error(Pats, Type, OpaqueType, Error0) ->
+ Error = case {Error0, Pats} of
+ {bind, [Pat]} ->
+ case is_literal_record(Pat) of
+ true -> record;
+ false -> Error0
+ end;
+ _ -> Error0
+ end,
+ throw({error, Error, Pats, Type, OpaqueType}).
+
+-spec bind_opaque_pats(type(), type(), cerl:c_literal(), state()) ->
+ no_return().
+
+bind_opaque_pats(GenType, Type, Pat, State) ->
+ case t_find_opaque_mismatch(GenType, Type, State#state.opaques) of
+ {ok, T1, T2} ->
+ bind_error([Pat], T1, T2, opaque);
+ error ->
+ bind_error([Pat], Type, t_none(), bind)
+ end.
+
+%%----------------------------------------
+%% Guards
+%%
+
+bind_guard(Guard, Map, State) ->
+ try bind_guard(Guard, Map, maps:new(), pos, State) of
+ {Map1, _Type} -> Map1
+ catch
+ throw:{fail, Warning} -> {error, Warning};
+ throw:{fatal_fail, Warning} -> {error, Warning}
+ end.
+
+bind_guard(Guard, Map, Env, Eval, State) ->
+ ?debug("Handling ~w guard: ~s\n",
+ [Eval, cerl_prettypr:format(Guard, [{noann, true}])]),
+ case cerl:type(Guard) of
+ binary ->
+ {Map, t_binary()};
+ 'case' ->
+ Arg = cerl:case_arg(Guard),
+ Clauses = cerl:case_clauses(Guard),
+ bind_guard_case_clauses(Arg, Clauses, Map, Env, Eval, State);
+ cons ->
+ Hd = cerl:cons_hd(Guard),
+ Tl = cerl:cons_tl(Guard),
+ {Map1, HdType} = bind_guard(Hd, Map, Env, dont_know, State),
+ {Map2, TlType} = bind_guard(Tl, Map1, Env, dont_know, State),
+ {Map2, t_cons(HdType, TlType)};
+ literal ->
+ {Map, literal_type(Guard)};
+ 'try' ->
+ Arg = cerl:try_arg(Guard),
+ [Var] = cerl:try_vars(Guard),
+ EVars = cerl:try_evars(Guard),
+ %%?debug("Storing: ~w\n", [Var]),
+ Map1 = join_maps_begin(Map),
+ Map2 = mark_as_fresh(EVars, Map1),
+ %% Visit handler first so we know if it should be ignored
+ {{HandlerMap, HandlerType}, HandlerE} =
+ try {bind_guard(cerl:try_handler(Guard), Map2, Env, Eval, State), none}
+ catch throw:HE ->
+ {{Map2, t_none()}, HE}
+ end,
+ BodyEnv = maps:put(get_label(Var), Arg, Env),
+ Wanted = case Eval of pos -> t_atom(true); neg -> t_atom(false);
+ dont_know -> t_any() end,
+ case t_is_none(t_inf(HandlerType, Wanted)) of
+ %% Handler won't save us; pretend it does not exist
+ true -> bind_guard(cerl:try_body(Guard), Map, BodyEnv, Eval, State);
+ false ->
+ {{BodyMap, BodyType}, BodyE} =
+ try {bind_guard(cerl:try_body(Guard), Map1, BodyEnv,
+ Eval, State), none}
+ catch throw:BE ->
+ {{Map1, t_none()}, BE}
+ end,
+ Map3 = join_maps_end([BodyMap, HandlerMap], Map1),
+ case t_is_none(Sup = t_sup(BodyType, HandlerType)) of
+ true ->
+ %% Pick a reason. N.B. We assume that the handler is always
+ %% compiler-generated if the body is; that way, we won't need to
+ %% check.
+ Fatality = case {BodyE, HandlerE} of
+ {{fatal_fail, _}, _} -> fatal_fail;
+ {_, {fatal_fail, _}} -> fatal_fail;
+ _ -> fail
+ end,
+ throw({Fatality,
+ case {BodyE, HandlerE} of
+ {{_, Rsn}, _} when Rsn =/= none -> Rsn;
+ {_, {_,Rsn}} -> Rsn;
+ _ -> none
+ end});
+ false -> {Map3, Sup}
+ end
+ end;
+ tuple ->
+ Es0 = cerl:tuple_es(Guard),
+ {Map1, Es} = bind_guard_list(Es0, Map, Env, dont_know, State),
+ {Map1, t_tuple(Es)};
+ map ->
+ case Eval of
+ dont_know -> handle_guard_map(Guard, Map, Env, State);
+ _PosOrNeg -> {Map, t_none()} %% Map exprs do not produce bools
+ end;
+ 'let' ->
+ Arg = cerl:let_arg(Guard),
+ [Var] = cerl:let_vars(Guard),
+ %%?debug("Storing: ~w\n", [Var]),
+ NewEnv = maps:put(get_label(Var), Arg, Env),
+ bind_guard(cerl:let_body(Guard), Map, NewEnv, Eval, State);
+ values ->
+ Es = cerl:values_es(Guard),
+ List = [bind_guard(V, Map, Env, dont_know, State) || V <- Es],
+ Type = t_product([T || {_, T} <- List]),
+ {Map, Type};
+ var ->
+ ?debug("Looking for var(~w)...", [cerl_trees:get_label(Guard)]),
+ case maps:find(get_label(Guard), Env) of
+ error ->
+ ?debug("Did not find it\n", []),
+ Type = lookup_type(Guard, Map),
+ Constr =
+ case Eval of
+ pos -> t_atom(true);
+ neg -> t_atom(false);
+ dont_know -> Type
+ end,
+ Inf = t_inf(Constr, Type),
+ {enter_type(Guard, Inf, Map), Inf};
+ {ok, Tree} ->
+ ?debug("Found it\n", []),
+ {Map1, Type} = bind_guard(Tree, Map, Env, Eval, State),
+ {enter_type(Guard, Type, Map1), Type}
+ end;
+ call ->
+ handle_guard_call(Guard, Map, Env, Eval, State)
+ end.
+
+handle_guard_call(Guard, Map, Env, Eval, State) ->
+ MFA = {cerl:atom_val(cerl:call_module(Guard)),
+ cerl:atom_val(cerl:call_name(Guard)),
+ cerl:call_arity(Guard)},
+ case MFA of
+ {erlang, F, 1} when F =:= is_atom; F =:= is_boolean;
+ F =:= is_binary; F =:= is_bitstring;
+ F =:= is_float; F =:= is_function;
+ F =:= is_integer; F =:= is_list; F =:= is_map;
+ F =:= is_number; F =:= is_pid; F =:= is_port;
+ F =:= is_reference; F =:= is_tuple ->
+ handle_guard_type_test(Guard, F, Map, Env, Eval, State);
+ {erlang, is_function, 2} ->
+ handle_guard_is_function(Guard, Map, Env, Eval, State);
+ MFA when (MFA =:= {erlang, internal_is_record, 3}) or
+ (MFA =:= {erlang, is_record, 3}) ->
+ handle_guard_is_record(Guard, Map, Env, Eval, State);
+ {erlang, '=:=', 2} ->
+ handle_guard_eqeq(Guard, Map, Env, Eval, State);
+ {erlang, '==', 2} ->
+ handle_guard_eq(Guard, Map, Env, Eval, State);
+ {erlang, 'and', 2} ->
+ handle_guard_and(Guard, Map, Env, Eval, State);
+ {erlang, 'or', 2} ->
+ handle_guard_or(Guard, Map, Env, Eval, State);
+ {erlang, 'not', 1} ->
+ handle_guard_not(Guard, Map, Env, Eval, State);
+ {erlang, Comp, 2} when Comp =:= '<'; Comp =:= '=<';
+ Comp =:= '>'; Comp =:= '>=' ->
+ handle_guard_comp(Guard, Comp, Map, Env, Eval, State);
+ _ ->
+ handle_guard_gen_fun(MFA, Guard, Map, Env, Eval, State)
+ end.
+
+handle_guard_gen_fun({M, F, A}, Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ {Map1, As} = bind_guard_list(Args, Map, Env, dont_know, State),
+ Opaques = State#state.opaques,
+ BifRet = erl_bif_types:type(M, F, A, As, Opaques),
+ case t_is_none(BifRet) of
+ true ->
+ %% Is this an error-bif?
+ case t_is_none(erl_bif_types:type(M, F, A)) of
+ true -> signal_guard_fail(Eval, Guard, As, State);
+ false -> signal_guard_fatal_fail(Eval, Guard, As, State)
+ end;
+ false ->
+ BifArgs = bif_args(M, F, A),
+ Map2 = enter_type_lists(Args, t_inf_lists(BifArgs, As, Opaques), Map1),
+ Ret =
+ case Eval of
+ pos -> t_inf(t_atom(true), BifRet);
+ neg -> t_inf(t_atom(false), BifRet);
+ dont_know -> BifRet
+ end,
+ case t_is_none(Ret) of
+ true ->
+ case Eval =:= pos of
+ true -> signal_guard_fail(Eval, Guard, As, State);
+ false -> throw({fail, none})
+ end;
+ false -> {Map2, Ret}
+ end
+ end.
+
+handle_guard_type_test(Guard, F, Map, Env, Eval, State) ->
+ [Arg] = cerl:call_args(Guard),
+ {Map1, ArgType} = bind_guard(Arg, Map, Env, dont_know, State),
+ case bind_type_test(Eval, F, ArgType, State) of
+ error ->
+ ?debug("Type test: ~w failed\n", [F]),
+ signal_guard_fail(Eval, Guard, [ArgType], State);
+ {ok, NewArgType, Ret} ->
+ ?debug("Type test: ~w succeeded, NewType: ~s, Ret: ~s\n",
+ [F, t_to_string(NewArgType), t_to_string(Ret)]),
+ {enter_type(Arg, NewArgType, Map1), Ret}
+ end.
+
+bind_type_test(Eval, TypeTest, ArgType, State) ->
+ Type = case TypeTest of
+ is_atom -> t_atom();
+ is_boolean -> t_boolean();
+ is_binary -> t_binary();
+ is_bitstring -> t_bitstr();
+ is_float -> t_float();
+ is_function -> t_fun();
+ is_integer -> t_integer();
+ is_list -> t_maybe_improper_list();
+ is_map -> t_map();
+ is_number -> t_number();
+ is_pid -> t_pid();
+ is_port -> t_port();
+ is_reference -> t_reference();
+ is_tuple -> t_tuple()
+ end,
+ case Eval of
+ pos ->
+ Inf = t_inf(Type, ArgType, State#state.opaques),
+ case t_is_none(Inf) of
+ true -> error;
+ false -> {ok, Inf, t_atom(true)}
+ end;
+ neg ->
+ Sub = t_subtract(ArgType, Type),
+ case t_is_none(Sub) of
+ true -> error;
+ false -> {ok, Sub, t_atom(false)}
+ end;
+ dont_know ->
+ {ok, ArgType, t_boolean()}
+ end.
+
+handle_guard_comp(Guard, Comp, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ [Arg1, Arg2] = Args,
+ {Map1, ArgTypes} = bind_guard_list(Args, Map, Env, dont_know, State),
+ Opaques = State#state.opaques,
+ [Type1, Type2] = ArgTypes,
+ IsInt1 = t_is_integer(Type1, Opaques),
+ IsInt2 = t_is_integer(Type2, Opaques),
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+ case erlang:Comp(cerl:concrete(Lit1), cerl:concrete(Lit2)) of
+ true when Eval =:= pos -> {Map, t_atom(true)};
+ true when Eval =:= dont_know -> {Map, t_atom(true)};
+ true when Eval =:= neg -> {Map, t_atom(true)};
+ false when Eval =:= pos ->
+ signal_guard_fail(Eval, Guard, ArgTypes, State);
+ false when Eval =:= dont_know -> {Map, t_atom(false)};
+ false when Eval =:= neg -> {Map, t_atom(false)}
+ end;
+ {{literal, Lit1}, var} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Lit1, Arg2, Type2, Comp, Map1, Opaques) of
+ error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
+ {ok, NewMap} -> {NewMap, t_atom(true)}
+ end;
+ {var, {literal, Lit2}} when IsInt1 andalso IsInt2 andalso (Eval =:= pos) ->
+ case bind_comp_literal_var(Lit2, Arg1, Type1, invert_comp(Comp),
+ Map1, Opaques) of
+ error -> signal_guard_fail(Eval, Guard, ArgTypes, State);
+ {ok, NewMap} -> {NewMap, t_atom(true)}
+ end;
+ {_, _} ->
+ handle_guard_gen_fun({erlang, Comp, 2}, Guard, Map, Env, Eval, State)
+ end.
+
+invert_comp('=<') -> '>=';
+invert_comp('<') -> '>';
+invert_comp('>=') -> '=<';
+invert_comp('>') -> '<'.
+
+bind_comp_literal_var(Lit, Var, VarType, CompOp, Map, Opaques) ->
+ LitVal = cerl:concrete(Lit),
+ NewVarType =
+ case t_number_vals(VarType, Opaques) of
+ unknown ->
+ Range =
+ case CompOp of
+ '=<' -> t_from_range(LitVal, pos_inf);
+ '<' -> t_from_range(LitVal + 1, pos_inf);
+ '>=' -> t_from_range(neg_inf, LitVal);
+ '>' -> t_from_range(neg_inf, LitVal - 1)
+ end,
+ t_inf(Range, VarType, Opaques);
+ NumberVals ->
+ NewNumberVals = [X || X <- NumberVals, erlang:CompOp(LitVal, X)],
+ t_integers(NewNumberVals)
+ end,
+ case t_is_none(NewVarType) of
+ true -> error;
+ false -> {ok, enter_type(Var, NewVarType, Map)}
+ end.
+
+handle_guard_is_function(Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ {Map1, ArgTypes0} = bind_guard_list(Args, Map, Env, dont_know, State),
+ [FunType0, ArityType0] = ArgTypes0,
+ Opaques = State#state.opaques,
+ ArityType = t_inf(ArityType0, t_integer(), Opaques),
+ case t_is_none(ArityType) of
+ true -> signal_guard_fail(Eval, Guard, ArgTypes0, State);
+ false ->
+ FunTypeConstr =
+ case t_number_vals(ArityType, State#state.opaques) of
+ unknown -> t_fun();
+ Vals ->
+ t_sup([t_fun(lists:duplicate(X, t_any()), t_any()) || X <- Vals])
+ end,
+ FunType = t_inf(FunType0, FunTypeConstr, Opaques),
+ case t_is_none(FunType) of
+ true ->
+ case Eval of
+ pos -> signal_guard_fail(Eval, Guard, ArgTypes0, State);
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_atom(false)}
+ end;
+ false ->
+ case Eval of
+ pos -> {enter_type_lists(Args, [FunType, ArityType], Map1),
+ t_atom(true)};
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_boolean()}
+ end
+ end
+ end.
+
+handle_guard_is_record(Guard, Map, Env, Eval, State) ->
+ Args = cerl:call_args(Guard),
+ [Rec, Tag0, Arity0] = Args,
+ Tag = cerl:atom_val(Tag0),
+ Arity = cerl:int_val(Arity0),
+ {Map1, RecType} = bind_guard(Rec, Map, Env, dont_know, State),
+ ArityMin1 = Arity - 1,
+ Opaques = State#state.opaques,
+ Tuple = t_tuple([t_atom(Tag)|lists:duplicate(ArityMin1, t_any())]),
+ case t_is_none(t_inf(Tuple, RecType, Opaques)) of
+ true ->
+ case erl_types:t_has_opaque_subtype(RecType, Opaques) of
+ true ->
+ signal_guard_fail(Eval, Guard,
+ [RecType, t_from_term(Tag),
+ t_from_term(Arity)],
+ State);
+ false ->
+ case Eval of
+ pos -> signal_guard_fail(Eval, Guard,
+ [RecType, t_from_term(Tag),
+ t_from_term(Arity)],
+ State);
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_atom(false)}
+ end
+ end;
+ false ->
+ TupleType =
+ case state__lookup_record(Tag, ArityMin1, State) of
+ error -> Tuple;
+ {ok, Prototype} -> Prototype
+ end,
+ Type = t_inf(TupleType, RecType, State#state.opaques),
+ case t_is_none(Type) of
+ true ->
+ %% No special handling of opaque errors.
+ FArgs = "record " ++ format_type(RecType, State),
+ Msg = {record_matching, [FArgs, Tag]},
+ throw({fail, {Guard, Msg}});
+ false ->
+ case Eval of
+ pos -> {enter_type(Rec, Type, Map1), t_atom(true)};
+ neg -> {Map1, t_atom(false)};
+ dont_know -> {Map1, t_boolean()}
+ end
+ end
+ end.
+
+handle_guard_eq(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+ case cerl:concrete(Lit1) =:= cerl:concrete(Lit2) of
+ true ->
+ if
+ Eval =:= pos -> {Map, t_atom(true)};
+ Eval =:= neg ->
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
+ signal_guard_fail(Eval, Guard, ArgTypes, State);
+ Eval =:= dont_know -> {Map, t_atom(true)}
+ end;
+ false ->
+ if
+ Eval =:= neg -> {Map, t_atom(false)};
+ Eval =:= dont_know -> {Map, t_atom(false)};
+ Eval =:= pos ->
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
+ signal_guard_fail(Eval, Guard, ArgTypes, State)
+ end
+ end;
+ {{literal, Lit1}, _} when Eval =:= pos ->
+ case cerl:concrete(Lit1) of
+ Atom when is_atom(Atom) ->
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
+ [] ->
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
+ _ ->
+ bind_eq_guard(Guard, Lit1, Arg2, Map, Env, Eval, State)
+ end;
+ {_, {literal, Lit2}} when Eval =:= pos ->
+ case cerl:concrete(Lit2) of
+ Atom when is_atom(Atom) ->
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
+ [] ->
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
+ _ ->
+ bind_eq_guard(Guard, Arg1, Lit2, Map, Env, Eval, State)
+ end;
+ {_, _} ->
+ bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end.
+
+bind_eq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
+ Opaques = State#state.opaques,
+ case
+ t_is_nil(Type1, Opaques) orelse t_is_nil(Type2, Opaques)
+ orelse t_is_atom(Type1, Opaques) orelse t_is_atom(Type2, Opaques)
+ of
+ true -> bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State);
+ false ->
+ %% XXX. Is this test OK?
+ OpArgs = erl_types:t_find_unknown_opaque(Type1, Type2, Opaques),
+ case OpArgs =:= [] of
+ true ->
+ case Eval of
+ pos -> {Map2, t_atom(true)};
+ neg -> {Map2, t_atom(false)};
+ dont_know -> {Map2, t_boolean()}
+ end;
+ false ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State)
+ end
+ end.
+
+handle_guard_eqeq(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ case {type(Arg1), type(Arg2)} of
+ {{literal, Lit1}, {literal, Lit2}} ->
+
+ case cerl:concrete(Lit1) =:= cerl:concrete(Lit2) of
+ true ->
+ if Eval =:= neg ->
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
+ signal_guard_fail(Eval, Guard, ArgTypes, State);
+ Eval =:= pos -> {Map, t_atom(true)};
+ Eval =:= dont_know -> {Map, t_atom(true)}
+ end;
+ false ->
+ if Eval =:= neg -> {Map, t_atom(false)};
+ Eval =:= dont_know -> {Map, t_atom(false)};
+ Eval =:= pos ->
+ ArgTypes = [t_from_term(cerl:concrete(Lit1)),
+ t_from_term(cerl:concrete(Lit2))],
+ signal_guard_fail(Eval, Guard, ArgTypes, State)
+ end
+ end;
+ {{literal, Lit1}, _} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Lit1, Arg2, Map, Env, State);
+ {_, {literal, Lit2}} when Eval =:= pos ->
+ bind_eqeq_guard_lit_other(Guard, Lit2, Arg1, Map, Env, State);
+ {_, _} ->
+ bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State)
+ end.
+
+bind_eqeq_guard(Guard, Arg1, Arg2, Map, Env, Eval, State) ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, dont_know, State),
+ ?debug("Types are:~s =:= ~s\n", [t_to_string(Type1),
+ t_to_string(Type2)]),
+ Opaques = State#state.opaques,
+ Inf = t_inf(Type1, Type2, Opaques),
+ case t_is_none(Inf) of
+ true ->
+ OpArgs = erl_types:t_find_unknown_opaque(Type1, Type2, Opaques),
+ case OpArgs =:= [] of
+ true ->
+ case Eval of
+ neg -> {Map2, t_atom(false)};
+ dont_know -> {Map2, t_atom(false)};
+ pos -> signal_guard_fail(Eval, Guard, [Type1, Type2], State)
+ end;
+ false ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State)
+ end;
+ false ->
+ case Eval of
+ pos ->
+ case {cerl:type(Arg1), cerl:type(Arg2)} of
+ {var, var} ->
+ Map3 = enter_subst(Arg1, Arg2, Map2),
+ Map4 = enter_type(Arg2, Inf, Map3),
+ {Map4, t_atom(true)};
+ {var, _} ->
+ Map3 = enter_type(Arg1, Inf, Map2),
+ {Map3, t_atom(true)};
+ {_, var} ->
+ Map3 = enter_type(Arg2, Inf, Map2),
+ {Map3, t_atom(true)};
+ {_, _} ->
+ {Map2, t_atom(true)}
+ end;
+ neg ->
+ {Map2, t_atom(false)};
+ dont_know ->
+ {Map2, t_boolean()}
+ end
+ end.
+
+bind_eqeq_guard_lit_other(Guard, Arg1, Arg2, Map, Env, State) ->
+ Eval = dont_know,
+ Opaques = State#state.opaques,
+ case cerl:concrete(Arg1) of
+ true ->
+ {_, Type} = MT = bind_guard(Arg2, Map, Env, pos, State),
+ case t_is_any_atom(true, Type, Opaques) of
+ true -> MT;
+ false ->
+ {_, Type0} = bind_guard(Arg2, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0, t_atom(true)], State)
+ end;
+ false ->
+ {Map1, Type} = bind_guard(Arg2, Map, Env, neg, State),
+ case t_is_any_atom(false, Type, Opaques) of
+ true -> {Map1, t_atom(true)};
+ false ->
+ {_, Type0} = bind_guard(Arg2, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0, t_atom(false)], State)
+ end;
+ Term ->
+ LitType = t_from_term(Term),
+ {Map1, Type} = bind_guard(Arg2, Map, Env, Eval, State),
+ case t_is_subtype(LitType, Type) of
+ false -> signal_guard_fail(Eval, Guard, [Type, LitType], State);
+ true ->
+ case cerl:is_c_var(Arg2) of
+ true -> {enter_type(Arg2, LitType, Map1), t_atom(true)};
+ false -> {Map1, t_atom(true)}
+ end
+ end
+ end.
+
+handle_guard_and(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ Opaques = State#state.opaques,
+ case Eval of
+ pos ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, Eval, State),
+ case t_is_any_atom(true, Type1, Opaques) of
+ false -> signal_guard_fail(Eval, Guard, [Type1, t_any()], State);
+ true ->
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, Eval, State),
+ case t_is_any_atom(true, Type2, Opaques) of
+ false -> signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ true -> {Map2, t_atom(true)}
+ end
+ end;
+ neg ->
+ MapJ = join_maps_begin(Map),
+ {Map1, Type1} =
+ try bind_guard(Arg1, MapJ, Env, neg, State)
+ catch throw:{fail, _} -> bind_guard(Arg2, MapJ, Env, pos, State)
+ end,
+ {Map2, Type2} =
+ try bind_guard(Arg2, MapJ, Env, neg, State)
+ catch throw:{fail, _} -> bind_guard(Arg1, MapJ, Env, pos, State)
+ end,
+ case
+ t_is_any_atom(false, Type1, Opaques)
+ orelse t_is_any_atom(false, Type2, Opaques)
+ of
+ true -> {join_maps_end([Map1, Map2], MapJ), t_atom(false)};
+ false -> signal_guard_fail(Eval, Guard, [Type1, Type2], State)
+ end;
+ dont_know ->
+ MapJ = join_maps_begin(Map),
+ {Map1, Type1} = bind_guard(Arg1, MapJ, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, MapJ, Env, dont_know, State),
+ Bool1 = t_inf(Type1, t_boolean()),
+ Bool2 = t_inf(Type2, t_boolean()),
+ case t_is_none(Bool1) orelse t_is_none(Bool2) of
+ true -> throw({fatal_fail, none});
+ false ->
+ NewMap = join_maps_end([Map1, Map2], MapJ),
+ NewType =
+ case {t_atom_vals(Bool1, Opaques), t_atom_vals(Bool2, Opaques)} of
+ {['true'] , ['true'] } -> t_atom(true);
+ {['false'], _ } -> t_atom(false);
+ {_ , ['false']} -> t_atom(false);
+ {unknown , _ } ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ {_ , unknown } ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ {_ , _ } -> t_boolean()
+
+ end,
+ {NewMap, NewType}
+ end
+ end.
+
+handle_guard_or(Guard, Map, Env, Eval, State) ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ Opaques = State#state.opaques,
+ case Eval of
+ pos ->
+ MapJ = join_maps_begin(Map),
+ {Map1, Bool1} =
+ try bind_guard(Arg1, MapJ, Env, pos, State)
+ catch
+ throw:{fail,_} -> bind_guard(Arg1, MapJ, Env, dont_know, State)
+ end,
+ {Map2, Bool2} =
+ try bind_guard(Arg2, MapJ, Env, pos, State)
+ catch
+ throw:{fail,_} -> bind_guard(Arg2, MapJ, Env, dont_know, State)
+ end,
+ case
+ ((t_is_any_atom(true, Bool1, Opaques)
+ andalso t_is_boolean(Bool2, Opaques))
+ orelse
+ (t_is_any_atom(true, Bool2, Opaques)
+ andalso t_is_boolean(Bool1, Opaques)))
+ of
+ true -> {join_maps_end([Map1, Map2], MapJ), t_atom(true)};
+ false -> signal_guard_fail(Eval, Guard, [Bool1, Bool2], State)
+ end;
+ neg ->
+ {Map1, Type1} = bind_guard(Arg1, Map, Env, neg, State),
+ case t_is_any_atom(false, Type1, Opaques) of
+ false -> signal_guard_fail(Eval, Guard, [Type1, t_any()], State);
+ true ->
+ {Map2, Type2} = bind_guard(Arg2, Map1, Env, neg, State),
+ case t_is_any_atom(false, Type2, Opaques) of
+ false -> signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ true -> {Map2, t_atom(false)}
+ end
+ end;
+ dont_know ->
+ MapJ = join_maps_begin(Map),
+ {Map1, Type1} = bind_guard(Arg1, MapJ, Env, dont_know, State),
+ {Map2, Type2} = bind_guard(Arg2, MapJ, Env, dont_know, State),
+ Bool1 = t_inf(Type1, t_boolean()),
+ Bool2 = t_inf(Type2, t_boolean()),
+ case t_is_none(Bool1) orelse t_is_none(Bool2) of
+ true -> throw({fatal_fail, none});
+ false ->
+ NewMap = join_maps_end([Map1, Map2], MapJ),
+ NewType =
+ case {t_atom_vals(Bool1, Opaques), t_atom_vals(Bool2, Opaques)} of
+ {['false'], ['false']} -> t_atom(false);
+ {['true'] , _ } -> t_atom(true);
+ {_ , ['true'] } -> t_atom(true);
+ {unknown , _ } ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ {_ , unknown } ->
+ signal_guard_fail(Eval, Guard, [Type1, Type2], State);
+ {_ , _ } -> t_boolean()
+ end,
+ {NewMap, NewType}
+ end
+ end.
+
+handle_guard_not(Guard, Map, Env, Eval, State) ->
+ [Arg] = cerl:call_args(Guard),
+ Opaques = State#state.opaques,
+ case Eval of
+ neg ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, pos, State),
+ case t_is_any_atom(true, Type, Opaques) of
+ true -> {Map1, t_atom(false)};
+ false ->
+ {_, Type0} = bind_guard(Arg, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0], State)
+ end;
+ pos ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, neg, State),
+ case t_is_any_atom(false, Type, Opaques) of
+ true -> {Map1, t_atom(true)};
+ false ->
+ {_, Type0} = bind_guard(Arg, Map, Env, Eval, State),
+ signal_guard_fail(Eval, Guard, [Type0], State)
+ end;
+ dont_know ->
+ {Map1, Type} = bind_guard(Arg, Map, Env, dont_know, State),
+ Bool = t_inf(Type, t_boolean()),
+ case t_is_none(Bool) of
+ true -> throw({fatal_fail, none});
+ false ->
+ case t_atom_vals(Bool, Opaques) of
+ ['true'] -> {Map1, t_atom(false)};
+ ['false'] -> {Map1, t_atom(true)};
+ [_, _] -> {Map1, Bool};
+ unknown -> signal_guard_fail(Eval, Guard, [Type], State)
+ end
+ end
+ end.
+
+bind_guard_list(Guards, Map, Env, Eval, State) ->
+ bind_guard_list(Guards, Map, Env, Eval, State, []).
+
+bind_guard_list([G|Gs], Map, Env, Eval, State, Acc) ->
+ {Map1, T} = bind_guard(G, Map, Env, Eval, State),
+ bind_guard_list(Gs, Map1, Env, Eval, State, [T|Acc]);
+bind_guard_list([], Map, _Env, _Eval, _State, Acc) ->
+ {Map, lists:reverse(Acc)}.
+
+handle_guard_map(Guard, Map, Env, State) ->
+ Pairs = cerl:map_es(Guard),
+ Arg = cerl:map_arg(Guard),
+ {Map1, ArgType0} = bind_guard(Arg, Map, Env, dont_know, State),
+ ArgType1 = t_inf(t_map(), ArgType0),
+ case t_is_none_or_unit(ArgType1) of
+ true -> {Map1, t_none()};
+ false ->
+ {Map2, TypePairs} = bind_guard_map_pairs(Pairs, Map1, Env, State, []),
+ {Map2, lists:foldl(fun({KV,assoc},Acc) -> erl_types:t_map_put(KV,Acc);
+ ({KV,exact},Acc) -> erl_types:t_map_update(KV,Acc)
+ end, ArgType1, TypePairs)}
+ end.
+
+bind_guard_map_pairs([], Map, _Env, _State, PairAcc) ->
+ {Map, lists:reverse(PairAcc)};
+bind_guard_map_pairs([Pair|Pairs], Map, Env, State, PairAcc) ->
+ Key = cerl:map_pair_key(Pair),
+ Val = cerl:map_pair_val(Pair),
+ Op = cerl:map_pair_op(Pair),
+ {Map1, [K,V]} = bind_guard_list([Key,Val],Map,Env,dont_know,State),
+ bind_guard_map_pairs(Pairs, Map1, Env, State,
+ [{{K,V},cerl:concrete(Op)}|PairAcc]).
+
+-type eval() :: 'pos' | 'neg' | 'dont_know'.
+
+-spec signal_guard_fail(eval(), cerl:c_call(), [type()],
+ state()) -> no_return().
+
+signal_guard_fail(Eval, Guard, ArgTypes, State) ->
+ signal_guard_failure(Eval, Guard, ArgTypes, fail, State).
+
+-spec signal_guard_fatal_fail(eval(), cerl:c_call(), [erl_types:erl_type()],
+ state()) -> no_return().
+
+signal_guard_fatal_fail(Eval, Guard, ArgTypes, State) ->
+ signal_guard_failure(Eval, Guard, ArgTypes, fatal_fail, State).
+
+signal_guard_failure(Eval, Guard, ArgTypes, Tag, State) ->
+ Args = cerl:call_args(Guard),
+ F = cerl:atom_val(cerl:call_name(Guard)),
+ {M, F, A} = MFA = {cerl:atom_val(cerl:call_module(Guard)), F, length(Args)},
+ Opaques = State#state.opaques,
+ {Kind, XInfo} =
+ case erl_bif_types:opaque_args(M, F, A, ArgTypes, Opaques) of
+ [] ->
+ {case Eval of
+ neg -> neg_guard_fail;
+ pos -> guard_fail;
+ dont_know -> guard_fail
+ end,
+ []};
+ Ns -> {opaque_guard, [Ns]}
+ end,
+ FArgs =
+ case is_infix_op(MFA) of
+ true ->
+ [ArgType1, ArgType2] = ArgTypes,
+ [Arg1, Arg2] = Args,
+ [format_args_1([Arg1], [ArgType1], State),
+ atom_to_list(F),
+ format_args_1([Arg2], [ArgType2], State)] ++ XInfo;
+ false ->
+ [F, format_args(Args, ArgTypes, State)]
+ end,
+ Msg = {Kind, FArgs},
+ throw({Tag, {Guard, Msg}}).
+
+is_infix_op({erlang, '=:=', 2}) -> true;
+is_infix_op({erlang, '==', 2}) -> true;
+is_infix_op({erlang, '=/=', 2}) -> true;
+is_infix_op({erlang, '=/', 2}) -> true;
+is_infix_op({erlang, '<', 2}) -> true;
+is_infix_op({erlang, '=<', 2}) -> true;
+is_infix_op({erlang, '>', 2}) -> true;
+is_infix_op({erlang, '>=', 2}) -> true;
+is_infix_op({M, F, A}) when is_atom(M), is_atom(F),
+ is_integer(A), 0 =< A, A =< 255 -> false.
+
+bif_args(M, F, A) ->
+ case erl_bif_types:arg_types(M, F, A) of
+ unknown -> lists:duplicate(A, t_any());
+ List -> List
+ end.
+
+bind_guard_case_clauses(Arg, Clauses, Map0, Env, Eval, State) ->
+ Clauses1 = filter_fail_clauses(Clauses),
+ Map = join_maps_begin(Map0),
+ {GenMap, GenArgType} = bind_guard(Arg, Map, Env, dont_know, State),
+ bind_guard_case_clauses(GenArgType, GenMap, Arg, Clauses1, Map, Env, Eval,
+ t_none(), [], State).
+
+filter_fail_clauses([Clause|Left]) ->
+ case (cerl:clause_pats(Clause) =:= []) of
+ true ->
+ Body = cerl:clause_body(Clause),
+ case cerl:is_literal(Body) andalso (cerl:concrete(Body) =:= fail) orelse
+ cerl:is_c_primop(Body) andalso
+ (cerl:atom_val(cerl:primop_name(Body)) =:= match_fail) of
+ true -> filter_fail_clauses(Left);
+ false -> [Clause|filter_fail_clauses(Left)]
+ end;
+ false ->
+ [Clause|filter_fail_clauses(Left)]
+ end;
+filter_fail_clauses([]) ->
+ [].
+
+bind_guard_case_clauses(GenArgType, GenMap, ArgExpr, [Clause|Left],
+ Map, Env, Eval, AccType, AccMaps, State) ->
+ Pats = cerl:clause_pats(Clause),
+ {NewMap0, ArgType} =
+ case Pats of
+ [Pat] ->
+ case cerl:is_literal(Pat) of
+ true ->
+ try
+ case cerl:concrete(Pat) of
+ true -> bind_guard(ArgExpr, Map, Env, pos, State);
+ false -> bind_guard(ArgExpr, Map, Env, neg, State);
+ _ -> {GenMap, GenArgType}
+ end
+ catch
+ throw:{fail, _} -> {none, GenArgType}
+ end;
+ false ->
+ {GenMap, GenArgType}
+ end;
+ _ -> {GenMap, GenArgType}
+ end,
+ NewMap1 =
+ case Pats =:= [] of
+ true -> NewMap0;
+ false ->
+ case t_is_none(ArgType) of
+ true -> none;
+ false ->
+ ArgTypes = case t_is_any(ArgType) of
+ true -> Any = t_any(), [Any || _ <- Pats];
+ false -> t_to_tlist(ArgType)
+ end,
+ case bind_pat_vars(Pats, ArgTypes, [], NewMap0, State) of
+ {error, _, _, _, _} -> none;
+ {PatMap, _PatTypes} -> PatMap
+ end
+ end
+ end,
+ Guard = cerl:clause_guard(Clause),
+ GenPatType = dialyzer_typesig:get_safe_underapprox(Pats, Guard),
+ NewGenArgType = t_subtract(GenArgType, GenPatType),
+ case (NewMap1 =:= none) orelse t_is_none(GenArgType) of
+ true ->
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ Eval, AccType, AccMaps, State);
+ false ->
+ {NewAccType, NewAccMaps} =
+ try
+ {NewMap2, GuardType} = bind_guard(Guard, NewMap1, Env, pos, State),
+ case t_is_none(t_inf(t_atom(true), GuardType)) of
+ true -> throw({fail, none});
+ false -> ok
+ end,
+ {NewMap3, CType} = bind_guard(cerl:clause_body(Clause), NewMap2,
+ Env, Eval, State),
+ Opaques = State#state.opaques,
+ case Eval of
+ pos ->
+ case t_is_any_atom(true, CType, Opaques) of
+ true -> ok;
+ false -> throw({fail, none})
+ end;
+ neg ->
+ case t_is_any_atom(false, CType, Opaques) of
+ true -> ok;
+ false -> throw({fail, none})
+ end;
+ dont_know ->
+ ok
+ end,
+ {t_sup(AccType, CType), [NewMap3|AccMaps]}
+ catch
+ throw:{fail, _What} -> {AccType, AccMaps}
+ end,
+ bind_guard_case_clauses(NewGenArgType, GenMap, ArgExpr, Left, Map, Env,
+ Eval, NewAccType, NewAccMaps, State)
+ end;
+bind_guard_case_clauses(_GenArgType, _GenMap, _ArgExpr, [], Map, _Env, _Eval,
+ AccType, AccMaps, _State) ->
+ case t_is_none(AccType) of
+ true -> throw({fail, none});
+ false -> {join_maps_end(AccMaps, Map), AccType}
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Maps and types.
+%%%
+%%% ===========================================================================
+
+map__new() ->
+ #map{}.
+
+%% join_maps_begin pushes 'modified' to the stack; join_maps pops
+%% 'modified' from the stack.
+
+join_maps_begin(#map{modified = M, modified_stack = S, ref = Ref} = Map) ->
+ Map#map{ref = make_ref(), modified = [], modified_stack = [{M,Ref} | S]}.
+
+join_maps_end(Maps, MapOut) ->
+ #map{ref = Ref, modified_stack = [{M1,R1} | S]} = MapOut,
+ true = lists:all(fun(M) -> M#map.ref =:= Ref end, Maps), % sanity
+ Keys0 = lists:usort(lists:append([M#map.modified || M <- Maps])),
+ #map{map = Map, subst = Subst} = MapOut,
+ Keys = [Key ||
+ Key <- Keys0,
+ maps:is_key(Key, Map) orelse maps:is_key(Key, Subst)],
+ Out = case Maps of
+ [] -> join_maps(Maps, MapOut);
+ _ -> join_maps(Keys, Maps, MapOut)
+ end,
+ debug_join_check(Maps, MapOut, Out),
+ Out#map{ref = R1,
+ modified = Out#map.modified ++ M1, % duplicates possible
+ modified_stack = S}.
+
+join_maps(Maps, MapOut) ->
+ #map{map = Map, subst = Subst} = MapOut,
+ Keys = ordsets:from_list(maps:keys(Map) ++ maps:keys(Subst)),
+ join_maps(Keys, Maps, MapOut).
+
+join_maps(Keys, Maps, MapOut) ->
+ KTs = join_maps_collect(Keys, Maps, MapOut),
+ lists:foldl(fun({K, T}, M) -> enter_type(K, T, M) end, MapOut, KTs).
+
+join_maps_collect([Key|Left], Maps, MapOut) ->
+ Type = join_maps_one_key(Maps, Key, t_none()),
+ case t_is_equal(lookup_type(Key, MapOut), Type) of
+ true -> join_maps_collect(Left, Maps, MapOut);
+ false -> [{Key, Type} | join_maps_collect(Left, Maps, MapOut)]
+ end;
+join_maps_collect([], _Maps, _MapOut) ->
+ [].
+
+join_maps_one_key([Map|Left], Key, AccType) ->
+ case t_is_any(AccType) of
+ true ->
+ %% We can stop here
+ AccType;
+ false ->
+ join_maps_one_key(Left, Key, t_sup(lookup_type(Key, Map), AccType))
+ end;
+join_maps_one_key([], _Key, AccType) ->
+ AccType.
+
+-ifdef(DEBUG).
+debug_join_check(Maps, MapOut, Out) ->
+ #map{map = Map, subst = Subst} = Out,
+ #map{map = Map2, subst = Subst2} = join_maps(Maps, MapOut),
+ F = fun(D) -> lists:keysort(1, maps:to_list(D)) end,
+ [throw({bug, join_maps}) ||
+ F(Map) =/= F(Map2) orelse F(Subst) =/= F(Subst2)].
+-else.
+debug_join_check(_Maps, _MapOut, _Out) -> ok.
+-endif.
+
+enter_type_lists([Key|KeyTail], [Val|ValTail], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_lists(KeyTail, ValTail, Map1);
+enter_type_lists([], [], Map) ->
+ Map.
+
+enter_type_list([{Key, Val}|Left], Map) ->
+ Map1 = enter_type(Key, Val, Map),
+ enter_type_list(Left, Map1);
+enter_type_list([], Map) ->
+ Map.
+
+enter_type(Key, Val, MS) ->
+ case cerl:is_literal(Key) of
+ true -> MS;
+ false ->
+ case cerl:is_c_values(Key) of
+ true ->
+ Keys = cerl:values_es(Key),
+ case t_is_any(Val) orelse t_is_none(Val) of
+ true ->
+ enter_type_lists(Keys, [Val || _ <- Keys], MS);
+ false ->
+ enter_type_lists(Keys, t_to_tlist(Val), MS)
+ end;
+ false ->
+ #map{map = Map, subst = Subst} = MS,
+ KeyLabel = get_label(Key),
+ case maps:find(KeyLabel, Subst) of
+ {ok, NewKey} ->
+ ?debug("Binding ~p to ~p\n", [KeyLabel, NewKey]),
+ enter_type(NewKey, Val, MS);
+ error ->
+ ?debug("Entering ~p :: ~s\n", [KeyLabel, t_to_string(Val)]),
+ case maps:find(KeyLabel, Map) of
+ {ok, Value} ->
+ case erl_types:t_is_equal(Val, Value) of
+ true -> MS;
+ false -> store_map(KeyLabel, Val, MS)
+ end;
+ error -> store_map(KeyLabel, Val, MS)
+ end
+ end
+ end
+ end.
+
+store_map(Key, Val, #map{map = Map, ref = undefined} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map)};
+store_map(Key, Val, #map{map = Map, modified = Mod} = MapRec) ->
+ MapRec#map{map = maps:put(Key, Val, Map), modified = [Key | Mod]}.
+
+enter_subst(Key, Val0, #map{subst = Subst} = MS) ->
+ KeyLabel = get_label(Key),
+ Val = dialyzer_utils:refold_pattern(Val0),
+ case cerl:is_literal(Val) of
+ true ->
+ store_map(KeyLabel, literal_type(Val), MS);
+ false ->
+ case cerl:is_c_var(Val) of
+ false -> MS;
+ true ->
+ ValLabel = get_label(Val),
+ case maps:find(ValLabel, Subst) of
+ {ok, NewVal} ->
+ enter_subst(Key, NewVal, MS);
+ error ->
+ if KeyLabel =:= ValLabel -> MS;
+ true ->
+ ?debug("Subst: storing ~p = ~p\n", [KeyLabel, ValLabel]),
+ store_subst(KeyLabel, ValLabel, MS)
+ end
+ end
+ end
+ end.
+
+store_subst(Key, Val, #map{subst = S, ref = undefined} = Map) ->
+ Map#map{subst = maps:put(Key, Val, S)};
+store_subst(Key, Val, #map{subst = S, modified = Mod} = Map) ->
+ Map#map{subst = maps:put(Key, Val, S), modified = [Key | Mod]}.
+
+lookup_type(Key, #map{map = Map, subst = Subst}) ->
+ lookup(Key, Map, Subst, t_none()).
+
+lookup(Key, Map, Subst, AnyNone) ->
+ case cerl:is_literal(Key) of
+ true -> literal_type(Key);
+ false ->
+ Label = get_label(Key),
+ case maps:find(Label, Subst) of
+ {ok, NewKey} -> lookup(NewKey, Map, Subst, AnyNone);
+ error ->
+ case maps:find(Label, Map) of
+ {ok, Val} -> Val;
+ error -> AnyNone
+ end
+ end
+ end.
+
+lookup_fun_sig(Fun, Callgraph, Plt) ->
+ MFAorLabel =
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ error -> Fun;
+ {ok, MFA} -> MFA
+ end,
+ dialyzer_plt:lookup(Plt, MFAorLabel).
+
+literal_type(Lit) ->
+ t_from_term(cerl:concrete(Lit)).
+
+mark_as_fresh([Tree|Left], Map) ->
+ SubTrees1 = lists:append(cerl:subtrees(Tree)),
+ {SubTrees2, Map1} =
+ case cerl:type(Tree) of
+ bitstr ->
+ %% The Size field is not fresh.
+ {SubTrees1 -- [cerl:bitstr_size(Tree)], Map};
+ map_pair ->
+ %% The keys are not fresh
+ {SubTrees1 -- [cerl:map_pair_key(Tree)], Map};
+ var ->
+ {SubTrees1, enter_type(Tree, t_any(), Map)};
+ _ ->
+ {SubTrees1, Map}
+ end,
+ mark_as_fresh(SubTrees2 ++ Left, Map1);
+mark_as_fresh([], Map) ->
+ Map.
+
+-ifdef(DEBUG).
+debug_pp_map(#map{map = Map}=MapRec) ->
+ Keys = maps:keys(Map),
+ io:format("Map:\n", []),
+ lists:foreach(fun (Key) ->
+ io:format("\t~w :: ~s\n",
+ [Key, t_to_string(lookup_type(Key, MapRec))])
+ end, Keys),
+ ok.
+-else.
+debug_pp_map(_Map) -> ok.
+-endif.
+
+%%% ===========================================================================
+%%%
+%%% Utilities
+%%%
+%%% ===========================================================================
+
+get_label(L) when is_integer(L) ->
+ L;
+get_label(T) ->
+ cerl_trees:get_label(T).
+
+t_is_simple(ArgType, State) ->
+ Opaques = State#state.opaques,
+ t_is_atom(ArgType, Opaques) orelse t_is_number(ArgType, Opaques)
+ orelse t_is_port(ArgType, Opaques)
+ orelse t_is_pid(ArgType, Opaques) orelse t_is_reference(ArgType, Opaques)
+ orelse t_is_nil(ArgType, Opaques).
+
+remove_local_opaque_types(Type, Opaques) ->
+ t_unopaque(Type, Opaques).
+
+%% t_is_structured(ArgType) ->
+%% case t_is_nil(ArgType) of
+%% true -> false;
+%% false ->
+%% SType = t_inf(t_sup([t_list(), t_tuple(), t_binary()]), ArgType),
+%% t_is_equal(ArgType, SType)
+%% end.
+
+is_call_to_send(Tree) ->
+ case cerl:is_c_call(Tree) of
+ false -> false;
+ true ->
+ Mod = cerl:call_module(Tree),
+ Name = cerl:call_name(Tree),
+ Arity = cerl:call_arity(Tree),
+ cerl:is_c_atom(Mod)
+ andalso cerl:is_c_atom(Name)
+ andalso is_send(cerl:atom_val(Name))
+ andalso (cerl:atom_val(Mod) =:= erlang)
+ andalso (Arity =:= 2)
+ end.
+
+is_send('!') -> true;
+is_send(send) -> true;
+is_send(_) -> false.
+
+is_lc_simple_list(Tree, TreeType, State) ->
+ Opaques = State#state.opaques,
+ Ann = cerl:get_ann(Tree),
+ lists:member(list_comprehension, Ann)
+ andalso t_is_list(TreeType)
+ andalso t_is_simple(t_list_elements(TreeType, Opaques), State).
+
+filter_match_fail([Clause] = Cls) ->
+ Body = cerl:clause_body(Clause),
+ case cerl:type(Body) of
+ primop ->
+ case cerl:atom_val(cerl:primop_name(Body)) of
+ match_fail -> [];
+ raise -> [];
+ _ -> Cls
+ end;
+ _ -> Cls
+ end;
+filter_match_fail([H|T]) ->
+ [H|filter_match_fail(T)];
+filter_match_fail([]) ->
+ %% This can actually happen, for example in
+ %% receive after 1 -> ok end
+ [].
+
+%%% ===========================================================================
+%%%
+%%% The State.
+%%%
+%%% ===========================================================================
+
+state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) ->
+ Opaques = erl_types:t_opaque_from_records(Records),
+ {TreeMap, FunHomes} = build_tree_map(Tree, Callgraph),
+ Funs = dict:fetch_keys(TreeMap),
+ FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
+ ExportedFuns =
+ [Fun || Fun <- Funs--[top], dialyzer_callgraph:is_escaping(Fun, Callgraph)],
+ Work = init_work(ExportedFuns),
+ Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end,
+ dict:new(), Funs),
+ #state{callgraph = Callgraph, codeserver = Codeserver,
+ envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques,
+ plt = Plt, races = dialyzer_races:new(), records = Records,
+ warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
+ module = Module}.
+
+state__warning_mode(#state{warning_mode = WM}) ->
+ WM.
+
+state__set_warning_mode(#state{tree_map = TreeMap, fun_tab = FunTab,
+ races = Races} = State) ->
+ ?debug("==========\nStarting warning pass\n==========\n", []),
+ Funs = dict:fetch_keys(TreeMap),
+ State#state{work = init_work([top|Funs--[top]]),
+ fun_tab = FunTab, warning_mode = true,
+ races = dialyzer_races:put_race_analysis(true, Races)}.
+
+state__race_analysis(Analysis, #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_race_analysis(Analysis, Races)}.
+
+state__renew_curr_fun(CurrFun, CurrFunLabel,
+ #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_curr_fun(CurrFun, CurrFunLabel,
+ Races)}.
+
+state__renew_fun_args(Args, #state{races = Races} = State) ->
+ case state__warning_mode(State) of
+ true -> State;
+ false ->
+ State#state{races = dialyzer_races:put_fun_args(Args, Races)}
+ end.
+
+state__renew_race_list(RaceList, RaceListSize,
+ #state{races = Races} = State) ->
+ State#state{races = dialyzer_races:put_race_list(RaceList, RaceListSize,
+ Races)}.
+
+state__renew_warnings(Warnings, State) ->
+ State#state{warnings = Warnings}.
+
+-spec state__add_warning(raw_warning(), state()) -> state().
+
+state__add_warning(Warn, #state{warnings = Warnings} = State) ->
+ State#state{warnings = [Warn|Warnings]}.
+
+state__add_warning(State, Tag, Tree, Msg) ->
+ state__add_warning(State, Tag, Tree, Msg, false).
+
+state__add_warning(#state{warning_mode = false} = State, _, _, _, _) ->
+ State;
+state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
+ Tag, Tree, Msg, Force) ->
+ Ann = cerl:get_ann(Tree),
+ case Force of
+ true ->
+ WarningInfo = {get_file(Ann),
+ abs(get_line(Ann)),
+ State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
+ ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
+ State#state{warnings = [Warn|Warnings]};
+ false ->
+ case is_compiler_generated(Ann) of
+ true -> State;
+ false ->
+ WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
+ case Tag of
+ ?WARN_CONTRACT_RANGE -> ok;
+ _ -> ?debug("MSG ~s\n", [dialyzer:format_warning(Warn)])
+ end,
+ State#state{warnings = [Warn|Warnings]}
+ end
+ end.
+
+state__remove_added_warnings(OldState, NewState) ->
+ #state{warnings = OldWarnings} = OldState,
+ #state{warnings = NewWarnings} = NewState,
+ {NewWarnings -- OldWarnings, NewState#state{warnings = OldWarnings}}.
+
+state__add_warnings(Warns, #state{warnings = Warnings} = State) ->
+ State#state{warnings = Warns ++ Warnings}.
+
+-spec state__set_curr_fun(curr_fun(), state()) -> state().
+
+state__set_curr_fun(undefined, State) ->
+ State#state{curr_fun = undefined};
+state__set_curr_fun(FunLbl, State) ->
+ State#state{curr_fun = find_function(FunLbl, State)}.
+
+-spec state__find_function(mfa_or_funlbl(), state()) -> mfa_or_funlbl().
+
+state__find_function(FunLbl, State) ->
+ find_function(FunLbl, State).
+
+state__get_race_warnings(#state{races = Races} = State) ->
+ {Races1, State1} = dialyzer_races:get_race_warnings(Races, State),
+ State1#state{races = Races1}.
+
+state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
+ callgraph = Callgraph, plt = Plt} = State) ->
+ FoldFun =
+ fun({top, _}, AccState) -> AccState;
+ ({FunLbl, Fun}, AccState) ->
+ AccState1 = state__set_curr_fun(FunLbl, AccState),
+ {NotCalled, Ret} =
+ case dict:fetch(get_label(Fun), FunTab) of
+ {not_handled, {_Args0, Ret0}} -> {true, Ret0};
+ {_Args0, Ret0} -> {false, Ret0}
+ end,
+ case NotCalled of
+ true ->
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> AccState1;
+ {ok, {_M, F, A}} ->
+ Msg = {unused_fun, [F, A]},
+ state__add_warning(AccState1, ?WARN_NOT_CALLED, Fun, Msg)
+ end;
+ false ->
+ {Name, Contract} =
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> {[], none};
+ {ok, {_M, F, A} = MFA} ->
+ {[F, A], dialyzer_plt:lookup_contract(Plt, MFA)}
+ end,
+ case t_is_none(Ret) of
+ true ->
+ %% Check if the function has a contract that allows this.
+ Warn =
+ case Contract of
+ none -> not parent_allows_this(FunLbl, AccState1);
+ {value, C} ->
+ GenRet = dialyzer_contracts:get_contract_return(C),
+ not t_is_unit(GenRet)
+ end,
+ case Warn of
+ true ->
+ case classify_returns(Fun) of
+ no_match ->
+ Msg = {no_return, [no_match|Name]},
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg);
+ only_explicit ->
+ Msg = {no_return, [only_explicit|Name]},
+ state__add_warning(AccState1, ?WARN_RETURN_ONLY_EXIT,
+ Fun, Msg);
+ only_normal ->
+ Msg = {no_return, [only_normal|Name]},
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg);
+ both ->
+ Msg = {no_return, [both|Name]},
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
+ Fun, Msg)
+ end;
+ false ->
+ AccState
+ end;
+ false ->
+ AccState
+ end
+ end
+ end,
+ #state{warnings = Warn} = lists:foldl(FoldFun, State, dict:to_list(TreeMap)),
+ Warn.
+
+state__is_escaping(Fun, #state{callgraph = Callgraph}) ->
+ dialyzer_callgraph:is_escaping(Fun, Callgraph).
+
+state__lookup_type_for_letrec(Var, #state{callgraph = Callgraph} = State) ->
+ Label = get_label(Var),
+ case dialyzer_callgraph:lookup_letrec(Label, Callgraph) of
+ error -> error;
+ {ok, FunLabel} ->
+ {ok, state__fun_type(FunLabel, State)}
+ end.
+
+state__lookup_name({_, _, _} = MFA, #state{}) ->
+ MFA;
+state__lookup_name(top, #state{}) ->
+ top;
+state__lookup_name(Fun, #state{callgraph = Callgraph}) ->
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ {ok, MFA} -> MFA;
+ error -> Fun
+ end.
+
+state__lookup_record(Tag, Arity, #state{records = Records}) ->
+ case erl_types:lookup_record(Tag, Arity, Records) of
+ {ok, Fields} ->
+ RecType =
+ t_tuple([t_atom(Tag)|
+ [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
+ {ok, RecType};
+ error ->
+ error
+ end.
+
+state__get_args_and_status(Tree, #state{fun_tab = FunTab}) ->
+ Fun = get_label(Tree),
+ case dict:find(Fun, FunTab) of
+ {ok, {not_handled, {ArgTypes, _}}} -> {ArgTypes, false};
+ {ok, {ArgTypes, _}} -> {ArgTypes, true}
+ end.
+
+build_tree_map(Tree, Callgraph) ->
+ Fun =
+ fun(T, {Dict, Homes, FunLbls} = Acc) ->
+ case cerl:is_c_fun(T) of
+ true ->
+ FunLbl = get_label(T),
+ Dict1 = dict:store(FunLbl, T, Dict),
+ case catch dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ {ok, MFA} ->
+ F2 =
+ fun(Lbl, Dict0) ->
+ dict:store(Lbl, MFA, Dict0)
+ end,
+ Homes1 = lists:foldl(F2, Homes, [FunLbl|FunLbls]),
+ {Dict1, Homes1, []};
+ _ ->
+ {Dict1, Homes, [FunLbl|FunLbls]}
+ end;
+ false ->
+ Acc
+ end
+ end,
+ Dict0 = dict:new(),
+ {Dict, Homes, _} = cerl_trees:fold(Fun, {Dict0, Dict0, []}, Tree),
+ {Dict, Homes}.
+
+init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
+ NewDict = dict:store(top, {[], t_none()}, Dict),
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
+init_fun_tab([Fun|Left], Dict, TreeMap, Callgraph, Plt) ->
+ Arity = cerl:fun_arity(dict:fetch(Fun, TreeMap)),
+ FunEntry =
+ case dialyzer_callgraph:is_escaping(Fun, Callgraph) of
+ true ->
+ Args = lists:duplicate(Arity, t_any()),
+ case lookup_fun_sig(Fun, Callgraph, Plt) of
+ none -> {Args, t_unit()};
+ {value, {RetType, _}} ->
+ case t_is_none(RetType) of
+ true -> {Args, t_none()};
+ false -> {Args, t_unit()}
+ end
+ end;
+ false -> {not_handled, {lists:duplicate(Arity, t_none()), t_unit()}}
+ end,
+ NewDict = dict:store(Fun, FunEntry, Dict),
+ init_fun_tab(Left, NewDict, TreeMap, Callgraph, Plt);
+init_fun_tab([], Dict, _TreeMap, _Callgraph, _Plt) ->
+ ?debug("DICT:~p\n",[dict:to_list(Dict)]),
+ Dict.
+
+state__update_fun_env(Tree, Map, #state{envs = Envs} = State) ->
+ NewEnvs = dict:store(get_label(Tree), Map, Envs),
+ State#state{envs = NewEnvs}.
+
+state__fun_env(Tree, #state{envs = Envs}) ->
+ Fun = get_label(Tree),
+ case dict:find(Fun, Envs) of
+ error -> none;
+ {ok, Map} -> Map
+ end.
+
+state__clean_not_called(#state{fun_tab = FunTab} = State) ->
+ NewFunTab =
+ dict:map(fun(top, Entry) -> Entry;
+ (_Fun, {not_handled, {Args, _}}) -> {Args, t_none()};
+ (_Fun, Entry) -> Entry
+ end, FunTab),
+ State#state{fun_tab = NewFunTab}.
+
+state__all_fun_types(State) ->
+ #state{fun_tab = FunTab} = state__clean_not_called(State),
+ Tab1 = dict:erase(top, FunTab),
+ dict:map(fun(_Fun, {Args, Ret}) -> t_fun(Args, Ret)end, Tab1).
+
+state__fun_type(Fun, #state{fun_tab = FunTab}) ->
+ Label =
+ if is_integer(Fun) -> Fun;
+ true -> get_label(Fun)
+ end,
+ Entry = dict:find(Label, FunTab),
+ ?debug("FunType ~p:~p\n",[Label, Entry]),
+ case Entry of
+ {ok, {not_handled, {A, R}}} ->
+ t_fun(A, R);
+ {ok, {A, R}} ->
+ t_fun(A, R)
+ end.
+
+state__update_fun_entry(Tree, ArgTypes, Out0,
+ #state{fun_tab=FunTab, callgraph=CG, plt=Plt} = State)->
+ Fun = get_label(Tree),
+ Out1 =
+ if Fun =:= top -> Out0;
+ true ->
+ case lookup_fun_sig(Fun, CG, Plt) of
+ {value, {SigRet, _}} -> t_inf(SigRet, Out0);
+ none -> Out0
+ end
+ end,
+ Out = t_limit(Out1, ?TYPE_LIMIT),
+ {ok, {OldArgTypes, OldOut}} = dict:find(Fun, FunTab),
+ SameArgs = lists:all(fun({A, B}) -> erl_types:t_is_equal(A, B)
+ end, lists:zip(OldArgTypes, ArgTypes)),
+ SameOut = t_is_equal(OldOut, Out),
+ if
+ SameArgs, SameOut ->
+ ?debug("Fixpoint for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_fun(ArgTypes, Out))]),
+ State;
+ true ->
+ %% Can only happen in self-recursive functions.
+ NewEntry = {OldArgTypes, Out},
+ ?debug("New Entry for ~w: ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_fun(OldArgTypes, Out))]),
+ NewFunTab = dict:store(Fun, NewEntry, FunTab),
+ State1 = State#state{fun_tab = NewFunTab},
+ state__add_work_from_fun(Tree, State1)
+ end.
+
+state__add_work_from_fun(_Tree, #state{warning_mode = true} = State) ->
+ State;
+state__add_work_from_fun(Tree, #state{callgraph = Callgraph,
+ tree_map = TreeMap} = State) ->
+ case get_label(Tree) of
+ top -> State;
+ Label when is_integer(Label) ->
+ case dialyzer_callgraph:in_neighbours(Label, Callgraph) of
+ none -> State;
+ MFAList ->
+ LabelList = [dialyzer_callgraph:lookup_label(MFA, Callgraph)
+ || MFA <- MFAList],
+ %% Must filter the result for results in this module.
+ FilteredList = [L || {ok, L} <- LabelList, dict:is_key(L, TreeMap)],
+ ?debug("~w: Will try to add:~w\n",
+ [state__lookup_name(Label, State), MFAList]),
+ lists:foldl(fun(L, AccState) ->
+ state__add_work(L, AccState)
+ end, State, FilteredList)
+ end
+ end.
+
+state__add_work(external, State) ->
+ State;
+state__add_work(top, State) ->
+ State;
+state__add_work(Fun, #state{work = Work} = State) ->
+ NewWork = add_work(Fun, Work),
+ State#state{work = NewWork}.
+
+state__get_work(#state{work = Work, tree_map = TreeMap} = State) ->
+ case get_work(Work) of
+ none -> none;
+ {Fun, NewWork} ->
+ {dict:fetch(Fun, TreeMap), State#state{work = NewWork}}
+ end.
+
+state__lookup_call_site(Tree, #state{callgraph = Callgraph}) ->
+ Label = get_label(Tree),
+ dialyzer_callgraph:lookup_call_site(Label, Callgraph).
+
+state__fun_info(external, #state{}) ->
+ external;
+state__fun_info({_, _, _} = MFA, #state{plt = PLT}) ->
+ {MFA,
+ dialyzer_plt:lookup(PLT, MFA),
+ dialyzer_plt:lookup_contract(PLT, MFA),
+ t_any()};
+state__fun_info(Fun, #state{callgraph = CG, fun_tab = FunTab, plt = PLT}) ->
+ {Sig, Contract} =
+ case dialyzer_callgraph:lookup_name(Fun, CG) of
+ error ->
+ {dialyzer_plt:lookup(PLT, Fun), none};
+ {ok, MFA} ->
+ {dialyzer_plt:lookup(PLT, MFA), dialyzer_plt:lookup_contract(PLT, MFA)}
+ end,
+ LocalRet =
+ case dict:fetch(Fun, FunTab) of
+ {not_handled, {_Args, Ret}} -> Ret;
+ {_Args, Ret} -> Ret
+ end,
+ ?debug("LocalRet: ~s\n", [t_to_string(LocalRet)]),
+ {Fun, Sig, Contract, LocalRet}.
+
+forward_args(Fun, ArgTypes, #state{work = Work, fun_tab = FunTab} = State) ->
+ {OldArgTypes, OldOut, Fixpoint} =
+ case dict:find(Fun, FunTab) of
+ {ok, {not_handled, {OldArgTypes0, OldOut0}}} ->
+ {OldArgTypes0, OldOut0, false};
+ {ok, {OldArgTypes0, OldOut0}} ->
+ {OldArgTypes0, OldOut0,
+ t_is_subtype(t_product(ArgTypes), t_product(OldArgTypes0))}
+ end,
+ case Fixpoint of
+ true -> State;
+ false ->
+ NewArgTypes = [t_sup(X, Y) ||
+ {X, Y} <- lists:zip(ArgTypes, OldArgTypes)],
+ NewWork = add_work(Fun, Work),
+ ?debug("~w: forwarding args ~s\n",
+ [state__lookup_name(Fun, State),
+ t_to_string(t_product(NewArgTypes))]),
+ NewFunTab = dict:store(Fun, {NewArgTypes, OldOut}, FunTab),
+ State#state{work = NewWork, fun_tab = NewFunTab}
+ end.
+
+-spec state__cleanup(state()) -> state().
+
+state__cleanup(#state{callgraph = Callgraph,
+ races = Races,
+ records = Records}) ->
+ #state{callgraph = dialyzer_callgraph:cleanup(Callgraph),
+ races = dialyzer_races:cleanup(Races),
+ records = Records}.
+
+-spec state__duplicate(state()) -> state().
+
+state__duplicate(#state{callgraph = Callgraph} = State) ->
+ State#state{callgraph = dialyzer_callgraph:duplicate(Callgraph)}.
+
+-spec dispose_state(state()) -> ok.
+
+dispose_state(#state{callgraph = Callgraph}) ->
+ dialyzer_callgraph:dispose_race_server(Callgraph).
+
+-spec state__get_callgraph(state()) -> dialyzer_callgraph:callgraph().
+
+state__get_callgraph(#state{callgraph = Callgraph}) ->
+ Callgraph.
+
+-spec state__get_races(state()) -> dialyzer_races:races().
+
+state__get_races(#state{races = Races}) ->
+ Races.
+
+-spec state__get_records(state()) -> types().
+
+state__get_records(#state{records = Records}) ->
+ Records.
+
+-spec state__put_callgraph(dialyzer_callgraph:callgraph(), state()) ->
+ state().
+
+state__put_callgraph(Callgraph, State) ->
+ State#state{callgraph = Callgraph}.
+
+-spec state__put_races(dialyzer_races:races(), state()) -> state().
+
+state__put_races(Races, State) ->
+ State#state{races = Races}.
+
+-spec state__records_only(state()) -> state().
+
+state__records_only(#state{records = Records}) ->
+ #state{records = Records}.
+
+%%% ===========================================================================
+%%%
+%%% Races
+%%%
+%%% ===========================================================================
+
+is_race_analysis_enabled(#state{races = Races, callgraph = Callgraph}) ->
+ RaceDetection = dialyzer_callgraph:get_race_detection(Callgraph),
+ RaceAnalysis = dialyzer_races:get_race_analysis(Races),
+ RaceDetection andalso RaceAnalysis.
+
+get_race_list_and_size(#state{races = Races}) ->
+ dialyzer_races:get_race_list_and_size(Races).
+
+renew_race_code(#state{races = Races, callgraph = Callgraph,
+ warning_mode = WarningMode} = State) ->
+ case WarningMode of
+ true -> State;
+ false ->
+ NewCallgraph = dialyzer_callgraph:renew_race_code(Races, Callgraph),
+ State#state{callgraph = NewCallgraph}
+ end.
+
+renew_race_public_tables([Var], #state{races = Races, callgraph = Callgraph,
+ warning_mode = WarningMode} = State) ->
+ case WarningMode of
+ true -> State;
+ false ->
+ Table = dialyzer_races:get_new_table(Races),
+ case Table of
+ no_t -> State;
+ _Other ->
+ VarLabel = get_label(Var),
+ NewCallgraph =
+ dialyzer_callgraph:renew_race_public_tables(VarLabel, Callgraph),
+ State#state{callgraph = NewCallgraph}
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Worklist
+%%%
+%%% ===========================================================================
+
+init_work(List) ->
+ {List, [], sets:from_list(List)}.
+
+get_work({[], [], _Set}) ->
+ none;
+get_work({[H|T], Rev, Set}) ->
+ {H, {T, Rev, sets:del_element(H, Set)}};
+get_work({[], Rev, Set}) ->
+ get_work({lists:reverse(Rev), [], Set}).
+
+add_work(New, {List, Rev, Set} = Work) ->
+ case sets:is_element(New, Set) of
+ true -> Work;
+ false -> {List, [New|Rev], sets:add_element(New, Set)}
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Utilities.
+%%%
+%%% ===========================================================================
+
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|Tail]) -> get_line(Tail);
+get_line([]) -> -1.
+
+get_file([]) -> [];
+get_file([{file, File}|_]) -> File;
+get_file([_|Tail]) -> get_file(Tail).
+
+is_compiler_generated(Ann) ->
+ lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
+
+is_literal_record(Tree) ->
+ Ann = cerl:get_ann(Tree),
+ lists:member(record, Ann).
+
+-spec format_args([cerl:cerl()], [type()], state()) ->
+ nonempty_string().
+
+format_args([], [], _State) ->
+ "()";
+format_args(ArgList0, TypeList, State) ->
+ ArgList = fold_literals(ArgList0),
+ "(" ++ format_args_1(ArgList, TypeList, State) ++ ")".
+
+format_args_1([Arg], [Type], State) ->
+ format_arg(Arg) ++ format_type(Type, State);
+format_args_1([Arg|Args], [Type|Types], State) ->
+ String =
+ case cerl:is_literal(Arg) of
+ true -> format_cerl(Arg);
+ false -> format_arg(Arg) ++ format_type(Type, State)
+ end,
+ String ++ "," ++ format_args_1(Args, Types, State).
+
+format_arg(Arg) ->
+ Default = "",
+ case cerl:is_c_var(Arg) of
+ true ->
+ case cerl:var_name(Arg) of
+ Atom when is_atom(Atom) ->
+ case atom_to_list(Atom) of
+ "cor"++_ -> Default;
+ "rec"++_ -> Default;
+ Name -> Name ++ "::"
+ end;
+ _What -> Default
+ end;
+ false ->
+ Default
+ end.
+
+-spec format_type(type(), state()) -> string().
+
+format_type(Type, #state{records = R}) ->
+ t_to_string(Type, R).
+
+-spec format_field_diffs(type(), state()) -> string().
+
+format_field_diffs(RecConstruction, #state{records = R}) ->
+ erl_types:record_field_diffs_to_string(RecConstruction, R).
+
+-spec format_sig_args(type(), state()) -> string().
+
+format_sig_args(Type, #state{opaques = Opaques} = State) ->
+ SigArgs = t_fun_args(Type, Opaques),
+ case SigArgs of
+ [] -> "()";
+ [SArg|SArgs] ->
+ lists:flatten("(" ++ format_type(SArg, State)
+ ++ ["," ++ format_type(T, State) || T <- SArgs] ++ ")")
+ end.
+
+format_cerl(Tree) ->
+ cerl_prettypr:format(cerl:set_ann(Tree, []),
+ [{hook, dialyzer_utils:pp_hook()},
+ {noann, true},
+ {paper, 100000}, %% These guys strip
+ {ribbon, 100000} %% newlines.
+ ]).
+
+format_patterns(Pats0) ->
+ Pats = fold_literals(Pats0),
+ NewPats = map_pats(cerl:c_values(Pats)),
+ String = format_cerl(NewPats),
+ case Pats of
+ [PosVar] ->
+ case cerl:is_c_var(PosVar) andalso (cerl:var_name(PosVar) =/= '') of
+ true -> "variable "++String;
+ false -> "pattern "++String
+ end;
+ _ ->
+ "pattern "++String
+ end.
+
+map_pats(Pats) ->
+ Fun = fun(Tree) ->
+ case cerl:is_c_var(Tree) of
+ true ->
+ case cerl:var_name(Tree) of
+ Atom when is_atom(Atom) ->
+ case atom_to_list(Atom) of
+ "cor"++_ -> cerl:c_var('');
+ "rec"++_ -> cerl:c_var('');
+ _ -> cerl:set_ann(Tree, [])
+ end;
+ _What -> cerl:c_var('')
+ end;
+ false ->
+ cerl:set_ann(Tree, [])
+ end
+ end,
+ cerl_trees:map(Fun, Pats).
+
+fold_literals(TreeList) ->
+ [cerl:fold_literal(Tree) || Tree <- TreeList].
+
+type(Tree) ->
+ Folded = cerl:fold_literal(Tree),
+ case cerl:type(Folded) of
+ literal -> {literal, Folded};
+ Type -> Type
+ end.
+
+is_literal(Tree) ->
+ Folded = cerl:fold_literal(Tree),
+ case cerl:is_literal(Folded) of
+ true -> {yes, Folded};
+ false -> no
+ end.
+
+parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) ->
+ case state__is_escaping(FunLbl, State) of
+ false -> false; % if it isn't escaping it can't be a return value
+ true ->
+ case state__lookup_name(FunLbl, State) of
+ {_M, _F, _A} -> false; % if it has a name it is not a fun
+ _ ->
+ case dialyzer_callgraph:in_neighbours(FunLbl, Callgraph) of
+ [Parent] ->
+ case state__lookup_name(Parent, State) of
+ {_M, _F, _A} = PMFA ->
+ case dialyzer_plt:lookup_contract(Plt, PMFA) of
+ none -> false;
+ {value, C} ->
+ GenRet = dialyzer_contracts:get_contract_return(C),
+ case erl_types:t_is_fun(GenRet) of
+ false -> false; % element of structure? far-fetched...
+ true -> t_is_unit(t_fun_range(GenRet))
+ end
+ end;
+ _ -> false % parent should have a name to have a contract
+ end;
+ _ -> false % called in other funs? far-fetched...
+ end
+ end
+ end.
+
+find_function({_, _, _} = MFA, _State) ->
+ MFA;
+find_function(top, _State) ->
+ top;
+find_function(FunLbl, #state{fun_homes = Homes}) ->
+ dict:fetch(FunLbl, Homes).
+
+classify_returns(Tree) ->
+ case find_terminals(cerl:fun_body(Tree)) of
+ {false, false} -> no_match;
+ {true, false} -> only_explicit;
+ {false, true} -> only_normal;
+ {true, true} -> both
+ end.
+
+find_terminals(Tree) ->
+ case cerl:type(Tree) of
+ apply -> {false, true};
+ binary -> {false, true};
+ bitstr -> {false, true};
+ call ->
+ M0 = cerl:call_module(Tree),
+ F0 = cerl:call_name(Tree),
+ A = length(cerl:call_args(Tree)),
+ case {is_literal(M0), is_literal(F0)} of
+ {{yes, LitM}, {yes, LitF}} ->
+ M = cerl:concrete(LitM),
+ F = cerl:concrete(LitF),
+ case (erl_bif_types:is_known(M, F, A)
+ andalso t_is_none(erl_bif_types:type(M, F, A))) of
+ true -> {true, false};
+ false -> {false, true}
+ end;
+ _ ->
+ %% We cannot make assumptions. Say that both are true.
+ {true, true}
+ end;
+ 'case' -> find_terminals_list(cerl:case_clauses(Tree));
+ 'catch' -> find_terminals(cerl:catch_body(Tree));
+ clause -> find_terminals(cerl:clause_body(Tree));
+ cons -> {false, true};
+ 'fun' -> {false, true};
+ 'let' -> find_terminals(cerl:let_body(Tree));
+ letrec -> find_terminals(cerl:letrec_body(Tree));
+ literal -> {false, true};
+ map -> {false, true};
+ primop -> {false, false}; %% match_fail, etc. are not explicit exits.
+ 'receive' ->
+ Timeout = cerl:receive_timeout(Tree),
+ Clauses = cerl:receive_clauses(Tree),
+ case (cerl:is_literal(Timeout) andalso
+ (cerl:concrete(Timeout) =:= infinity)) of
+ true ->
+ if Clauses =:= [] -> {false, true}; %% A never ending receive.
+ true -> find_terminals_list(Clauses)
+ end;
+ false -> find_terminals_list([cerl:receive_action(Tree)|Clauses])
+ end;
+ seq -> find_terminals(cerl:seq_body(Tree));
+ 'try' ->
+ find_terminals_list([cerl:try_handler(Tree), cerl:try_body(Tree)]);
+ tuple -> {false, true};
+ values -> {false, true};
+ var -> {false, true}
+ end.
+
+find_terminals_list(List) ->
+ find_terminals_list(List, false, false).
+
+find_terminals_list([Tree|Left], Explicit1, Normal1) ->
+ {Explicit2, Normal2} = find_terminals(Tree),
+ case {Explicit1 or Explicit2, Normal1 or Normal2} of
+ {true, true} = Ans -> Ans;
+ {NewExplicit, NewNormal} ->
+ find_terminals_list(Left, NewExplicit, NewNormal)
+ end;
+find_terminals_list([], Explicit, Normal) ->
+ {Explicit, Normal}.
+
+%%----------------------------------------------------------------------------
+
+-ifdef(DEBUG_PP).
+debug_pp(Tree, true) ->
+ io:put_chars(cerl_prettypr:format(Tree, [{hook, cerl_typean:pp_hook()}])),
+ io:nl(),
+ ok;
+debug_pp(Tree, false) ->
+ io:put_chars(cerl_prettypr:format(strip_annotations(Tree))),
+ io:nl(),
+ ok.
+
+strip_annotations(Tree) ->
+ Fun = fun(T) ->
+ case cerl:type(T) of
+ var ->
+ cerl:set_ann(T, [{label, cerl_trees:get_label(T)}]);
+ 'fun' ->
+ cerl:set_ann(T, [{label, cerl_trees:get_label(T)}]);
+ _ ->
+ cerl:set_ann(T, [])
+ end
+ end,
+ cerl_trees:map(Fun, Tree).
+
+-else.
+
+debug_pp(_Tree, _UseHook) ->
+ ok.
+-endif.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_races.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_races.erl
new file mode 100644
index 0000000000..7fe64c3e11
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_races.erl
@@ -0,0 +1,2487 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+%%%----------------------------------------------------------------------
+%%% File : dialyzer_races.erl
+%%% Author : Maria Christakis <[email protected]>
+%%% Description : Utility functions for race condition detection
+%%%
+%%% Created : 21 Nov 2008 by Maria Christakis <[email protected]>
+%%%----------------------------------------------------------------------
+-module(dialyzer_races).
+
+%% Race Analysis
+
+-export([store_race_call/5, race/1, get_race_warnings/2, format_args/4]).
+
+%% Record Interfaces
+
+-export([beg_clause_new/3, cleanup/1, end_case_new/1, end_clause_new/3,
+ get_curr_fun/1, get_curr_fun_args/1, get_new_table/1,
+ get_race_analysis/1, get_race_list/1, get_race_list_size/1,
+ get_race_list_and_size/1,
+ let_tag_new/2, new/0, put_curr_fun/3, put_fun_args/2,
+ put_race_analysis/2, put_race_list/3]).
+
+-export_type([races/0, core_vars/0]).
+
+-include("dialyzer.hrl").
+
+%%% ===========================================================================
+%%%
+%%% Definitions
+%%%
+%%% ===========================================================================
+
+-define(local, 5).
+-define(no_arg, no_arg).
+-define(no_label, no_label).
+-define(bypassed, bypassed).
+
+-define(WARN_WHEREIS_REGISTER, warn_whereis_register).
+-define(WARN_WHEREIS_UNREGISTER, warn_whereis_unregister).
+-define(WARN_ETS_LOOKUP_INSERT, warn_ets_lookup_insert).
+-define(WARN_MNESIA_DIRTY_READ_WRITE, warn_mnesia_dirty_read_write).
+-define(WARN_NO_WARN, warn_no_warn).
+
+%%% ===========================================================================
+%%%
+%%% Local Types
+%%%
+%%% ===========================================================================
+
+-type label_type() :: label() | [label()] | {label()} | ?no_label.
+-type args() :: [label_type() | [string()]].
+-type core_vars() :: cerl:cerl() | ?no_arg | ?bypassed.
+-type var_to_map1() :: core_vars() | [cerl:cerl()].
+-type var_to_map2() :: cerl:cerl() | [cerl:cerl()] | ?bypassed.
+-type core_args() :: [core_vars()] | 'empty'.
+-type op() :: 'bind' | 'unbind'.
+
+-type dep_calls() :: 'whereis' | 'ets_lookup' | 'mnesia_dirty_read'.
+-type warn_calls() :: 'register' | 'unregister' | 'ets_insert'
+ | 'mnesia_dirty_write'.
+-type call() :: 'whereis' | 'register' | 'unregister' | 'ets_new'
+ | 'ets_lookup' | 'ets_insert' | 'mnesia_dirty_read1'
+ | 'mnesia_dirty_read2' | 'mnesia_dirty_write1'
+ | 'mnesia_dirty_write2' | 'function_call'.
+-type race_tag() :: 'whereis_register' | 'whereis_unregister'
+ | 'ets_lookup_insert' | 'mnesia_dirty_read_write'.
+
+%% The following type is similar to the raw_warning() type but has a
+%% tag which is local to this module and is not propagated to outside
+-type dial_race_warning() :: {race_warn_tag(), warning_info(), {atom(), [term()]}}.
+-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
+ | ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
+
+-record(beg_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
+-record(end_clause, {arg :: var_to_map1() | 'undefined',
+ pats :: var_to_map1() | 'undefined',
+ guard :: cerl:cerl() | 'undefined'}).
+-record(end_case, {clauses :: [#end_clause{}]}).
+-record(curr_fun, {status :: 'in' | 'out' | 'undefined',
+ mfa :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ label :: label() | 'undefined',
+ def_vars :: [core_vars()] | 'undefined',
+ arg_types :: [erl_types:erl_type()] | 'undefined',
+ call_vars :: [core_vars()] | 'undefined',
+ var_map :: dict:dict() | 'undefined'}).
+-record(dep_call, {call_name :: dep_calls(),
+ args :: args() | 'undefined',
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()],
+ state :: dialyzer_dataflow:state(),
+ file_line :: file_line(),
+ var_map :: dict:dict() | 'undefined'}).
+-record(fun_call, {caller :: dialyzer_callgraph:mfa_or_funlbl(),
+ callee :: dialyzer_callgraph:mfa_or_funlbl(),
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()]}).
+-record(let_tag, {var :: var_to_map1(),
+ arg :: var_to_map1()}).
+-record(warn_call, {call_name :: warn_calls(),
+ args :: args(),
+ var_map :: dict:dict() | 'undefined'}).
+
+-type case_tags() :: 'beg_case' | #beg_clause{} | #end_clause{} | #end_case{}.
+-type code() :: [#dep_call{} | #fun_call{} | #warn_call{} |
+ #curr_fun{} | #let_tag{} | case_tags() | race_tag()].
+
+-type table_var() :: label() | ?no_label.
+-type table() :: {'named', table_var(), [string()]} | 'other' | 'no_t'.
+
+-record(race_fun, {mfa :: mfa(),
+ args :: args(),
+ arg_types :: [erl_types:erl_type()],
+ vars :: [core_vars()],
+ file_line :: file_line(),
+ index :: non_neg_integer(),
+ fun_mfa :: dialyzer_callgraph:mfa_or_funlbl(),
+ fun_label :: label()}).
+
+-record(races, {curr_fun :: dialyzer_callgraph:mfa_or_funlbl()
+ | 'undefined',
+ curr_fun_label :: label() | 'undefined',
+ curr_fun_args = 'empty' :: core_args(),
+ new_table = 'no_t' :: table(),
+ race_list = [] :: code(),
+ race_list_size = 0 :: non_neg_integer(),
+ race_tags = [] :: [#race_fun{}],
+ %% true for fun types and warning mode
+ race_analysis = false :: boolean(),
+ race_warnings = [] :: [dial_race_warning()]}).
+
+%%% ===========================================================================
+%%%
+%%% Exported Types
+%%%
+%%% ===========================================================================
+
+-opaque races() :: #races{}.
+
+%%% ===========================================================================
+%%%
+%%% Race Analysis
+%%%
+%%% ===========================================================================
+
+-spec store_race_call(dialyzer_callgraph:mfa_or_funlbl(),
+ [erl_types:erl_type()], [core_vars()],
+ file_line(), dialyzer_dataflow:state()) ->
+ dialyzer_dataflow:state().
+
+store_race_call(Fun, ArgTypes, Args, FileLine, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ CurrFun = Races#races.curr_fun,
+ CurrFunLabel = Races#races.curr_fun_label,
+ RaceTags = Races#races.race_tags,
+ CleanState = dialyzer_dataflow:state__records_only(State),
+ {NewRaceList, NewRaceListSize, NewRaceTags, NewTable} =
+ case CurrFun of
+ {_Module, module_info, A} when A =:= 0 orelse A =:= 1 ->
+ {[], 0, RaceTags, no_t};
+ _Thing ->
+ RaceList = Races#races.race_list,
+ RaceListSize = Races#races.race_list_size,
+ case Fun of
+ {erlang, get_module_info, A} when A =:= 1 orelse A =:= 2 ->
+ {[], 0, RaceTags, no_t};
+ {erlang, register, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, register),
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = register, args = VarArgs}|
+ RaceList], RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ {erlang, unregister, 1} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, unregister),
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = unregister, args = VarArgs}|
+ RaceList], RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ {erlang, whereis, 1} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, whereis),
+ {[#dep_call{call_name = whereis, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|
+ RaceList], RaceListSize + 1, RaceTags, no_t};
+ {ets, insert, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_insert),
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = ets_insert, args = VarArgs}|
+ RaceList], RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ {ets, lookup, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_lookup),
+ {[#dep_call{call_name = ets_lookup, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|
+ RaceList], RaceListSize + 1, RaceTags, no_t};
+ {ets, new, 2} ->
+ VarArgs = format_args(Args, ArgTypes, CleanState, ets_new),
+ [VarArgs1, VarArgs2, _, Options] = VarArgs,
+ NewTable1 =
+ case lists:member("'public'", Options) of
+ true ->
+ case lists:member("'named_table'", Options) of
+ true ->
+ {named, VarArgs1, VarArgs2};
+ false -> other
+ end;
+ false -> no_t
+ end,
+ {RaceList, RaceListSize, RaceTags, NewTable1};
+ {mnesia, dirty_read, A} when A =:= 1 orelse A =:= 2 ->
+ VarArgs =
+ case A of
+ 1 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_read1);
+ 2 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_read2)
+ end,
+ {[#dep_call{call_name = mnesia_dirty_read, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ state = CleanState, file_line = FileLine}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ {mnesia, dirty_write, A} when A =:= 1 orelse A =:= 2 ->
+ VarArgs =
+ case A of
+ 1 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_write1);
+ 2 ->
+ format_args(Args, ArgTypes, CleanState, mnesia_dirty_write2)
+ end,
+ RaceFun = #race_fun{mfa = Fun, args = VarArgs,
+ arg_types = ArgTypes, vars = Args,
+ file_line = FileLine, index = RaceListSize,
+ fun_mfa = CurrFun, fun_label = CurrFunLabel},
+ {[#warn_call{call_name = mnesia_dirty_write,
+ args = VarArgs}|RaceList],
+ RaceListSize + 1, [RaceFun|RaceTags], no_t};
+ Int when is_integer(Int) ->
+ {[#fun_call{caller = CurrFun, callee = Int, arg_types = ArgTypes,
+ vars = Args}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ _Other ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ case digraph:vertex(dialyzer_callgraph:get_digraph(Callgraph),
+ Fun) of
+ {Fun, confirmed} ->
+ {[#fun_call{caller = CurrFun, callee = Fun,
+ arg_types = ArgTypes, vars = Args}|RaceList],
+ RaceListSize + 1, RaceTags, no_t};
+ false ->
+ {RaceList, RaceListSize, RaceTags, no_t}
+ end
+ end
+ end,
+ state__renew_info(NewRaceList, NewRaceListSize, NewRaceTags, NewTable, State).
+
+-spec race(dialyzer_dataflow:state()) -> dialyzer_dataflow:state().
+
+race(State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ RaceTags = Races#races.race_tags,
+ RetState =
+ case RaceTags of
+ [] -> State;
+ [#race_fun{mfa = Fun,
+ args = VarArgs, arg_types = ArgTypes,
+ vars = Args, file_line = FileLine,
+ index = Index, fun_mfa = CurrFun,
+ fun_label = CurrFunLabel}|T] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ {ok, [_Args, Code]} =
+ dict:find(CurrFun, dialyzer_callgraph:get_race_code(Callgraph)),
+ RaceList = lists:reverse(Code),
+ RaceWarnTag =
+ case Fun of
+ {erlang, register, 2} -> ?WARN_WHEREIS_REGISTER;
+ {erlang, unregister, 1} -> ?WARN_WHEREIS_UNREGISTER;
+ {ets, insert, 2} -> ?WARN_ETS_LOOKUP_INSERT;
+ {mnesia, dirty_write, _A} -> ?WARN_MNESIA_DIRTY_READ_WRITE
+ end,
+ State1 =
+ state__renew_curr_fun(CurrFun,
+ state__renew_curr_fun_label(CurrFunLabel,
+ state__renew_race_list(lists:nthtail(length(RaceList) - Index,
+ RaceList), State))),
+ DepList = fixup_race_list(RaceWarnTag, VarArgs, State1),
+ {State2, RaceWarn} =
+ get_race_warn(Fun, Args, ArgTypes, DepList, State),
+ {File, Line} = FileLine,
+ CurrMFA = dialyzer_dataflow:state__find_function(CurrFun, State),
+ WarningInfo = {File, Line, CurrMFA},
+ race(
+ state__add_race_warning(
+ state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag,
+ WarningInfo))
+ end,
+ state__renew_race_tags([], RetState).
+
+fixup_race_list(RaceWarnTag, WarnVarArgs, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ CurrFun = Races#races.curr_fun,
+ CurrFunLabel = Races#races.curr_fun_label,
+ RaceList = Races#races.race_list,
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Digraph = dialyzer_callgraph:get_digraph(Callgraph),
+ Calls = digraph:edges(Digraph),
+ RaceTag =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER -> whereis_register;
+ ?WARN_WHEREIS_UNREGISTER -> whereis_unregister;
+ ?WARN_ETS_LOOKUP_INSERT -> ets_lookup_insert;
+ ?WARN_MNESIA_DIRTY_READ_WRITE -> mnesia_dirty_read_write
+ end,
+ NewRaceList = [RaceTag|RaceList],
+ CleanState = dialyzer_dataflow:state__cleanup(State),
+ NewState = state__renew_race_list(NewRaceList, CleanState),
+ DepList1 =
+ fixup_race_forward_pullout(CurrFun, CurrFunLabel, Calls,
+ lists:reverse(NewRaceList), [], CurrFun,
+ WarnVarArgs, RaceWarnTag, dict:new(),
+ [], [], [], 2 * ?local, NewState),
+ Parents = fixup_race_backward(CurrFun, Calls, Calls, [], ?local),
+ UParents = lists:usort(Parents),
+ Filtered = filter_parents(UParents, UParents, Digraph),
+ NewParents =
+ case lists:member(CurrFun, Filtered) of
+ true -> Filtered;
+ false -> [CurrFun|Filtered]
+ end,
+ DepList2 =
+ fixup_race_list_helper(NewParents, Calls, CurrFun, WarnVarArgs,
+ RaceWarnTag, NewState),
+ dialyzer_dataflow:dispose_state(CleanState),
+ lists:usort(cleanup_dep_calls(DepList1 ++ DepList2)).
+
+fixup_race_list_helper(Parents, Calls, CurrFun, WarnVarArgs, RaceWarnTag,
+ State) ->
+ case Parents of
+ [] -> [];
+ [Head|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Code =
+ case dict:find(Head, dialyzer_callgraph:get_race_code(Callgraph)) of
+ error -> [];
+ {ok, [_A, C]} -> C
+ end,
+ {ok, FunLabel} = dialyzer_callgraph:lookup_label(Head, Callgraph),
+ DepList1 =
+ fixup_race_forward_pullout(Head, FunLabel, Calls, Code, [], CurrFun,
+ WarnVarArgs, RaceWarnTag, dict:new(),
+ [], [], [], 2 * ?local, State),
+ DepList2 =
+ fixup_race_list_helper(Tail, Calls, CurrFun, WarnVarArgs,
+ RaceWarnTag, State),
+ DepList1 ++ DepList2
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Forward Analysis
+%%%
+%%% ===========================================================================
+
+fixup_race_forward_pullout(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ State) ->
+ TState = dialyzer_dataflow:state__duplicate(State),
+ {DepList, NewCurrFun, NewCurrFunLabel, NewCalls,
+ NewCode, NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel} =
+ fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ cleanup_race_code(TState)),
+ dialyzer_dataflow:dispose_state(TState),
+ case NewCode of
+ [] -> DepList;
+ [#fun_call{caller = NewCurrFun, callee = Call, arg_types = FunTypes,
+ vars = FunArgs}|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ OkCall = {ok, Call},
+ {Name, Label} =
+ case is_integer(Call) of
+ true ->
+ case dialyzer_callgraph:lookup_name(Call, Callgraph) of
+ error -> {OkCall, OkCall};
+ N -> {N, OkCall}
+ end;
+ false ->
+ {OkCall, dialyzer_callgraph:lookup_label(Call, Callgraph)}
+ end,
+ {NewCurrFun1, NewCurrFunLabel1, NewCalls1, NewCode1, NewRaceList1,
+ NewRaceVarMap1, NewFunDefVars1, NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel1} =
+ case Label =:= error of
+ true ->
+ {NewCurrFun, NewCurrFunLabel, NewCalls, Tail, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ false ->
+ {ok, Fun} = Name,
+ {ok, Int} = Label,
+ case dict:find(Fun, dialyzer_callgraph:get_race_code(Callgraph)) of
+ error ->
+ {NewCurrFun, NewCurrFunLabel, NewCalls, Tail, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ {ok, [Args, CodeB]} ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ {RetCurrFun, RetCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, RetRaceVarMap, RetFunDefVars, RetFunCallVars,
+ RetFunArgTypes, RetNestingLevel} =
+ fixup_race_forward_helper(NewCurrFun,
+ NewCurrFunLabel, Fun, Int, NewCalls, NewCalls,
+ [#curr_fun{status = out, mfa = NewCurrFun,
+ label = NewCurrFunLabel,
+ var_map = NewRaceVarMap,
+ def_vars = NewFunDefVars,
+ call_vars = NewFunCallVars,
+ arg_types = NewFunArgTypes}|
+ Tail],
+ NewRaceList, InitFun, FunArgs, FunTypes, RaceWarnTag,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars,
+ NewFunArgTypes, NewNestingLevel, Args, CodeB,
+ Races#races.race_list),
+ case RetCode of
+ [#curr_fun{}|_CodeTail] ->
+ {NewCurrFun, NewCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, RetNestingLevel};
+ _Else ->
+ {RetCurrFun, RetCurrFunLabel, RetCalls, RetCode,
+ RetRaceList, RetRaceVarMap, RetFunDefVars,
+ RetFunCallVars, RetFunArgTypes, RetNestingLevel}
+ end
+ end
+ end,
+ DepList ++
+ fixup_race_forward_pullout(NewCurrFun1, NewCurrFunLabel1, NewCalls1,
+ NewCode1, NewRaceList1, InitFun, WarnVarArgs,
+ RaceWarnTag, NewRaceVarMap1, NewFunDefVars1,
+ NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel1, State)
+ end.
+
+fixup_race_forward(CurrFun, CurrFunLabel, Calls, Code, RaceList,
+ InitFun, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel,
+ State) ->
+ case Code of
+ [] ->
+ {[], CurrFun, CurrFunLabel, Calls, Code, RaceList, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NestingLevel};
+ [Head|Tail] ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ {NewRL, DepList, NewNL, Return} =
+ case Head of
+ #dep_call{call_name = whereis} ->
+ case RaceWarnTag of
+ WarnWhereis when WarnWhereis =:= ?WARN_WHEREIS_REGISTER orelse
+ WarnWhereis =:= ?WARN_WHEREIS_UNREGISTER ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #dep_call{call_name = ets_lookup} ->
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ case RaceWarnTag of
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ {[Head#dep_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = RegCall} when RegCall =:= register orelse
+ RegCall =:= unregister ->
+ case RaceWarnTag of
+ WarnWhereis when WarnWhereis =:= ?WARN_WHEREIS_REGISTER orelse
+ WarnWhereis =:= ?WARN_WHEREIS_UNREGISTER ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = ets_insert} ->
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #warn_call{call_name = mnesia_dirty_write} ->
+ case RaceWarnTag of
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ {[Head#warn_call{var_map = RaceVarMap}|RaceList],
+ [], NestingLevel, false};
+ _Other ->
+ {RaceList, [], NestingLevel, false}
+ end;
+ #fun_call{caller = CurrFun, callee = InitFun} ->
+ {RaceList, [], NestingLevel, false};
+ #fun_call{caller = CurrFun} ->
+ {RaceList, [], NestingLevel - 1, false};
+ beg_case ->
+ {[Head|RaceList], [], NestingLevel, false};
+ #beg_clause{} ->
+ {[#beg_clause{}|RaceList], [], NestingLevel, false};
+ #end_clause{} ->
+ {[#end_clause{}|RaceList], [], NestingLevel, false};
+ #end_case{} ->
+ {[Head|RaceList], [], NestingLevel, false};
+ #let_tag{} ->
+ {RaceList, [], NestingLevel, false};
+ #curr_fun{status = in, mfa = InitFun,
+ label = _InitFunLabel, var_map = _NewRVM,
+ def_vars = NewFDV, call_vars = NewFCV,
+ arg_types = _NewFAT} ->
+ {[#curr_fun{status = out, var_map = RaceVarMap,
+ def_vars = NewFDV, call_vars = NewFCV}|
+ RaceList], [], NestingLevel - 1, false};
+ #curr_fun{status = in, def_vars = NewFDV,
+ call_vars = NewFCV} ->
+ {[#curr_fun{status = out, var_map = RaceVarMap,
+ def_vars = NewFDV, call_vars = NewFCV}|
+ RaceList],
+ [], NestingLevel - 1, false};
+ #curr_fun{status = out} ->
+ {[#curr_fun{status = in, var_map = RaceVarMap}|RaceList], [],
+ NestingLevel + 1, false};
+ RaceTag ->
+ PublicTables = dialyzer_callgraph:get_public_tables(Callgraph),
+ NamedTables = dialyzer_callgraph:get_named_tables(Callgraph),
+ WarnVarArgs1 =
+ var_type_analysis(FunDefVars, FunArgTypes, WarnVarArgs,
+ RaceWarnTag, RaceVarMap,
+ dialyzer_dataflow:state__records_only(State)),
+ {NewDepList, IsPublic, _Return} =
+ get_deplist_paths(RaceList, WarnVarArgs1, RaceWarnTag,
+ RaceVarMap, 0, PublicTables, NamedTables),
+ {NewHead, NewDepList1} =
+ case RaceTag of
+ whereis_register ->
+ {[#warn_call{call_name = register, args = WarnVarArgs,
+ var_map = RaceVarMap}],
+ NewDepList};
+ whereis_unregister ->
+ {[#warn_call{call_name = unregister, args = WarnVarArgs,
+ var_map = RaceVarMap}],
+ NewDepList};
+ ets_lookup_insert ->
+ NewWarnCall =
+ [#warn_call{call_name = ets_insert, args = WarnVarArgs,
+ var_map = RaceVarMap}],
+ [Tab, Names, _, _] = WarnVarArgs,
+ case IsPublic orelse
+ compare_var_list(Tab, PublicTables, RaceVarMap)
+ orelse
+ length(Names -- NamedTables) < length(Names) of
+ true ->
+ {NewWarnCall, NewDepList};
+ false -> {NewWarnCall, []}
+ end;
+ mnesia_dirty_read_write ->
+ {[#warn_call{call_name = mnesia_dirty_write,
+ args = WarnVarArgs, var_map = RaceVarMap}],
+ NewDepList}
+ end,
+ {NewHead ++ RaceList, NewDepList1, NestingLevel,
+ is_last_race(RaceTag, InitFun, Tail, Callgraph)}
+ end,
+ {NewCurrFun, NewCurrFunLabel, NewCode, NewRaceList, NewRaceVarMap,
+ NewFunDefVars, NewFunCallVars, NewFunArgTypes, NewNestingLevel,
+ PullOut} =
+ case Head of
+ #fun_call{caller = CurrFun} ->
+ case NewNL =:= 0 of
+ true ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false};
+ false ->
+ {CurrFun, CurrFunLabel, Code, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, true}
+ end;
+ #beg_clause{arg = Arg, pats = Pats, guard = Guard} ->
+ {RaceVarMap1, RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, bind),
+ case RemoveClause of
+ true ->
+ {RaceList2,
+ #curr_fun{mfa = CurrFun2, label = CurrFunLabel2,
+ var_map = RaceVarMap2, def_vars = FunDefVars2,
+ call_vars = FunCallVars2, arg_types = FunArgTypes2},
+ Code2, NestingLevel2} =
+ remove_clause(NewRL,
+ #curr_fun{mfa = CurrFun, label = CurrFunLabel,
+ var_map = RaceVarMap1,
+ def_vars = FunDefVars,
+ call_vars = FunCallVars,
+ arg_types = FunArgTypes},
+ Tail, NewNL),
+ {CurrFun2, CurrFunLabel2, Code2, RaceList2,
+ RaceVarMap2, FunDefVars2, FunCallVars2, FunArgTypes2,
+ NestingLevel2, false};
+ false ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false}
+ end;
+ #end_clause{arg = Arg, pats = Pats, guard = Guard} ->
+ {RaceVarMap1, _RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, unbind),
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL,
+ false};
+ #end_case{clauses = Clauses} ->
+ RaceVarMap1 =
+ race_var_map_clauses(Clauses, RaceVarMap),
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap1,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL,
+ false};
+ #let_tag{var = Var, arg = Arg} ->
+ {CurrFun, CurrFunLabel, Tail, NewRL,
+ race_var_map(Var, Arg, RaceVarMap, bind), FunDefVars,
+ FunCallVars, FunArgTypes, NewNL, false};
+ #curr_fun{mfa = CurrFun1, label = CurrFunLabel1,
+ var_map = RaceVarMap1, def_vars = FunDefVars1,
+ call_vars = FunCallVars1, arg_types = FunArgTypes1} ->
+ case NewNL =:= 0 of
+ true ->
+ {CurrFun, CurrFunLabel,
+ remove_nonlocal_functions(Tail, 1), NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false};
+ false ->
+ {CurrFun1, CurrFunLabel1, Tail, NewRL, RaceVarMap1,
+ FunDefVars1, FunCallVars1, FunArgTypes1, NewNL, false}
+ end;
+ _Thing ->
+ {CurrFun, CurrFunLabel, Tail, NewRL, RaceVarMap,
+ FunDefVars, FunCallVars, FunArgTypes, NewNL, false}
+ end,
+ case Return of
+ true ->
+ {DepList, NewCurrFun, NewCurrFunLabel, Calls,
+ [], NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel};
+ false ->
+ NewNestingLevel1 =
+ case NewNestingLevel =:= 0 of
+ true -> NewNestingLevel + 1;
+ false -> NewNestingLevel
+ end,
+ case PullOut of
+ true ->
+ {DepList, NewCurrFun, NewCurrFunLabel, Calls,
+ NewCode, NewRaceList, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes, NewNestingLevel1};
+ false ->
+ {RetDepList, NewCurrFun1, NewCurrFunLabel1, NewCalls1,
+ NewCode1, NewRaceList1, NewRaceVarMap1, NewFunDefVars1,
+ NewFunCallVars1, NewFunArgTypes1, NewNestingLevel2} =
+ fixup_race_forward(NewCurrFun, NewCurrFunLabel, Calls,
+ NewCode, NewRaceList, InitFun, WarnVarArgs,
+ RaceWarnTag, NewRaceVarMap, NewFunDefVars,
+ NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel1, State),
+ {DepList ++ RetDepList, NewCurrFun1, NewCurrFunLabel1,
+ NewCalls1, NewCode1, NewRaceList1, NewRaceVarMap1,
+ NewFunDefVars1, NewFunCallVars1, NewFunArgTypes1,
+ NewNestingLevel2}
+ end
+ end
+ end.
+
+get_deplist_paths(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ case RaceList of
+ [] -> {[], false, true};
+ [Head|Tail] ->
+ case Head of
+ #end_case{} ->
+ {RaceList1, DepList1, IsPublic1, Continue1} =
+ handle_case(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ case Continue1 of
+ true ->
+ {DepList2, IsPublic2, Continue2} =
+ get_deplist_paths(RaceList1, WarnVarArgs, RaceWarnTag,
+ RaceVarMap, CurrLevel, PublicTables,
+ NamedTables),
+ {DepList1 ++ DepList2, IsPublic1 orelse IsPublic2, Continue2};
+ false -> {DepList1, IsPublic1, false}
+ end;
+ #beg_clause{} ->
+ get_deplist_paths(fixup_before_case_path(Tail), WarnVarArgs,
+ RaceWarnTag, RaceVarMap, CurrLevel, PublicTables,
+ NamedTables);
+ #curr_fun{status = in, var_map = RaceVarMap1} ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel + 1, PublicTables, NamedTables),
+ IsPublic1 =
+ case RaceWarnTag of
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [Tabs, Names, _, _] = WarnVarArgs,
+ IsPublic orelse
+ lists:any(
+ fun (T) ->
+ compare_var_list(T, PublicTables, RaceVarMap1)
+ end, Tabs)
+ orelse
+ length(Names -- NamedTables) < length(Names);
+ _ -> true
+ end,
+ {DepList, IsPublic1, Continue};
+ #curr_fun{status = out, var_map = RaceVarMap1, def_vars = FunDefVars,
+ call_vars = FunCallVars} ->
+ WarnVarArgs1 =
+ var_analysis([format_arg(DefVar) || DefVar <- FunDefVars],
+ [format_arg(CallVar) || CallVar <- FunCallVars],
+ WarnVarArgs, RaceWarnTag),
+ {WarnVarArgs2, Stop} =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs1,
+ Vars =
+ lists:flatten(
+ [find_all_bound_vars(V, RaceVarMap1) || V <- WVA1]),
+ case {Vars, CurrLevel} of
+ {[], 0} ->
+ {WarnVarArgs, true};
+ {[], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars, WVA2, WVA3, WVA4], false}
+ end;
+ ?WARN_WHEREIS_UNREGISTER ->
+ [WVA1, WVA2] = WarnVarArgs1,
+ Vars =
+ lists:flatten(
+ [find_all_bound_vars(V, RaceVarMap1) || V <- WVA1]),
+ case {Vars, CurrLevel} of
+ {[], 0} ->
+ {WarnVarArgs, true};
+ {[], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars, WVA2], false}
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs1,
+ Vars1 =
+ lists:flatten(
+ [find_all_bound_vars(V1, RaceVarMap1) || V1 <- WVA1]),
+ Vars2 =
+ lists:flatten(
+ [find_all_bound_vars(V2, RaceVarMap1) || V2 <- WVA3]),
+ case {Vars1, Vars2, CurrLevel} of
+ {[], _, 0} ->
+ {WarnVarArgs, true};
+ {[], _, _} ->
+ {WarnVarArgs, false};
+ {_, [], 0} ->
+ {WarnVarArgs, true};
+ {_, [], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars1, WVA2, Vars2, WVA4], false}
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs1,
+ Vars =
+ lists:flatten(
+ [find_all_bound_vars(V, RaceVarMap1) || V <- WVA1]),
+ case {Vars, CurrLevel} of
+ {[], 0} ->
+ {WarnVarArgs, true};
+ {[], _} ->
+ {WarnVarArgs, false};
+ _ ->
+ {[Vars, WVA2|T], false}
+ end
+ end,
+ case Stop of
+ true -> {[], false, false};
+ false ->
+ CurrLevel1 =
+ case CurrLevel of
+ 0 -> CurrLevel;
+ _ -> CurrLevel - 1
+ end,
+ get_deplist_paths(Tail, WarnVarArgs2, RaceWarnTag, RaceVarMap1,
+ CurrLevel1, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = RegCall, args = WarnVarArgs1,
+ var_map = RaceVarMap1} when RegCall =:= register orelse
+ RegCall =:= unregister ->
+ case compare_first_arg(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = ets_insert, args = WarnVarArgs1,
+ var_map = RaceVarMap1} ->
+ case compare_ets_insert(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #warn_call{call_name = mnesia_dirty_write, args = WarnVarArgs1,
+ var_map = RaceVarMap1} ->
+ case compare_first_arg(WarnVarArgs, WarnVarArgs1, RaceVarMap1) of
+ true -> {[], false, false};
+ NewWarnVarArgs ->
+ get_deplist_paths(Tail, NewWarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables)
+ end;
+ #dep_call{var_map = RaceVarMap1} ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables),
+ {refine_race(Head, WarnVarArgs, RaceWarnTag, DepList, RaceVarMap1),
+ IsPublic, Continue}
+ end
+ end.
+
+handle_case(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ case RaceList of
+ [] -> {[], [], false, true};
+ [Head|Tail] ->
+ case Head of
+ #end_clause{} ->
+ {RestRaceList, DepList1, IsPublic1, Continue1} =
+ do_clause(Tail, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ {RetRaceList, DepList2, IsPublic2, Continue2} =
+ handle_case(RestRaceList, WarnVarArgs, RaceWarnTag, RaceVarMap,
+ CurrLevel, PublicTables, NamedTables),
+ {RetRaceList, DepList1 ++ DepList2, IsPublic1 orelse IsPublic2,
+ Continue1 orelse Continue2};
+ beg_case -> {Tail, [], false, false}
+ end
+ end.
+
+do_clause(RaceList, WarnVarArgs, RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables) ->
+ {DepList, IsPublic, Continue} =
+ get_deplist_paths(fixup_case_path(RaceList, 0), WarnVarArgs,
+ RaceWarnTag, RaceVarMap, CurrLevel,
+ PublicTables, NamedTables),
+ {fixup_case_rest_paths(RaceList, 0), DepList, IsPublic, Continue}.
+
+fixup_case_path(RaceList, NestingLevel) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ {NewNestingLevel, Return} =
+ case Head of
+ beg_case -> {NestingLevel - 1, false};
+ #end_case{} -> {NestingLevel + 1, false};
+ #beg_clause{} ->
+ case NestingLevel =:= 0 of
+ true -> {NestingLevel, true};
+ false -> {NestingLevel, false}
+ end;
+ _Other -> {NestingLevel, false}
+ end,
+ case Return of
+ true -> [];
+ false -> [Head|fixup_case_path(Tail, NewNestingLevel)]
+ end
+ end.
+
+%% Gets the race list before a case clause.
+fixup_before_case_path(RaceList) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ case Head of
+ #end_clause{} ->
+ fixup_before_case_path(fixup_case_rest_paths(Tail, 0));
+ beg_case -> Tail
+ end
+ end.
+
+fixup_case_rest_paths(RaceList, NestingLevel) ->
+ case RaceList of
+ [] -> [];
+ [Head|Tail] ->
+ {NewNestingLevel, Return} =
+ case Head of
+ beg_case -> {NestingLevel - 1, false};
+ #end_case{} -> {NestingLevel + 1, false};
+ #beg_clause{} ->
+ case NestingLevel =:= 0 of
+ true -> {NestingLevel, true};
+ false -> {NestingLevel, false}
+ end;
+ _Other -> {NestingLevel, false}
+ end,
+ case Return of
+ true -> Tail;
+ false -> fixup_case_rest_paths(Tail, NewNestingLevel)
+ end
+ end.
+
+fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
+ Calls, CallsToAnalyze, Code, RaceList,
+ InitFun, NewFunArgs, NewFunTypes,
+ RaceWarnTag, RaceVarMap, FunDefVars,
+ FunCallVars, FunArgTypes, NestingLevel,
+ Args, CodeB, StateRaceList) ->
+ case Calls of
+ [] ->
+ {NewRaceList,
+ #curr_fun{mfa = NewCurrFun, label = NewCurrFunLabel,
+ var_map = NewRaceVarMap, def_vars = NewFunDefVars,
+ call_vars = NewFunCallVars, arg_types = NewFunArgTypes},
+ NewCode, NewNestingLevel} =
+ remove_clause(RaceList,
+ #curr_fun{mfa = CurrFun, label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes},
+ Code, NestingLevel),
+ {NewCurrFun, NewCurrFunLabel, CallsToAnalyze, NewCode, NewRaceList,
+ NewRaceVarMap, NewFunDefVars, NewFunCallVars, NewFunArgTypes,
+ NewNestingLevel};
+ [Head|Tail] ->
+ case Head of
+ {InitFun, InitFun} when CurrFun =:= InitFun, Fun =:= InitFun ->
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewRaceVarMap =
+ race_var_map(Args, NewFunArgs, RaceVarMap, bind),
+ RetC =
+ fixup_all_calls(InitFun, InitFun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = InitFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap),
+ NewCode =
+ fixup_all_calls(InitFun, InitFun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = InitFun,
+ label = CurrFunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}],
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|
+ lists:reverse(StateRaceList)] ++
+ RetC, NewRaceVarMap),
+ {InitFun, FunLabel, NewCallsToAnalyze, NewCode, RaceList,
+ NewRaceVarMap, Args, NewFunArgs, NewFunTypes, NestingLevel};
+ {CurrFun, Fun} ->
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewRaceVarMap = race_var_map(Args, NewFunArgs, RaceVarMap, bind),
+ RetC =
+ case Fun of
+ InitFun ->
+ fixup_all_calls(CurrFun, Fun, FunLabel, Args,
+ lists:reverse(StateRaceList) ++
+ [#curr_fun{status = out, mfa = CurrFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap);
+ _Other1 ->
+ fixup_all_calls(CurrFun, Fun, FunLabel, Args,
+ CodeB ++
+ [#curr_fun{status = out, mfa = CurrFun,
+ label = CurrFunLabel, var_map = RaceVarMap,
+ def_vars = FunDefVars, call_vars = FunCallVars,
+ arg_types = FunArgTypes}],
+ Code, RaceVarMap)
+ end,
+ NewCode =
+ case Fun of
+ InitFun ->
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|
+ lists:reverse(StateRaceList)] ++ RetC;
+ _ ->
+ [#curr_fun{status = in, mfa = Fun,
+ label = FunLabel, var_map = NewRaceVarMap,
+ def_vars = Args, call_vars = NewFunArgs,
+ arg_types = NewFunTypes}|CodeB] ++
+ RetC
+ end,
+ {Fun, FunLabel, NewCallsToAnalyze, NewCode, RaceList, NewRaceVarMap,
+ Args, NewFunArgs, NewFunTypes, NestingLevel};
+ {_TupleA, _TupleB} ->
+ fixup_race_forward_helper(CurrFun, CurrFunLabel, Fun, FunLabel,
+ Tail, CallsToAnalyze, Code, RaceList, InitFun, NewFunArgs,
+ NewFunTypes, RaceWarnTag, RaceVarMap, FunDefVars, FunCallVars,
+ FunArgTypes, NestingLevel, Args, CodeB, StateRaceList)
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Backward Analysis
+%%%
+%%% ===========================================================================
+
+fixup_race_backward(CurrFun, Calls, CallsToAnalyze, Parents, Height) ->
+ case Height =:= 0 of
+ true -> Parents;
+ false ->
+ case Calls of
+ [] ->
+ case is_integer(CurrFun) orelse lists:member(CurrFun, Parents) of
+ true -> Parents;
+ false -> [CurrFun|Parents]
+ end;
+ [Head|Tail] ->
+ {Parent, TupleB} = Head,
+ case TupleB =:= CurrFun of
+ true -> % more paths are needed
+ NewCallsToAnalyze = lists:delete(Head, CallsToAnalyze),
+ NewParents =
+ fixup_race_backward(Parent, NewCallsToAnalyze,
+ NewCallsToAnalyze, Parents, Height - 1),
+ fixup_race_backward(CurrFun, Tail, NewCallsToAnalyze, NewParents,
+ Height);
+ false ->
+ fixup_race_backward(CurrFun, Tail, CallsToAnalyze, Parents,
+ Height)
+ end
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Utilities
+%%%
+%%% ===========================================================================
+
+are_bound_labels(Label1, Label2, RaceVarMap) ->
+ case dict:find(Label1, RaceVarMap) of
+ error -> false;
+ {ok, Labels} ->
+ lists:member(Label2, Labels) orelse
+ are_bound_labels_helper(Labels, Label1, Label2, RaceVarMap)
+ end.
+
+are_bound_labels_helper(Labels, OldLabel, CompLabel, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> false;
+ _ ->
+ case Labels of
+ [] -> false;
+ [Head|Tail] ->
+ NewRaceVarMap = dict:erase(OldLabel, RaceVarMap),
+ are_bound_labels(Head, CompLabel, NewRaceVarMap) orelse
+ are_bound_labels_helper(Tail, Head, CompLabel, NewRaceVarMap)
+ end
+ end.
+
+are_bound_vars(Vars1, Vars2, RaceVarMap) ->
+ case is_list(Vars1) andalso is_list(Vars2) of
+ true ->
+ case Vars1 of
+ [] -> false;
+ [AHead|ATail] ->
+ case Vars2 of
+ [] -> false;
+ [PHead|PTail] ->
+ are_bound_vars(AHead, PHead, RaceVarMap) andalso
+ are_bound_vars(ATail, PTail, RaceVarMap)
+ end
+ end;
+ false ->
+ {NewVars1, NewVars2, IsList} =
+ case is_list(Vars1) of
+ true ->
+ case Vars1 of
+ [Var1] -> {Var1, Vars2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false ->
+ case is_list(Vars2) of
+ true ->
+ case Vars2 of
+ [Var2] -> {Vars1, Var2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false -> {Vars1, Vars2, true}
+ end
+ end,
+ case IsList of
+ true ->
+ case cerl:type(NewVars1) of
+ var ->
+ case cerl:type(NewVars2) of
+ var ->
+ ALabel = cerl_trees:get_label(NewVars1),
+ PLabel = cerl_trees:get_label(NewVars2),
+ are_bound_labels(ALabel, PLabel, RaceVarMap) orelse
+ are_bound_labels(PLabel, ALabel, RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ tuple ->
+ case cerl:type(NewVars2) of
+ tuple ->
+ are_bound_vars(cerl:tuple_es(NewVars1),
+ cerl:tuple_es(NewVars2), RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ cons ->
+ case cerl:type(NewVars2) of
+ cons ->
+ are_bound_vars(cerl:cons_hd(NewVars1),
+ cerl:cons_hd(NewVars2), RaceVarMap)
+ andalso
+ are_bound_vars(cerl:cons_tl(NewVars1),
+ cerl:cons_tl(NewVars2), RaceVarMap);
+ alias ->
+ are_bound_vars(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap);
+ values ->
+ are_bound_vars(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap);
+ _Other -> false
+ end;
+ alias ->
+ case cerl:type(NewVars2) of
+ alias ->
+ are_bound_vars(cerl:alias_var(NewVars1),
+ cerl:alias_var(NewVars2), RaceVarMap);
+ _Other ->
+ are_bound_vars(cerl:alias_var(NewVars1),
+ NewVars2, RaceVarMap)
+ end;
+ values ->
+ case cerl:type(NewVars2) of
+ values ->
+ are_bound_vars(cerl:values_es(NewVars1),
+ cerl:values_es(NewVars2), RaceVarMap);
+ _Other ->
+ are_bound_vars(cerl:values_es(NewVars1),
+ NewVars2, RaceVarMap)
+ end;
+ _Other -> false
+ end;
+ false -> false
+ end
+ end.
+
+callgraph__renew_tables(Table, Callgraph) ->
+ case Table of
+ {named, NameLabel, Names} ->
+ PTablesToAdd =
+ case NameLabel of
+ ?no_label -> [];
+ _Other -> [NameLabel]
+ end,
+ NamesToAdd = filter_named_tables(Names),
+ PTables = dialyzer_callgraph:get_public_tables(Callgraph),
+ NTables = dialyzer_callgraph:get_named_tables(Callgraph),
+ dialyzer_callgraph:put_public_tables(
+ lists:usort(PTablesToAdd ++ PTables),
+ dialyzer_callgraph:put_named_tables(
+ NamesToAdd ++ NTables, Callgraph));
+ _Other ->
+ Callgraph
+ end.
+
+cleanup_clause_code(#curr_fun{mfa = CurrFun} = CurrTuple, Code,
+ NestingLevel, LocalNestingLevel) ->
+ case Code of
+ [] -> {CurrTuple, []};
+ [Head|Tail] ->
+ {NewLocalNestingLevel, NewNestingLevel, NewCurrTuple, Return} =
+ case Head of
+ beg_case ->
+ {LocalNestingLevel, NestingLevel + 1, CurrTuple, false};
+ #end_case{} ->
+ {LocalNestingLevel, NestingLevel - 1, CurrTuple, false};
+ #end_clause{} ->
+ case NestingLevel =:= 0 of
+ true ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, true};
+ false ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, false}
+ end;
+ #fun_call{caller = CurrFun} ->
+ {LocalNestingLevel - 1, NestingLevel, CurrTuple, false};
+ #curr_fun{status = in} ->
+ {LocalNestingLevel - 1, NestingLevel, Head, false};
+ #curr_fun{status = out} ->
+ {LocalNestingLevel + 1, NestingLevel, Head, false};
+ Other when Other =/= #fun_call{} ->
+ {LocalNestingLevel, NestingLevel, CurrTuple, false}
+ end,
+ case Return of
+ true -> {NewCurrTuple, Tail};
+ false ->
+ cleanup_clause_code(NewCurrTuple, Tail, NewNestingLevel,
+ NewLocalNestingLevel)
+ end
+ end.
+
+cleanup_dep_calls(DepList) ->
+ case DepList of
+ [] -> [];
+ [#dep_call{call_name = CallName, arg_types = ArgTypes,
+ vars = Vars, state = State, file_line = FileLine}|T] ->
+ [#dep_call{call_name = CallName, arg_types = ArgTypes,
+ vars = Vars, state = State, file_line = FileLine}|
+ cleanup_dep_calls(T)]
+ end.
+
+cleanup_race_code(State) ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ dialyzer_dataflow:state__put_callgraph(
+ dialyzer_callgraph:race_code_new(Callgraph), State).
+
+filter_named_tables(NamesList) ->
+ case NamesList of
+ [] -> [];
+ [Head|Tail] ->
+ NewHead =
+ case string:rstr(Head, "()") of
+ 0 -> [Head];
+ _Other -> []
+ end,
+ NewHead ++ filter_named_tables(Tail)
+ end.
+
+filter_parents(Parents, NewParents, Digraph) ->
+ case Parents of
+ [] -> NewParents;
+ [Head|Tail] ->
+ NewParents1 = filter_parents_helper1(Head, Tail, NewParents, Digraph),
+ filter_parents(Tail, NewParents1, Digraph)
+ end.
+
+filter_parents_helper1(First, Rest, NewParents, Digraph) ->
+ case Rest of
+ [] -> NewParents;
+ [Head|Tail] ->
+ NewParents1 = filter_parents_helper2(First, Head, NewParents, Digraph),
+ filter_parents_helper1(First, Tail, NewParents1, Digraph)
+ end.
+
+filter_parents_helper2(Parent1, Parent2, NewParents, Digraph) ->
+ case digraph:get_path(Digraph, Parent1, Parent2) of
+ false ->
+ case digraph:get_path(Digraph, Parent2, Parent1) of
+ false -> NewParents;
+ _Vertices -> NewParents -- [Parent1]
+ end;
+ _Vertices -> NewParents -- [Parent2]
+ end.
+
+find_all_bound_vars(Label, RaceVarMap) ->
+ case dict:find(Label, RaceVarMap) of
+ error -> [Label];
+ {ok, Labels} ->
+ lists:usort(Labels ++
+ find_all_bound_vars_helper(Labels, Label, RaceVarMap))
+ end.
+
+find_all_bound_vars_helper(Labels, Label, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> [];
+ _ ->
+ case Labels of
+ [] -> [];
+ [Head|Tail] ->
+ NewRaceVarMap = dict:erase(Label, RaceVarMap),
+ find_all_bound_vars(Head, NewRaceVarMap) ++
+ find_all_bound_vars_helper(Tail, Head, NewRaceVarMap)
+ end
+ end.
+
+fixup_all_calls(CurrFun, NextFun, NextFunLabel, Args, CodeToReplace,
+ Code, RaceVarMap) ->
+ case Code of
+ [] -> [];
+ [Head|Tail] ->
+ NewCode =
+ case Head of
+ #fun_call{caller = CurrFun, callee = Callee,
+ arg_types = FunArgTypes, vars = FunArgs}
+ when Callee =:= NextFun orelse Callee =:= NextFunLabel ->
+ RaceVarMap1 = race_var_map(Args, FunArgs, RaceVarMap, bind),
+ [#curr_fun{status = in, mfa = NextFun, label = NextFunLabel,
+ var_map = RaceVarMap1, def_vars = Args,
+ call_vars = FunArgs, arg_types = FunArgTypes}|
+ CodeToReplace];
+ _Other -> [Head]
+ end,
+ RetCode =
+ fixup_all_calls(CurrFun, NextFun, NextFunLabel, Args, CodeToReplace,
+ Tail, RaceVarMap),
+ NewCode ++ RetCode
+ end.
+
+is_last_race(RaceTag, InitFun, Code, Callgraph) ->
+ case Code of
+ [] -> true;
+ [Head|Tail] ->
+ case Head of
+ RaceTag -> false;
+ #fun_call{callee = Fun} ->
+ FunName =
+ case is_integer(Fun) of
+ true ->
+ case dialyzer_callgraph:lookup_name(Fun, Callgraph) of
+ error -> Fun;
+ {ok, Name} -> Name
+ end;
+ false -> Fun
+ end,
+ Digraph = dialyzer_callgraph:get_digraph(Callgraph),
+ case FunName =:= InitFun orelse
+ digraph:get_path(Digraph, FunName, InitFun) of
+ false -> is_last_race(RaceTag, InitFun, Tail, Callgraph);
+ _Vertices -> false
+ end;
+ _Other -> is_last_race(RaceTag, InitFun, Tail, Callgraph)
+ end
+ end.
+
+lists_key_member(Member, List, N) when is_integer(Member) ->
+ case List of
+ [] -> 0;
+ [Head|Tail] ->
+ NewN = N + 1,
+ case Head of
+ Member -> NewN;
+ _Other -> lists_key_member(Member, Tail, NewN)
+ end
+ end;
+lists_key_member(_M, _L, _N) ->
+ 0.
+
+lists_key_member_lists(MemberList, List) ->
+ case MemberList of
+ [] -> 0;
+ [Head|Tail] ->
+ case lists_key_member(Head, List, 0) of
+ 0 -> lists_key_member_lists(Tail, List);
+ Other -> Other
+ end
+ end.
+
+lists_key_members_lists(MemberList, List) ->
+ case MemberList of
+ [] -> [];
+ [Head|Tail] ->
+ lists:usort(
+ lists_key_members_lists_helper(Head, List, 1) ++
+ lists_key_members_lists(Tail, List))
+ end.
+
+lists_key_members_lists_helper(Elem, List, N) when is_integer(Elem) ->
+ case List of
+ [] -> [];
+ [Head|Tail] ->
+ NewHead =
+ case Head =:= Elem of
+ true -> [N];
+ false -> []
+ end,
+ NewHead ++ lists_key_members_lists_helper(Elem, Tail, N + 1)
+ end;
+lists_key_members_lists_helper(_Elem, _List, _N) ->
+ [0].
+
+lists_key_replace(N, List, NewMember) ->
+ {Before, [_|After]} = lists:split(N - 1, List),
+ Before ++ [NewMember|After].
+
+lists_get(0, _List) -> ?no_label;
+lists_get(N, List) -> lists:nth(N, List).
+
+refine_race(RaceCall, WarnVarArgs, RaceWarnTag, DependencyList, RaceVarMap) ->
+ case RaceWarnTag of
+ WarnWhereis when WarnWhereis =:= ?WARN_WHEREIS_REGISTER orelse
+ WarnWhereis =:= ?WARN_WHEREIS_UNREGISTER ->
+ case RaceCall of
+ #dep_call{call_name = ets_lookup} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ DependencyList;
+ #dep_call{call_name = whereis, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ case RaceCall of
+ #dep_call{call_name = whereis} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read} ->
+ DependencyList;
+ #dep_call{call_name = ets_lookup, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ case RaceCall of
+ #dep_call{call_name = whereis} ->
+ DependencyList;
+ #dep_call{call_name = ets_lookup} ->
+ DependencyList;
+ #dep_call{call_name = mnesia_dirty_read, args = VarArgs} ->
+ refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag,
+ DependencyList, RaceVarMap)
+ end
+ end.
+
+refine_race_helper(RaceCall, VarArgs, WarnVarArgs, RaceWarnTag, DependencyList,
+ RaceVarMap) ->
+ case compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) of
+ true -> [RaceCall|DependencyList];
+ false -> DependencyList
+ end.
+
+remove_clause(RaceList, CurrTuple, Code, NestingLevel) ->
+ NewRaceList = fixup_case_rest_paths(RaceList, 0),
+ {NewCurrTuple, NewCode} =
+ cleanup_clause_code(CurrTuple, Code, 0, NestingLevel),
+ ReturnTuple = {NewRaceList, NewCurrTuple, NewCode, NestingLevel},
+ case NewRaceList of
+ [beg_case|RTail] ->
+ case NewCode of
+ [#end_case{}|CTail] ->
+ remove_clause(RTail, NewCurrTuple, CTail, NestingLevel);
+ _Other -> ReturnTuple
+ end;
+ _Else -> ReturnTuple
+ end.
+
+remove_nonlocal_functions(Code, NestingLevel) ->
+ case Code of
+ [] -> [];
+ [H|T] ->
+ NewNL =
+ case H of
+ #curr_fun{status = in} ->
+ NestingLevel + 1;
+ #curr_fun{status = out} ->
+ NestingLevel - 1;
+ _Other ->
+ NestingLevel
+ end,
+ case NewNL =:= 0 of
+ true -> T;
+ false -> remove_nonlocal_functions(T, NewNL)
+ end
+ end.
+
+renew_curr_fun(CurrFun, Races) ->
+ Races#races{curr_fun = CurrFun}.
+
+renew_curr_fun_label(CurrFunLabel, Races) ->
+ Races#races{curr_fun_label = CurrFunLabel}.
+
+renew_race_list(RaceList, Races) ->
+ Races#races{race_list = RaceList}.
+
+renew_race_list_size(RaceListSize, Races) ->
+ Races#races{race_list_size = RaceListSize}.
+
+renew_race_tags(RaceTags, Races) ->
+ Races#races{race_tags = RaceTags}.
+
+renew_table(Table, Races) ->
+ Races#races{new_table = Table}.
+
+state__renew_curr_fun(CurrFun, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_curr_fun(CurrFun, Races), State).
+
+state__renew_curr_fun_label(CurrFunLabel, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(
+ renew_curr_fun_label(CurrFunLabel, Races), State).
+
+state__renew_race_list(RaceList, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_race_list(RaceList, Races), State).
+
+state__renew_race_tags(RaceTags, State) ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_races(renew_race_tags(RaceTags, Races), State).
+
+state__renew_info(RaceList, RaceListSize, RaceTags, Table, State) ->
+ Callgraph = dialyzer_dataflow:state__get_callgraph(State),
+ Races = dialyzer_dataflow:state__get_races(State),
+ dialyzer_dataflow:state__put_callgraph(
+ callgraph__renew_tables(Table, Callgraph),
+ dialyzer_dataflow:state__put_races(
+ renew_table(Table,
+ renew_race_list(RaceList,
+ renew_race_list_size(RaceListSize,
+ renew_race_tags(RaceTags, Races)))), State)).
+
+%%% ===========================================================================
+%%%
+%%% Variable and Type Utilities
+%%%
+%%% ===========================================================================
+
+any_args(StrList) ->
+ case StrList of
+ [] -> false;
+ [Head|Tail] ->
+ case string:rstr(Head, "()") of
+ 0 -> any_args(Tail);
+ _Other -> true
+ end
+ end.
+
+-spec bind_dict_vars(label(), label(), dict:dict()) -> dict:dict().
+
+bind_dict_vars(Key, Label, RaceVarMap) ->
+ case Key =:= Label of
+ true -> RaceVarMap;
+ false ->
+ case dict:find(Key, RaceVarMap) of
+ error -> dict:store(Key, [Label], RaceVarMap);
+ {ok, Labels} ->
+ case lists:member(Label, Labels) of
+ true -> RaceVarMap;
+ false -> dict:store(Key, [Label|Labels], RaceVarMap)
+ end
+ end
+ end.
+
+bind_dict_vars_list(Key, Labels, RaceVarMap) ->
+ case Labels of
+ [] -> RaceVarMap;
+ [Head|Tail] ->
+ bind_dict_vars_list(Key, Tail, bind_dict_vars(Key, Head, RaceVarMap))
+ end.
+
+compare_ets_insert(OldWarnVarArgs, NewWarnVarArgs, RaceVarMap) ->
+ [Old1, Old2, Old3, Old4] = OldWarnVarArgs,
+ [New1, New2, New3, New4] = NewWarnVarArgs,
+ Bool =
+ case any_args(Old2) of
+ true -> compare_var_list(New1, Old1, RaceVarMap);
+ false ->
+ case any_args(New2) of
+ true -> compare_var_list(New1, Old1, RaceVarMap);
+ false -> compare_var_list(New1, Old1, RaceVarMap)
+ orelse (Old2 =:= New2)
+ end
+ end,
+ case Bool of
+ true ->
+ case any_args(Old4) of
+ true ->
+ case compare_list_vars(Old3, ets_list_args(New3), [], RaceVarMap) of
+ true -> true;
+ Args3 -> lists_key_replace(3, OldWarnVarArgs, Args3)
+ end;
+ false ->
+ case any_args(New4) of
+ true ->
+ case compare_list_vars(Old3, ets_list_args(New3), [],
+ RaceVarMap) of
+ true -> true;
+ Args3 -> lists_key_replace(3, OldWarnVarArgs, Args3)
+ end;
+ false ->
+ case compare_list_vars(Old3, ets_list_args(New3), [],
+ RaceVarMap) of
+ true -> true;
+ Args3 ->
+ lists_key_replace(4,
+ lists_key_replace(3, OldWarnVarArgs, Args3), Old4 -- New4)
+ end
+ end
+ end;
+ false -> OldWarnVarArgs
+ end.
+
+compare_first_arg(OldWarnVarArgs, NewWarnVarArgs, RaceVarMap) ->
+ [Old1, Old2|_OldT] = OldWarnVarArgs,
+ [New1, New2|_NewT] = NewWarnVarArgs,
+ case any_args(Old2) of
+ true ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> OldWarnVarArgs
+ end;
+ false ->
+ case any_args(New2) of
+ true ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> OldWarnVarArgs
+ end;
+ false ->
+ case compare_var_list(New1, Old1, RaceVarMap) of
+ true -> true;
+ false -> lists_key_replace(2, OldWarnVarArgs, Old2 -- New2)
+ end
+ end
+ end.
+
+compare_argtypes(ArgTypes, WarnArgTypes) ->
+ lists:any(fun (X) -> lists:member(X, WarnArgTypes) end, ArgTypes).
+
+%% Compares the argument types of the two suspicious calls.
+compare_types(VarArgs, WarnVarArgs, RaceWarnTag, RaceVarMap) ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [VA1, VA2] = VarArgs,
+ [WVA1, WVA2, _, _] = WarnVarArgs,
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end;
+ ?WARN_WHEREIS_UNREGISTER ->
+ [VA1, VA2] = VarArgs,
+ [WVA1, WVA2] = WarnVarArgs,
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [VA1, VA2, VA3, VA4] = VarArgs,
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Bool =
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end,
+ Bool andalso
+ (case any_args(VA4) of
+ true ->
+ compare_var_list(VA3, WVA3, RaceVarMap);
+ false ->
+ case any_args(WVA4) of
+ true ->
+ compare_var_list(VA3, WVA3, RaceVarMap);
+ false ->
+ compare_var_list(VA3, WVA3, RaceVarMap) orelse
+ compare_argtypes(VA4, WVA4)
+ end
+ end);
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [VA1, VA2|_] = VarArgs, %% Two or four elements
+ [WVA1, WVA2|_] = WarnVarArgs,
+ case any_args(VA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ case any_args(WVA2) of
+ true -> compare_var_list(VA1, WVA1, RaceVarMap);
+ false ->
+ compare_var_list(VA1, WVA1, RaceVarMap) orelse
+ compare_argtypes(VA2, WVA2)
+ end
+ end
+ end.
+
+compare_list_vars(VarList1, VarList2, NewVarList1, RaceVarMap) ->
+ case VarList1 of
+ [] ->
+ case NewVarList1 of
+ [] -> true;
+ _Other -> NewVarList1
+ end;
+ [Head|Tail] ->
+ NewHead =
+ case compare_var_list(Head, VarList2, RaceVarMap) of
+ true -> [];
+ false -> [Head]
+ end,
+ compare_list_vars(Tail, VarList2, NewHead ++ NewVarList1, RaceVarMap)
+ end.
+
+compare_vars(Var1, Var2, RaceVarMap) when is_integer(Var1), is_integer(Var2) ->
+ Var1 =:= Var2 orelse
+ are_bound_labels(Var1, Var2, RaceVarMap) orelse
+ are_bound_labels(Var2, Var1, RaceVarMap);
+compare_vars(_Var1, _Var2, _RaceVarMap) ->
+ false.
+
+-spec compare_var_list(label_type(), [label_type()], dict:dict()) -> boolean().
+
+compare_var_list(Var, VarList, RaceVarMap) ->
+ lists:any(fun (V) -> compare_vars(Var, V, RaceVarMap) end, VarList).
+
+ets_list_args(MaybeList) ->
+ case is_list(MaybeList) of
+ true ->
+ try [ets_tuple_args(T) || T <- MaybeList]
+ catch _:_ -> [?no_label]
+ end;
+ false -> [ets_tuple_args(MaybeList)]
+ end.
+
+ets_list_argtypes(ListStr) ->
+ ListStr1 = string:strip(ListStr, left, $[),
+ ListStr2 = string:strip(ListStr1, right, $]),
+ ListStr3 = string:strip(ListStr2, right, $.),
+ string:strip(ListStr3, right, $,).
+
+ets_tuple_args(MaybeTuple) ->
+ case is_tuple(MaybeTuple) of
+ true -> element(1, MaybeTuple);
+ false -> ?no_label
+ end.
+
+ets_tuple_argtypes2(TupleList, ElemList) ->
+ case TupleList of
+ [] -> ElemList;
+ [H|T] ->
+ ets_tuple_argtypes2(T,
+ ets_tuple_argtypes2_helper(H, [], 0) ++ ElemList)
+ end.
+
+ets_tuple_argtypes2_helper(TupleStr, ElemStr, NestingLevel) ->
+ case TupleStr of
+ [] -> [];
+ [H|T] ->
+ {NewElemStr, NewNestingLevel, Return} =
+ case H of
+ ${ when NestingLevel =:= 0 ->
+ {ElemStr, NestingLevel + 1, false};
+ ${ ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $[ ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $( ->
+ {[H|ElemStr], NestingLevel + 1, false};
+ $} ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $] ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $) ->
+ {[H|ElemStr], NestingLevel - 1, false};
+ $, when NestingLevel =:= 1 ->
+ {lists:reverse(ElemStr), NestingLevel, true};
+ _Other ->
+ {[H|ElemStr], NestingLevel, false}
+ end,
+ case Return of
+ true -> string:tokens(NewElemStr, " |");
+ false ->
+ ets_tuple_argtypes2_helper(T, NewElemStr, NewNestingLevel)
+ end
+ end.
+
+ets_tuple_argtypes1(Str, Tuple, TupleList, NestingLevel) ->
+ case Str of
+ [] -> TupleList;
+ [H|T] ->
+ {NewTuple, NewNestingLevel, Add} =
+ case H of
+ ${ ->
+ {[H|Tuple], NestingLevel + 1, false};
+ $} ->
+ case NestingLevel of
+ 1 ->
+ {[H|Tuple], NestingLevel - 1, true};
+ _Else ->
+ {[H|Tuple], NestingLevel - 1, false}
+ end;
+ _Other1 when NestingLevel =:= 0 ->
+ {Tuple, NestingLevel, false};
+ _Other2 ->
+ {[H|Tuple], NestingLevel, false}
+ end,
+ case Add of
+ true ->
+ ets_tuple_argtypes1(T, [],
+ [lists:reverse(NewTuple)|TupleList],
+ NewNestingLevel);
+ false ->
+ ets_tuple_argtypes1(T, NewTuple, TupleList, NewNestingLevel)
+ end
+ end.
+
+format_arg(?bypassed) -> ?no_label;
+format_arg(Arg0) ->
+ Arg = cerl:fold_literal(Arg0),
+ case cerl:type(Arg) of
+ var -> cerl_trees:get_label(Arg);
+ tuple -> list_to_tuple([format_arg(A) || A <- cerl:tuple_es(Arg)]);
+ cons -> [format_arg(cerl:cons_hd(Arg))|format_arg(cerl:cons_tl(Arg))];
+ alias -> format_arg(cerl:alias_var(Arg));
+ literal ->
+ case cerl:is_c_nil(Arg) of
+ true -> [];
+ false -> ?no_label
+ end;
+ _Other -> ?no_label
+ end.
+
+-spec format_args([core_vars()], [erl_types:erl_type()],
+ dialyzer_dataflow:state(), call()) ->
+ args().
+
+format_args([], [], _State, _Call) ->
+ [];
+format_args(ArgList, TypeList, CleanState, Call) ->
+ format_args_2(format_args_1(ArgList, TypeList, CleanState), Call).
+
+format_args_1([Arg], [Type], CleanState) ->
+ [format_arg(Arg), format_type(Type, CleanState)];
+format_args_1([Arg|Args], [Type|Types], CleanState) ->
+ List =
+ case Arg =:= ?bypassed of
+ true -> [?no_label, format_type(Type, CleanState)];
+ false ->
+ case cerl:is_literal(cerl:fold_literal(Arg)) of
+ true -> [?no_label, format_cerl(Arg)];
+ false -> [format_arg(Arg), format_type(Type, CleanState)]
+ end
+ end,
+ List ++ format_args_1(Args, Types, CleanState).
+
+format_args_2(StrArgList, Call) ->
+ case Call of
+ whereis ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ register ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ unregister ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ ets_new ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ string:tokens(ets_list_argtypes(lists:nth(4, StrArgList1)), " |"));
+ ets_lookup ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ string:tokens(lists:nth(4, StrArgList1), " |"));
+ ets_insert ->
+ StrArgList1 = lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |")),
+ lists_key_replace(4, StrArgList1,
+ ets_tuple_argtypes2(
+ ets_tuple_argtypes1(lists:nth(4, StrArgList1), [], [], 0),
+ []));
+ mnesia_dirty_read1 ->
+ lists_key_replace(2, StrArgList,
+ [mnesia_tuple_argtypes(T) || T <- string:tokens(
+ lists:nth(2, StrArgList), " |")]);
+ mnesia_dirty_read2 ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ mnesia_dirty_write1 ->
+ lists_key_replace(2, StrArgList,
+ [mnesia_record_tab(R) || R <- string:tokens(
+ lists:nth(2, StrArgList), " |")]);
+ mnesia_dirty_write2 ->
+ lists_key_replace(2, StrArgList,
+ string:tokens(lists:nth(2, StrArgList), " |"));
+ function_call -> StrArgList
+ end.
+
+format_cerl(Tree) ->
+ cerl_prettypr:format(cerl:set_ann(Tree, []),
+ [{hook, dialyzer_utils:pp_hook()},
+ {noann, true},
+ {paper, 100000},
+ {ribbon, 100000}
+ ]).
+
+format_type(Type, State) ->
+ R = dialyzer_dataflow:state__get_records(State),
+ erl_types:t_to_string(Type, R).
+
+mnesia_record_tab(RecordStr) ->
+ case string:str(RecordStr, "#") =:= 1 of
+ true ->
+ "'" ++
+ string:sub_string(RecordStr, 2, string:str(RecordStr, "{") - 1) ++
+ "'";
+ false -> RecordStr
+ end.
+
+mnesia_tuple_argtypes(TupleStr) ->
+ TupleStr1 = string:strip(TupleStr, left, ${),
+ [TupleStr2|_T] = string:tokens(TupleStr1, " ,"),
+ lists:flatten(string:tokens(TupleStr2, " |")).
+
+-spec race_var_map(var_to_map1(), var_to_map2(), dict:dict(), op()) ->
+ dict:dict().
+
+race_var_map(Vars1, Vars2, RaceVarMap, Op) ->
+ case Vars1 =:= ?no_arg orelse Vars1 =:= ?bypassed
+ orelse Vars2 =:= ?bypassed of
+ true -> RaceVarMap;
+ false ->
+ case is_list(Vars1) andalso is_list(Vars2) of
+ true ->
+ case Vars1 of
+ [] -> RaceVarMap;
+ [AHead|ATail] ->
+ case Vars2 of
+ [] -> RaceVarMap;
+ [PHead|PTail] ->
+ NewRaceVarMap = race_var_map(AHead, PHead, RaceVarMap, Op),
+ race_var_map(ATail, PTail, NewRaceVarMap, Op)
+ end
+ end;
+ false ->
+ {NewVars1, NewVars2, Bool} =
+ case is_list(Vars1) of
+ true ->
+ case Vars1 of
+ [Var1] -> {Var1, Vars2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false ->
+ case is_list(Vars2) of
+ true ->
+ case Vars2 of
+ [Var2] -> {Vars1, Var2, true};
+ _Thing -> {Vars1, Vars2, false}
+ end;
+ false -> {Vars1, Vars2, true}
+ end
+ end,
+ case Bool of
+ true ->
+ case cerl:type(NewVars1) of
+ var ->
+ case cerl:type(NewVars2) of
+ var ->
+ ALabel = cerl_trees:get_label(NewVars1),
+ PLabel = cerl_trees:get_label(NewVars2),
+ case Op of
+ bind ->
+ TempRaceVarMap =
+ bind_dict_vars(ALabel, PLabel, RaceVarMap),
+ bind_dict_vars(PLabel, ALabel, TempRaceVarMap);
+ unbind ->
+ TempRaceVarMap =
+ unbind_dict_vars(ALabel, PLabel, RaceVarMap),
+ unbind_dict_vars(PLabel, ALabel, TempRaceVarMap)
+ end;
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ tuple ->
+ case cerl:type(NewVars2) of
+ tuple ->
+ race_var_map(cerl:tuple_es(NewVars1),
+ cerl:tuple_es(NewVars2), RaceVarMap, Op);
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ cons ->
+ case cerl:type(NewVars2) of
+ cons ->
+ NewRaceVarMap = race_var_map(cerl:cons_hd(NewVars1),
+ cerl:cons_hd(NewVars2), RaceVarMap, Op),
+ race_var_map(cerl:cons_tl(NewVars1),
+ cerl:cons_tl(NewVars2), NewRaceVarMap, Op);
+ alias ->
+ race_var_map(NewVars1, cerl:alias_var(NewVars2),
+ RaceVarMap, Op);
+ values ->
+ race_var_map(NewVars1, cerl:values_es(NewVars2),
+ RaceVarMap, Op);
+ _Other -> RaceVarMap
+ end;
+ alias ->
+ case cerl:type(NewVars2) of
+ alias ->
+ race_var_map(cerl:alias_var(NewVars1),
+ cerl:alias_var(NewVars2), RaceVarMap, Op);
+ _Other ->
+ race_var_map(cerl:alias_var(NewVars1),
+ NewVars2, RaceVarMap, Op)
+ end;
+ values ->
+ case cerl:type(NewVars2) of
+ values ->
+ race_var_map(cerl:values_es(NewVars1),
+ cerl:values_es(NewVars2), RaceVarMap, Op);
+ _Other ->
+ race_var_map(cerl:values_es(NewVars1),
+ NewVars2, RaceVarMap, Op)
+ end;
+ _Other -> RaceVarMap
+ end;
+ false -> RaceVarMap
+ end
+ end
+ end.
+
+race_var_map_clauses(Clauses, RaceVarMap) ->
+ case Clauses of
+ [] -> RaceVarMap;
+ [#end_clause{arg = Arg, pats = Pats, guard = Guard}|T] ->
+ {RaceVarMap1, _RemoveClause} =
+ race_var_map_guard(Arg, Pats, Guard, RaceVarMap, bind),
+ race_var_map_clauses(T, RaceVarMap1)
+ end.
+
+race_var_map_guard(Arg, Pats, Guard, RaceVarMap, Op) ->
+ {NewRaceVarMap, RemoveClause} =
+ case cerl:type(Guard) of
+ call ->
+ CallName = cerl:call_name(Guard),
+ case cerl:is_literal(CallName) of
+ true ->
+ case cerl:concrete(CallName) of
+ '=:=' ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ '==' ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ '=/=' ->
+ case Op of
+ bind ->
+ [Arg1, Arg2] = cerl:call_args(Guard),
+ {RaceVarMap, are_bound_vars(Arg1, Arg2, RaceVarMap)};
+ unbind -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end;
+ false -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end,
+ {RaceVarMap1, RemoveClause1} =
+ race_var_map_guard_helper1(Arg, Pats,
+ race_var_map(Arg, Pats, NewRaceVarMap, Op), Op),
+ {RaceVarMap1, RemoveClause orelse RemoveClause1}.
+
+race_var_map_guard_helper1(Arg, Pats, RaceVarMap, Op) ->
+ case Arg =:= ?no_arg orelse Arg =:= ?bypassed of
+ true -> {RaceVarMap, false};
+ false ->
+ case cerl:type(Arg) of
+ call ->
+ case Pats of
+ [NewPat] ->
+ ModName = cerl:call_module(Arg),
+ CallName = cerl:call_name(Arg),
+ case cerl:is_literal(ModName) andalso
+ cerl:is_literal(CallName) of
+ true ->
+ case {cerl:concrete(ModName),
+ cerl:concrete(CallName)} of
+ {erlang, '=:='} ->
+ race_var_map_guard_helper2(Arg, NewPat, true,
+ RaceVarMap, Op);
+ {erlang, '=='} ->
+ race_var_map_guard_helper2(Arg, NewPat, true,
+ RaceVarMap, Op);
+ {erlang, '=/='} ->
+ race_var_map_guard_helper2(Arg, NewPat, false,
+ RaceVarMap, Op);
+ _Else -> {RaceVarMap, false}
+ end;
+ false -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end;
+ _Other -> {RaceVarMap, false}
+ end
+ end.
+
+race_var_map_guard_helper2(Arg, Pat0, Bool, RaceVarMap, Op) ->
+ Pat = cerl:fold_literal(Pat0),
+ case cerl:type(Pat) of
+ literal ->
+ [Arg1, Arg2] = cerl:call_args(Arg),
+ case cerl:concrete(Pat) of
+ Bool ->
+ {race_var_map(Arg1, Arg2, RaceVarMap, Op), false};
+ _Else ->
+ case Op of
+ bind ->
+ {RaceVarMap, are_bound_vars(Arg1, Arg2, RaceVarMap)};
+ unbind -> {RaceVarMap, false}
+ end
+ end;
+ _Else -> {RaceVarMap, false}
+ end.
+
+unbind_dict_vars(Var, Var, RaceVarMap) ->
+ RaceVarMap;
+unbind_dict_vars(Var1, Var2, RaceVarMap) ->
+ case dict:find(Var1, RaceVarMap) of
+ error -> RaceVarMap;
+ {ok, Labels} ->
+ case Labels of
+ [] -> dict:erase(Var1, RaceVarMap);
+ _Else ->
+ case lists:member(Var2, Labels) of
+ true ->
+ unbind_dict_vars(Var1, Var2,
+ bind_dict_vars_list(Var1, Labels -- [Var2],
+ dict:erase(Var1, RaceVarMap)));
+ false ->
+ unbind_dict_vars_helper(Labels, Var1, Var2, RaceVarMap)
+ end
+ end
+ end.
+
+unbind_dict_vars_helper(Labels, Key, CompLabel, RaceVarMap) ->
+ case dict:size(RaceVarMap) of
+ 0 -> RaceVarMap;
+ _ ->
+ case Labels of
+ [] -> RaceVarMap;
+ [Head|Tail] ->
+ NewRaceVarMap =
+ case are_bound_labels(Head, CompLabel, RaceVarMap) orelse
+ are_bound_labels(CompLabel, Head, RaceVarMap) of
+ true ->
+ bind_dict_vars_list(Key, Labels -- [Head],
+ dict:erase(Key, RaceVarMap));
+ false -> RaceVarMap
+ end,
+ unbind_dict_vars_helper(Tail, Key, CompLabel, NewRaceVarMap)
+ end
+ end.
+
+var_analysis(FunDefArgs, FunCallArgs, WarnVarArgs, RaceWarnTag) ->
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
+ [[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2, WVA3, WVA4];
+ ?WARN_WHEREIS_UNREGISTER ->
+ [WVA1, WVA2] = WarnVarArgs,
+ ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
+ [[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2];
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ ArgNos1 = lists_key_members_lists(WVA1, FunDefArgs),
+ ArgNos2 = lists_key_members_lists(WVA3, FunDefArgs),
+ [[lists_get(N1, FunCallArgs) || N1 <- ArgNos1], WVA2,
+ [lists_get(N2, FunCallArgs) || N2 <- ArgNos2], WVA4];
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs,
+ ArgNos = lists_key_members_lists(WVA1, FunDefArgs),
+ [[lists_get(N, FunCallArgs) || N <- ArgNos], WVA2|T]
+ end.
+
+var_type_analysis(FunDefArgs, FunCallTypes, WarnVarArgs, RaceWarnTag,
+ RaceVarMap, CleanState) ->
+ FunVarArgs = format_args(FunDefArgs, FunCallTypes, CleanState, function_call),
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Vars = find_all_bound_vars(WVA1, RaceVarMap),
+ case lists_key_member_lists(Vars, FunVarArgs) of
+ 0 -> [Vars, WVA2, WVA3, WVA4];
+ N when is_integer(N) ->
+ NewWVA2 = string:tokens(lists:nth(N + 1, FunVarArgs), " |"),
+ [Vars, NewWVA2, WVA3, WVA4]
+ end;
+ ?WARN_WHEREIS_UNREGISTER ->
+ [WVA1, WVA2] = WarnVarArgs,
+ Vars = find_all_bound_vars(WVA1, RaceVarMap),
+ case lists_key_member_lists(Vars, FunVarArgs) of
+ 0 -> [Vars, WVA2];
+ N when is_integer(N) ->
+ NewWVA2 = string:tokens(lists:nth(N + 1, FunVarArgs), " |"),
+ [Vars, NewWVA2]
+ end;
+ ?WARN_ETS_LOOKUP_INSERT ->
+ [WVA1, WVA2, WVA3, WVA4] = WarnVarArgs,
+ Vars1 = find_all_bound_vars(WVA1, RaceVarMap),
+ FirstVarArg =
+ case lists_key_member_lists(Vars1, FunVarArgs) of
+ 0 -> [Vars1, WVA2];
+ N1 when is_integer(N1) ->
+ NewWVA2 = string:tokens(lists:nth(N1 + 1, FunVarArgs), " |"),
+ [Vars1, NewWVA2]
+ end,
+ Vars2 =
+ lists:flatten(
+ [find_all_bound_vars(A, RaceVarMap) || A <- ets_list_args(WVA3)]),
+ case lists_key_member_lists(Vars2, FunVarArgs) of
+ 0 -> FirstVarArg ++ [Vars2, WVA4];
+ N2 when is_integer(N2) ->
+ NewWVA4 =
+ ets_tuple_argtypes2(
+ ets_tuple_argtypes1(lists:nth(N2 + 1, FunVarArgs), [], [], 0),
+ []),
+ FirstVarArg ++ [Vars2, NewWVA4]
+
+ end;
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ [WVA1, WVA2|T] = WarnVarArgs,
+ Arity =
+ case T of
+ [] -> 1;
+ _Else -> 2
+ end,
+ Vars = find_all_bound_vars(WVA1, RaceVarMap),
+ case lists_key_member_lists(Vars, FunVarArgs) of
+ 0 -> [Vars, WVA2|T];
+ N when is_integer(N) ->
+ NewWVA2 =
+ case Arity of
+ 1 ->
+ [mnesia_record_tab(R) || R <- string:tokens(
+ lists:nth(2, FunVarArgs), " |")];
+ 2 ->
+ string:tokens(lists:nth(N + 1, FunVarArgs), " |")
+ end,
+ [Vars, NewWVA2|T]
+ end
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Warning Format Utilities
+%%%
+%%% ===========================================================================
+
+add_race_warning(Warn, #races{race_warnings = Warns} = Races) ->
+ Races#races{race_warnings = [Warn|Warns]}.
+
+get_race_warn(Fun, Args, ArgTypes, DepList, State) ->
+ {M, F, _A} = Fun,
+ case DepList of
+ [] -> {State, no_race};
+ _Other ->
+ {State, {race_condition, [M, F, Args, ArgTypes, State, DepList]}}
+ end.
+
+-spec get_race_warnings(races(), dialyzer_dataflow:state()) ->
+ {races(), dialyzer_dataflow:state()}.
+
+get_race_warnings(#races{race_warnings = RaceWarnings}, State) ->
+ get_race_warnings_helper(RaceWarnings, State).
+
+get_race_warnings_helper(Warnings, State) ->
+ case Warnings of
+ [] ->
+ {dialyzer_dataflow:state__get_races(State), State};
+ [H|T] ->
+ {RaceWarnTag, WarningInfo, {race_condition, [M, F, A, AT, S, DepList]}} = H,
+ Reason =
+ case RaceWarnTag of
+ ?WARN_WHEREIS_REGISTER ->
+ get_reason(lists:keysort(7, DepList),
+ "might fail due to a possible race condition "
+ "caused by its combination with ");
+ ?WARN_WHEREIS_UNREGISTER ->
+ get_reason(lists:keysort(7, DepList),
+ "might fail due to a possible race condition "
+ "caused by its combination with ");
+ ?WARN_ETS_LOOKUP_INSERT ->
+ get_reason(lists:keysort(7, DepList),
+ "might have an unintended effect due to " ++
+ "a possible race condition " ++
+ "caused by its combination with ");
+ ?WARN_MNESIA_DIRTY_READ_WRITE ->
+ get_reason(lists:keysort(7, DepList),
+ "might have an unintended effect due to " ++
+ "a possible race condition " ++
+ "caused by its combination with ")
+ end,
+ W =
+ {?WARN_RACE_CONDITION, WarningInfo,
+ {race_condition,
+ [M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}},
+ get_race_warnings_helper(T,
+ dialyzer_dataflow:state__add_warning(W, State))
+ end.
+
+get_reason(DependencyList, Reason) ->
+ case DependencyList of
+ [] -> "";
+ [#dep_call{call_name = Call, arg_types = ArgTypes, vars = Args,
+ state = State, file_line = {File, Line}}|T] ->
+ R =
+ Reason ++
+ case Call of
+ whereis -> "the erlang:whereis";
+ ets_lookup -> "the ets:lookup";
+ mnesia_dirty_read -> "the mnesia:dirty_read"
+ end ++
+ dialyzer_dataflow:format_args(Args, ArgTypes, State) ++
+ " call in " ++
+ filename:basename(File) ++
+ " on line " ++
+ lists:flatten(io_lib:write(Line)),
+ case T of
+ [] -> R;
+ _ -> get_reason(T, R ++ ", ")
+ end
+ end.
+
+state__add_race_warning(State, RaceWarn, RaceWarnTag, WarningInfo) ->
+ case RaceWarn of
+ no_race -> State;
+ _Else ->
+ Races = dialyzer_dataflow:state__get_races(State),
+ Warn = {RaceWarnTag, WarningInfo, RaceWarn},
+ dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State)
+ end.
+
+%%% ===========================================================================
+%%%
+%%% Record Interfaces
+%%%
+%%% ===========================================================================
+
+-spec beg_clause_new(var_to_map1(), var_to_map1(), cerl:cerl()) ->
+ #beg_clause{}.
+
+beg_clause_new(Arg, Pats, Guard) ->
+ #beg_clause{arg = Arg, pats = Pats, guard = Guard}.
+
+-spec cleanup(races()) -> races().
+
+cleanup(#races{race_list = RaceList}) ->
+ #races{race_list = RaceList}.
+
+-spec end_case_new([#end_clause{}]) -> #end_case{}.
+
+end_case_new(Clauses) ->
+ #end_case{clauses = Clauses}.
+
+-spec end_clause_new(var_to_map1(), var_to_map1(), cerl:cerl()) ->
+ #end_clause{}.
+
+end_clause_new(Arg, Pats, Guard) ->
+ #end_clause{arg = Arg, pats = Pats, guard = Guard}.
+
+-spec get_curr_fun(races()) -> dialyzer_callgraph:mfa_or_funlbl().
+
+get_curr_fun(#races{curr_fun = CurrFun}) ->
+ CurrFun.
+
+-spec get_curr_fun_args(races()) -> core_args().
+
+get_curr_fun_args(#races{curr_fun_args = CurrFunArgs}) ->
+ CurrFunArgs.
+
+-spec get_new_table(races()) -> table().
+
+get_new_table(#races{new_table = Table}) ->
+ Table.
+
+-spec get_race_analysis(races()) -> boolean().
+
+get_race_analysis(#races{race_analysis = RaceAnalysis}) ->
+ RaceAnalysis.
+
+-spec get_race_list(races()) -> code().
+
+get_race_list(#races{race_list = RaceList}) ->
+ RaceList.
+
+-spec get_race_list_size(races()) -> non_neg_integer().
+
+get_race_list_size(#races{race_list_size = RaceListSize}) ->
+ RaceListSize.
+
+-spec get_race_list_and_size(races()) -> {code(), non_neg_integer()}.
+
+get_race_list_and_size(#races{race_list = RaceList,
+ race_list_size = RaceListSize}) ->
+ {RaceList, RaceListSize}.
+
+-spec let_tag_new(var_to_map1(), var_to_map1()) -> #let_tag{}.
+
+let_tag_new(Var, Arg) ->
+ #let_tag{var = Var, arg = Arg}.
+
+-spec new() -> races().
+
+new() -> #races{}.
+
+-spec put_curr_fun(dialyzer_callgraph:mfa_or_funlbl(), label(), races()) ->
+ races().
+
+put_curr_fun(CurrFun, CurrFunLabel, Races) ->
+ Races#races{curr_fun = CurrFun,
+ curr_fun_label = CurrFunLabel,
+ curr_fun_args = empty}.
+
+-spec put_fun_args(core_args(), races()) -> races().
+
+put_fun_args(Args, #races{curr_fun_args = CurrFunArgs} = Races) ->
+ case CurrFunArgs of
+ empty -> Races#races{curr_fun_args = Args};
+ _Other -> Races
+ end.
+
+-spec put_race_analysis(boolean(), races()) ->
+ races().
+
+put_race_analysis(Analysis, Races) ->
+ Races#races{race_analysis = Analysis}.
+
+-spec put_race_list(code(), non_neg_integer(), races()) ->
+ races().
+
+put_race_list(RaceList, RaceListSize, Races) ->
+ Races#races{race_list = RaceList, race_list_size = RaceListSize}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/erl_types.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/erl_types.erl
new file mode 100644
index 0000000000..41e3d4be27
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/erl_types.erl
@@ -0,0 +1,5735 @@
+%% -*- erlang-indent-level: 2 -*-
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% ======================================================================
+%% Copyright (C) 2000-2003 Richard Carlsson
+%%
+%% ======================================================================
+%% Provides a representation of Erlang types.
+%%
+%% The initial author of this file is Richard Carlsson (2000-2004).
+%% In July 2006, the type representation was totally re-designed by
+%% Tobias Lindahl. This is the representation which is used currently.
+%% In late 2008, Manouk Manoukian and Kostis Sagonas added support for
+%% opaque types to the structure-based representation of types.
+%% During February and March 2009, Kostis Sagonas significantly
+%% cleaned up the type representation and added spec declarations.
+%%
+%% ======================================================================
+
+-module(erl_types).
+
+-export([any_none/1,
+ any_none_or_unit/1,
+ lookup_record/3,
+ max/2,
+ min/2,
+ number_max/1, number_max/2,
+ number_min/1, number_min/2,
+ t_abstract_records/2,
+ t_any/0,
+ t_arity/0,
+ t_atom/0,
+ t_atom/1,
+ t_atoms/1,
+ t_atom_vals/1, t_atom_vals/2,
+ t_binary/0,
+ t_bitstr/0,
+ t_bitstr/2,
+ t_bitstr_base/1,
+ t_bitstr_concat/1,
+ t_bitstr_concat/2,
+ t_bitstr_match/2,
+ t_bitstr_unit/1,
+ t_bitstrlist/0,
+ t_boolean/0,
+ t_byte/0,
+ t_char/0,
+ t_collect_vars/1,
+ t_cons/0,
+ t_cons/2,
+ t_cons_hd/1, t_cons_hd/2,
+ t_cons_tl/1, t_cons_tl/2,
+ t_contains_opaque/1, t_contains_opaque/2,
+ t_decorate_with_opaque/3,
+ t_elements/1,
+ t_find_opaque_mismatch/3,
+ t_find_unknown_opaque/3,
+ t_fixnum/0,
+ t_map/2,
+ t_non_neg_fixnum/0,
+ t_pos_fixnum/0,
+ t_float/0,
+ t_var_names/1,
+ t_form_to_string/1,
+ t_from_form/6,
+ t_from_form_without_remote/3,
+ t_check_record_fields/6,
+ t_from_range/2,
+ t_from_range_unsafe/2,
+ t_from_term/1,
+ t_fun/0,
+ t_fun/1,
+ t_fun/2,
+ t_fun_args/1, t_fun_args/2,
+ t_fun_arity/1, t_fun_arity/2,
+ t_fun_range/1, t_fun_range/2,
+ t_has_opaque_subtype/2,
+ t_has_var/1,
+ t_identifier/0,
+ %% t_improper_list/2,
+ t_inf/1,
+ t_inf/2,
+ t_inf/3,
+ t_inf_lists/2,
+ t_inf_lists/3,
+ t_integer/0,
+ t_integer/1,
+ t_non_neg_integer/0,
+ t_pos_integer/0,
+ t_integers/1,
+ t_iodata/0,
+ t_iolist/0,
+ t_is_any/1,
+ t_is_atom/1, t_is_atom/2,
+ t_is_any_atom/2, t_is_any_atom/3,
+ t_is_binary/1, t_is_binary/2,
+ t_is_bitstr/1, t_is_bitstr/2,
+ t_is_bitwidth/1,
+ t_is_boolean/1, t_is_boolean/2,
+ %% t_is_byte/1,
+ %% t_is_char/1,
+ t_is_cons/1, t_is_cons/2,
+ t_is_equal/2,
+ t_is_fixnum/1,
+ t_is_float/1, t_is_float/2,
+ t_is_fun/1, t_is_fun/2,
+ t_is_instance/2,
+ t_is_integer/1, t_is_integer/2,
+ t_is_list/1,
+ t_is_map/1,
+ t_is_map/2,
+ t_is_matchstate/1,
+ t_is_nil/1, t_is_nil/2,
+ t_is_non_neg_integer/1,
+ t_is_none/1,
+ t_is_none_or_unit/1,
+ t_is_number/1, t_is_number/2,
+ t_is_opaque/1, t_is_opaque/2,
+ t_is_pid/1, t_is_pid/2,
+ t_is_port/1, t_is_port/2,
+ t_is_maybe_improper_list/1, t_is_maybe_improper_list/2,
+ t_is_reference/1, t_is_reference/2,
+ t_is_singleton/1,
+ t_is_singleton/2,
+ t_is_string/1,
+ t_is_subtype/2,
+ t_is_tuple/1, t_is_tuple/2,
+ t_is_unit/1,
+ t_is_var/1,
+ t_limit/2,
+ t_list/0,
+ t_list/1,
+ t_list_elements/1, t_list_elements/2,
+ t_list_termination/1, t_list_termination/2,
+ t_map/0,
+ t_map/1,
+ t_map/3,
+ t_map_entries/2, t_map_entries/1,
+ t_map_def_key/2, t_map_def_key/1,
+ t_map_def_val/2, t_map_def_val/1,
+ t_map_get/2, t_map_get/3,
+ t_map_is_key/2, t_map_is_key/3,
+ t_map_update/2, t_map_update/3,
+ t_map_put/2, t_map_put/3,
+ t_matchstate/0,
+ t_matchstate/2,
+ t_matchstate_present/1,
+ t_matchstate_slot/2,
+ t_matchstate_slots/1,
+ t_matchstate_update_present/2,
+ t_matchstate_update_slot/3,
+ t_mfa/0,
+ t_module/0,
+ t_nil/0,
+ t_node/0,
+ t_none/0,
+ t_nonempty_list/0,
+ t_nonempty_list/1,
+ t_nonempty_string/0,
+ t_number/0,
+ t_number/1,
+ t_number_vals/1, t_number_vals/2,
+ t_opaque_from_records/1,
+ t_opaque_structure/1,
+ t_pid/0,
+ t_port/0,
+ t_maybe_improper_list/0,
+ %% t_maybe_improper_list/2,
+ t_product/1,
+ t_reference/0,
+ t_singleton_to_term/2,
+ t_string/0,
+ t_struct_from_opaque/2,
+ t_subst/2,
+ t_subtract/2,
+ t_subtract_list/2,
+ t_sup/1,
+ t_sup/2,
+ t_timeout/0,
+ t_to_string/1,
+ t_to_string/2,
+ t_to_tlist/1,
+ t_tuple/0,
+ t_tuple/1,
+ t_tuple_args/1, t_tuple_args/2,
+ t_tuple_size/1, t_tuple_size/2,
+ t_tuple_sizes/1,
+ t_tuple_subtypes/1,
+ t_tuple_subtypes/2,
+ t_unify/2,
+ t_unit/0,
+ t_unopaque/1, t_unopaque/2,
+ t_var/1,
+ t_var_name/1,
+ %% t_assign_variables_to_subtype/2,
+ type_is_defined/4,
+ record_field_diffs_to_string/2,
+ subst_all_vars_to_any/1,
+ lift_list_to_pos_empty/1, lift_list_to_pos_empty/2,
+ is_opaque_type/2,
+ is_erl_type/1,
+ atom_to_string/1,
+ var_table__new/0,
+ cache__new/0,
+ map_pairwise_merge/3
+ ]).
+
+%%-define(DO_ERL_TYPES_TEST, true).
+-compile({no_auto_import,[min/2,max/2]}).
+
+-ifdef(DO_ERL_TYPES_TEST).
+-export([test/0]).
+-else.
+-define(NO_UNUSED, true).
+-endif.
+
+-ifndef(NO_UNUSED).
+-export([t_is_identifier/1]).
+-endif.
+
+-export_type([erl_type/0, opaques/0, type_table/0, var_table/0, cache/0]).
+
+%%-define(DEBUG, true).
+
+-ifdef(DEBUG).
+-define(debug(__A), __A).
+-else.
+-define(debug(__A), ok).
+-endif.
+
+%%=============================================================================
+%%
+%% Definition of the type structure
+%%
+%%=============================================================================
+
+%%-----------------------------------------------------------------------------
+%% Limits
+%%
+
+-define(REC_TYPE_LIMIT, 2).
+-define(EXPAND_DEPTH, 16).
+-define(EXPAND_LIMIT, 10000).
+
+-define(TUPLE_TAG_LIMIT, 5).
+-define(TUPLE_ARITY_LIMIT, 8).
+-define(SET_LIMIT, 13).
+-define(MAX_BYTE, 255).
+-define(MAX_CHAR, 16#10ffff).
+
+-define(UNIT_MULTIPLIER, 8).
+
+-define(TAG_IMMED1_SIZE, 4).
+-define(BITS, (erlang:system_info(wordsize) * 8) - ?TAG_IMMED1_SIZE).
+
+-define(MAX_TUPLE_SIZE, (1 bsl 10)).
+
+%%-----------------------------------------------------------------------------
+%% Type tags and qualifiers
+%%
+
+-define(atom_tag, atom).
+-define(binary_tag, binary).
+-define(function_tag, function).
+-define(identifier_tag, identifier).
+-define(list_tag, list).
+-define(map_tag, map).
+-define(matchstate_tag, matchstate).
+-define(nil_tag, nil).
+-define(number_tag, number).
+-define(opaque_tag, opaque).
+-define(product_tag, product).
+-define(tuple_set_tag, tuple_set).
+-define(tuple_tag, tuple).
+-define(union_tag, union).
+-define(var_tag, var).
+
+-type tag() :: ?atom_tag | ?binary_tag | ?function_tag | ?identifier_tag
+ | ?list_tag | ?map_tag | ?matchstate_tag | ?nil_tag | ?number_tag
+ | ?opaque_tag | ?product_tag
+ | ?tuple_tag | ?tuple_set_tag | ?union_tag | ?var_tag.
+
+-define(float_qual, float).
+-define(integer_qual, integer).
+-define(nonempty_qual, nonempty).
+-define(pid_qual, pid).
+-define(port_qual, port).
+-define(reference_qual, reference).
+-define(unknown_qual, unknown).
+
+-type qual() :: ?float_qual | ?integer_qual | ?nonempty_qual | ?pid_qual
+ | ?port_qual | ?reference_qual | ?unknown_qual | {_, _}.
+
+%%-----------------------------------------------------------------------------
+%% The type representation
+%%
+
+-define(any, any).
+-define(none, none).
+-define(unit, unit).
+%% Generic constructor - elements can be many things depending on the tag.
+-record(c, {tag :: tag(),
+ elements = [] :: term(),
+ qualifier = ?unknown_qual :: qual()}).
+
+-opaque erl_type() :: ?any | ?none | ?unit | #c{}.
+
+%%-----------------------------------------------------------------------------
+%% Auxiliary types and convenient macros
+%%
+
+-type parse_form() :: erl_parse:abstract_type().
+-type rng_elem() :: 'pos_inf' | 'neg_inf' | integer().
+
+-record(int_set, {set :: [integer()]}).
+-record(int_rng, {from :: rng_elem(), to :: rng_elem()}).
+%% Note: the definition of #opaque{} was changed to 'mod' and 'name';
+%% it used to be an ordsets of {Mod, Name} pairs. The Dialyzer version
+%% was updated to 2.7 due to this change.
+-record(opaque, {mod :: module(), name :: atom(),
+ args = [] :: [erl_type()], struct :: erl_type()}).
+
+-define(atom(Set), #c{tag=?atom_tag, elements=Set}).
+-define(bitstr(Unit, Base), #c{tag=?binary_tag, elements=[Unit,Base]}).
+-define(float, ?number(?any, ?float_qual)).
+-define(function(Domain, Range), #c{tag=?function_tag,
+ elements=[Domain, Range]}).
+-define(identifier(Types), #c{tag=?identifier_tag, elements=Types}).
+-define(integer(Types), ?number(Types, ?integer_qual)).
+-define(int_range(From, To), ?integer(#int_rng{from=From, to=To})).
+-define(int_set(Set), ?integer(#int_set{set=Set})).
+-define(list(Types, Term, Size), #c{tag=?list_tag, elements=[Types,Term],
+ qualifier=Size}).
+-define(nil, #c{tag=?nil_tag}).
+-define(nonempty_list(Types, Term),?list(Types, Term, ?nonempty_qual)).
+-define(number(Set, Qualifier), #c{tag=?number_tag, elements=Set,
+ qualifier=Qualifier}).
+-define(map(Pairs,DefKey,DefVal),
+ #c{tag=?map_tag, elements={Pairs,DefKey,DefVal}}).
+-define(opaque(Optypes), #c{tag=?opaque_tag, elements=Optypes}).
+-define(product(Types), #c{tag=?product_tag, elements=Types}).
+-define(tuple(Types, Arity, Qual), #c{tag=?tuple_tag, elements=Types,
+ qualifier={Arity, Qual}}).
+-define(tuple_set(Tuples), #c{tag=?tuple_set_tag, elements=Tuples}).
+-define(var(Id), #c{tag=?var_tag, elements=Id}).
+
+-define(matchstate(P, Slots), #c{tag=?matchstate_tag, elements=[P,Slots]}).
+-define(any_matchstate, ?matchstate(t_bitstr(), ?any)).
+
+-define(byte, ?int_range(0, ?MAX_BYTE)).
+-define(char, ?int_range(0, ?MAX_CHAR)).
+-define(integer_pos, ?int_range(1, pos_inf)).
+-define(integer_non_neg, ?int_range(0, pos_inf)).
+-define(integer_neg, ?int_range(neg_inf, -1)).
+
+-type opaques() :: [erl_type()] | 'universe'.
+
+-type record_key() :: {'record', atom()}.
+-type type_key() :: {'type' | 'opaque', mfa()}.
+-type record_value() :: [{atom(), erl_parse:abstract_expr(), erl_type()}].
+-type type_value() :: {{module(), {file:name(), erl_anno:line()},
+ erl_parse:abstract_type(), ArgNames :: [atom()]},
+ erl_type()}.
+-type type_table() :: dict:dict(record_key() | type_key(),
+ record_value() | type_value()).
+
+-opaque var_table() :: #{atom() => erl_type()}.
+
+%%-----------------------------------------------------------------------------
+%% Unions
+%%
+
+-define(union(List), #c{tag=?union_tag, elements=[_,_,_,_,_,_,_,_,_,_]=List}).
+
+-define(atom_union(T), ?union([T,?none,?none,?none,?none,?none,?none,?none,?none,?none])).
+-define(bitstr_union(T), ?union([?none,T,?none,?none,?none,?none,?none,?none,?none,?none])).
+-define(function_union(T), ?union([?none,?none,T,?none,?none,?none,?none,?none,?none,?none])).
+-define(identifier_union(T), ?union([?none,?none,?none,T,?none,?none,?none,?none,?none,?none])).
+-define(list_union(T), ?union([?none,?none,?none,?none,T,?none,?none,?none,?none,?none])).
+-define(number_union(T), ?union([?none,?none,?none,?none,?none,T,?none,?none,?none,?none])).
+-define(tuple_union(T), ?union([?none,?none,?none,?none,?none,?none,T,?none,?none,?none])).
+-define(matchstate_union(T), ?union([?none,?none,?none,?none,?none,?none,?none,T,?none,?none])).
+-define(opaque_union(T), ?union([?none,?none,?none,?none,?none,?none,?none,?none,T,?none])).
+-define(map_union(T), ?union([?none,?none,?none,?none,?none,?none,?none,?none,?none,T])).
+-define(integer_union(T), ?number_union(T)).
+-define(float_union(T), ?number_union(T)).
+-define(nil_union(T), ?list_union(T)).
+
+
+%%=============================================================================
+%%
+%% Primitive operations such as type construction and type tests
+%%
+%%=============================================================================
+
+%%-----------------------------------------------------------------------------
+%% Top and bottom
+%%
+
+-spec t_any() -> erl_type().
+
+t_any() ->
+ ?any.
+
+-spec t_is_any(erl_type()) -> boolean().
+
+t_is_any(Type) ->
+ do_opaque(Type, 'universe', fun is_any/1).
+
+is_any(?any) -> true;
+is_any(_) -> false.
+
+-spec t_none() -> erl_type().
+
+t_none() ->
+ ?none.
+
+-spec t_is_none(erl_type()) -> boolean().
+
+t_is_none(?none) -> true;
+t_is_none(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Opaque types
+%%
+
+-spec t_opaque(module(), atom(), [_], erl_type()) -> erl_type().
+
+t_opaque(Mod, Name, Args, Struct) ->
+ O = #opaque{mod = Mod, name = Name, args = Args, struct = Struct},
+ ?opaque(set_singleton(O)).
+
+-spec t_is_opaque(erl_type(), [erl_type()]) -> boolean().
+
+t_is_opaque(?opaque(_) = Type, Opaques) ->
+ not is_opaque_type(Type, Opaques);
+t_is_opaque(_Type, _Opaques) -> false.
+
+-spec t_is_opaque(erl_type()) -> boolean().
+
+t_is_opaque(?opaque(_)) -> true;
+t_is_opaque(_) -> false.
+
+-spec t_has_opaque_subtype(erl_type(), opaques()) -> boolean().
+
+t_has_opaque_subtype(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun has_opaque_subtype/1).
+
+has_opaque_subtype(?union(Ts)) ->
+ lists:any(fun t_is_opaque/1, Ts);
+has_opaque_subtype(T) ->
+ t_is_opaque(T).
+
+-spec t_opaque_structure(erl_type()) -> erl_type().
+
+t_opaque_structure(?opaque(Elements)) ->
+ t_sup([Struct || #opaque{struct = Struct} <- ordsets:to_list(Elements)]).
+
+-spec t_contains_opaque(erl_type()) -> boolean().
+
+t_contains_opaque(Type) ->
+ t_contains_opaque(Type, []).
+
+%% Returns 'true' iff there is an opaque type that is *not* one of
+%% the types of the second argument.
+
+-spec t_contains_opaque(erl_type(), [erl_type()]) -> boolean().
+
+t_contains_opaque(?any, _Opaques) -> false;
+t_contains_opaque(?none, _Opaques) -> false;
+t_contains_opaque(?unit, _Opaques) -> false;
+t_contains_opaque(?atom(_Set), _Opaques) -> false;
+t_contains_opaque(?bitstr(_Unit, _Base), _Opaques) -> false;
+t_contains_opaque(?float, _Opaques) -> false;
+t_contains_opaque(?function(Domain, Range), Opaques) ->
+ t_contains_opaque(Domain, Opaques)
+ orelse t_contains_opaque(Range, Opaques);
+t_contains_opaque(?identifier(_Types), _Opaques) -> false;
+t_contains_opaque(?integer(_Types), _Opaques) -> false;
+t_contains_opaque(?int_range(_From, _To), _Opaques) -> false;
+t_contains_opaque(?int_set(_Set), _Opaques) -> false;
+t_contains_opaque(?list(Type, Tail, _), Opaques) ->
+ t_contains_opaque(Type, Opaques) orelse t_contains_opaque(Tail, Opaques);
+t_contains_opaque(?map(_, _, _) = Map, Opaques) ->
+ list_contains_opaque(map_all_types(Map), Opaques);
+t_contains_opaque(?matchstate(_P, _Slots), _Opaques) -> false;
+t_contains_opaque(?nil, _Opaques) -> false;
+t_contains_opaque(?number(_Set, _Tag), _Opaques) -> false;
+t_contains_opaque(?opaque(_)=T, Opaques) ->
+ not is_opaque_type(T, Opaques)
+ orelse t_contains_opaque(t_opaque_structure(T));
+t_contains_opaque(?product(Types), Opaques) ->
+ list_contains_opaque(Types, Opaques);
+t_contains_opaque(?tuple(?any, _, _), _Opaques) -> false;
+t_contains_opaque(?tuple(Types, _, _), Opaques) ->
+ list_contains_opaque(Types, Opaques);
+t_contains_opaque(?tuple_set(_Set) = T, Opaques) ->
+ list_contains_opaque(t_tuple_subtypes(T), Opaques);
+t_contains_opaque(?union(List), Opaques) ->
+ list_contains_opaque(List, Opaques);
+t_contains_opaque(?var(_Id), _Opaques) -> false.
+
+-spec list_contains_opaque([erl_type()], [erl_type()]) -> boolean().
+
+list_contains_opaque(List, Opaques) ->
+ lists:any(fun(E) -> t_contains_opaque(E, Opaques) end, List).
+
+%% t_find_opaque_mismatch/2 of two types should only be used if their
+%% t_inf is t_none() due to some opaque type violation.
+%%
+%% The first argument of the function is the pattern and its second
+%% argument the type we are matching against the pattern.
+
+-spec t_find_opaque_mismatch(erl_type(), erl_type(), [erl_type()]) ->
+ 'error' | {'ok', erl_type(), erl_type()}.
+
+t_find_opaque_mismatch(T1, T2, Opaques) ->
+ t_find_opaque_mismatch(T1, T2, T2, Opaques).
+
+t_find_opaque_mismatch(?any, _Type, _TopType, _Opaques) -> error;
+t_find_opaque_mismatch(?none, _Type, _TopType, _Opaques) -> error;
+t_find_opaque_mismatch(?list(T1, Tl1, _), ?list(T2, Tl2, _), TopType, Opaques) ->
+ t_find_opaque_mismatch_ordlists([T1, Tl1], [T2, Tl2], TopType, Opaques);
+t_find_opaque_mismatch(T1, ?opaque(_) = T2, TopType, Opaques) ->
+ case is_opaque_type(T2, Opaques) of
+ false -> {ok, TopType, T2};
+ true ->
+ t_find_opaque_mismatch(T1, t_opaque_structure(T2), TopType, Opaques)
+ end;
+t_find_opaque_mismatch(?opaque(_) = T1, T2, TopType, Opaques) ->
+ %% The generated message is somewhat misleading:
+ case is_opaque_type(T1, Opaques) of
+ false -> {ok, TopType, T1};
+ true ->
+ t_find_opaque_mismatch(t_opaque_structure(T1), T2, TopType, Opaques)
+ end;
+t_find_opaque_mismatch(?product(T1), ?product(T2), TopType, Opaques) ->
+ t_find_opaque_mismatch_ordlists(T1, T2, TopType, Opaques);
+t_find_opaque_mismatch(?tuple(T1, Arity, _), ?tuple(T2, Arity, _),
+ TopType, Opaques) ->
+ t_find_opaque_mismatch_ordlists(T1, T2, TopType, Opaques);
+t_find_opaque_mismatch(?tuple(_, _, _) = T1, ?tuple_set(_) = T2,
+ TopType, Opaques) ->
+ Tuples1 = t_tuple_subtypes(T1),
+ Tuples2 = t_tuple_subtypes(T2),
+ t_find_opaque_mismatch_lists(Tuples1, Tuples2, TopType, Opaques);
+t_find_opaque_mismatch(T1, ?union(U2), TopType, Opaques) ->
+ t_find_opaque_mismatch_lists([T1], U2, TopType, Opaques);
+t_find_opaque_mismatch(_T1, _T2, _TopType, _Opaques) -> error.
+
+t_find_opaque_mismatch_ordlists(L1, L2, TopType, Opaques) ->
+ List = lists:zipwith(fun(T1, T2) ->
+ t_find_opaque_mismatch(T1, T2, TopType, Opaques)
+ end, L1, L2),
+ t_find_opaque_mismatch_list(List).
+
+t_find_opaque_mismatch_lists(L1, L2, _TopType, Opaques) ->
+ List = [t_find_opaque_mismatch(T1, T2, T2, Opaques) || T1 <- L1, T2 <- L2],
+ t_find_opaque_mismatch_list(List).
+
+t_find_opaque_mismatch_list([]) -> error;
+t_find_opaque_mismatch_list([H|T]) ->
+ case H of
+ {ok, _T1, _T2} -> H;
+ error -> t_find_opaque_mismatch_list(T)
+ end.
+
+-spec t_find_unknown_opaque(erl_type(), erl_type(), opaques()) ->
+ [pos_integer()].
+
+%% The nice thing about using two types and t_inf() as compared to
+%% calling t_contains_opaque/2 is that the traversal stops when
+%% there is a mismatch which means that unknown opaque types "below"
+%% the mismatch are not found.
+t_find_unknown_opaque(_T1, _T2, 'universe') -> [];
+t_find_unknown_opaque(T1, T2, Opaques) ->
+ try t_inf(T1, T2, {match, Opaques}) of
+ _ -> []
+ catch throw:{pos, Ns} -> Ns
+ end.
+
+-spec t_decorate_with_opaque(erl_type(), erl_type(), [erl_type()]) -> erl_type().
+
+%% The first argument can contain opaque types. The second argument
+%% is assumed to be taken from the contract.
+
+t_decorate_with_opaque(T1, T2, Opaques) ->
+ case t_is_equal(T1, T2) orelse not t_contains_opaque(T2) of
+ true -> T1;
+ false ->
+ T = t_inf(T1, T2),
+ case t_contains_opaque(T) of
+ false -> T1;
+ true ->
+ R = decorate(T1, T, Opaques),
+ ?debug(case catch t_is_equal(t_unopaque(R), t_unopaque(T1)) of
+ true -> ok;
+ false ->
+ io:format("T1 = ~p,\n", [T1]),
+ io:format("T2 = ~p,\n", [T2]),
+ io:format("O = ~p,\n", [Opaques]),
+ io:format("erl_types:t_decorate_with_opaque(T1,T2,O).\n"),
+ throw({error, "Failed to handle opaque types"})
+ end),
+ R
+ end
+ end.
+
+decorate(Type, ?none, _Opaques) -> Type;
+decorate(?function(Domain, Range), ?function(D, R), Opaques) ->
+ ?function(decorate(Domain, D, Opaques), decorate(Range, R, Opaques));
+decorate(?list(Types, Tail, Size), ?list(Ts, Tl, _Sz), Opaques) ->
+ ?list(decorate(Types, Ts, Opaques), decorate(Tail, Tl, Opaques), Size);
+decorate(?product(Types), ?product(Ts), Opaques) ->
+ ?product(list_decorate(Types, Ts, Opaques));
+decorate(?tuple(_, _, _)=T, ?tuple(?any, _, _), _Opaques) -> T;
+decorate(?tuple(?any, _, _)=T, ?tuple(_, _, _), _Opaques) -> T;
+decorate(?tuple(Types, Arity, Tag), ?tuple(Ts, Arity, _), Opaques) ->
+ ?tuple(list_decorate(Types, Ts, Opaques), Arity, Tag);
+decorate(?tuple_set(List), ?tuple(_, Arity, _) = T, Opaques) ->
+ decorate_tuple_sets(List, [{Arity, [T]}], Opaques);
+decorate(?tuple_set(List), ?tuple_set(L), Opaques) ->
+ decorate_tuple_sets(List, L, Opaques);
+decorate(?union(List), T, Opaques) when T =/= ?any ->
+ ?union(L) = force_union(T),
+ union_decorate(List, L, Opaques);
+decorate(?opaque(_)=T, _, _Opaques) -> T;
+decorate(T, ?union(L), Opaques) when T =/= ?any ->
+ ?union(List) = force_union(T),
+ union_decorate(List, L, Opaques);
+decorate(Type, ?opaque(_)=T, Opaques) ->
+ decorate_with_opaque(Type, T, Opaques);
+decorate(Type, _T, _Opaques) -> Type.
+
+%% Note: it is important that #opaque.struct is a subtype of the
+%% opaque type.
+decorate_with_opaque(Type, ?opaque(Set2), Opaques) ->
+ case decoration(set_to_list(Set2), Type, Opaques, [], false) of
+ {[], false} -> Type;
+ {List, All} when List =/= [] ->
+ NewType = ?opaque(ordsets:from_list(List)),
+ case All of
+ true -> NewType;
+ false -> t_sup(NewType, Type)
+ end
+ end.
+
+decoration([#opaque{struct = S} = Opaque|OpaqueTypes], Type, Opaques,
+ NewOpaqueTypes0, All) ->
+ IsOpaque = is_opaque_type2(Opaque, Opaques),
+ I = t_inf(Type, S),
+ case not IsOpaque orelse t_is_none(I) of
+ true -> decoration(OpaqueTypes, Type, Opaques, NewOpaqueTypes0, All);
+ false ->
+ NewOpaque = Opaque#opaque{struct = decorate(I, S, Opaques)},
+ NewAll = All orelse t_is_equal(I, Type),
+ NewOpaqueTypes = [NewOpaque|NewOpaqueTypes0],
+ decoration(OpaqueTypes, Type, Opaques, NewOpaqueTypes, NewAll)
+ end;
+decoration([], _Type, _Opaques, NewOpaqueTypes, All) ->
+ {NewOpaqueTypes, All}.
+
+-spec list_decorate([erl_type()], [erl_type()], opaques()) -> [erl_type()].
+
+list_decorate(List, L, Opaques) ->
+ [decorate(Elem, E, Opaques) || {Elem, E} <- lists:zip(List, L)].
+
+union_decorate(U1, U2, Opaques) ->
+ Union = union_decorate(U1, U2, Opaques, 0, []),
+ [A,B,F,I,L,N,T,M,_,Map] = U1,
+ [_,_,_,_,_,_,_,_,Opaque,_] = U2,
+ List = [A,B,F,I,L,N,T,M,Map],
+ DecList = [Dec ||
+ E <- List,
+ not t_is_none(E),
+ not t_is_none(Dec = decorate(E, Opaque, Opaques))],
+ t_sup([Union|DecList]).
+
+union_decorate([?none|Left1], [_|Left2], Opaques, N, Acc) ->
+ union_decorate(Left1, Left2, Opaques, N, [?none|Acc]);
+union_decorate([T1|Left1], [?none|Left2], Opaques, N, Acc) ->
+ union_decorate(Left1, Left2, Opaques, N+1, [T1|Acc]);
+union_decorate([T1|Left1], [T2|Left2], Opaques, N, Acc) ->
+ union_decorate(Left1, Left2, Opaques, N+1, [decorate(T1, T2, Opaques)|Acc]);
+union_decorate([], [], _Opaques, N, Acc) ->
+ if N =:= 0 -> ?none;
+ N =:= 1 ->
+ [Type] = [T || T <- Acc, T =/= ?none],
+ Type;
+ N >= 2 -> ?union(lists:reverse(Acc))
+ end.
+
+decorate_tuple_sets(List, L, Opaques) ->
+ decorate_tuple_sets(List, L, Opaques, []).
+
+decorate_tuple_sets([{Arity, Tuples}|List], [{Arity, Ts}|L], Opaques, Acc) ->
+ DecTs = decorate_tuples_in_sets(Tuples, Ts, Opaques),
+ decorate_tuple_sets(List, L, Opaques, [{Arity, DecTs}|Acc]);
+decorate_tuple_sets([ArTup|List], L, Opaques, Acc) ->
+ decorate_tuple_sets(List, L, Opaques, [ArTup|Acc]);
+decorate_tuple_sets([], _L, _Opaques, Acc) ->
+ ?tuple_set(lists:reverse(Acc)).
+
+decorate_tuples_in_sets([?tuple(Elements, _, ?any)], Ts, Opaques) ->
+ NewList = [list_decorate(Elements, Es, Opaques) || ?tuple(Es, _, _) <- Ts],
+ case t_sup([t_tuple(Es) || Es <- NewList]) of
+ ?tuple_set([{_Arity, Tuples}]) -> Tuples;
+ ?tuple(_, _, _)=Tuple -> [Tuple]
+ end;
+decorate_tuples_in_sets(Tuples, Ts, Opaques) ->
+ decorate_tuples_in_sets(Tuples, Ts, Opaques, []).
+
+decorate_tuples_in_sets([?tuple(Elements, Arity, Tag1) = T1|Tuples] = L1,
+ [?tuple(Es, Arity, Tag2)|Ts] = L2, Opaques, Acc) ->
+ if
+ Tag1 < Tag2 -> decorate_tuples_in_sets(Tuples, L2, Opaques, [T1|Acc]);
+ Tag1 > Tag2 -> decorate_tuples_in_sets(L1, Ts, Opaques, Acc);
+ Tag1 =:= Tag2 ->
+ NewElements = list_decorate(Elements, Es, Opaques),
+ NewAcc = [?tuple(NewElements, Arity, Tag1)|Acc],
+ decorate_tuples_in_sets(Tuples, Ts, Opaques, NewAcc)
+ end;
+decorate_tuples_in_sets([T1|Tuples], L2, Opaques, Acc) ->
+ decorate_tuples_in_sets(Tuples, L2, Opaques, [T1|Acc]);
+decorate_tuples_in_sets([], _L, _Opaques, Acc) ->
+ lists:reverse(Acc).
+
+-spec t_opaque_from_records(type_table()) -> [erl_type()].
+
+t_opaque_from_records(RecDict) ->
+ OpaqueRecDict =
+ dict:filter(fun(Key, _Value) ->
+ case Key of
+ {opaque, _Name, _Arity} -> true;
+ _ -> false
+ end
+ end, RecDict),
+ OpaqueTypeDict =
+ dict:map(fun({opaque, Name, _Arity},
+ {{Module, _FileLine, _Form, ArgNames}, _Type}) ->
+ %% Args = args_to_types(ArgNames),
+ %% List = lists:zip(ArgNames, Args),
+ %% TmpVarTab = maps:to_list(List),
+ %% Rep = t_from_form(Type, RecDict, TmpVarTab),
+ Rep = t_any(), % not used for anything right now
+ Args = [t_any() || _ <- ArgNames],
+ t_opaque(Module, Name, Args, Rep)
+ end, OpaqueRecDict),
+ [OpaqueType || {_Key, OpaqueType} <- dict:to_list(OpaqueTypeDict)].
+
+%% Decompose opaque instances of type arg2 to structured types, in arg1
+%% XXX: Same as t_unopaque
+-spec t_struct_from_opaque(erl_type(), [erl_type()]) -> erl_type().
+
+t_struct_from_opaque(?function(Domain, Range), Opaques) ->
+ ?function(t_struct_from_opaque(Domain, Opaques),
+ t_struct_from_opaque(Range, Opaques));
+t_struct_from_opaque(?list(Types, Term, Size), Opaques) ->
+ ?list(t_struct_from_opaque(Types, Opaques),
+ t_struct_from_opaque(Term, Opaques), Size);
+t_struct_from_opaque(?opaque(_) = T, Opaques) ->
+ case is_opaque_type(T, Opaques) of
+ true -> t_opaque_structure(T);
+ false -> T
+ end;
+t_struct_from_opaque(?product(Types), Opaques) ->
+ ?product(list_struct_from_opaque(Types, Opaques));
+t_struct_from_opaque(?tuple(?any, _, _) = T, _Opaques) -> T;
+t_struct_from_opaque(?tuple(Types, Arity, Tag), Opaques) ->
+ ?tuple(list_struct_from_opaque(Types, Opaques), Arity, Tag);
+t_struct_from_opaque(?tuple_set(Set), Opaques) ->
+ NewSet = [{Sz, [t_struct_from_opaque(T, Opaques) || T <- Tuples]}
+ || {Sz, Tuples} <- Set],
+ ?tuple_set(NewSet);
+t_struct_from_opaque(?union(List), Opaques) ->
+ t_sup(list_struct_from_opaque(List, Opaques));
+t_struct_from_opaque(Type, _Opaques) -> Type.
+
+list_struct_from_opaque(Types, Opaques) ->
+ [t_struct_from_opaque(Type, Opaques) || Type <- Types].
+
+%%-----------------------------------------------------------------------------
+
+-type mod_records() :: dict:dict(module(), type_table()).
+
+%%-----------------------------------------------------------------------------
+%% Unit type. Signals non termination.
+%%
+
+-spec t_unit() -> erl_type().
+
+t_unit() ->
+ ?unit.
+
+-spec t_is_unit(erl_type()) -> boolean().
+
+t_is_unit(?unit) -> true;
+t_is_unit(_) -> false.
+
+-spec t_is_none_or_unit(erl_type()) -> boolean().
+
+t_is_none_or_unit(?none) -> true;
+t_is_none_or_unit(?unit) -> true;
+t_is_none_or_unit(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Atoms and the derived type boolean
+%%
+
+-spec t_atom() -> erl_type().
+
+t_atom() ->
+ ?atom(?any).
+
+-spec t_atom(atom()) -> erl_type().
+
+t_atom(A) when is_atom(A) ->
+ ?atom(set_singleton(A)).
+
+-spec t_atoms([atom()]) -> erl_type().
+
+t_atoms(List) when is_list(List) ->
+ t_sup([t_atom(A) || A <- List]).
+
+-spec t_atom_vals(erl_type()) -> 'unknown' | [atom(),...].
+
+t_atom_vals(Type) ->
+ t_atom_vals(Type, 'universe').
+
+-spec t_atom_vals(erl_type(), opaques()) -> 'unknown' | [atom(),...].
+
+t_atom_vals(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun atom_vals/1).
+
+atom_vals(?atom(?any)) -> unknown;
+atom_vals(?atom(Set)) -> set_to_list(Set);
+atom_vals(?opaque(_)) -> unknown;
+atom_vals(Other) ->
+ ?atom(_) = Atm = t_inf(t_atom(), Other),
+ atom_vals(Atm).
+
+-spec t_is_atom(erl_type()) -> boolean().
+
+t_is_atom(Type) ->
+ t_is_atom(Type, 'universe').
+
+-spec t_is_atom(erl_type(), opaques()) -> boolean().
+
+t_is_atom(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_atom1/1).
+
+is_atom1(?atom(_)) -> true;
+is_atom1(_) -> false.
+
+-spec t_is_any_atom(atom(), erl_type()) -> boolean().
+
+t_is_any_atom(Atom, SomeAtomsType) ->
+ t_is_any_atom(Atom, SomeAtomsType, 'universe').
+
+-spec t_is_any_atom(atom(), erl_type(), opaques()) -> boolean().
+
+t_is_any_atom(Atom, SomeAtomsType, Opaques) ->
+ do_opaque(SomeAtomsType, Opaques,
+ fun(AtomsType) -> is_any_atom(Atom, AtomsType) end).
+
+is_any_atom(Atom, ?atom(?any)) when is_atom(Atom) -> false;
+is_any_atom(Atom, ?atom(Set)) when is_atom(Atom) ->
+ set_is_singleton(Atom, Set);
+is_any_atom(Atom, _) when is_atom(Atom) -> false.
+
+%%------------------------------------
+
+-spec t_is_boolean(erl_type()) -> boolean().
+
+t_is_boolean(Type) ->
+ t_is_boolean(Type, 'universe').
+
+-spec t_is_boolean(erl_type(), opaques()) -> boolean().
+
+t_is_boolean(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_boolean/1).
+
+-spec t_boolean() -> erl_type().
+
+t_boolean() ->
+ ?atom(set_from_list([false, true])).
+
+is_boolean(?atom(?any)) -> false;
+is_boolean(?atom(Set)) ->
+ case set_size(Set) of
+ 1 -> set_is_element(true, Set) orelse set_is_element(false, Set);
+ 2 -> set_is_element(true, Set) andalso set_is_element(false, Set);
+ N when is_integer(N), N > 2 -> false
+ end;
+is_boolean(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Binaries
+%%
+
+-spec t_binary() -> erl_type().
+
+t_binary() ->
+ ?bitstr(8, 0).
+
+-spec t_is_binary(erl_type()) -> boolean().
+
+t_is_binary(Type) ->
+ t_is_binary(Type, 'universe').
+
+-spec t_is_binary(erl_type(), opaques()) -> boolean().
+
+t_is_binary(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_binary/1).
+
+is_binary(?bitstr(U, B)) ->
+ ((U rem 8) =:= 0) andalso ((B rem 8) =:= 0);
+is_binary(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Bitstrings
+%%
+
+-spec t_bitstr() -> erl_type().
+
+t_bitstr() ->
+ ?bitstr(1, 0).
+
+-spec t_bitstr(non_neg_integer(), non_neg_integer()) -> erl_type().
+
+t_bitstr(U, B) ->
+ NewB =
+ if
+ U =:= 0 -> B;
+ B >= (U * (?UNIT_MULTIPLIER + 1)) ->
+ (B rem U) + U * ?UNIT_MULTIPLIER;
+ true ->
+ B
+ end,
+ ?bitstr(U, NewB).
+
+-spec t_bitstr_unit(erl_type()) -> non_neg_integer().
+
+t_bitstr_unit(?bitstr(U, _)) -> U.
+
+-spec t_bitstr_base(erl_type()) -> non_neg_integer().
+
+t_bitstr_base(?bitstr(_, B)) -> B.
+
+-spec t_bitstr_concat([erl_type()]) -> erl_type().
+
+t_bitstr_concat(List) ->
+ t_bitstr_concat_1(List, t_bitstr(0, 0)).
+
+t_bitstr_concat_1([T|Left], Acc) ->
+ t_bitstr_concat_1(Left, t_bitstr_concat(Acc, T));
+t_bitstr_concat_1([], Acc) ->
+ Acc.
+
+-spec t_bitstr_concat(erl_type(), erl_type()) -> erl_type().
+
+t_bitstr_concat(T1, T2) ->
+ T1p = t_inf(t_bitstr(), T1),
+ T2p = t_inf(t_bitstr(), T2),
+ bitstr_concat(t_unopaque(T1p), t_unopaque(T2p)).
+
+-spec t_bitstr_match(erl_type(), erl_type()) -> erl_type().
+
+t_bitstr_match(T1, T2) ->
+ T1p = t_inf(t_bitstr(), T1),
+ T2p = t_inf(t_bitstr(), T2),
+ bitstr_match(t_unopaque(T1p), t_unopaque(T2p)).
+
+-spec t_is_bitstr(erl_type()) -> boolean().
+
+t_is_bitstr(Type) ->
+ t_is_bitstr(Type, 'universe').
+
+-spec t_is_bitstr(erl_type(), opaques()) -> boolean().
+
+t_is_bitstr(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_bitstr/1).
+
+is_bitstr(?bitstr(_, _)) -> true;
+is_bitstr(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Matchstates
+%%
+
+-spec t_matchstate() -> erl_type().
+
+t_matchstate() ->
+ ?any_matchstate.
+
+-spec t_matchstate(erl_type(), non_neg_integer()) -> erl_type().
+
+t_matchstate(Init, 0) ->
+ ?matchstate(Init, Init);
+t_matchstate(Init, Max) when is_integer(Max) ->
+ Slots = [Init|[?none || _ <- lists:seq(1, Max)]],
+ ?matchstate(Init, t_product(Slots)).
+
+-spec t_is_matchstate(erl_type()) -> boolean().
+
+t_is_matchstate(?matchstate(_, _)) -> true;
+t_is_matchstate(_) -> false.
+
+-spec t_matchstate_present(erl_type()) -> erl_type().
+
+t_matchstate_present(Type) ->
+ case t_inf(t_matchstate(), Type) of
+ ?matchstate(P, _) -> P;
+ _ -> ?none
+ end.
+
+-spec t_matchstate_slot(erl_type(), non_neg_integer()) -> erl_type().
+
+t_matchstate_slot(Type, Slot) ->
+ RealSlot = Slot + 1,
+ case t_inf(t_matchstate(), Type) of
+ ?matchstate(_, ?any) -> ?any;
+ ?matchstate(_, ?product(Vals)) when length(Vals) >= RealSlot ->
+ lists:nth(RealSlot, Vals);
+ ?matchstate(_, ?product(_)) ->
+ ?none;
+ ?matchstate(_, SlotType) when RealSlot =:= 1 ->
+ SlotType;
+ _ ->
+ ?none
+ end.
+
+-spec t_matchstate_slots(erl_type()) -> erl_type().
+
+t_matchstate_slots(?matchstate(_, Slots)) ->
+ Slots.
+
+-spec t_matchstate_update_present(erl_type(), erl_type()) -> erl_type().
+
+t_matchstate_update_present(New, Type) ->
+ case t_inf(t_matchstate(), Type) of
+ ?matchstate(_, Slots) ->
+ ?matchstate(New, Slots);
+ _ -> ?none
+ end.
+
+-spec t_matchstate_update_slot(erl_type(), erl_type(), non_neg_integer()) -> erl_type().
+
+t_matchstate_update_slot(New, Type, Slot) ->
+ RealSlot = Slot + 1,
+ case t_inf(t_matchstate(), Type) of
+ ?matchstate(Pres, Slots) ->
+ NewSlots =
+ case Slots of
+ ?any ->
+ ?any;
+ ?product(Vals) when length(Vals) >= RealSlot ->
+ NewTuple = setelement(RealSlot, list_to_tuple(Vals), New),
+ NewVals = tuple_to_list(NewTuple),
+ ?product(NewVals);
+ ?product(_) ->
+ ?none;
+ _ when RealSlot =:= 1 ->
+ New;
+ _ ->
+ ?none
+ end,
+ ?matchstate(Pres, NewSlots);
+ _ ->
+ ?none
+ end.
+
+%%-----------------------------------------------------------------------------
+%% Functions
+%%
+
+-spec t_fun() -> erl_type().
+
+t_fun() ->
+ ?function(?any, ?any).
+
+-spec t_fun(erl_type()) -> erl_type().
+
+t_fun(Range) ->
+ ?function(?any, Range).
+
+-spec t_fun([erl_type()] | arity(), erl_type()) -> erl_type().
+
+t_fun(Domain, Range) when is_list(Domain) ->
+ ?function(?product(Domain), Range);
+t_fun(Arity, Range) when is_integer(Arity), 0 =< Arity, Arity =< 255 ->
+ ?function(?product(lists:duplicate(Arity, ?any)), Range).
+
+-spec t_fun_args(erl_type()) -> 'unknown' | [erl_type()].
+
+t_fun_args(Type) ->
+ t_fun_args(Type, 'universe').
+
+-spec t_fun_args(erl_type(), opaques()) -> 'unknown' | [erl_type()].
+
+t_fun_args(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun fun_args/1).
+
+fun_args(?function(?any, _)) ->
+ unknown;
+fun_args(?function(?product(Domain), _)) when is_list(Domain) ->
+ Domain.
+
+-spec t_fun_arity(erl_type()) -> 'unknown' | non_neg_integer().
+
+t_fun_arity(Type) ->
+ t_fun_arity(Type, 'universe').
+
+-spec t_fun_arity(erl_type(), opaques()) -> 'unknown' | non_neg_integer().
+
+t_fun_arity(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun fun_arity/1).
+
+fun_arity(?function(?any, _)) ->
+ unknown;
+fun_arity(?function(?product(Domain), _)) ->
+ length(Domain).
+
+-spec t_fun_range(erl_type()) -> erl_type().
+
+t_fun_range(Type) ->
+ t_fun_range(Type, 'universe').
+
+-spec t_fun_range(erl_type(), opaques()) -> erl_type().
+
+t_fun_range(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun fun_range/1).
+
+fun_range(?function(_, Range)) ->
+ Range.
+
+-spec t_is_fun(erl_type()) -> boolean().
+
+t_is_fun(Type) ->
+ t_is_fun(Type, 'universe').
+
+-spec t_is_fun(erl_type(), opaques()) -> boolean().
+
+t_is_fun(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_fun/1).
+
+is_fun(?function(_, _)) -> true;
+is_fun(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Identifiers. Includes ports, pids and refs.
+%%
+
+-spec t_identifier() -> erl_type().
+
+t_identifier() ->
+ ?identifier(?any).
+
+-ifdef(DO_ERL_TYPES_TEST).
+-spec t_is_identifier(erl_type()) -> erl_type().
+
+t_is_identifier(?identifier(_)) -> true;
+t_is_identifier(_) -> false.
+-endif.
+
+%%------------------------------------
+
+-spec t_port() -> erl_type().
+
+t_port() ->
+ ?identifier(set_singleton(?port_qual)).
+
+-spec t_is_port(erl_type()) -> boolean().
+
+t_is_port(Type) ->
+ t_is_port(Type, 'universe').
+
+-spec t_is_port(erl_type(), opaques()) -> boolean().
+
+t_is_port(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_port1/1).
+
+is_port1(?identifier(?any)) -> false;
+is_port1(?identifier(Set)) -> set_is_singleton(?port_qual, Set);
+is_port1(_) -> false.
+
+%%------------------------------------
+
+-spec t_pid() -> erl_type().
+
+t_pid() ->
+ ?identifier(set_singleton(?pid_qual)).
+
+-spec t_is_pid(erl_type()) -> boolean().
+
+t_is_pid(Type) ->
+ t_is_pid(Type, 'universe').
+
+-spec t_is_pid(erl_type(), opaques()) -> boolean().
+
+t_is_pid(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_pid1/1).
+
+is_pid1(?identifier(?any)) -> false;
+is_pid1(?identifier(Set)) -> set_is_singleton(?pid_qual, Set);
+is_pid1(_) -> false.
+
+%%------------------------------------
+
+-spec t_reference() -> erl_type().
+
+t_reference() ->
+ ?identifier(set_singleton(?reference_qual)).
+
+-spec t_is_reference(erl_type()) -> boolean().
+
+t_is_reference(Type) ->
+ t_is_reference(Type, 'universe').
+
+-spec t_is_reference(erl_type(), opaques()) -> boolean().
+
+t_is_reference(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_reference1/1).
+
+is_reference1(?identifier(?any)) -> false;
+is_reference1(?identifier(Set)) -> set_is_singleton(?reference_qual, Set);
+is_reference1(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Numbers are divided into floats, integers, chars and bytes.
+%%
+
+-spec t_number() -> erl_type().
+
+t_number() ->
+ ?number(?any, ?unknown_qual).
+
+-spec t_number(integer()) -> erl_type().
+
+t_number(X) when is_integer(X) ->
+ t_integer(X).
+
+-spec t_is_number(erl_type()) -> boolean().
+
+t_is_number(Type) ->
+ t_is_number(Type, 'universe').
+
+-spec t_is_number(erl_type(), opaques()) -> boolean().
+
+t_is_number(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_number/1).
+
+is_number(?number(_, _)) -> true;
+is_number(_) -> false.
+
+%% Currently, the type system collapses all floats to ?float and does
+%% not keep any information about their values. As a result, the list
+%% that this function returns contains only integers.
+
+-spec t_number_vals(erl_type()) -> 'unknown' | [integer(),...].
+
+t_number_vals(Type) ->
+ t_number_vals(Type, 'universe').
+
+-spec t_number_vals(erl_type(), opaques()) -> 'unknown' | [integer(),...].
+
+t_number_vals(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun number_vals/1).
+
+number_vals(?int_set(Set)) -> set_to_list(Set);
+number_vals(?number(_, _)) -> unknown;
+number_vals(?opaque(_)) -> unknown;
+number_vals(Other) ->
+ Inf = t_inf(Other, t_number()),
+ false = t_is_none(Inf), % sanity check
+ number_vals(Inf).
+
+%%------------------------------------
+
+-spec t_float() -> erl_type().
+
+t_float() ->
+ ?float.
+
+-spec t_is_float(erl_type()) -> boolean().
+
+t_is_float(Type) ->
+ t_is_float(Type, 'universe').
+
+-spec t_is_float(erl_type(), opaques()) -> boolean().
+
+t_is_float(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_float1/1).
+
+is_float1(?float) -> true;
+is_float1(_) -> false.
+
+%%------------------------------------
+
+-spec t_integer() -> erl_type().
+
+t_integer() ->
+ ?integer(?any).
+
+-spec t_integer(integer()) -> erl_type().
+
+t_integer(I) when is_integer(I) ->
+ ?int_set(set_singleton(I)).
+
+-spec t_integers([integer()]) -> erl_type().
+
+t_integers(List) when is_list(List) ->
+ t_sup([t_integer(I) || I <- List]).
+
+-spec t_is_integer(erl_type()) -> boolean().
+
+t_is_integer(Type) ->
+ t_is_integer(Type, 'universe').
+
+-spec t_is_integer(erl_type(), opaques()) -> boolean().
+
+t_is_integer(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_integer1/1).
+
+is_integer1(?integer(_)) -> true;
+is_integer1(_) -> false.
+
+%%------------------------------------
+
+-spec t_byte() -> erl_type().
+
+t_byte() ->
+ ?byte.
+
+-ifdef(DO_ERL_TYPES_TEST).
+-spec t_is_byte(erl_type()) -> boolean().
+
+t_is_byte(?int_range(neg_inf, _)) -> false;
+t_is_byte(?int_range(_, pos_inf)) -> false;
+t_is_byte(?int_range(From, To))
+ when is_integer(From), From >= 0, is_integer(To), To =< ?MAX_BYTE -> true;
+t_is_byte(?int_set(Set)) ->
+ (set_min(Set) >= 0) andalso (set_max(Set) =< ?MAX_BYTE);
+t_is_byte(_) -> false.
+-endif.
+
+%%------------------------------------
+
+-spec t_char() -> erl_type().
+
+t_char() ->
+ ?char.
+
+-spec t_is_char(erl_type()) -> boolean().
+
+t_is_char(?int_range(neg_inf, _)) -> false;
+t_is_char(?int_range(_, pos_inf)) -> false;
+t_is_char(?int_range(From, To))
+ when is_integer(From), From >= 0, is_integer(To), To =< ?MAX_CHAR -> true;
+t_is_char(?int_set(Set)) ->
+ (set_min(Set) >= 0) andalso (set_max(Set) =< ?MAX_CHAR);
+t_is_char(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Lists
+%%
+
+-spec t_cons() -> erl_type().
+
+t_cons() ->
+ ?nonempty_list(?any, ?any).
+
+%% Note that if the tail argument can be a list, we must collapse the
+%% content of the list to include both the content of the tail list
+%% and the head of the cons. If for example the tail argument is any()
+%% then there can be any list in the tail and the content of the
+%% returned list must be any().
+
+-spec t_cons(erl_type(), erl_type()) -> erl_type().
+
+t_cons(?none, _) -> ?none;
+t_cons(_, ?none) -> ?none;
+t_cons(?unit, _) -> ?none;
+t_cons(_, ?unit) -> ?none;
+t_cons(Hd, ?nil) ->
+ ?nonempty_list(Hd, ?nil);
+t_cons(Hd, ?list(Contents, Termination, _)) ->
+ ?nonempty_list(t_sup(Contents, Hd), Termination);
+t_cons(Hd, Tail) ->
+ case cons_tail(t_inf(Tail, t_maybe_improper_list())) of
+ ?list(Contents, Termination, _Size) ->
+ %% Collapse the list part of the termination but keep the
+ %% non-list part intact.
+ NewTermination = t_sup(t_subtract(Tail, t_maybe_improper_list()),
+ Termination),
+ ?nonempty_list(t_sup(Hd, Contents), NewTermination);
+ ?nil -> ?nonempty_list(Hd, Tail);
+ ?none -> ?nonempty_list(Hd, Tail);
+ ?unit -> ?none
+ end.
+
+cons_tail(Type) ->
+ do_opaque(Type, 'universe', fun(T) -> T end).
+
+-spec t_is_cons(erl_type()) -> boolean().
+
+t_is_cons(Type) ->
+ t_is_cons(Type, 'universe').
+
+-spec t_is_cons(erl_type(), opaques()) -> boolean().
+
+t_is_cons(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_cons/1).
+
+is_cons(?nonempty_list(_, _)) -> true;
+is_cons(_) -> false.
+
+-spec t_cons_hd(erl_type()) -> erl_type().
+
+t_cons_hd(Type) ->
+ t_cons_hd(Type, 'universe').
+
+-spec t_cons_hd(erl_type(), opaques()) -> erl_type().
+
+t_cons_hd(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun cons_hd/1).
+
+cons_hd(?nonempty_list(Contents, _Termination)) -> Contents.
+
+-spec t_cons_tl(erl_type()) -> erl_type().
+
+t_cons_tl(Type) ->
+ t_cons_tl(Type, 'universe').
+
+-spec t_cons_tl(erl_type(), opaques()) -> erl_type().
+
+t_cons_tl(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun cons_tl/1).
+
+cons_tl(?nonempty_list(_Contents, Termination) = T) ->
+ t_sup(Termination, T).
+
+-spec t_nil() -> erl_type().
+
+t_nil() ->
+ ?nil.
+
+-spec t_is_nil(erl_type()) -> boolean().
+
+t_is_nil(Type) ->
+ t_is_nil(Type, 'universe').
+
+-spec t_is_nil(erl_type(), opaques()) -> boolean().
+
+t_is_nil(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_nil/1).
+
+is_nil(?nil) -> true;
+is_nil(_) -> false.
+
+-spec t_list() -> erl_type().
+
+t_list() ->
+ ?list(?any, ?nil, ?unknown_qual).
+
+-spec t_list(erl_type()) -> erl_type().
+
+t_list(?none) -> ?none;
+t_list(?unit) -> ?none;
+t_list(Contents) ->
+ ?list(Contents, ?nil, ?unknown_qual).
+
+-spec t_list_elements(erl_type()) -> erl_type().
+
+t_list_elements(Type) ->
+ t_list_elements(Type, 'universe').
+
+-spec t_list_elements(erl_type(), opaques()) -> erl_type().
+
+t_list_elements(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun list_elements/1).
+
+list_elements(?list(Contents, _, _)) -> Contents;
+list_elements(?nil) -> ?none.
+
+-spec t_list_termination(erl_type(), opaques()) -> erl_type().
+
+t_list_termination(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun t_list_termination/1).
+
+-spec t_list_termination(erl_type()) -> erl_type().
+
+t_list_termination(?nil) -> ?nil;
+t_list_termination(?list(_, Term, _)) -> Term.
+
+-spec t_is_list(erl_type()) -> boolean().
+
+t_is_list(?list(_Contents, ?nil, _)) -> true;
+t_is_list(?nil) -> true;
+t_is_list(_) -> false.
+
+-spec t_nonempty_list() -> erl_type().
+
+t_nonempty_list() ->
+ t_cons(?any, ?nil).
+
+-spec t_nonempty_list(erl_type()) -> erl_type().
+
+t_nonempty_list(Type) ->
+ t_cons(Type, ?nil).
+
+-spec t_nonempty_string() -> erl_type().
+
+t_nonempty_string() ->
+ t_nonempty_list(t_char()).
+
+-spec t_string() -> erl_type().
+
+t_string() ->
+ t_list(t_char()).
+
+-spec t_is_string(erl_type()) -> boolean().
+
+t_is_string(X) ->
+ t_is_list(X) andalso t_is_char(t_list_elements(X)).
+
+-spec t_maybe_improper_list() -> erl_type().
+
+t_maybe_improper_list() ->
+ ?list(?any, ?any, ?unknown_qual).
+
+%% Should only be used if you know what you are doing. See t_cons/2
+-spec t_maybe_improper_list(erl_type(), erl_type()) -> erl_type().
+
+t_maybe_improper_list(_Content, ?unit) -> ?none;
+t_maybe_improper_list(?unit, _Termination) -> ?none;
+t_maybe_improper_list(Content, Termination) ->
+ %% Safety check: would be nice to have but does not work with remote types
+ %% true = t_is_subtype(t_nil(), Termination),
+ ?list(Content, Termination, ?unknown_qual).
+
+-spec t_is_maybe_improper_list(erl_type()) -> boolean().
+
+t_is_maybe_improper_list(Type) ->
+ t_is_maybe_improper_list(Type, 'universe').
+
+-spec t_is_maybe_improper_list(erl_type(), opaques()) -> boolean().
+
+t_is_maybe_improper_list(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_maybe_improper_list/1).
+
+is_maybe_improper_list(?list(_, _, _)) -> true;
+is_maybe_improper_list(?nil) -> true;
+is_maybe_improper_list(_) -> false.
+
+%% %% Should only be used if you know what you are doing. See t_cons/2
+%% -spec t_improper_list(erl_type(), erl_type()) -> erl_type().
+%%
+%% t_improper_list(?unit, _Termination) -> ?none;
+%% t_improper_list(_Content, ?unit) -> ?none;
+%% t_improper_list(Content, Termination) ->
+%% %% Safety check: would be nice to have but does not work with remote types
+%% %% false = t_is_subtype(t_nil(), Termination),
+%% ?list(Content, Termination, ?any).
+
+-spec lift_list_to_pos_empty(erl_type(), opaques()) -> erl_type().
+
+lift_list_to_pos_empty(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun lift_list_to_pos_empty/1).
+
+-spec lift_list_to_pos_empty(erl_type()) -> erl_type().
+
+lift_list_to_pos_empty(?nil) -> ?nil;
+lift_list_to_pos_empty(?list(Content, Termination, _)) ->
+ ?list(Content, Termination, ?unknown_qual).
+
+%%-----------------------------------------------------------------------------
+%% Maps
+%%
+%% Representation:
+%% ?map(Pairs, DefaultKey, DefaultValue)
+%%
+%% Pairs is a sorted dictionary of types with a mandatoriness tag on each pair
+%% (t_map_dict()). DefaultKey and DefaultValue are plain types.
+%%
+%% A map M belongs to this type iff
+%% For each pair {KT, mandatory, VT} in Pairs, there exists a pair {K, V} in M
+%% such that K \in KT and V \in VT.
+%% For each pair {KT, optional, VT} in Pairs, either there exists no key K in
+%% M s.t. K in KT, or there exists a pair {K, V} in M such that K \in KT and
+%% V \in VT.
+%% For each remaining pair {K, V} in M (where remaining means that there is no
+%% key KT in Pairs s.t. K \in KT), K \in DefaultKey and V \in DefaultValue.
+%%
+%% Invariants:
+%% * The keys in Pairs are singleton types.
+%% * The values of Pairs must not be unit, and may only be none if the
+%% mandatoriness tag is 'optional'.
+%% * Optional must contain no pair {K,V} s.t. K is a subtype of DefaultKey and
+%% V is equal to DefaultKey.
+%% * DefaultKey must be the empty type iff DefaultValue is the empty type.
+%% * DefaultKey must not be a singleton type.
+%% * For every key K in Pairs, DefaultKey - K must not be representable; i.e.
+%% t_subtract(DefaultKey, K) must return DefaultKey.
+%% * For every pair {K, 'optional', ?none} in Pairs, K must be a subtype of
+%% DefaultKey.
+%% * Pairs must be sorted and not contain any duplicate keys.
+%%
+%% These invariants ensure that equal map types are represented by equal terms.
+
+-define(mand, mandatory).
+-define(opt, optional).
+
+-type t_map_mandatoriness() :: ?mand | ?opt.
+-type t_map_pair() :: {erl_type(), t_map_mandatoriness(), erl_type()}.
+-type t_map_dict() :: [t_map_pair()].
+
+-spec t_map() -> erl_type().
+
+t_map() ->
+ t_map([], t_any(), t_any()).
+
+-spec t_map([{erl_type(), erl_type()}]) -> erl_type().
+
+t_map(L) ->
+ lists:foldl(fun t_map_put/2, t_map(), L).
+
+-spec t_map(t_map_dict(), erl_type(), erl_type()) -> erl_type().
+
+t_map(Pairs0, DefK0, DefV0) ->
+ DefK1 = lists:foldl(fun({K,_,_},Acc)->t_subtract(Acc,K)end, DefK0, Pairs0),
+ {DefK2, DefV1} =
+ case t_is_none_or_unit(DefK1) orelse t_is_none_or_unit(DefV0) of
+ true -> {?none, ?none};
+ false -> {DefK1, DefV0}
+ end,
+ {Pairs1, DefK, DefV}
+ = case is_singleton_type(DefK2) of
+ true -> {mapdict_insert({DefK2, ?opt, DefV1}, Pairs0), ?none, ?none};
+ false -> {Pairs0, DefK2, DefV1}
+ end,
+ Pairs = normalise_map_optionals(Pairs1, DefK, DefV),
+ %% Validate invariants of the map representation.
+ %% Since we needed to iterate over the arguments in order to normalise anyway,
+ %% we might as well save us some future pain and do this even without
+ %% define(DEBUG, true).
+ try
+ validate_map_elements(Pairs)
+ catch error:badarg -> error(badarg, [Pairs0,DefK0,DefV0]);
+ error:{badarg, E} -> error({badarg, E}, [Pairs0,DefK0,DefV0])
+ end,
+ ?map(Pairs, DefK, DefV).
+
+normalise_map_optionals([], _, _) -> [];
+normalise_map_optionals([E={K,?opt,?none}|T], DefK, DefV) ->
+ Diff = t_subtract(DefK, K),
+ case t_is_subtype(K, DefK) andalso DefK =:= Diff of
+ true -> [E|normalise_map_optionals(T, DefK, DefV)];
+ false -> normalise_map_optionals(T, Diff, DefV)
+ end;
+normalise_map_optionals([E={K,?opt,V}|T], DefK, DefV) ->
+ case t_is_equal(V, DefV) andalso t_is_subtype(K, DefK) of
+ true -> normalise_map_optionals(T, DefK, DefV);
+ false -> [E|normalise_map_optionals(T, DefK, DefV)]
+ end;
+normalise_map_optionals([E|T], DefK, DefV) ->
+ [E|normalise_map_optionals(T, DefK, DefV)].
+
+validate_map_elements([{_,?mand,?none}|_]) -> error({badarg, none_in_mand});
+validate_map_elements([{K1,_,_}|Rest=[{K2,_,_}|_]]) ->
+ case is_singleton_type(K1) andalso K1 < K2 of
+ false -> error(badarg);
+ true -> validate_map_elements(Rest)
+ end;
+validate_map_elements([{K,_,_}]) ->
+ case is_singleton_type(K) of
+ false -> error(badarg);
+ true -> true
+ end;
+validate_map_elements([]) -> true.
+
+-spec t_is_map(erl_type()) -> boolean().
+
+t_is_map(Type) ->
+ t_is_map(Type, 'universe').
+
+-spec t_is_map(erl_type(), opaques()) -> boolean().
+
+t_is_map(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_map1/1).
+
+is_map1(?map(_, _, _)) -> true;
+is_map1(_) -> false.
+
+-spec t_map_entries(erl_type()) -> t_map_dict().
+
+t_map_entries(M) ->
+ t_map_entries(M, 'universe').
+
+-spec t_map_entries(erl_type(), opaques()) -> t_map_dict().
+
+t_map_entries(M, Opaques) ->
+ do_opaque(M, Opaques, fun map_entries/1).
+
+map_entries(?map(Pairs,_,_)) ->
+ Pairs.
+
+-spec t_map_def_key(erl_type()) -> erl_type().
+
+t_map_def_key(M) ->
+ t_map_def_key(M, 'universe').
+
+-spec t_map_def_key(erl_type(), opaques()) -> erl_type().
+
+t_map_def_key(M, Opaques) ->
+ do_opaque(M, Opaques, fun map_def_key/1).
+
+map_def_key(?map(_,DefK,_)) ->
+ DefK.
+
+-spec t_map_def_val(erl_type()) -> erl_type().
+
+t_map_def_val(M) ->
+ t_map_def_val(M, 'universe').
+
+-spec t_map_def_val(erl_type(), opaques()) -> erl_type().
+
+t_map_def_val(M, Opaques) ->
+ do_opaque(M, Opaques, fun map_def_val/1).
+
+map_def_val(?map(_,_,DefV)) ->
+ DefV.
+
+-spec mapdict_store(t_map_pair(), t_map_dict()) -> t_map_dict().
+
+mapdict_store(E={K,_,_}, [{K,_,_}|T]) -> [E|T];
+mapdict_store(E1={K1,_,_}, [E2={K2,_,_}|T]) when K1 > K2 ->
+ [E2|mapdict_store(E1, T)];
+mapdict_store(E={_,_,_}, T) -> [E|T].
+
+-spec mapdict_insert(t_map_pair(), t_map_dict()) -> t_map_dict().
+
+mapdict_insert(E={K,_,_}, D=[{K,_,_}|_]) -> error(badarg, [E, D]);
+mapdict_insert(E1={K1,_,_}, [E2={K2,_,_}|T]) when K1 > K2 ->
+ [E2|mapdict_insert(E1, T)];
+mapdict_insert(E={_,_,_}, T) -> [E|T].
+
+%% Merges the pairs of two maps together. Missing pairs become (?opt, DefV) or
+%% (?opt, ?none), depending on whether K \in DefK.
+-spec map_pairwise_merge(fun((erl_type(),
+ t_map_mandatoriness(), erl_type(),
+ t_map_mandatoriness(), erl_type())
+ -> t_map_pair() | false),
+ erl_type(), erl_type()) -> t_map_dict().
+map_pairwise_merge(F, ?map(APairs, ADefK, ADefV),
+ ?map(BPairs, BDefK, BDefV)) ->
+ map_pairwise_merge(F, APairs, ADefK, ADefV, BPairs, BDefK, BDefV).
+
+map_pairwise_merge(_, [], _, _, [], _, _) -> [];
+map_pairwise_merge(F, As0, ADefK, ADefV, Bs0, BDefK, BDefV) ->
+ {K1, AMNess1, AV1, As1, BMNess1, BV1, Bs1} =
+ case {As0, Bs0} of
+ {[{K,AMNess,AV}|As], [{K, BMNess,BV}|Bs]} ->
+ {K, AMNess, AV, As, BMNess, BV, Bs};
+ {[{K,AMNess,AV}|As], [{BK,_, _ }|_]=Bs} when K < BK ->
+ {K, AMNess, AV, As, ?opt, mapmerge_otherv(K, BDefK, BDefV), Bs};
+ {As, [{K, BMNess,BV}|Bs]} ->
+ {K, ?opt, mapmerge_otherv(K, ADefK, ADefV), As, BMNess, BV, Bs};
+ {[{K,AMNess,AV}|As], []=Bs} ->
+ {K, AMNess, AV, As, ?opt, mapmerge_otherv(K, BDefK, BDefV), Bs}
+ end,
+ MK = K1, %% Rename to make clear that we are matching below
+ case F(K1, AMNess1, AV1, BMNess1, BV1) of
+ false -> map_pairwise_merge(F,As1,ADefK,ADefV,Bs1,BDefK,BDefV);
+ {MK,_,_}=M -> [M|map_pairwise_merge(F,As1,ADefK,ADefV,Bs1,BDefK,BDefV)]
+ end.
+
+%% Folds over the pairs in two maps simultaneously in reverse key order. Missing
+%% pairs become (?opt, DefV) or (?opt, ?none), depending on whether K \in DefK.
+-spec map_pairwise_merge_foldr(fun((erl_type(),
+ t_map_mandatoriness(), erl_type(),
+ t_map_mandatoriness(), erl_type(),
+ Acc) -> Acc),
+ Acc, erl_type(), erl_type()) -> Acc.
+
+map_pairwise_merge_foldr(F, AccIn, ?map(APairs, ADefK, ADefV),
+ ?map(BPairs, BDefK, BDefV)) ->
+ map_pairwise_merge_foldr(F, AccIn, APairs, ADefK, ADefV, BPairs, BDefK, BDefV).
+
+map_pairwise_merge_foldr(_, Acc, [], _, _, [], _, _) -> Acc;
+map_pairwise_merge_foldr(F, AccIn, As0, ADefK, ADefV, Bs0, BDefK, BDefV) ->
+ {K1, AMNess1, AV1, As1, BMNess1, BV1, Bs1} =
+ case {As0, Bs0} of
+ {[{K,AMNess,AV}|As], [{K,BMNess,BV}|Bs]} ->
+ {K, AMNess, AV, As, BMNess, BV, Bs};
+ {[{K,AMNess,AV}|As], [{BK,_, _ }|_]=Bs} when K < BK ->
+ {K, AMNess, AV, As, ?opt, mapmerge_otherv(K, BDefK, BDefV), Bs};
+ {As, [{K,BMNess,BV}|Bs]} ->
+ {K, ?opt, mapmerge_otherv(K, ADefK, ADefV), As, BMNess, BV, Bs};
+ {[{K,AMNess,AV}|As], []=Bs} ->
+ {K, AMNess, AV, As, ?opt, mapmerge_otherv(K, BDefK, BDefV), Bs}
+ end,
+ F(K1, AMNess1, AV1, BMNess1, BV1,
+ map_pairwise_merge_foldr(F,AccIn,As1,ADefK,ADefV,Bs1,BDefK,BDefV)).
+
+%% By observing that a missing pair in a map is equivalent to an optional pair,
+%% with ?none or DefV value, depending on whether K \in DefK, we can simplify
+%% merging by denormalising the map pairs temporarily, removing all 'false'
+%% cases, at the cost of the creation of more tuples:
+mapmerge_otherv(K, ODefK, ODefV) ->
+ case t_inf(K, ODefK) of
+ ?none -> ?none;
+ _KOrOpaque -> ODefV
+ end.
+
+-spec t_map_put({erl_type(), erl_type()}, erl_type()) -> erl_type().
+
+t_map_put(KV, Map) ->
+ t_map_put(KV, Map, 'universe').
+
+-spec t_map_put({erl_type(), erl_type()}, erl_type(), opaques()) -> erl_type().
+
+t_map_put(KV, Map, Opaques) ->
+ do_opaque(Map, Opaques, fun(UM) -> map_put(KV, UM, Opaques) end).
+
+%% Key and Value are *not* unopaqued, but the map is
+map_put(_, ?none, _) -> ?none;
+map_put({Key, Value}, ?map(Pairs,DefK,DefV), Opaques) ->
+ case t_is_none_or_unit(Key) orelse t_is_none_or_unit(Value) of
+ true -> ?none;
+ false ->
+ case is_singleton_type(Key) of
+ true ->
+ t_map(mapdict_store({Key, ?mand, Value}, Pairs), DefK, DefV);
+ false ->
+ t_map([{K, MNess, case t_is_none(t_inf(K, Key, Opaques)) of
+ true -> V;
+ false -> t_sup(V, Value)
+ end} || {K, MNess, V} <- Pairs],
+ t_sup(DefK, Key),
+ t_sup(DefV, Value))
+ end
+ end.
+
+-spec t_map_update({erl_type(), erl_type()}, erl_type()) -> erl_type().
+
+t_map_update(KV, Map) ->
+ t_map_update(KV, Map, 'universe').
+
+-spec t_map_update({erl_type(), erl_type()}, erl_type(), opaques()) -> erl_type().
+
+t_map_update(_, ?none, _) -> ?none;
+t_map_update(KV={Key, _}, M, Opaques) ->
+ case t_is_subtype(t_atom('true'), t_map_is_key(Key, M, Opaques)) of
+ false -> ?none;
+ true -> t_map_put(KV, M, Opaques)
+ end.
+
+-spec t_map_get(erl_type(), erl_type()) -> erl_type().
+
+t_map_get(Key, Map) ->
+ t_map_get(Key, Map, 'universe').
+
+-spec t_map_get(erl_type(), erl_type(), opaques()) -> erl_type().
+
+t_map_get(Key, Map, Opaques) ->
+ do_opaque(Map, Opaques,
+ fun(UM) ->
+ do_opaque(Key, Opaques, fun(UK) -> map_get(UK, UM) end)
+ end).
+
+map_get(_, ?none) -> ?none;
+map_get(Key, ?map(Pairs, DefK, DefV)) ->
+ DefRes =
+ case t_do_overlap(DefK, Key) of
+ false -> t_none();
+ true -> DefV
+ end,
+ case is_singleton_type(Key) of
+ false ->
+ lists:foldl(fun({K, _, V}, Res) ->
+ case t_do_overlap(K, Key) of
+ false -> Res;
+ true -> t_sup(Res, V)
+ end
+ end, DefRes, Pairs);
+ true ->
+ case lists:keyfind(Key, 1, Pairs) of
+ false -> DefRes;
+ {_, _, ValType} -> ValType
+ end
+ end.
+
+-spec t_map_is_key(erl_type(), erl_type()) -> erl_type().
+
+t_map_is_key(Key, Map) ->
+ t_map_is_key(Key, Map, 'universe').
+
+-spec t_map_is_key(erl_type(), erl_type(), opaques()) -> erl_type().
+
+t_map_is_key(Key, Map, Opaques) ->
+ do_opaque(Map, Opaques,
+ fun(UM) ->
+ do_opaque(Key, Opaques, fun(UK) -> map_is_key(UK, UM) end)
+ end).
+
+map_is_key(_, ?none) -> ?none;
+map_is_key(Key, ?map(Pairs, DefK, _DefV)) ->
+ case is_singleton_type(Key) of
+ true ->
+ case lists:keyfind(Key, 1, Pairs) of
+ {Key, ?mand, _} -> t_atom(true);
+ {Key, ?opt, ?none} -> t_atom(false);
+ {Key, ?opt, _} -> t_boolean();
+ false ->
+ case t_do_overlap(DefK, Key) of
+ false -> t_atom(false);
+ true -> t_boolean()
+ end
+ end;
+ false ->
+ case t_do_overlap(DefK, Key)
+ orelse lists:any(fun({_,_,?none}) -> false;
+ ({K,_,_}) -> t_do_overlap(K, Key)
+ end, Pairs)
+ of
+ true -> t_boolean();
+ false -> t_atom(false)
+ end
+ end.
+
+%%-----------------------------------------------------------------------------
+%% Tuples
+%%
+
+-spec t_tuple() -> erl_type().
+
+t_tuple() ->
+ ?tuple(?any, ?any, ?any).
+
+-spec t_tuple(non_neg_integer() | [erl_type()]) -> erl_type().
+
+t_tuple(N) when is_integer(N), N > ?MAX_TUPLE_SIZE ->
+ t_tuple();
+t_tuple(N) when is_integer(N) ->
+ ?tuple(lists:duplicate(N, ?any), N, ?any);
+t_tuple(List) ->
+ case any_none_or_unit(List) of
+ true -> t_none();
+ false ->
+ Arity = length(List),
+ case get_tuple_tags(List) of
+ [Tag] -> ?tuple(List, Arity, Tag); %% Tag can also be ?any here
+ TagList ->
+ SortedTagList = lists:sort(TagList),
+ Tuples = [?tuple([T|tl(List)], Arity, T) || T <- SortedTagList],
+ ?tuple_set([{Arity, Tuples}])
+ end
+ end.
+
+-spec get_tuple_tags([erl_type()]) -> [erl_type(),...].
+
+get_tuple_tags([Tag|_]) ->
+ do_opaque(Tag, 'universe', fun tuple_tags/1);
+get_tuple_tags(_) -> [?any].
+
+tuple_tags(?atom(?any)) -> [?any];
+tuple_tags(?atom(Set)) ->
+ case set_size(Set) > ?TUPLE_TAG_LIMIT of
+ true -> [?any];
+ false -> [t_atom(A) || A <- set_to_list(Set)]
+ end;
+tuple_tags(_) -> [?any].
+
+%% to be used for a tuple with known types for its arguments (not ?any)
+-spec t_tuple_args(erl_type()) -> [erl_type()].
+
+t_tuple_args(Type) ->
+ t_tuple_args(Type, 'universe').
+
+%% to be used for a tuple with known types for its arguments (not ?any)
+-spec t_tuple_args(erl_type(), opaques()) -> [erl_type()].
+
+t_tuple_args(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun tuple_args/1).
+
+tuple_args(?tuple(Args, _, _)) when is_list(Args) -> Args.
+
+%% to be used for a tuple with a known size (not ?any)
+-spec t_tuple_size(erl_type()) -> non_neg_integer().
+
+t_tuple_size(Type) ->
+ t_tuple_size(Type, 'universe').
+
+%% to be used for a tuple with a known size (not ?any)
+-spec t_tuple_size(erl_type(), opaques()) -> non_neg_integer().
+
+t_tuple_size(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun tuple_size1/1).
+
+tuple_size1(?tuple(_, Size, _)) when is_integer(Size) -> Size.
+
+-spec t_tuple_sizes(erl_type()) -> 'unknown' | [non_neg_integer(),...].
+
+t_tuple_sizes(Type) ->
+ do_opaque(Type, 'universe', fun tuple_sizes/1).
+
+tuple_sizes(?tuple(?any, ?any, ?any)) -> unknown;
+tuple_sizes(?tuple(_, Size, _)) when is_integer(Size) -> [Size];
+tuple_sizes(?tuple_set(List)) -> [Size || {Size, _} <- List].
+
+-spec t_tuple_subtypes(erl_type(), opaques()) ->
+ 'unknown' | [erl_type(),...].
+
+t_tuple_subtypes(Type, Opaques) ->
+ Fun = fun(?tuple_set(List)) ->
+ t_tuple_subtypes_tuple_list(List, Opaques);
+ (?opaque(_)) -> unknown;
+ (T) -> t_tuple_subtypes(T)
+ end,
+ do_opaque(Type, Opaques, Fun).
+
+t_tuple_subtypes_tuple_list(List, Opaques) ->
+ lists:append([t_tuple_subtypes_list(Tuples, Opaques) ||
+ {_Size, Tuples} <- List]).
+
+t_tuple_subtypes_list(List, Opaques) ->
+ ListOfLists = [t_tuple_subtypes(E, Opaques) || E <- List, E =/= ?none],
+ lists:append([L || L <- ListOfLists, L =/= 'unknown']).
+
+-spec t_tuple_subtypes(erl_type()) -> 'unknown' | [erl_type(),...].
+
+%% XXX. Not the same as t_tuple_subtypes(T, 'universe')...
+t_tuple_subtypes(?tuple(?any, ?any, ?any)) -> unknown;
+t_tuple_subtypes(?tuple(_, _, _) = T) -> [T];
+t_tuple_subtypes(?tuple_set(List)) ->
+ lists:append([Tuples || {_Size, Tuples} <- List]).
+
+-spec t_is_tuple(erl_type()) -> boolean().
+
+t_is_tuple(Type) ->
+ t_is_tuple(Type, 'universe').
+
+-spec t_is_tuple(erl_type(), opaques()) -> boolean().
+
+t_is_tuple(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_tuple1/1).
+
+is_tuple1(?tuple(_, _, _)) -> true;
+is_tuple1(?tuple_set(_)) -> true;
+is_tuple1(_) -> false.
+
+%%-----------------------------------------------------------------------------
+%% Non-primitive types, including some handy syntactic sugar types
+%%
+
+-spec t_bitstrlist() -> erl_type().
+
+t_bitstrlist() ->
+ t_iolist(1, t_bitstr()).
+
+-spec t_arity() -> erl_type().
+
+t_arity() ->
+ t_from_range(0, 255). % was t_byte().
+
+-spec t_pos_integer() -> erl_type().
+
+t_pos_integer() ->
+ t_from_range(1, pos_inf).
+
+-spec t_non_neg_integer() -> erl_type().
+
+t_non_neg_integer() ->
+ t_from_range(0, pos_inf).
+
+-spec t_is_non_neg_integer(erl_type()) -> boolean().
+
+t_is_non_neg_integer(?integer(_) = T) ->
+ t_is_subtype(T, t_non_neg_integer());
+t_is_non_neg_integer(_) -> false.
+
+-spec t_neg_integer() -> erl_type().
+
+t_neg_integer() ->
+ t_from_range(neg_inf, -1).
+
+-spec t_fixnum() -> erl_type().
+
+t_fixnum() ->
+ t_integer(). % Gross over-approximation
+
+-spec t_pos_fixnum() -> erl_type().
+
+t_pos_fixnum() ->
+ t_pos_integer(). % Gross over-approximation
+
+-spec t_non_neg_fixnum() -> erl_type().
+
+t_non_neg_fixnum() ->
+ t_non_neg_integer(). % Gross over-approximation
+
+-spec t_mfa() -> erl_type().
+
+t_mfa() ->
+ t_tuple([t_atom(), t_atom(), t_arity()]).
+
+-spec t_module() -> erl_type().
+
+t_module() ->
+ t_atom().
+
+-spec t_node() -> erl_type().
+
+t_node() ->
+ t_atom().
+
+-spec t_iodata() -> erl_type().
+
+t_iodata() ->
+ t_sup(t_iolist(), t_binary()).
+
+-spec t_iolist() -> erl_type().
+
+t_iolist() ->
+ t_iolist(1, t_binary()).
+
+%% Added a second argument which currently is t_binary() | t_bitstr()
+-spec t_iolist(non_neg_integer(), erl_type()) -> erl_type().
+
+t_iolist(N, T) when N > 0 ->
+ t_maybe_improper_list(t_sup([t_iolist(N-1, T), T, t_byte()]),
+ t_sup(T, t_nil()));
+t_iolist(0, T) ->
+ t_maybe_improper_list(t_any(), t_sup(T, t_nil())).
+
+-spec t_timeout() -> erl_type().
+
+t_timeout() ->
+ t_sup(t_non_neg_integer(), t_atom('infinity')).
+
+%%------------------------------------
+
+%% ?none is allowed in products. A product of size 1 is not a product.
+
+-spec t_product([erl_type()]) -> erl_type().
+
+t_product([T]) -> T;
+t_product(Types) when is_list(Types) ->
+ ?product(Types).
+
+%% This function is intended to be the inverse of the one above.
+%% It should NOT be used with ?any, ?none or ?unit as input argument.
+
+-spec t_to_tlist(erl_type()) -> [erl_type()].
+
+t_to_tlist(?product(Types)) -> Types;
+t_to_tlist(T) when T =/= ?any orelse T =/= ?none orelse T =/= ?unit -> [T].
+
+%%------------------------------------
+
+-spec t_var(atom() | integer()) -> erl_type().
+
+t_var(Atom) when is_atom(Atom) -> ?var(Atom);
+t_var(Int) when is_integer(Int) -> ?var(Int).
+
+-spec t_is_var(erl_type()) -> boolean().
+
+t_is_var(?var(_)) -> true;
+t_is_var(_) -> false.
+
+-spec t_var_name(erl_type()) -> atom() | integer().
+
+t_var_name(?var(Id)) -> Id.
+
+-spec t_has_var(erl_type()) -> boolean().
+
+t_has_var(?var(_)) -> true;
+t_has_var(?function(Domain, Range)) ->
+ t_has_var(Domain) orelse t_has_var(Range);
+t_has_var(?list(Contents, Termination, _)) ->
+ t_has_var(Contents) orelse t_has_var(Termination);
+t_has_var(?product(Types)) -> t_has_var_list(Types);
+t_has_var(?tuple(?any, ?any, ?any)) -> false;
+t_has_var(?tuple(Elements, _, _)) ->
+ t_has_var_list(Elements);
+t_has_var(?tuple_set(_) = T) ->
+ t_has_var_list(t_tuple_subtypes(T));
+t_has_var(?map(_, DefK, _)= Map) ->
+ t_has_var_list(map_all_values(Map)) orelse
+ t_has_var(DefK);
+t_has_var(?opaque(Set)) ->
+ %% Assume variables in 'args' are also present i 'struct'
+ t_has_var_list([O#opaque.struct || O <- set_to_list(Set)]);
+t_has_var(?union(List)) ->
+ t_has_var_list(List);
+t_has_var(_) -> false.
+
+-spec t_has_var_list([erl_type()]) -> boolean().
+
+t_has_var_list([T|Ts]) ->
+ t_has_var(T) orelse t_has_var_list(Ts);
+t_has_var_list([]) -> false.
+
+-spec t_collect_vars(erl_type()) -> [erl_type()].
+
+t_collect_vars(T) ->
+ t_collect_vars(T, []).
+
+-spec t_collect_vars(erl_type(), [erl_type()]) -> [erl_type()].
+
+t_collect_vars(?var(_) = Var, Acc) ->
+ ordsets:add_element(Var, Acc);
+t_collect_vars(?function(Domain, Range), Acc) ->
+ ordsets:union(t_collect_vars(Domain, Acc), t_collect_vars(Range, []));
+t_collect_vars(?list(Contents, Termination, _), Acc) ->
+ ordsets:union(t_collect_vars(Contents, Acc), t_collect_vars(Termination, []));
+t_collect_vars(?product(Types), Acc) ->
+ t_collect_vars_list(Types, Acc);
+t_collect_vars(?tuple(?any, ?any, ?any), Acc) ->
+ Acc;
+t_collect_vars(?tuple(Types, _, _), Acc) ->
+ t_collect_vars_list(Types, Acc);
+t_collect_vars(?tuple_set(_) = TS, Acc) ->
+ t_collect_vars_list(t_tuple_subtypes(TS), Acc);
+t_collect_vars(?map(_, DefK, _) = Map, Acc0) ->
+ Acc = t_collect_vars_list(map_all_values(Map), Acc0),
+ t_collect_vars(DefK, Acc);
+t_collect_vars(?opaque(Set), Acc) ->
+ %% Assume variables in 'args' are also present i 'struct'
+ t_collect_vars_list([O#opaque.struct || O <- set_to_list(Set)], Acc);
+t_collect_vars(?union(List), Acc) ->
+ t_collect_vars_list(List, Acc);
+t_collect_vars(_, Acc) ->
+ Acc.
+
+t_collect_vars_list([T|Ts], Acc0) ->
+ Acc = t_collect_vars(T, Acc0),
+ t_collect_vars_list(Ts, Acc);
+t_collect_vars_list([], Acc) -> Acc.
+
+%%=============================================================================
+%%
+%% Type construction from Erlang terms.
+%%
+%%=============================================================================
+
+%%-----------------------------------------------------------------------------
+%% Make a type from a term. No type depth is enforced.
+%%
+
+-spec t_from_term(term()) -> erl_type().
+
+t_from_term([H|T]) -> t_cons(t_from_term(H), t_from_term(T));
+t_from_term([]) -> t_nil();
+t_from_term(T) when is_atom(T) -> t_atom(T);
+t_from_term(T) when is_bitstring(T) -> t_bitstr(0, erlang:bit_size(T));
+t_from_term(T) when is_float(T) -> t_float();
+t_from_term(T) when is_function(T) ->
+ {arity, Arity} = erlang:fun_info(T, arity),
+ t_fun(Arity, t_any());
+t_from_term(T) when is_integer(T) -> t_integer(T);
+t_from_term(T) when is_map(T) ->
+ Pairs = [{t_from_term(K), ?mand, t_from_term(V)}
+ || {K, V} <- maps:to_list(T)],
+ {Stons, Rest} = lists:partition(fun({K,_,_}) -> is_singleton_type(K) end,
+ Pairs),
+ {DefK, DefV}
+ = lists:foldl(fun({K,_,V},{AK,AV}) -> {t_sup(K,AK), t_sup(V,AV)} end,
+ {t_none(), t_none()}, Rest),
+ t_map(lists:keysort(1, Stons), DefK, DefV);
+t_from_term(T) when is_pid(T) -> t_pid();
+t_from_term(T) when is_port(T) -> t_port();
+t_from_term(T) when is_reference(T) -> t_reference();
+t_from_term(T) when is_tuple(T) ->
+ t_tuple([t_from_term(E) || E <- tuple_to_list(T)]).
+
+%%-----------------------------------------------------------------------------
+%% Integer types from a range.
+%%-----------------------------------------------------------------------------
+
+%%-define(USE_UNSAFE_RANGES, true).
+
+-spec t_from_range(rng_elem(), rng_elem()) -> erl_type().
+
+-ifdef(USE_UNSAFE_RANGES).
+
+t_from_range(X, Y) ->
+ t_from_range_unsafe(X, Y).
+
+-else.
+
+t_from_range(neg_inf, pos_inf) -> t_integer();
+t_from_range(neg_inf, Y) when is_integer(Y), Y < 0 -> ?integer_neg;
+t_from_range(neg_inf, Y) when is_integer(Y), Y >= 0 -> t_integer();
+t_from_range(X, pos_inf) when is_integer(X), X >= 1 -> ?integer_pos;
+t_from_range(X, pos_inf) when is_integer(X), X >= 0 -> ?integer_non_neg;
+t_from_range(X, pos_inf) when is_integer(X), X < 0 -> t_integer();
+t_from_range(X, Y) when is_integer(X), is_integer(Y), X > Y -> t_none();
+t_from_range(X, Y) when is_integer(X), is_integer(Y) ->
+ case ((Y - X) < ?SET_LIMIT) of
+ true -> t_integers(lists:seq(X, Y));
+ false ->
+ case X >= 0 of
+ false ->
+ if Y < 0 -> ?integer_neg;
+ true -> t_integer()
+ end;
+ true ->
+ if Y =< ?MAX_BYTE, X >= 1 -> ?int_range(1, ?MAX_BYTE);
+ Y =< ?MAX_BYTE -> t_byte();
+ Y =< ?MAX_CHAR, X >= 1 -> ?int_range(1, ?MAX_CHAR);
+ Y =< ?MAX_CHAR -> t_char();
+ X >= 1 -> ?integer_pos;
+ X >= 0 -> ?integer_non_neg
+ end
+ end
+ end;
+t_from_range(pos_inf, neg_inf) -> t_none().
+
+-endif.
+
+-spec t_from_range_unsafe(rng_elem(), rng_elem()) -> erl_type().
+
+t_from_range_unsafe(neg_inf, pos_inf) -> t_integer();
+t_from_range_unsafe(neg_inf, Y) -> ?int_range(neg_inf, Y);
+t_from_range_unsafe(X, pos_inf) -> ?int_range(X, pos_inf);
+t_from_range_unsafe(X, Y) when is_integer(X), is_integer(Y), X =< Y ->
+ if (Y - X) < ?SET_LIMIT -> t_integers(lists:seq(X, Y));
+ true -> ?int_range(X, Y)
+ end;
+t_from_range_unsafe(X, Y) when is_integer(X), is_integer(Y) -> t_none();
+t_from_range_unsafe(pos_inf, neg_inf) -> t_none().
+
+-spec t_is_fixnum(erl_type()) -> boolean().
+
+t_is_fixnum(?int_range(neg_inf, _)) -> false;
+t_is_fixnum(?int_range(_, pos_inf)) -> false;
+t_is_fixnum(?int_range(From, To)) ->
+ is_fixnum(From) andalso is_fixnum(To);
+t_is_fixnum(?int_set(Set)) ->
+ is_fixnum(set_min(Set)) andalso is_fixnum(set_max(Set));
+t_is_fixnum(_) -> false.
+
+-spec is_fixnum(integer()) -> boolean().
+
+is_fixnum(N) when is_integer(N) ->
+ Bits = ?BITS,
+ (N =< ((1 bsl (Bits - 1)) - 1)) andalso (N >= -(1 bsl (Bits - 1))).
+
+infinity_geq(pos_inf, _) -> true;
+infinity_geq(_, pos_inf) -> false;
+infinity_geq(_, neg_inf) -> true;
+infinity_geq(neg_inf, _) -> false;
+infinity_geq(A, B) -> A >= B.
+
+-spec t_is_bitwidth(erl_type()) -> boolean().
+
+t_is_bitwidth(?int_range(neg_inf, _)) -> false;
+t_is_bitwidth(?int_range(_, pos_inf)) -> false;
+t_is_bitwidth(?int_range(From, To)) ->
+ infinity_geq(From, 0) andalso infinity_geq(?BITS, To);
+t_is_bitwidth(?int_set(Set)) ->
+ infinity_geq(set_min(Set), 0) andalso infinity_geq(?BITS, set_max(Set));
+t_is_bitwidth(_) -> false.
+
+-spec number_min(erl_type()) -> rng_elem().
+
+number_min(Type) ->
+ number_min(Type, 'universe').
+
+-spec number_min(erl_type(), opaques()) -> rng_elem().
+
+number_min(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun number_min2/1).
+
+number_min2(?int_range(From, _)) -> From;
+number_min2(?int_set(Set)) -> set_min(Set);
+number_min2(?number(?any, _Tag)) -> neg_inf.
+
+-spec number_max(erl_type()) -> rng_elem().
+
+number_max(Type) ->
+ number_max(Type, 'universe').
+
+-spec number_max(erl_type(), opaques()) -> rng_elem().
+
+number_max(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun number_max2/1).
+
+number_max2(?int_range(_, To)) -> To;
+number_max2(?int_set(Set)) -> set_max(Set);
+number_max2(?number(?any, _Tag)) -> pos_inf.
+
+%% -spec int_range(rgn_elem(), rng_elem()) -> erl_type().
+%%
+%% int_range(neg_inf, pos_inf) -> t_integer();
+%% int_range(neg_inf, To) -> ?int_range(neg_inf, To);
+%% int_range(From, pos_inf) -> ?int_range(From, pos_inf);
+%% int_range(From, To) when From =< To -> t_from_range(From, To);
+%% int_range(From, To) when To < From -> ?none.
+
+in_range(_, ?int_range(neg_inf, pos_inf)) -> true;
+in_range(X, ?int_range(From, pos_inf)) -> X >= From;
+in_range(X, ?int_range(neg_inf, To)) -> X =< To;
+in_range(X, ?int_range(From, To)) -> (X >= From) andalso (X =< To).
+
+-spec min(rng_elem(), rng_elem()) -> rng_elem().
+
+min(neg_inf, _) -> neg_inf;
+min(_, neg_inf) -> neg_inf;
+min(pos_inf, Y) -> Y;
+min(X, pos_inf) -> X;
+min(X, Y) when X =< Y -> X;
+min(_, Y) -> Y.
+
+-spec max(rng_elem(), rng_elem()) -> rng_elem().
+
+max(neg_inf, Y) -> Y;
+max(X, neg_inf) -> X;
+max(pos_inf, _) -> pos_inf;
+max(_, pos_inf) -> pos_inf;
+max(X, Y) when X =< Y -> Y;
+max(X, _) -> X.
+
+expand_range_from_set(Range = ?int_range(From, To), Set) ->
+ Min = min(set_min(Set), From),
+ Max = max(set_max(Set), To),
+ if From =:= Min, To =:= Max -> Range;
+ true -> t_from_range(Min, Max)
+ end.
+
+%%=============================================================================
+%%
+%% Lattice operations
+%%
+%%=============================================================================
+
+%%-----------------------------------------------------------------------------
+%% Supremum
+%%
+
+-spec t_sup([erl_type()]) -> erl_type().
+
+t_sup([]) -> ?none;
+t_sup(Ts) ->
+ case lists:any(fun is_any/1, Ts) of
+ true -> ?any;
+ false ->
+ t_sup1(Ts, [])
+ end.
+
+t_sup1([H1, H2|T], L) ->
+ t_sup1(T, [t_sup(H1, H2)|L]);
+t_sup1([T], []) -> subst_all_vars_to_any(T);
+t_sup1(Ts, L) ->
+ t_sup1(Ts++L, []).
+
+-spec t_sup(erl_type(), erl_type()) -> erl_type().
+
+t_sup(?any, _) -> ?any;
+t_sup(_, ?any) -> ?any;
+t_sup(?none, T) -> T;
+t_sup(T, ?none) -> T;
+t_sup(?unit, T) -> T;
+t_sup(T, ?unit) -> T;
+t_sup(T, T) -> subst_all_vars_to_any(T);
+t_sup(?var(_), _) -> ?any;
+t_sup(_, ?var(_)) -> ?any;
+t_sup(?atom(Set1), ?atom(Set2)) ->
+ ?atom(set_union(Set1, Set2));
+t_sup(?bitstr(U1, B1), ?bitstr(U2, B2)) ->
+ t_bitstr(gcd(gcd(U1, U2), abs(B1-B2)), lists:min([B1, B2]));
+t_sup(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
+ %% The domain is either a product or any.
+ ?function(t_sup(Domain1, Domain2), t_sup(Range1, Range2));
+t_sup(?identifier(Set1), ?identifier(Set2)) ->
+ ?identifier(set_union(Set1, Set2));
+t_sup(?opaque(Set1), ?opaque(Set2)) ->
+ sup_opaque(set_to_list(ordsets:union(Set1, Set2)));
+%%Disallow unions with opaque types
+%%t_sup(T1=?opaque(_,_,_), T2) ->
+%% io:format("Debug: t_sup executed with args ~w and ~w~n",[T1, T2]), ?none;
+%%t_sup(T1, T2=?opaque(_,_,_)) ->
+%% io:format("Debug: t_sup executed with args ~w and ~w~n",[T1, T2]), ?none;
+t_sup(?matchstate(Pres1, Slots1), ?matchstate(Pres2, Slots2)) ->
+ ?matchstate(t_sup(Pres1, Pres2), t_sup(Slots1, Slots2));
+t_sup(?nil, ?nil) -> ?nil;
+t_sup(?nil, ?list(Contents, Termination, _)) ->
+ ?list(Contents, t_sup(?nil, Termination), ?unknown_qual);
+t_sup(?list(Contents, Termination, _), ?nil) ->
+ ?list(Contents, t_sup(?nil, Termination), ?unknown_qual);
+t_sup(?list(Contents1, Termination1, Size1),
+ ?list(Contents2, Termination2, Size2)) ->
+ NewSize =
+ case {Size1, Size2} of
+ {?unknown_qual, ?unknown_qual} -> ?unknown_qual;
+ {?unknown_qual, ?nonempty_qual} -> ?unknown_qual;
+ {?nonempty_qual, ?unknown_qual} -> ?unknown_qual;
+ {?nonempty_qual, ?nonempty_qual} -> ?nonempty_qual
+ end,
+ NewContents = t_sup(Contents1, Contents2),
+ NewTermination = t_sup(Termination1, Termination2),
+ TmpList = t_cons(NewContents, NewTermination),
+ case NewSize of
+ ?nonempty_qual -> TmpList;
+ ?unknown_qual ->
+ ?list(FinalContents, FinalTermination, _) = TmpList,
+ ?list(FinalContents, FinalTermination, ?unknown_qual)
+ end;
+t_sup(?number(_, _), ?number(?any, ?unknown_qual) = T) -> T;
+t_sup(?number(?any, ?unknown_qual) = T, ?number(_, _)) -> T;
+t_sup(?float, ?float) -> ?float;
+t_sup(?float, ?integer(_)) -> t_number();
+t_sup(?integer(_), ?float) -> t_number();
+t_sup(?integer(?any) = T, ?integer(_)) -> T;
+t_sup(?integer(_), ?integer(?any) = T) -> T;
+t_sup(?int_set(Set1), ?int_set(Set2)) ->
+ case set_union(Set1, Set2) of
+ ?any ->
+ t_from_range(min(set_min(Set1), set_min(Set2)),
+ max(set_max(Set1), set_max(Set2)));
+ Set -> ?int_set(Set)
+ end;
+t_sup(?int_range(From1, To1), ?int_range(From2, To2)) ->
+ t_from_range(min(From1, From2), max(To1, To2));
+t_sup(Range = ?int_range(_, _), ?int_set(Set)) ->
+ expand_range_from_set(Range, Set);
+t_sup(?int_set(Set), Range = ?int_range(_, _)) ->
+ expand_range_from_set(Range, Set);
+t_sup(?product(Types1), ?product(Types2)) ->
+ L1 = length(Types1),
+ L2 = length(Types2),
+ if L1 =:= L2 -> ?product(t_sup_lists(Types1, Types2));
+ true -> ?any
+ end;
+t_sup(?product(_), _) ->
+ ?any;
+t_sup(_, ?product(_)) ->
+ ?any;
+t_sup(?tuple(?any, ?any, ?any) = T, ?tuple(_, _, _)) -> T;
+t_sup(?tuple(_, _, _), ?tuple(?any, ?any, ?any) = T) -> T;
+t_sup(?tuple(?any, ?any, ?any) = T, ?tuple_set(_)) -> T;
+t_sup(?tuple_set(_), ?tuple(?any, ?any, ?any) = T) -> T;
+t_sup(?tuple(Elements1, Arity, Tag1) = T1,
+ ?tuple(Elements2, Arity, Tag2) = T2) ->
+ if Tag1 =:= Tag2 -> t_tuple(t_sup_lists(Elements1, Elements2));
+ Tag1 =:= ?any -> t_tuple(t_sup_lists(Elements1, Elements2));
+ Tag2 =:= ?any -> t_tuple(t_sup_lists(Elements1, Elements2));
+ Tag1 < Tag2 -> ?tuple_set([{Arity, [T1, T2]}]);
+ Tag1 > Tag2 -> ?tuple_set([{Arity, [T2, T1]}])
+ end;
+t_sup(?tuple(_, Arity1, _) = T1, ?tuple(_, Arity2, _) = T2) ->
+ sup_tuple_sets([{Arity1, [T1]}], [{Arity2, [T2]}]);
+t_sup(?tuple_set(List1), ?tuple_set(List2)) ->
+ sup_tuple_sets(List1, List2);
+t_sup(?tuple_set(List1), T2 = ?tuple(_, Arity, _)) ->
+ sup_tuple_sets(List1, [{Arity, [T2]}]);
+t_sup(?tuple(_, Arity, _) = T1, ?tuple_set(List2)) ->
+ sup_tuple_sets([{Arity, [T1]}], List2);
+t_sup(?map(_, ADefK, ADefV) = A, ?map(_, BDefK, BDefV) = B) ->
+ Pairs =
+ map_pairwise_merge(
+ fun(K, MNess, V1, MNess, V2) -> {K, MNess, t_sup(V1, V2)};
+ (K, _, V1, _, V2) -> {K, ?opt, t_sup(V1, V2)}
+ end, A, B),
+ t_map(Pairs, t_sup(ADefK, BDefK), t_sup(ADefV, BDefV));
+t_sup(T1, T2) ->
+ ?union(U1) = force_union(T1),
+ ?union(U2) = force_union(T2),
+ sup_union(U1, U2).
+
+sup_opaque([]) -> ?none;
+sup_opaque(List) ->
+ L = sup_opaq(List),
+ ?opaque(ordsets:from_list(L)).
+
+sup_opaq(L0) ->
+ L1 = [{{Mod,Name,Args}, T} ||
+ #opaque{mod = Mod, name = Name, args = Args}=T <- L0],
+ F = family(L1),
+ [supl(Ts) || {_, Ts} <- F].
+
+supl([O]) -> O;
+supl(Ts) -> supl(Ts, t_none()).
+
+supl([#opaque{struct = S}=O|L], S0) ->
+ S1 = t_sup(S, S0),
+ case L =:= [] of
+ true -> O#opaque{struct = S1};
+ false -> supl(L, S1)
+ end.
+
+-spec t_sup_lists([erl_type()], [erl_type()]) -> [erl_type()].
+
+t_sup_lists([T1|Left1], [T2|Left2]) ->
+ [t_sup(T1, T2)|t_sup_lists(Left1, Left2)];
+t_sup_lists([], []) ->
+ [].
+
+sup_tuple_sets(L1, L2) ->
+ TotalArities = ordsets:union([Arity || {Arity, _} <- L1],
+ [Arity || {Arity, _} <- L2]),
+ if length(TotalArities) > ?TUPLE_ARITY_LIMIT -> t_tuple();
+ true ->
+ case sup_tuple_sets(L1, L2, []) of
+ [{_Arity, [OneTuple = ?tuple(_, _, _)]}] -> OneTuple;
+ List -> ?tuple_set(List)
+ end
+ end.
+
+sup_tuple_sets([{Arity, Tuples1}|Left1], [{Arity, Tuples2}|Left2], Acc) ->
+ NewAcc = [{Arity, sup_tuples_in_set(Tuples1, Tuples2)}|Acc],
+ sup_tuple_sets(Left1, Left2, NewAcc);
+sup_tuple_sets([{Arity1, _} = T1|Left1] = L1,
+ [{Arity2, _} = T2|Left2] = L2, Acc) ->
+ if Arity1 < Arity2 -> sup_tuple_sets(Left1, L2, [T1|Acc]);
+ Arity1 > Arity2 -> sup_tuple_sets(L1, Left2, [T2|Acc])
+ end;
+sup_tuple_sets([], L2, Acc) -> lists:reverse(Acc, L2);
+sup_tuple_sets(L1, [], Acc) -> lists:reverse(Acc, L1).
+
+sup_tuples_in_set([?tuple(_, _, ?any) = T], L) ->
+ [t_tuple(sup_tuple_elements([T|L]))];
+sup_tuples_in_set(L, [?tuple(_, _, ?any) = T]) ->
+ [t_tuple(sup_tuple_elements([T|L]))];
+sup_tuples_in_set(L1, L2) ->
+ FoldFun = fun(?tuple(_, _, Tag), AccTag) -> t_sup(Tag, AccTag) end,
+ TotalTag0 = lists:foldl(FoldFun, ?none, L1),
+ TotalTag = lists:foldl(FoldFun, TotalTag0, L2),
+ case TotalTag of
+ ?atom(?any) ->
+ %% We will reach the set limit. Widen now.
+ [t_tuple(sup_tuple_elements(L1 ++ L2))];
+ ?atom(Set) ->
+ case set_size(Set) > ?TUPLE_TAG_LIMIT of
+ true ->
+ %% We will reach the set limit. Widen now.
+ [t_tuple(sup_tuple_elements(L1 ++ L2))];
+ false ->
+ %% We can go on and build the tuple set.
+ sup_tuples_in_set(L1, L2, [])
+ end
+ end.
+
+sup_tuple_elements([?tuple(Elements, _, _)|L]) ->
+ lists:foldl(fun (?tuple(Es, _, _), Acc) -> t_sup_lists(Es, Acc) end,
+ Elements, L).
+
+sup_tuples_in_set([?tuple(Elements1, Arity, Tag1) = T1|Left1] = L1,
+ [?tuple(Elements2, Arity, Tag2) = T2|Left2] = L2, Acc) ->
+ if
+ Tag1 < Tag2 -> sup_tuples_in_set(Left1, L2, [T1|Acc]);
+ Tag1 > Tag2 -> sup_tuples_in_set(L1, Left2, [T2|Acc]);
+ Tag2 =:= Tag2 -> NewElements = t_sup_lists(Elements1, Elements2),
+ NewAcc = [?tuple(NewElements, Arity, Tag1)|Acc],
+ sup_tuples_in_set(Left1, Left2, NewAcc)
+ end;
+sup_tuples_in_set([], L2, Acc) -> lists:reverse(Acc, L2);
+sup_tuples_in_set(L1, [], Acc) -> lists:reverse(Acc, L1).
+
+sup_union(U1, U2) ->
+ sup_union(U1, U2, 0, []).
+
+sup_union([?none|Left1], [?none|Left2], N, Acc) ->
+ sup_union(Left1, Left2, N, [?none|Acc]);
+sup_union([T1|Left1], [T2|Left2], N, Acc) ->
+ sup_union(Left1, Left2, N+1, [t_sup(T1, T2)|Acc]);
+sup_union([], [], N, Acc) ->
+ if N =:= 0 -> ?none;
+ N =:= 1 ->
+ [Type] = [T || T <- Acc, T =/= ?none],
+ Type;
+ N =:= length(Acc) -> ?any;
+ true -> ?union(lists:reverse(Acc))
+ end.
+
+force_union(T = ?atom(_)) -> ?atom_union(T);
+force_union(T = ?bitstr(_, _)) -> ?bitstr_union(T);
+force_union(T = ?function(_, _)) -> ?function_union(T);
+force_union(T = ?identifier(_)) -> ?identifier_union(T);
+force_union(T = ?list(_, _, _)) -> ?list_union(T);
+force_union(T = ?nil) -> ?list_union(T);
+force_union(T = ?number(_, _)) -> ?number_union(T);
+force_union(T = ?opaque(_)) -> ?opaque_union(T);
+force_union(T = ?map(_,_,_)) -> ?map_union(T);
+force_union(T = ?tuple(_, _, _)) -> ?tuple_union(T);
+force_union(T = ?tuple_set(_)) -> ?tuple_union(T);
+force_union(T = ?matchstate(_, _)) -> ?matchstate_union(T);
+force_union(T = ?union(_)) -> T.
+
+%%-----------------------------------------------------------------------------
+%% An attempt to write the inverse operation of t_sup/1 -- XXX: INCOMPLETE !!
+%%
+
+-spec t_elements(erl_type()) -> [erl_type()].
+
+t_elements(?none) -> [];
+t_elements(?unit) -> [];
+t_elements(?any = T) -> [T];
+t_elements(?nil = T) -> [T];
+t_elements(?atom(?any) = T) -> [T];
+t_elements(?atom(Atoms)) ->
+ [t_atom(A) || A <- Atoms];
+t_elements(?bitstr(_, _) = T) -> [T];
+t_elements(?function(_, _) = T) -> [T];
+t_elements(?identifier(?any) = T) -> [T];
+t_elements(?identifier(IDs)) ->
+ [?identifier([T]) || T <- IDs];
+t_elements(?list(_, _, _) = T) -> [T];
+t_elements(?number(_, _) = T) ->
+ case T of
+ ?number(?any, ?unknown_qual) ->
+ [?float, ?integer(?any)];
+ ?float -> [T];
+ ?integer(?any) -> [T];
+ ?int_range(_, _) -> [T];
+ ?int_set(Set) ->
+ [t_integer(I) || I <- Set]
+ end;
+t_elements(?opaque(_) = T) ->
+ do_elements(T);
+t_elements(?map(_,_,_) = T) -> [T];
+t_elements(?tuple(_, _, _) = T) -> [T];
+t_elements(?tuple_set(_) = TS) ->
+ case t_tuple_subtypes(TS) of
+ unknown -> [];
+ Elems -> Elems
+ end;
+t_elements(?union(_) = T) ->
+ do_elements(T);
+t_elements(?var(_)) -> [?any]. %% yes, vars exist -- what else to do here?
+%% t_elements(T) ->
+%% io:format("T_ELEMENTS => ~p\n", [T]).
+
+do_elements(Type0) ->
+ case do_opaque(Type0, 'universe', fun(T) -> T end) of
+ ?union(List) -> lists:append([t_elements(T) || T <- List]);
+ Type -> t_elements(Type)
+ end.
+
+%%-----------------------------------------------------------------------------
+%% Infimum
+%%
+
+-spec t_inf([erl_type()]) -> erl_type().
+
+t_inf([H1, H2|T]) ->
+ case t_inf(H1, H2) of
+ ?none -> ?none;
+ NewH -> t_inf([NewH|T])
+ end;
+t_inf([H]) -> H;
+t_inf([]) -> ?none.
+
+-spec t_inf(erl_type(), erl_type()) -> erl_type().
+
+t_inf(T1, T2) ->
+ t_inf(T1, T2, 'universe').
+
+%% 'match' should be used from t_find_unknown_opaque() only
+-type t_inf_opaques() :: opaques() | {'match', [erl_type() | 'universe']}.
+
+-spec t_inf(erl_type(), erl_type(), t_inf_opaques()) -> erl_type().
+
+t_inf(?var(_), ?var(_), _Opaques) -> ?any;
+t_inf(?var(_), T, _Opaques) -> subst_all_vars_to_any(T);
+t_inf(T, ?var(_), _Opaques) -> subst_all_vars_to_any(T);
+t_inf(?any, T, _Opaques) -> subst_all_vars_to_any(T);
+t_inf(T, ?any, _Opaques) -> subst_all_vars_to_any(T);
+t_inf(?none, _, _Opaques) -> ?none;
+t_inf(_, ?none, _Opaques) -> ?none;
+t_inf(?unit, _, _Opaques) -> ?unit; % ?unit cases should appear below ?none
+t_inf(_, ?unit, _Opaques) -> ?unit;
+t_inf(T, T, _Opaques) -> subst_all_vars_to_any(T);
+t_inf(?atom(Set1), ?atom(Set2), _) ->
+ case set_intersection(Set1, Set2) of
+ ?none -> ?none;
+ NewSet -> ?atom(NewSet)
+ end;
+t_inf(?bitstr(U1, B1), ?bitstr(0, B2), _Opaques) ->
+ if B2 >= B1 andalso (B2-B1) rem U1 =:= 0 -> t_bitstr(0, B2);
+ true -> ?none
+ end;
+t_inf(?bitstr(0, B1), ?bitstr(U2, B2), _Opaques) ->
+ if B1 >= B2 andalso (B1-B2) rem U2 =:= 0 -> t_bitstr(0, B1);
+ true -> ?none
+ end;
+t_inf(?bitstr(U1, B1), ?bitstr(U1, B1), _Opaques) ->
+ t_bitstr(U1, B1);
+t_inf(?bitstr(U1, B1), ?bitstr(U2, B2), _Opaques) when U2 > U1 ->
+ inf_bitstr(U2, B2, U1, B1);
+t_inf(?bitstr(U1, B1), ?bitstr(U2, B2), _Opaques) ->
+ inf_bitstr(U1, B1, U2, B2);
+t_inf(?function(Domain1, Range1), ?function(Domain2, Range2), Opaques) ->
+ case t_inf(Domain1, Domain2, Opaques) of
+ ?none -> ?none;
+ Domain -> ?function(Domain, t_inf(Range1, Range2, Opaques))
+ end;
+t_inf(?identifier(Set1), ?identifier(Set2), _Opaques) ->
+ case set_intersection(Set1, Set2) of
+ ?none -> ?none;
+ Set -> ?identifier(Set)
+ end;
+t_inf(?map(_, ADefK, ADefV) = A, ?map(_, BDefK, BDefV) = B, _Opaques) ->
+ %% Because it simplifies the anonymous function, we allow Pairs to temporarily
+ %% contain mandatory pairs with none values, since all such cases should
+ %% result in a none result.
+ Pairs =
+ map_pairwise_merge(
+ %% For optional keys in both maps, when the infinimum is none, we have
+ %% essentially concluded that K must not be a key in the map.
+ fun(K, ?opt, V1, ?opt, V2) -> {K, ?opt, t_inf(V1, V2)};
+ %% When a key is optional in one map, but mandatory in another, it
+ %% becomes mandatory in the infinumum
+ (K, _, V1, _, V2) -> {K, ?mand, t_inf(V1, V2)}
+ end, A, B),
+ %% If the infinimum of any mandatory values is ?none, the entire map infinimum
+ %% is ?none.
+ case lists:any(fun({_,?mand,?none})->true; ({_,_,_}) -> false end, Pairs) of
+ true -> t_none();
+ false -> t_map(Pairs, t_inf(ADefK, BDefK), t_inf(ADefV, BDefV))
+ end;
+t_inf(?matchstate(Pres1, Slots1), ?matchstate(Pres2, Slots2), _Opaques) ->
+ ?matchstate(t_inf(Pres1, Pres2), t_inf(Slots1, Slots2));
+t_inf(?nil, ?nil, _Opaques) -> ?nil;
+t_inf(?nil, ?nonempty_list(_, _), _Opaques) ->
+ ?none;
+t_inf(?nonempty_list(_, _), ?nil, _Opaques) ->
+ ?none;
+t_inf(?nil, ?list(_Contents, Termination, _), Opaques) ->
+ t_inf(?nil, t_unopaque(Termination), Opaques);
+t_inf(?list(_Contents, Termination, _), ?nil, Opaques) ->
+ t_inf(?nil, t_unopaque(Termination), Opaques);
+t_inf(?list(Contents1, Termination1, Size1),
+ ?list(Contents2, Termination2, Size2), Opaques) ->
+ case t_inf(Termination1, Termination2, Opaques) of
+ ?none -> ?none;
+ Termination ->
+ case t_inf(Contents1, Contents2, Opaques) of
+ ?none ->
+ %% If none of the lists are nonempty, then the infimum is nil.
+ case (Size1 =:= ?unknown_qual) andalso (Size2 =:= ?unknown_qual) of
+ true -> t_nil();
+ false -> ?none
+ end;
+ Contents ->
+ Size =
+ case {Size1, Size2} of
+ {?unknown_qual, ?unknown_qual} -> ?unknown_qual;
+ {?unknown_qual, ?nonempty_qual} -> ?nonempty_qual;
+ {?nonempty_qual, ?unknown_qual} -> ?nonempty_qual;
+ {?nonempty_qual, ?nonempty_qual} -> ?nonempty_qual
+ end,
+ ?list(Contents, Termination, Size)
+ end
+ end;
+t_inf(?number(_, _) = T1, ?number(_, _) = T2, _Opaques) ->
+ case {T1, T2} of
+ {T, T} -> T;
+ {_, ?number(?any, ?unknown_qual)} -> T1;
+ {?number(?any, ?unknown_qual), _} -> T2;
+ {?float, ?integer(_)} -> ?none;
+ {?integer(_), ?float} -> ?none;
+ {?integer(?any), ?integer(_)} -> T2;
+ {?integer(_), ?integer(?any)} -> T1;
+ {?int_set(Set1), ?int_set(Set2)} ->
+ case set_intersection(Set1, Set2) of
+ ?none -> ?none;
+ Set -> ?int_set(Set)
+ end;
+ {?int_range(From1, To1), ?int_range(From2, To2)} ->
+ t_from_range(max(From1, From2), min(To1, To2));
+ {Range = ?int_range(_, _), ?int_set(Set)} ->
+ %% io:format("t_inf range, set args ~p ~p ~n", [T1, T2]),
+ Ans2 =
+ case set_filter(fun(X) -> in_range(X, Range) end, Set) of
+ ?none -> ?none;
+ NewSet -> ?int_set(NewSet)
+ end,
+ %% io:format("Ans2 ~p ~n", [Ans2]),
+ Ans2;
+ {?int_set(Set), ?int_range(_, _) = Range} ->
+ case set_filter(fun(X) -> in_range(X, Range) end, Set) of
+ ?none -> ?none;
+ NewSet -> ?int_set(NewSet)
+ end
+ end;
+t_inf(?product(Types1), ?product(Types2), Opaques) ->
+ L1 = length(Types1),
+ L2 = length(Types2),
+ if L1 =:= L2 -> ?product(t_inf_lists(Types1, Types2, Opaques));
+ true -> ?none
+ end;
+t_inf(?product(_), _, _Opaques) ->
+ ?none;
+t_inf(_, ?product(_), _Opaques) ->
+ ?none;
+t_inf(?tuple(?any, ?any, ?any), ?tuple(_, _, _) = T, _Opaques) ->
+ subst_all_vars_to_any(T);
+t_inf(?tuple(_, _, _) = T, ?tuple(?any, ?any, ?any), _Opaques) ->
+ subst_all_vars_to_any(T);
+t_inf(?tuple(?any, ?any, ?any), ?tuple_set(_) = T, _Opaques) ->
+ subst_all_vars_to_any(T);
+t_inf(?tuple_set(_) = T, ?tuple(?any, ?any, ?any), _Opaques) ->
+ subst_all_vars_to_any(T);
+t_inf(?tuple(Elements1, Arity, _Tag1), ?tuple(Elements2, Arity, _Tag2), Opaques) ->
+ case t_inf_lists_strict(Elements1, Elements2, Opaques) of
+ bottom -> ?none;
+ NewElements -> t_tuple(NewElements)
+ end;
+t_inf(?tuple_set(List1), ?tuple_set(List2), Opaques) ->
+ inf_tuple_sets(List1, List2, Opaques);
+t_inf(?tuple_set(List), ?tuple(_, Arity, _) = T, Opaques) ->
+ inf_tuple_sets(List, [{Arity, [T]}], Opaques);
+t_inf(?tuple(_, Arity, _) = T, ?tuple_set(List), Opaques) ->
+ inf_tuple_sets(List, [{Arity, [T]}], Opaques);
+%% be careful: here and in the next clause T can be ?opaque
+t_inf(?union(U1), T, Opaques) ->
+ ?union(U2) = force_union(T),
+ inf_union(U1, U2, Opaques);
+t_inf(T, ?union(U2), Opaques) ->
+ ?union(U1) = force_union(T),
+ inf_union(U1, U2, Opaques);
+t_inf(?opaque(Set1), ?opaque(Set2), Opaques) ->
+ inf_opaque(Set1, Set2, Opaques);
+t_inf(?opaque(_) = T1, T2, Opaques) ->
+ inf_opaque1(T2, T1, 1, Opaques);
+t_inf(T1, ?opaque(_) = T2, Opaques) ->
+ inf_opaque1(T1, T2, 2, Opaques);
+%% and as a result, the cases for ?opaque should appear *after* ?union
+t_inf(#c{}, #c{}, _) ->
+ ?none.
+
+inf_opaque1(T1, ?opaque(Set2)=T2, Pos, Opaques) ->
+ case Opaques =:= 'universe' orelse inf_is_opaque_type(T2, Pos, Opaques) of
+ false -> ?none;
+ true ->
+ List2 = set_to_list(Set2),
+ case inf_collect(T1, List2, Opaques, []) of
+ [] -> ?none;
+ OpL -> ?opaque(ordsets:from_list(OpL))
+ end
+ end.
+
+inf_is_opaque_type(T, Pos, {match, Opaques}) ->
+ is_opaque_type(T, Opaques) orelse throw({pos, [Pos]});
+inf_is_opaque_type(T, _Pos, Opaques) ->
+ is_opaque_type(T, Opaques).
+
+inf_collect(T1, [T2|List2], Opaques, OpL) ->
+ #opaque{struct = S2} = T2,
+ case t_inf(T1, S2, Opaques) of
+ ?none -> inf_collect(T1, List2, Opaques, OpL);
+ Inf ->
+ Op = T2#opaque{struct = Inf},
+ inf_collect(T1, List2, Opaques, [Op|OpL])
+ end;
+inf_collect(_T1, [], _Opaques, OpL) ->
+ OpL.
+
+combine(S, T1, T2) ->
+ #opaque{mod = Mod1, name = Name1, args = Args1} = T1,
+ #opaque{mod = Mod2, name = Name2, args = Args2} = T2,
+ Comb1 = comb(Mod1, Name1, Args1, S, T1),
+ case is_compat_opaque_names({Mod1, Name1, Args1}, {Mod2, Name2, Args2}) of
+ true -> Comb1;
+ false -> Comb1 ++ comb(Mod2, Name2, Args2, S, T2)
+ end.
+
+comb(Mod, Name, Args, S, T) ->
+ case can_combine_opaque_names(Mod, Name, Args, S) of
+ true ->
+ ?opaque(Set) = S,
+ Set;
+ false ->
+ [T#opaque{struct = S}]
+ end.
+
+can_combine_opaque_names(Mod1, Name1, Args1,
+ ?opaque([#opaque{mod = Mod2, name = Name2, args = Args2}])) ->
+ is_compat_opaque_names({Mod1, Name1, Args1}, {Mod2, Name2, Args2});
+can_combine_opaque_names(_, _, _, _) -> false.
+
+%% Combining two lists this way can be very time consuming...
+%% Note: two parameterized opaque types are not the same if their
+%% actual parameters differ
+inf_opaque(Set1, Set2, Opaques) ->
+ List1 = inf_look_up(Set1, Opaques),
+ List2 = inf_look_up(Set2, Opaques),
+ List0 = [combine(Inf, T1, T2) ||
+ {Is1, ModNameArgs1, T1} <- List1,
+ {Is2, ModNameArgs2, T2} <- List2,
+ not t_is_none(Inf = inf_opaque_types(Is1, ModNameArgs1, T1,
+ Is2, ModNameArgs2, T2,
+ Opaques))],
+ List = lists:sort(lists:append(List0)),
+ sup_opaque(List).
+
+%% Optimization: do just one lookup.
+inf_look_up(Set, Opaques) ->
+ [{Opaques =:= 'universe' orelse inf_is_opaque_type2(T, Opaques),
+ {M, N, Args}, T} ||
+ #opaque{mod = M, name = N, args = Args} = T <- set_to_list(Set)].
+
+inf_is_opaque_type2(T, {match, Opaques}) ->
+ is_opaque_type2(T, Opaques);
+inf_is_opaque_type2(T, Opaques) ->
+ is_opaque_type2(T, Opaques).
+
+inf_opaque_types(IsOpaque1, ModNameArgs1, T1,
+ IsOpaque2, ModNameArgs2, T2, Opaques) ->
+ #opaque{struct = S1}=T1,
+ #opaque{struct = S2}=T2,
+ case
+ Opaques =:= 'universe' orelse
+ is_compat_opaque_names(ModNameArgs1, ModNameArgs2)
+ of
+ true -> t_inf(S1, S2, Opaques);
+ false ->
+ case {IsOpaque1, IsOpaque2} of
+ {true, true} -> t_inf(S1, S2, Opaques);
+ {true, false} -> t_inf(S1, ?opaque(set_singleton(T2)), Opaques);
+ {false, true} -> t_inf(?opaque(set_singleton(T1)), S2, Opaques);
+ {false, false} when element(1, Opaques) =:= match ->
+ throw({pos, [1, 2]});
+ {false, false} -> t_none()
+ end
+ end.
+
+is_compat_opaque_names(ModNameArgs, ModNameArgs) -> true;
+is_compat_opaque_names({Mod,Name,Args1}, {Mod,Name,Args2}) ->
+ is_compat_args(Args1, Args2);
+is_compat_opaque_names(_, _) -> false.
+
+is_compat_args([A1|Args1], [A2|Args2]) ->
+ is_compat_arg(A1, A2) andalso is_compat_args(Args1, Args2);
+is_compat_args([], []) -> true;
+is_compat_args(_, _) -> false.
+
+is_compat_arg(A1, A2) ->
+ is_specialization(A1, A2) orelse is_specialization(A2, A1).
+
+-spec is_specialization(erl_type(), erl_type()) -> boolean().
+
+%% Returns true if the first argument is a specialization of the
+%% second argument in the sense that every type is a specialization of
+%% any(). For example, {_,_} is a specialization of any(), but not of
+%% tuple(). Does not handle variables, but any() and unions (sort of).
+
+is_specialization(T, T) -> true;
+is_specialization(_, ?any) -> true;
+is_specialization(?any, _) -> false;
+is_specialization(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
+ (is_specialization(Domain1, Domain2) andalso
+ is_specialization(Range1, Range2));
+is_specialization(?list(Contents1, Termination1, Size1),
+ ?list(Contents2, Termination2, Size2)) ->
+ (Size1 =:= Size2 andalso
+ is_specialization(Contents1, Contents2) andalso
+ is_specialization(Termination1, Termination2));
+is_specialization(?product(Types1), ?product(Types2)) ->
+ specialization_list(Types1, Types2);
+is_specialization(?tuple(?any, ?any, ?any), ?tuple(_, _, _)) -> false;
+is_specialization(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> false;
+is_specialization(?tuple(Elements1, Arity, _),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ specialization_list(Elements1, Elements2);
+is_specialization(?tuple_set([{Arity, List}]),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ specialization_list(sup_tuple_elements(List), Elements2);
+is_specialization(?tuple(Elements1, Arity, _),
+ ?tuple_set([{Arity, List}])) when Arity =/= ?any ->
+ specialization_list(Elements1, sup_tuple_elements(List));
+is_specialization(?tuple_set(List1), ?tuple_set(List2)) ->
+ try
+ specialization_list_list([sup_tuple_elements(T) || {_Arity, T} <- List1],
+ [sup_tuple_elements(T) || {_Arity, T} <- List2])
+ catch _:_ -> false
+ end;
+is_specialization(?union(List1)=T1, ?union(List2)=T2) ->
+ case specialization_union2(T1, T2) of
+ {yes, Type1, Type2} -> is_specialization(Type1, Type2);
+ no -> specialization_list(List1, List2)
+ end;
+is_specialization(?union(List), T2) ->
+ case unify_union(List) of
+ {yes, Type} -> is_specialization(Type, T2);
+ no -> false
+ end;
+is_specialization(T1, ?union(List)) ->
+ case unify_union(List) of
+ {yes, Type} -> is_specialization(T1, Type);
+ no -> false
+ end;
+is_specialization(?opaque(_) = T1, T2) ->
+ is_specialization(t_opaque_structure(T1), T2);
+is_specialization(T1, ?opaque(_) = T2) ->
+ is_specialization(T1, t_opaque_structure(T2));
+is_specialization(?var(_), _) -> exit(error);
+is_specialization(_, ?var(_)) -> exit(error);
+is_specialization(?none, _) -> false;
+is_specialization(_, ?none) -> false;
+is_specialization(?unit, _) -> false;
+is_specialization(_, ?unit) -> false;
+is_specialization(#c{}, #c{}) -> false.
+
+specialization_list_list(LL1, LL2) ->
+ length(LL1) =:= length(LL2) andalso specialization_list_list1(LL1, LL2).
+
+specialization_list_list1([], []) -> true;
+specialization_list_list1([L1|LL1], [L2|LL2]) ->
+ specialization_list(L1, L2) andalso specialization_list_list1(LL1, LL2).
+
+specialization_list(L1, L2) ->
+ length(L1) =:= length(L2) andalso specialization_list1(L1, L2).
+
+specialization_list1([], []) -> true;
+specialization_list1([T1|L1], [T2|L2]) ->
+ is_specialization(T1, T2) andalso specialization_list1(L1, L2).
+
+specialization_union2(?union(List1)=T1, ?union(List2)=T2) ->
+ case {unify_union(List1), unify_union(List2)} of
+ {{yes, Type1}, {yes, Type2}} -> {yes, Type1, Type2};
+ {{yes, Type1}, no} -> {yes, Type1, T2};
+ {no, {yes, Type2}} -> {yes, T1, Type2};
+ {no, no} -> no
+ end.
+
+-spec t_inf_lists([erl_type()], [erl_type()]) -> [erl_type()].
+
+t_inf_lists(L1, L2) ->
+ t_inf_lists(L1, L2, 'universe').
+
+-spec t_inf_lists([erl_type()], [erl_type()], t_inf_opaques()) -> [erl_type()].
+
+t_inf_lists(L1, L2, Opaques) ->
+ t_inf_lists(L1, L2, [], Opaques).
+
+-spec t_inf_lists([erl_type()], [erl_type()], [erl_type()], [erl_type()]) -> [erl_type()].
+
+t_inf_lists([T1|Left1], [T2|Left2], Acc, Opaques) ->
+ t_inf_lists(Left1, Left2, [t_inf(T1, T2, Opaques)|Acc], Opaques);
+t_inf_lists([], [], Acc, _Opaques) ->
+ lists:reverse(Acc).
+
+%% Infimum of lists with strictness.
+%% If any element is the ?none type, the value 'bottom' is returned.
+
+-spec t_inf_lists_strict([erl_type()], [erl_type()], [erl_type()]) -> 'bottom' | [erl_type()].
+
+t_inf_lists_strict(L1, L2, Opaques) ->
+ t_inf_lists_strict(L1, L2, [], Opaques).
+
+-spec t_inf_lists_strict([erl_type()], [erl_type()], [erl_type()], [erl_type()]) -> 'bottom' | [erl_type()].
+
+t_inf_lists_strict([T1|Left1], [T2|Left2], Acc, Opaques) ->
+ case t_inf(T1, T2, Opaques) of
+ ?none -> bottom;
+ T -> t_inf_lists_strict(Left1, Left2, [T|Acc], Opaques)
+ end;
+t_inf_lists_strict([], [], Acc, _Opaques) ->
+ lists:reverse(Acc).
+
+inf_tuple_sets(L1, L2, Opaques) ->
+ case inf_tuple_sets(L1, L2, [], Opaques) of
+ [] -> ?none;
+ [{_Arity, [?tuple(_, _, _) = OneTuple]}] -> OneTuple;
+ List -> ?tuple_set(List)
+ end.
+
+inf_tuple_sets([{Arity, Tuples1}|Ts1], [{Arity, Tuples2}|Ts2], Acc, Opaques) ->
+ case inf_tuples_in_sets(Tuples1, Tuples2, Opaques) of
+ [] -> inf_tuple_sets(Ts1, Ts2, Acc, Opaques);
+ [?tuple_set([{Arity, NewTuples}])] ->
+ inf_tuple_sets(Ts1, Ts2, [{Arity, NewTuples}|Acc], Opaques);
+ NewTuples -> inf_tuple_sets(Ts1, Ts2, [{Arity, NewTuples}|Acc], Opaques)
+ end;
+inf_tuple_sets([{Arity1, _}|Ts1] = L1, [{Arity2, _}|Ts2] = L2, Acc, Opaques) ->
+ if Arity1 < Arity2 -> inf_tuple_sets(Ts1, L2, Acc, Opaques);
+ Arity1 > Arity2 -> inf_tuple_sets(L1, Ts2, Acc, Opaques)
+ end;
+inf_tuple_sets([], _, Acc, _Opaques) -> lists:reverse(Acc);
+inf_tuple_sets(_, [], Acc, _Opaques) -> lists:reverse(Acc).
+
+inf_tuples_in_sets([?tuple(Elements1, _, ?any)], L2, Opaques) ->
+ NewList = [t_inf_lists_strict(Elements1, Elements2, Opaques)
+ || ?tuple(Elements2, _, _) <- L2],
+ [t_tuple(Es) || Es <- NewList, Es =/= bottom];
+inf_tuples_in_sets(L1, [?tuple(Elements2, _, ?any)], Opaques) ->
+ NewList = [t_inf_lists_strict(Elements1, Elements2, Opaques)
+ || ?tuple(Elements1, _, _) <- L1],
+ [t_tuple(Es) || Es <- NewList, Es =/= bottom];
+inf_tuples_in_sets(L1, L2, Opaques) ->
+ inf_tuples_in_sets2(L1, L2, [], Opaques).
+
+inf_tuples_in_sets2([?tuple(Elements1, Arity, Tag)|Ts1],
+ [?tuple(Elements2, Arity, Tag)|Ts2], Acc, Opaques) ->
+ case t_inf_lists_strict(Elements1, Elements2, Opaques) of
+ bottom -> inf_tuples_in_sets2(Ts1, Ts2, Acc, Opaques);
+ NewElements ->
+ inf_tuples_in_sets2(Ts1, Ts2, [?tuple(NewElements, Arity, Tag)|Acc],
+ Opaques)
+ end;
+inf_tuples_in_sets2([?tuple(_, _, Tag1)|Ts1] = L1,
+ [?tuple(_, _, Tag2)|Ts2] = L2, Acc, Opaques) ->
+ if Tag1 < Tag2 -> inf_tuples_in_sets2(Ts1, L2, Acc, Opaques);
+ Tag1 > Tag2 -> inf_tuples_in_sets2(L1, Ts2, Acc, Opaques)
+ end;
+inf_tuples_in_sets2([], _, Acc, _Opaques) -> lists:reverse(Acc);
+inf_tuples_in_sets2(_, [], Acc, _Opaques) -> lists:reverse(Acc).
+
+inf_union(U1, U2, Opaques) ->
+ OpaqueFun =
+ fun(Union1, Union2, InfFun) ->
+ [_,_,_,_,_,_,_,_,Opaque,_] = Union1,
+ [A,B,F,I,L,N,T,M,_,Map] = Union2,
+ List = [A,B,F,I,L,N,T,M,Map],
+ inf_union_collect(List, Opaque, InfFun, [], [])
+ end,
+ {O1, ThrowList1} =
+ OpaqueFun(U1, U2, fun(E, Opaque) -> t_inf(Opaque, E, Opaques) end),
+ {O2, ThrowList2}
+ = OpaqueFun(U2, U1, fun(E, Opaque) -> t_inf(E, Opaque, Opaques) end),
+ {Union, ThrowList3} = inf_union(U1, U2, 0, [], [], Opaques),
+ ThrowList = lists:merge3(ThrowList1, ThrowList2, ThrowList3),
+ case t_sup([O1, O2, Union]) of
+ ?none when ThrowList =/= [] -> throw({pos, lists:usort(ThrowList)});
+ Sup -> Sup
+ end.
+
+inf_union_collect([], _Opaque, _InfFun, InfList, ThrowList) ->
+ {t_sup(InfList), lists:usort(ThrowList)};
+inf_union_collect([?none|L], Opaque, InfFun, InfList, ThrowList) ->
+ inf_union_collect(L, Opaque, InfFun, [?none|InfList], ThrowList);
+inf_union_collect([E|L], Opaque, InfFun, InfList, ThrowList) ->
+ try InfFun(E, Opaque)of
+ Inf ->
+ inf_union_collect(L, Opaque, InfFun, [Inf|InfList], ThrowList)
+ catch throw:{pos, Ns} ->
+ inf_union_collect(L, Opaque, InfFun, InfList, Ns ++ ThrowList)
+ end.
+
+inf_union([?none|Left1], [?none|Left2], N, Acc, ThrowList, Opaques) ->
+ inf_union(Left1, Left2, N, [?none|Acc], ThrowList, Opaques);
+inf_union([T1|Left1], [T2|Left2], N, Acc, ThrowList, Opaques) ->
+ try t_inf(T1, T2, Opaques) of
+ ?none -> inf_union(Left1, Left2, N, [?none|Acc], ThrowList, Opaques);
+ T -> inf_union(Left1, Left2, N+1, [T|Acc], ThrowList, Opaques)
+ catch throw:{pos, Ns} ->
+ inf_union(Left1, Left2, N, [?none|Acc], Ns ++ ThrowList, Opaques)
+ end;
+inf_union([], [], N, Acc, ThrowList, _Opaques) ->
+ if N =:= 0 -> {?none, ThrowList};
+ N =:= 1 ->
+ [Type] = [T || T <- Acc, T =/= ?none],
+ {Type, ThrowList};
+ N >= 2 -> {?union(lists:reverse(Acc)), ThrowList}
+ end.
+
+inf_bitstr(U1, B1, U2, B2) ->
+ GCD = gcd(U1, U2),
+ case (B2-B1) rem GCD of
+ 0 ->
+ U = (U1*U2) div GCD,
+ B = findfirst(0, 0, U1, B1, U2, B2),
+ t_bitstr(U, B);
+ _ ->
+ ?none
+ end.
+
+findfirst(N1, N2, U1, B1, U2, B2) ->
+ Val1 = U1*N1+B1,
+ Val2 = U2*N2+B2,
+ if Val1 =:= Val2 ->
+ Val1;
+ Val1 > Val2 ->
+ findfirst(N1, N2+1, U1, B1, U2, B2);
+ Val1 < Val2 ->
+ findfirst(N1+1, N2, U1, B1, U2, B2)
+ end.
+
+%%-----------------------------------------------------------------------------
+%% Substitution of variables
+%%
+
+-type subst_table() :: #{any() => erl_type()}.
+
+-spec t_subst(erl_type(), subst_table()) -> erl_type().
+
+t_subst(T, Map) ->
+ case t_has_var(T) of
+ true -> t_subst_aux(T, Map);
+ false -> T
+ end.
+
+-spec subst_all_vars_to_any(erl_type()) -> erl_type().
+
+subst_all_vars_to_any(T) ->
+ t_subst(T, #{}).
+
+t_subst_aux(?var(Id), Map) ->
+ case maps:find(Id, Map) of
+ error -> ?any;
+ {ok, Type} -> Type
+ end;
+t_subst_aux(?list(Contents, Termination, Size), Map) ->
+ case t_subst_aux(Contents, Map) of
+ ?none -> ?none;
+ NewContents ->
+ %% Be careful here to make the termination collapse if necessary.
+ case t_subst_aux(Termination, Map) of
+ ?nil -> ?list(NewContents, ?nil, Size);
+ ?any -> ?list(NewContents, ?any, Size);
+ Other ->
+ ?list(NewContents2, NewTermination, _) = t_cons(NewContents, Other),
+ ?list(NewContents2, NewTermination, Size)
+ end
+ end;
+t_subst_aux(?function(Domain, Range), Map) ->
+ ?function(t_subst_aux(Domain, Map), t_subst_aux(Range, Map));
+t_subst_aux(?product(Types), Map) ->
+ ?product([t_subst_aux(T, Map) || T <- Types]);
+t_subst_aux(?tuple(?any, ?any, ?any) = T, _Map) ->
+ T;
+t_subst_aux(?tuple(Elements, _Arity, _Tag), Map) ->
+ t_tuple([t_subst_aux(E, Map) || E <- Elements]);
+t_subst_aux(?tuple_set(_) = TS, Map) ->
+ t_sup([t_subst_aux(T, Map) || T <- t_tuple_subtypes(TS)]);
+t_subst_aux(?map(Pairs, DefK, DefV), Map) ->
+ t_map([{K, MNess, t_subst_aux(V, Map)} || {K, MNess, V} <- Pairs],
+ t_subst_aux(DefK, Map), t_subst_aux(DefV, Map));
+t_subst_aux(?opaque(Es), Map) ->
+ List = [Opaque#opaque{args = [t_subst_aux(Arg, Map) || Arg <- Args],
+ struct = t_subst_aux(S, Map)} ||
+ Opaque = #opaque{args = Args, struct = S} <- set_to_list(Es)],
+ ?opaque(ordsets:from_list(List));
+t_subst_aux(?union(List), Map) ->
+ ?union([t_subst_aux(E, Map) || E <- List]);
+t_subst_aux(T, _Map) ->
+ T.
+
+%%-----------------------------------------------------------------------------
+%% Unification
+%%
+
+-type t_unify_ret() :: {erl_type(), [{_, erl_type()}]}.
+
+-spec t_unify(erl_type(), erl_type()) -> t_unify_ret().
+
+t_unify(T1, T2) ->
+ {T, VarMap} = t_unify(T1, T2, #{}),
+ {t_subst(T, VarMap), lists:keysort(1, maps:to_list(VarMap))}.
+
+t_unify(?var(Id) = T, ?var(Id), VarMap) ->
+ {T, VarMap};
+t_unify(?var(Id1) = T, ?var(Id2), VarMap) ->
+ case maps:find(Id1, VarMap) of
+ error ->
+ case maps:find(Id2, VarMap) of
+ error -> {T, VarMap#{Id2 => T}};
+ {ok, Type} -> t_unify(T, Type, VarMap)
+ end;
+ {ok, Type1} ->
+ case maps:find(Id2, VarMap) of
+ error -> {Type1, VarMap#{Id2 => T}};
+ {ok, Type2} -> t_unify(Type1, Type2, VarMap)
+ end
+ end;
+t_unify(?var(Id), Type, VarMap) ->
+ case maps:find(Id, VarMap) of
+ error -> {Type, VarMap#{Id => Type}};
+ {ok, VarType} -> t_unify(VarType, Type, VarMap)
+ end;
+t_unify(Type, ?var(Id), VarMap) ->
+ case maps:find(Id, VarMap) of
+ error -> {Type, VarMap#{Id => Type}};
+ {ok, VarType} -> t_unify(VarType, Type, VarMap)
+ end;
+t_unify(?function(Domain1, Range1), ?function(Domain2, Range2), VarMap) ->
+ {Domain, VarMap1} = t_unify(Domain1, Domain2, VarMap),
+ {Range, VarMap2} = t_unify(Range1, Range2, VarMap1),
+ {?function(Domain, Range), VarMap2};
+t_unify(?list(Contents1, Termination1, Size),
+ ?list(Contents2, Termination2, Size), VarMap) ->
+ {Contents, VarMap1} = t_unify(Contents1, Contents2, VarMap),
+ {Termination, VarMap2} = t_unify(Termination1, Termination2, VarMap1),
+ {?list(Contents, Termination, Size), VarMap2};
+t_unify(?product(Types1), ?product(Types2), VarMap) ->
+ {Types, VarMap1} = unify_lists(Types1, Types2, VarMap),
+ {?product(Types), VarMap1};
+t_unify(?tuple(?any, ?any, ?any) = T, ?tuple(?any, ?any, ?any), VarMap) ->
+ {T, VarMap};
+t_unify(?tuple(Elements1, Arity, _),
+ ?tuple(Elements2, Arity, _), VarMap) when Arity =/= ?any ->
+ {NewElements, VarMap1} = unify_lists(Elements1, Elements2, VarMap),
+ {t_tuple(NewElements), VarMap1};
+t_unify(?tuple_set([{Arity, _}]) = T1,
+ ?tuple(_, Arity, _) = T2, VarMap) when Arity =/= ?any ->
+ unify_tuple_set_and_tuple1(T1, T2, VarMap);
+t_unify(?tuple(_, Arity, _) = T1,
+ ?tuple_set([{Arity, _}]) = T2, VarMap) when Arity =/= ?any ->
+ unify_tuple_set_and_tuple2(T1, T2, VarMap);
+t_unify(?tuple_set(List1) = T1, ?tuple_set(List2) = T2, VarMap) ->
+ try
+ unify_lists(lists:append([T || {_Arity, T} <- List1]),
+ lists:append([T || {_Arity, T} <- List2]), VarMap)
+ of
+ {Tuples, NewVarMap} -> {t_sup(Tuples), NewVarMap}
+ catch _:_ -> throw({mismatch, T1, T2})
+ end;
+t_unify(?map(_, ADefK, ADefV) = A, ?map(_, BDefK, BDefV) = B, VarMap0) ->
+ {DefK, VarMap1} = t_unify(ADefK, BDefK, VarMap0),
+ {DefV, VarMap2} = t_unify(ADefV, BDefV, VarMap1),
+ {Pairs, VarMap} =
+ map_pairwise_merge_foldr(
+ fun(K, MNess, V1, MNess, V2, {Pairs0, VarMap3}) ->
+ %% We know that the keys unify and do not contain variables, or they
+ %% would not be singletons
+ %% TODO: Should V=?none (known missing keys) be handled special?
+ {V, VarMap4} = t_unify(V1, V2, VarMap3),
+ {[{K,MNess,V}|Pairs0], VarMap4};
+ (K, _, V1, _, V2, {Pairs0, VarMap3}) ->
+ %% One mandatory and one optional; what should be done in this case?
+ {V, VarMap4} = t_unify(V1, V2, VarMap3),
+ {[{K,?mand,V}|Pairs0], VarMap4}
+ end, {[], VarMap2}, A, B),
+ {t_map(Pairs, DefK, DefV), VarMap};
+t_unify(?opaque(_) = T1, ?opaque(_) = T2, VarMap) ->
+ t_unify(t_opaque_structure(T1), t_opaque_structure(T2), VarMap);
+t_unify(T1, ?opaque(_) = T2, VarMap) ->
+ t_unify(T1, t_opaque_structure(T2), VarMap);
+t_unify(?opaque(_) = T1, T2, VarMap) ->
+ t_unify(t_opaque_structure(T1), T2, VarMap);
+t_unify(T, T, VarMap) ->
+ {T, VarMap};
+t_unify(?union(_)=T1, ?union(_)=T2, VarMap) ->
+ {Type1, Type2} = unify_union2(T1, T2),
+ t_unify(Type1, Type2, VarMap);
+t_unify(?union(_)=T1, T2, VarMap) ->
+ t_unify(unify_union1(T1, T1, T2), T2, VarMap);
+t_unify(T1, ?union(_)=T2, VarMap) ->
+ t_unify(T1, unify_union1(T2, T1, T2), VarMap);
+t_unify(T1, T2, _) ->
+ throw({mismatch, T1, T2}).
+
+unify_union2(?union(List1)=T1, ?union(List2)=T2) ->
+ case {unify_union(List1), unify_union(List2)} of
+ {{yes, Type1}, {yes, Type2}} -> {Type1, Type2};
+ {{yes, Type1}, no} -> {Type1, T2};
+ {no, {yes, Type2}} -> {T1, Type2};
+ {no, no} -> throw({mismatch, T1, T2})
+ end.
+
+unify_union1(?union(List), T1, T2) ->
+ case unify_union(List) of
+ {yes, Type} -> Type;
+ no -> throw({mismatch, T1, T2})
+ end.
+
+unify_union(List) ->
+ [A,B,F,I,L,N,T,M,O,Map] = List,
+ if O =:= ?none -> no;
+ true ->
+ S = t_opaque_structure(O),
+ {yes, t_sup([A,B,F,I,L,N,T,M,S,Map])}
+ end.
+
+-spec is_opaque_type(erl_type(), [erl_type()]) -> boolean().
+
+%% An opaque type is a union of types. Returns true iff any of the type
+%% names (Module and Name) of the first argument (the opaque type to
+%% check) occurs in any of the opaque types of the second argument.
+is_opaque_type(?opaque(Elements), Opaques) ->
+ lists:any(fun(Opaque) -> is_opaque_type2(Opaque, Opaques) end, Elements).
+
+is_opaque_type2(#opaque{mod = Mod1, name = Name1, args = Args1}, Opaques) ->
+ F1 = fun(?opaque(Es)) ->
+ F2 = fun(#opaque{mod = Mod, name = Name, args = Args}) ->
+ is_type_name(Mod1, Name1, Args1, Mod, Name, Args)
+ end,
+ lists:any(F2, Es)
+ end,
+ lists:any(F1, Opaques).
+
+is_type_name(Mod, Name, Args1, Mod, Name, Args2) ->
+ length(Args1) =:= length(Args2);
+is_type_name(_Mod1, _Name1, _Args1, _Mod2, _Name2, _Args2) ->
+ false.
+
+%% Two functions since t_unify is not symmetric.
+unify_tuple_set_and_tuple1(?tuple_set([{Arity, List}]),
+ ?tuple(Elements2, Arity, _), VarMap) ->
+ %% Can only work if the single tuple has variables at correct places.
+ %% Collapse the tuple set.
+ {NewElements, VarMap1} =
+ unify_lists(sup_tuple_elements(List), Elements2, VarMap),
+ {t_tuple(NewElements), VarMap1}.
+
+unify_tuple_set_and_tuple2(?tuple(Elements2, Arity, _),
+ ?tuple_set([{Arity, List}]), VarMap) ->
+ %% Can only work if the single tuple has variables at correct places.
+ %% Collapse the tuple set.
+ {NewElements, VarMap1} =
+ unify_lists(Elements2, sup_tuple_elements(List), VarMap),
+ {t_tuple(NewElements), VarMap1}.
+
+unify_lists(L1, L2, VarMap) ->
+ unify_lists(L1, L2, VarMap, []).
+
+unify_lists([T1|Left1], [T2|Left2], VarMap, Acc) ->
+ {NewT, NewVarMap} = t_unify(T1, T2, VarMap),
+ unify_lists(Left1, Left2, NewVarMap, [NewT|Acc]);
+unify_lists([], [], VarMap, Acc) ->
+ {lists:reverse(Acc), VarMap}.
+
+%%t_assign_variables_to_subtype(T1, T2) ->
+%% try
+%% Dict = assign_vars(T1, T2, dict:new()),
+%% {ok, dict:map(fun(_Param, List) -> t_sup(List) end, Dict)}
+%% catch
+%% throw:error -> error
+%% end.
+
+%%assign_vars(_, ?var(_), _Dict) ->
+%% erlang:error("Variable in right hand side of assignment");
+%%assign_vars(?any, _, Dict) ->
+%% Dict;
+%%assign_vars(?var(_) = Var, Type, Dict) ->
+%% store_var(Var, Type, Dict);
+%%assign_vars(?function(Domain1, Range1), ?function(Domain2, Range2), Dict) ->
+%% DomainList =
+%% case Domain2 of
+%% ?any -> [];
+%% ?product(List) -> List
+%% end,
+%% case any_none([Range2|DomainList]) of
+%% true -> throw(error);
+%% false ->
+%% Dict1 = assign_vars(Domain1, Domain2, Dict),
+%% assign_vars(Range1, Range2, Dict1)
+%% end;
+%%assign_vars(?list(_Contents, _Termination, ?any), ?nil, Dict) ->
+%% Dict;
+%%assign_vars(?list(Contents1, Termination1, Size1),
+%% ?list(Contents2, Termination2, Size2), Dict) ->
+%% Dict1 = assign_vars(Contents1, Contents2, Dict),
+%% Dict2 = assign_vars(Termination1, Termination2, Dict1),
+%% case {Size1, Size2} of
+%% {S, S} -> Dict2;
+%% {?any, ?nonempty_qual} -> Dict2;
+%% {_, _} -> throw(error)
+%% end;
+%%assign_vars(?product(Types1), ?product(Types2), Dict) ->
+%% case length(Types1) =:= length(Types2) of
+%% true -> assign_vars_lists(Types1, Types2, Dict);
+%% false -> throw(error)
+%% end;
+%%assign_vars(?tuple(?any, ?any, ?any), ?tuple(?any, ?any, ?any), Dict) ->
+%% Dict;
+%%assign_vars(?tuple(?any, ?any, ?any), ?tuple(_, _, _), Dict) ->
+%% Dict;
+%%assign_vars(?tuple(Elements1, Arity, _),
+%% ?tuple(Elements2, Arity, _), Dict) when Arity =/= ?any ->
+%% assign_vars_lists(Elements1, Elements2, Dict);
+%%assign_vars(?tuple_set(_) = T, ?tuple_set(List2), Dict) ->
+%% %% All Rhs tuples must already be subtypes of Lhs, so we can take
+%% %% each one separatly.
+%% assign_vars_lists([T || _ <- List2], List2, Dict);
+%%assign_vars(?tuple(?any, ?any, ?any), ?tuple_set(_), Dict) ->
+%% Dict;
+%%assign_vars(?tuple(_, Arity, _) = T1, ?tuple_set(List), Dict) ->
+%% case reduce_tuple_tags(List) of
+%% [Tuple = ?tuple(_, Arity, _)] -> assign_vars(T1, Tuple, Dict);
+%% _ -> throw(error)
+%% end;
+%%assign_vars(?tuple_set(List), ?tuple(_, Arity, Tag) = T2, Dict) ->
+%% case [T || ?tuple(_, Arity1, Tag1) = T <- List,
+%% Arity1 =:= Arity, Tag1 =:= Tag] of
+%% [] -> throw(error);
+%% [T1] -> assign_vars(T1, T2, Dict)
+%% end;
+%%assign_vars(?union(U1), T2, Dict) ->
+%% ?union(U2) = force_union(T2),
+%% assign_vars_lists(U1, U2, Dict);
+%%assign_vars(T, T, Dict) ->
+%% Dict;
+%%assign_vars(T1, T2, Dict) ->
+%% case t_is_subtype(T2, T1) of
+%% false -> throw(error);
+%% true -> Dict
+%% end.
+
+%%assign_vars_lists([T1|Left1], [T2|Left2], Dict) ->
+%% assign_vars_lists(Left1, Left2, assign_vars(T1, T2, Dict));
+%%assign_vars_lists([], [], Dict) ->
+%% Dict.
+
+%%store_var(?var(Id), Type, Dict) ->
+%% case dict:find(Id, Dict) of
+%% error -> dict:store(Id, [Type], Dict);
+%% {ok, _VarType0} -> dict:update(Id, fun(X) -> [Type|X] end, Dict)
+%% end.
+
+%%-----------------------------------------------------------------------------
+%% Subtraction.
+%%
+%% Note that the subtraction is an approximation since we do not have
+%% negative types. Also, tuples and products should be handled using
+%% the cartesian product of the elements, but this is not feasible to
+%% do.
+%%
+%% Example: {a|b,c|d}\{a,d} = {a,c}|{a,d}|{b,c}|{b,d} \ {a,d} =
+%% = {a,c}|{b,c}|{b,d} = {a|b,c|d}
+%%
+%% Instead, we can subtract if all elements but one becomes none after
+%% subtracting element-wise.
+%%
+%% Example: {a|b,c|d}\{a|b,d} = {a,c}|{a,d}|{b,c}|{b,d} \ {a,d}|{b,d} =
+%% = {a,c}|{b,c} = {a|b,c}
+
+-spec t_subtract_list(erl_type(), [erl_type()]) -> erl_type().
+
+t_subtract_list(T1, [T2|Left]) ->
+ t_subtract_list(t_subtract(T1, T2), Left);
+t_subtract_list(T, []) ->
+ T.
+
+-spec t_subtract(erl_type(), erl_type()) -> erl_type().
+
+t_subtract(_, ?any) -> ?none;
+t_subtract(T, ?var(_)) -> T;
+t_subtract(?any, _) -> ?any;
+t_subtract(?var(_) = T, _) -> T;
+t_subtract(T, ?unit) -> T;
+t_subtract(?unit, _) -> ?unit;
+t_subtract(?none, _) -> ?none;
+t_subtract(T, ?none) -> T;
+t_subtract(?atom(Set1), ?atom(Set2)) ->
+ case set_subtract(Set1, Set2) of
+ ?none -> ?none;
+ Set -> ?atom(Set)
+ end;
+t_subtract(?bitstr(U1, B1), ?bitstr(U2, B2)) ->
+ subtract_bin(t_bitstr(U1, B1), t_inf(t_bitstr(U1, B1), t_bitstr(U2, B2)));
+t_subtract(?function(_, _) = T1, ?function(_, _) = T2) ->
+ case t_is_subtype(T1, T2) of
+ true -> ?none;
+ false -> T1
+ end;
+t_subtract(?identifier(Set1), ?identifier(Set2)) ->
+ case set_subtract(Set1, Set2) of
+ ?none -> ?none;
+ Set -> ?identifier(Set)
+ end;
+t_subtract(?opaque(_)=T1, ?opaque(_)=T2) ->
+ opaque_subtract(T1, t_opaque_structure(T2));
+t_subtract(?opaque(_)=T1, T2) ->
+ opaque_subtract(T1, T2);
+t_subtract(T1, ?opaque(_)=T2) ->
+ t_subtract(T1, t_opaque_structure(T2));
+t_subtract(?matchstate(Pres1, Slots1), ?matchstate(Pres2, _Slots2)) ->
+ Pres = t_subtract(Pres1, Pres2),
+ case t_is_none(Pres) of
+ true -> ?none;
+ false -> ?matchstate(Pres, Slots1)
+ end;
+t_subtract(?matchstate(Present, Slots), _) ->
+ ?matchstate(Present, Slots);
+t_subtract(?nil, ?nil) ->
+ ?none;
+t_subtract(?nil, ?nonempty_list(_, _)) ->
+ ?nil;
+t_subtract(?nil, ?list(_, _, _)) ->
+ ?none;
+t_subtract(?list(Contents, Termination, _Size) = T, ?nil) ->
+ case Termination =:= ?nil of
+ true -> ?nonempty_list(Contents, Termination);
+ false -> T
+ end;
+t_subtract(?list(Contents1, Termination1, Size1) = T,
+ ?list(Contents2, Termination2, Size2)) ->
+ case t_is_subtype(Contents1, Contents2) of
+ true ->
+ case t_is_subtype(Termination1, Termination2) of
+ true ->
+ case {Size1, Size2} of
+ {?nonempty_qual, ?unknown_qual} -> ?none;
+ {?unknown_qual, ?nonempty_qual} -> ?nil;
+ {S, S} -> ?none
+ end;
+ false ->
+ %% If the termination is not covered by the subtracted type
+ %% we cannot really say anything about the result.
+ T
+ end;
+ false ->
+ %% All contents must be covered if there is going to be any
+ %% change to the list.
+ T
+ end;
+t_subtract(?float, ?float) -> ?none;
+t_subtract(?number(_, _) = T1, ?float) -> t_inf(T1, t_integer());
+t_subtract(?float, ?number(_Set, Tag)) ->
+ case Tag of
+ ?unknown_qual -> ?none;
+ _ -> ?float
+ end;
+t_subtract(?number(_, _), ?number(?any, ?unknown_qual)) -> ?none;
+t_subtract(?number(_, _) = T1, ?integer(?any)) -> t_inf(?float, T1);
+t_subtract(?int_set(Set1), ?int_set(Set2)) ->
+ case set_subtract(Set1, Set2) of
+ ?none -> ?none;
+ Set -> ?int_set(Set)
+ end;
+t_subtract(?int_range(From1, To1) = T1, ?int_range(_, _) = T2) ->
+ case t_inf(T1, T2) of
+ ?none -> T1;
+ ?int_range(From1, To1) -> ?none;
+ ?int_range(neg_inf, To) -> t_from_range(To + 1, To1);
+ ?int_range(From, pos_inf) -> t_from_range(From1, From - 1);
+ ?int_range(From, To) -> t_sup(t_from_range(From1, From - 1),
+ t_from_range(To + 1, To))
+ end;
+t_subtract(?int_range(From, To) = T1, ?int_set(Set)) ->
+ NewFrom = case set_is_element(From, Set) of
+ true -> From + 1;
+ false -> From
+ end,
+ NewTo = case set_is_element(To, Set) of
+ true -> To - 1;
+ false -> To
+ end,
+ if (NewFrom =:= From) and (NewTo =:= To) -> T1;
+ true -> t_from_range(NewFrom, NewTo)
+ end;
+t_subtract(?int_set(Set), ?int_range(From, To)) ->
+ case set_filter(fun(X) -> not ((X =< From) orelse (X >= To)) end, Set) of
+ ?none -> ?none;
+ NewSet -> ?int_set(NewSet)
+ end;
+t_subtract(?integer(?any) = T1, ?integer(_)) -> T1;
+t_subtract(?number(_, _) = T1, ?number(_, _)) -> T1;
+t_subtract(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> ?none;
+t_subtract(?tuple_set(_), ?tuple(?any, ?any, ?any)) -> ?none;
+t_subtract(?tuple(?any, ?any, ?any) = T1, ?tuple_set(_)) -> T1;
+t_subtract(?tuple(Elements1, Arity1, _Tag1) = T1,
+ ?tuple(Elements2, Arity2, _Tag2)) ->
+ if Arity1 =/= Arity2 -> T1;
+ Arity1 =:= Arity2 ->
+ NewElements = t_subtract_lists(Elements1, Elements2),
+ case [E || E <- NewElements, E =/= ?none] of
+ [] -> ?none;
+ [_] -> t_tuple(replace_nontrivial_element(Elements1, NewElements));
+ _ -> T1
+ end
+ end;
+t_subtract(?tuple_set(List1) = T1, ?tuple(_, Arity, _) = T2) ->
+ case orddict:find(Arity, List1) of
+ error -> T1;
+ {ok, List2} ->
+ TuplesLeft0 = [Tuple || {_Arity, Tuple} <- orddict:erase(Arity, List1)],
+ TuplesLeft1 = lists:append(TuplesLeft0),
+ t_sup([t_subtract(L, T2) || L <- List2] ++ TuplesLeft1)
+ end;
+t_subtract(?tuple(_, Arity, _) = T1, ?tuple_set(List1)) ->
+ case orddict:find(Arity, List1) of
+ error -> T1;
+ {ok, List2} -> t_inf([t_subtract(T1, L) || L <- List2])
+ end;
+t_subtract(?tuple_set(_) = T1, ?tuple_set(_) = T2) ->
+ t_sup([t_subtract(T, T2) || T <- t_tuple_subtypes(T1)]);
+t_subtract(?product(Elements1) = T1, ?product(Elements2)) ->
+ Arity1 = length(Elements1),
+ Arity2 = length(Elements2),
+ if Arity1 =/= Arity2 -> T1;
+ Arity1 =:= Arity2 ->
+ NewElements = t_subtract_lists(Elements1, Elements2),
+ case [E || E <- NewElements, E =/= ?none] of
+ [] -> ?none;
+ [_] -> t_product(replace_nontrivial_element(Elements1, NewElements));
+ _ -> T1
+ end
+ end;
+t_subtract(?map(APairs, ADefK, ADefV) = A, ?map(_, BDefK, BDefV) = B) ->
+ case t_is_subtype(ADefK, BDefK) andalso t_is_subtype(ADefV, BDefV) of
+ false -> A;
+ true ->
+ %% We fold over the maps to produce a list of constraints, where
+ %% constraints are additional key-value pairs to put in Pairs. Only one
+ %% constraint need to be applied to produce a type that excludes the
+ %% right-hand-side type, so if more than one constraint is produced, we
+ %% just return the left-hand-side argument.
+ %%
+ %% Each case of the fold may either conclude that
+ %% * The arguments constrain A at least as much as B, i.e. that A so far
+ %% is a subtype of B. In that case they return false
+ %% * That for the particular arguments, A being a subtype of B does not
+ %% hold, but the infinimum of A and B is nonempty, and by narrowing a
+ %% pair in A, we can create a type that excludes some elements in the
+ %% infinumum. In that case, they will return that pair.
+ %% * That for the particular arguments, A being a subtype of B does not
+ %% hold, and either the infinumum of A and B is empty, or it is not
+ %% possible with the current representation to create a type that
+ %% excludes elements from B without also excluding elements that are
+ %% only in A. In that case, it will return the pair from A unchanged.
+ case
+ map_pairwise_merge(
+ %% If V1 is a subtype of V2, the case that K does not exist in A
+ %% remain.
+ fun(K, ?opt, V1, ?mand, V2) -> {K, ?opt, t_subtract(V1, V2)};
+ (K, _, V1, _, V2) ->
+ %% If we subtract an optional key, that leaves a mandatory key
+ case t_subtract(V1, V2) of
+ ?none -> false;
+ Partial -> {K, ?mand, Partial}
+ end
+ end, A, B)
+ of
+ %% We produce a list of keys that are constrained. As only one of
+ %% these should apply at a time, we can't represent the difference if
+ %% more than one constraint is produced. If we applied all of them,
+ %% that would make an underapproximation, which we must not do.
+ [] -> ?none; %% A is a subtype of B
+ [E] -> t_map(mapdict_store(E, APairs), ADefK, ADefV);
+ _ -> A
+ end
+ end;
+t_subtract(?product(P1), _) ->
+ ?product(P1);
+t_subtract(T, ?product(_)) ->
+ T;
+t_subtract(?union(U1), ?union(U2)) ->
+ subtract_union(U1, U2);
+t_subtract(T1, T2) ->
+ ?union(U1) = force_union(T1),
+ ?union(U2) = force_union(T2),
+ subtract_union(U1, U2).
+
+-spec opaque_subtract(erl_type(), erl_type()) -> erl_type().
+
+opaque_subtract(?opaque(Set1), T2) ->
+ List = [T1#opaque{struct = Sub} ||
+ #opaque{struct = S1}=T1 <- set_to_list(Set1),
+ not t_is_none(Sub = t_subtract(S1, T2))],
+ case List of
+ [] -> ?none;
+ _ -> ?opaque(ordsets:from_list(List))
+ end.
+
+-spec t_subtract_lists([erl_type()], [erl_type()]) -> [erl_type()].
+
+t_subtract_lists(L1, L2) ->
+ t_subtract_lists(L1, L2, []).
+
+-spec t_subtract_lists([erl_type()], [erl_type()], [erl_type()]) -> [erl_type()].
+
+t_subtract_lists([T1|Left1], [T2|Left2], Acc) ->
+ t_subtract_lists(Left1, Left2, [t_subtract(T1, T2)|Acc]);
+t_subtract_lists([], [], Acc) ->
+ lists:reverse(Acc).
+
+-spec subtract_union([erl_type(),...], [erl_type(),...]) -> erl_type().
+
+subtract_union(U1, U2) ->
+ [A1,B1,F1,I1,L1,N1,T1,M1,O1,Map1] = U1,
+ [A2,B2,F2,I2,L2,N2,T2,M2,O2,Map2] = U2,
+ List1 = [A1,B1,F1,I1,L1,N1,T1,M1,?none,Map1],
+ List2 = [A2,B2,F2,I2,L2,N2,T2,M2,?none,Map2],
+ Sub1 = subtract_union(List1, List2, 0, []),
+ O = if O1 =:= ?none -> O1;
+ true -> t_subtract(O1, ?union(U2))
+ end,
+ Sub2 = if O2 =:= ?none -> Sub1;
+ true -> t_subtract(Sub1, t_opaque_structure(O2))
+ end,
+ t_sup(O, Sub2).
+
+-spec subtract_union([erl_type()], [erl_type()], non_neg_integer(), [erl_type()]) -> erl_type().
+
+subtract_union([T1|Left1], [T2|Left2], N, Acc) ->
+ case t_subtract(T1, T2) of
+ ?none -> subtract_union(Left1, Left2, N, [?none|Acc]);
+ T -> subtract_union(Left1, Left2, N+1, [T|Acc])
+ end;
+subtract_union([], [], 0, _Acc) ->
+ ?none;
+subtract_union([], [], 1, Acc) ->
+ [T] = [X || X <- Acc, X =/= ?none],
+ T;
+subtract_union([], [], N, Acc) when is_integer(N), N > 1 ->
+ ?union(lists:reverse(Acc)).
+
+replace_nontrivial_element(El1, El2) ->
+ replace_nontrivial_element(El1, El2, []).
+
+replace_nontrivial_element([T1|Left1], [?none|Left2], Acc) ->
+ replace_nontrivial_element(Left1, Left2, [T1|Acc]);
+replace_nontrivial_element([_|Left1], [T2|_], Acc) ->
+ lists:reverse(Acc) ++ [T2|Left1].
+
+subtract_bin(?bitstr(U1, B1), ?bitstr(U1, B1)) ->
+ ?none;
+subtract_bin(?bitstr(U1, B1), ?none) ->
+ t_bitstr(U1, B1);
+subtract_bin(?bitstr(U1, B1), ?bitstr(0, B1)) ->
+ t_bitstr(U1, B1+U1);
+subtract_bin(?bitstr(U1, B1), ?bitstr(U1, B2)) ->
+ if (B1+U1) =/= B2 -> t_bitstr(0, B1);
+ true -> t_bitstr(U1, B1)
+ end;
+subtract_bin(?bitstr(U1, B1), ?bitstr(U2, B2)) ->
+ if (2 * U1) =:= U2 ->
+ if B1 =:= B2 ->
+ t_bitstr(U2, B1+U1);
+ (B1 + U1) =:= B2 ->
+ t_bitstr(U2, B1);
+ true ->
+ t_bitstr(U1, B1)
+ end;
+ true ->
+ t_bitstr(U1, B1)
+ end.
+
+%%-----------------------------------------------------------------------------
+%% Relations
+%%
+
+-spec t_is_equal(erl_type(), erl_type()) -> boolean().
+
+t_is_equal(T, T) -> true;
+t_is_equal(_, _) -> false.
+
+-spec t_is_subtype(erl_type(), erl_type()) -> boolean().
+
+t_is_subtype(T1, T2) ->
+ Inf = t_inf(T1, T2),
+ subtype_is_equal(T1, Inf).
+
+%% The subtype relation has to behave correctly irrespective of opaque
+%% types.
+subtype_is_equal(T, T) -> true;
+subtype_is_equal(T1, T2) ->
+ t_is_equal(case t_contains_opaque(T1) of
+ true -> t_unopaque(T1);
+ false -> T1
+ end,
+ case t_contains_opaque(T2) of
+ true -> t_unopaque(T2);
+ false -> T2
+ end).
+
+-spec t_is_instance(erl_type(), erl_type()) -> boolean().
+
+%% XXX. To be removed.
+t_is_instance(ConcreteType, Type) ->
+ t_is_subtype(ConcreteType, t_unopaque(Type)).
+
+-spec t_do_overlap(erl_type(), erl_type()) -> boolean().
+
+t_do_overlap(TypeA, TypeB) ->
+ not (t_is_none_or_unit(t_inf(TypeA, TypeB))).
+
+-spec t_unopaque(erl_type()) -> erl_type().
+
+t_unopaque(T) ->
+ t_unopaque(T, 'universe').
+
+-spec t_unopaque(erl_type(), opaques()) -> erl_type().
+
+t_unopaque(?opaque(_) = T, Opaques) ->
+ case Opaques =:= 'universe' orelse is_opaque_type(T, Opaques) of
+ true -> t_unopaque(t_opaque_structure(T), Opaques);
+ false -> T
+ end;
+t_unopaque(?list(ElemT, Termination, Sz), Opaques) ->
+ ?list(t_unopaque(ElemT, Opaques), t_unopaque(Termination, Opaques), Sz);
+t_unopaque(?tuple(?any, _, _) = T, _) -> T;
+t_unopaque(?tuple(ArgTs, Sz, Tag), Opaques) when is_list(ArgTs) ->
+ NewArgTs = [t_unopaque(A, Opaques) || A <- ArgTs],
+ ?tuple(NewArgTs, Sz, Tag);
+t_unopaque(?tuple_set(Set), Opaques) ->
+ NewSet = [{Sz, [t_unopaque(T, Opaques) || T <- Tuples]}
+ || {Sz, Tuples} <- Set],
+ ?tuple_set(NewSet);
+t_unopaque(?product(Types), Opaques) ->
+ ?product([t_unopaque(T, Opaques) || T <- Types]);
+t_unopaque(?function(Domain, Range), Opaques) ->
+ ?function(t_unopaque(Domain, Opaques), t_unopaque(Range, Opaques));
+t_unopaque(?union([A,B,F,I,L,N,T,M,O,Map]), Opaques) ->
+ UL = t_unopaque(L, Opaques),
+ UT = t_unopaque(T, Opaques),
+ UF = t_unopaque(F, Opaques),
+ UM = t_unopaque(M, Opaques),
+ UMap = t_unopaque(Map, Opaques),
+ {OF,UO} = case t_unopaque(O, Opaques) of
+ ?opaque(_) = O1 -> {O1, []};
+ Type -> {?none, [Type]}
+ end,
+ t_sup([?union([A,B,UF,I,UL,N,UT,UM,OF,UMap])|UO]);
+t_unopaque(?map(Pairs,DefK,DefV), Opaques) ->
+ t_map([{K, MNess, t_unopaque(V, Opaques)} || {K, MNess, V} <- Pairs],
+ t_unopaque(DefK, Opaques),
+ t_unopaque(DefV, Opaques));
+t_unopaque(T, _) ->
+ T.
+
+%%-----------------------------------------------------------------------------
+%% K-depth abstraction.
+%%
+%% t_limit/2 is the exported function, which checks the type of the
+%% second argument and calls the module local t_limit_k/2 function.
+%%
+
+-spec t_limit(erl_type(), integer()) -> erl_type().
+
+t_limit(Term, K) when is_integer(K) ->
+ t_limit_k(Term, K).
+
+t_limit_k(_, K) when K =< 0 -> ?any;
+t_limit_k(?tuple(?any, ?any, ?any) = T, _K) -> T;
+t_limit_k(?tuple(Elements, Arity, _), K) ->
+ if K =:= 1 -> t_tuple(Arity);
+ true -> t_tuple([t_limit_k(E, K-1) || E <- Elements])
+ end;
+t_limit_k(?tuple_set(_) = T, K) ->
+ t_sup([t_limit_k(Tuple, K) || Tuple <- t_tuple_subtypes(T)]);
+t_limit_k(?list(Elements, Termination, Size), K) ->
+ NewTermination =
+ if K =:= 1 ->
+ %% We do not want to lose the termination information.
+ t_limit_k(Termination, K);
+ true -> t_limit_k(Termination, K - 1)
+ end,
+ NewElements = t_limit_k(Elements, K - 1),
+ TmpList = t_cons(NewElements, NewTermination),
+ case Size of
+ ?nonempty_qual -> TmpList;
+ ?unknown_qual ->
+ ?list(NewElements1, NewTermination1, _) = TmpList,
+ ?list(NewElements1, NewTermination1, ?unknown_qual)
+ end;
+t_limit_k(?function(Domain, Range), K) ->
+ %% The domain is either a product or any() so we do not decrease the K.
+ ?function(t_limit_k(Domain, K), t_limit_k(Range, K-1));
+t_limit_k(?product(Elements), K) ->
+ ?product([t_limit_k(X, K - 1) || X <- Elements]);
+t_limit_k(?union(Elements), K) ->
+ ?union([t_limit_k(X, K) || X <- Elements]);
+t_limit_k(?opaque(Es), K) ->
+ List = [begin
+ NewS = t_limit_k(S, K),
+ Opaque#opaque{struct = NewS}
+ end || #opaque{struct = S} = Opaque <- set_to_list(Es)],
+ ?opaque(ordsets:from_list(List));
+t_limit_k(?map(Pairs0, DefK0, DefV0), K) ->
+ Fun = fun({EK, MNess, EV}, {Exact, DefK1, DefV1}) ->
+ LV = t_limit_k(EV, K - 1),
+ case t_limit_k(EK, K - 1) of
+ EK -> {[{EK,MNess,LV}|Exact], DefK1, DefV1};
+ LK -> {Exact, t_sup(LK, DefK1), t_sup(LV, DefV1)}
+ end
+ end,
+ {Pairs, DefK2, DefV2} = lists:foldr(Fun, {[], DefK0, DefV0}, Pairs0),
+ t_map(Pairs, t_limit_k(DefK2, K - 1), t_limit_k(DefV2, K - 1));
+t_limit_k(T, _K) -> T.
+
+%%============================================================================
+%%
+%% Abstract records. Used for comparing contracts.
+%%
+%%============================================================================
+
+-spec t_abstract_records(erl_type(), type_table()) -> erl_type().
+
+t_abstract_records(?list(Contents, Termination, Size), RecDict) ->
+ case t_abstract_records(Contents, RecDict) of
+ ?none -> ?none;
+ NewContents ->
+ %% Be careful here to make the termination collapse if necessary.
+ case t_abstract_records(Termination, RecDict) of
+ ?nil -> ?list(NewContents, ?nil, Size);
+ ?any -> ?list(NewContents, ?any, Size);
+ Other ->
+ ?list(NewContents2, NewTermination, _) = t_cons(NewContents, Other),
+ ?list(NewContents2, NewTermination, Size)
+ end
+ end;
+t_abstract_records(?function(Domain, Range), RecDict) ->
+ ?function(t_abstract_records(Domain, RecDict),
+ t_abstract_records(Range, RecDict));
+t_abstract_records(?product(Types), RecDict) ->
+ ?product([t_abstract_records(T, RecDict) || T <- Types]);
+t_abstract_records(?union(Types), RecDict) ->
+ t_sup([t_abstract_records(T, RecDict) || T <- Types]);
+t_abstract_records(?tuple(?any, ?any, ?any) = T, _RecDict) ->
+ T;
+t_abstract_records(?tuple(Elements, Arity, ?atom(_) = Tag), RecDict) ->
+ [TagAtom] = atom_vals(Tag),
+ case lookup_record(TagAtom, Arity - 1, RecDict) of
+ error -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
+ {ok, Fields} -> t_tuple([Tag|[T || {_Name, _Abstr, T} <- Fields]])
+ end;
+t_abstract_records(?tuple(Elements, _Arity, _Tag), RecDict) ->
+ t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
+t_abstract_records(?tuple_set(_) = Tuples, RecDict) ->
+ t_sup([t_abstract_records(T, RecDict) || T <- t_tuple_subtypes(Tuples)]);
+t_abstract_records(?opaque(_)=Type, RecDict) ->
+ t_abstract_records(t_opaque_structure(Type), RecDict);
+t_abstract_records(T, _RecDict) ->
+ T.
+
+%% Map over types. Depth first. Used by the contract checker. ?list is
+%% not fully implemented so take care when changing the type in Termination.
+
+-spec t_map(fun((erl_type()) -> erl_type()), erl_type()) -> erl_type().
+
+t_map(Fun, ?list(Contents, Termination, Size)) ->
+ Fun(?list(t_map(Fun, Contents), t_map(Fun, Termination), Size));
+t_map(Fun, ?function(Domain, Range)) ->
+ Fun(?function(t_map(Fun, Domain), t_map(Fun, Range)));
+t_map(Fun, ?product(Types)) ->
+ Fun(?product([t_map(Fun, T) || T <- Types]));
+t_map(Fun, ?union(Types)) ->
+ Fun(t_sup([t_map(Fun, T) || T <- Types]));
+t_map(Fun, ?tuple(?any, ?any, ?any) = T) ->
+ Fun(T);
+t_map(Fun, ?tuple(Elements, _Arity, _Tag)) ->
+ Fun(t_tuple([t_map(Fun, E) || E <- Elements]));
+t_map(Fun, ?tuple_set(_) = Tuples) ->
+ Fun(t_sup([t_map(Fun, T) || T <- t_tuple_subtypes(Tuples)]));
+t_map(Fun, ?opaque(Set)) ->
+ L = [Opaque#opaque{struct = NewS} ||
+ #opaque{struct = S} = Opaque <- set_to_list(Set),
+ not t_is_none(NewS = t_map(Fun, S))],
+ Fun(case L of
+ [] -> ?none;
+ _ -> ?opaque(ordsets:from_list(L))
+ end);
+t_map(Fun, ?map(Pairs,DefK,DefV)) ->
+ %% TODO:
+ Fun(t_map(Pairs, Fun(DefK), Fun(DefV)));
+t_map(Fun, T) ->
+ Fun(T).
+
+%%=============================================================================
+%%
+%% Prettyprinter
+%%
+%%=============================================================================
+
+-spec t_to_string(erl_type()) -> string().
+
+t_to_string(T) ->
+ t_to_string(T, dict:new()).
+
+-spec t_to_string(erl_type(), type_table()) -> string().
+
+t_to_string(?any, _RecDict) ->
+ "any()";
+t_to_string(?none, _RecDict) ->
+ "none()";
+t_to_string(?unit, _RecDict) ->
+ "no_return()";
+t_to_string(?atom(?any), _RecDict) ->
+ "atom()";
+t_to_string(?atom(Set), _RecDict) ->
+ case set_size(Set) of
+ 2 ->
+ case set_is_element(true, Set) andalso set_is_element(false, Set) of
+ true -> "boolean()";
+ false -> set_to_string(Set)
+ end;
+ _ ->
+ set_to_string(Set)
+ end;
+t_to_string(?bitstr(0, 0), _RecDict) ->
+ "<<>>";
+t_to_string(?bitstr(8, 0), _RecDict) ->
+ "binary()";
+t_to_string(?bitstr(1, 0), _RecDict) ->
+ "bitstring()";
+t_to_string(?bitstr(0, B), _RecDict) ->
+ flat_format("<<_:~w>>", [B]);
+t_to_string(?bitstr(U, 0), _RecDict) ->
+ flat_format("<<_:_*~w>>", [U]);
+t_to_string(?bitstr(U, B), _RecDict) ->
+ flat_format("<<_:~w,_:_*~w>>", [B, U]);
+t_to_string(?function(?any, ?any), _RecDict) ->
+ "fun()";
+t_to_string(?function(?any, Range), RecDict) ->
+ "fun((...) -> " ++ t_to_string(Range, RecDict) ++ ")";
+t_to_string(?function(?product(ArgList), Range), RecDict) ->
+ "fun((" ++ comma_sequence(ArgList, RecDict) ++ ") -> "
+ ++ t_to_string(Range, RecDict) ++ ")";
+t_to_string(?identifier(Set), _RecDict) ->
+ case Set of
+ ?any -> "identifier()";
+ _ ->
+ string:join([flat_format("~w()", [T]) || T <- set_to_list(Set)], " | ")
+ end;
+t_to_string(?opaque(Set), RecDict) ->
+ string:join([opaque_type(Mod, Name, Args, S, RecDict) ||
+ #opaque{mod = Mod, name = Name, struct = S, args = Args}
+ <- set_to_list(Set)],
+ " | ");
+t_to_string(?matchstate(Pres, Slots), RecDict) ->
+ flat_format("ms(~s,~s)", [t_to_string(Pres, RecDict),
+ t_to_string(Slots,RecDict)]);
+t_to_string(?nil, _RecDict) ->
+ "[]";
+t_to_string(?nonempty_list(Contents, Termination), RecDict) ->
+ ContentString = t_to_string(Contents, RecDict),
+ case Termination of
+ ?nil ->
+ case Contents of
+ ?char -> "nonempty_string()";
+ _ -> "["++ContentString++",...]"
+ end;
+ ?any ->
+ %% Just a safety check.
+ case Contents =:= ?any of
+ true -> ok;
+ false ->
+ %% XXX. See comment below.
+ %% erlang:error({illegal_list, ?nonempty_list(Contents, Termination)})
+ ok
+ end,
+ "nonempty_maybe_improper_list()";
+ _ ->
+ case t_is_subtype(t_nil(), Termination) of
+ true ->
+ "nonempty_maybe_improper_list("++ContentString++","
+ ++t_to_string(Termination, RecDict)++")";
+ false ->
+ "nonempty_improper_list("++ContentString++","
+ ++t_to_string(Termination, RecDict)++")"
+ end
+ end;
+t_to_string(?list(Contents, Termination, ?unknown_qual), RecDict) ->
+ ContentString = t_to_string(Contents, RecDict),
+ case Termination of
+ ?nil ->
+ case Contents of
+ ?char -> "string()";
+ _ -> "["++ContentString++"]"
+ end;
+ ?any ->
+ %% Just a safety check.
+ %% XXX. Types such as "maybe_improper_list(integer(), any())"
+ %% are OK, but cannot be printed!?
+ case Contents =:= ?any of
+ true -> ok;
+ false ->
+ ok
+ %% L = ?list(Contents, Termination, ?unknown_qual),
+ %% erlang:error({illegal_list, L})
+ end,
+ "maybe_improper_list()";
+ _ ->
+ case t_is_subtype(t_nil(), Termination) of
+ true ->
+ "maybe_improper_list("++ContentString++","
+ ++t_to_string(Termination, RecDict)++")";
+ false ->
+ "improper_list("++ContentString++","
+ ++t_to_string(Termination, RecDict)++")"
+ end
+ end;
+t_to_string(?int_set(Set), _RecDict) ->
+ set_to_string(Set);
+t_to_string(?byte, _RecDict) -> "byte()";
+t_to_string(?char, _RecDict) -> "char()";
+t_to_string(?integer_pos, _RecDict) -> "pos_integer()";
+t_to_string(?integer_non_neg, _RecDict) -> "non_neg_integer()";
+t_to_string(?integer_neg, _RecDict) -> "neg_integer()";
+t_to_string(?int_range(From, To), _RecDict) ->
+ flat_format("~w..~w", [From, To]);
+t_to_string(?integer(?any), _RecDict) -> "integer()";
+t_to_string(?float, _RecDict) -> "float()";
+t_to_string(?number(?any, ?unknown_qual), _RecDict) -> "number()";
+t_to_string(?product(List), RecDict) ->
+ "<" ++ comma_sequence(List, RecDict) ++ ">";
+t_to_string(?map([],?any,?any), _RecDict) -> "map()";
+t_to_string(?map(Pairs0,DefK,DefV), RecDict) ->
+ {Pairs, ExtraEl} =
+ case {DefK, DefV} of
+ {?none, ?none} -> {Pairs0, []};
+ _ -> {Pairs0 ++ [{DefK,?opt,DefV}], []}
+ end,
+ Tos = fun(T) -> case T of
+ ?any -> "_";
+ _ -> t_to_string(T, RecDict)
+ end end,
+ StrMand = [{Tos(K),Tos(V)}||{K,?mand,V}<-Pairs],
+ StrOpt = [{Tos(K),Tos(V)}||{K,?opt,V}<-Pairs],
+ "#{" ++ string:join([K ++ ":=" ++ V||{K,V}<-StrMand]
+ ++ [K ++ "=>" ++ V||{K,V}<-StrOpt]
+ ++ ExtraEl, ", ") ++ "}";
+t_to_string(?tuple(?any, ?any, ?any), _RecDict) -> "tuple()";
+t_to_string(?tuple(Elements, _Arity, ?any), RecDict) ->
+ "{" ++ comma_sequence(Elements, RecDict) ++ "}";
+t_to_string(?tuple(Elements, Arity, Tag), RecDict) ->
+ [TagAtom] = atom_vals(Tag),
+ case lookup_record(TagAtom, Arity-1, RecDict) of
+ error -> "{" ++ comma_sequence(Elements, RecDict) ++ "}";
+ {ok, FieldNames} ->
+ record_to_string(TagAtom, Elements, FieldNames, RecDict)
+ end;
+t_to_string(?tuple_set(_) = T, RecDict) ->
+ union_sequence(t_tuple_subtypes(T), RecDict);
+t_to_string(?union(Types), RecDict) ->
+ union_sequence([T || T <- Types, T =/= ?none], RecDict);
+t_to_string(?var(Id), _RecDict) when is_atom(Id) ->
+ flat_format("~s", [atom_to_list(Id)]);
+t_to_string(?var(Id), _RecDict) when is_integer(Id) ->
+ flat_format("var(~w)", [Id]).
+
+
+record_to_string(Tag, [_|Fields], FieldNames, RecDict) ->
+ FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []),
+ "#" ++ atom_to_string(Tag) ++ "{" ++ string:join(FieldStrings, ",") ++ "}".
+
+record_fields_to_string([F|Fs], [{FName, _Abstr, DefType}|FDefs],
+ RecDict, Acc) ->
+ NewAcc =
+ case
+ t_is_equal(F, t_any()) orelse
+ (t_is_any_atom('undefined', F) andalso
+ not t_is_none(t_inf(F, DefType)))
+ of
+ true -> Acc;
+ false ->
+ StrFV = atom_to_string(FName) ++ "::" ++ t_to_string(F, RecDict),
+ [StrFV|Acc]
+ end,
+ record_fields_to_string(Fs, FDefs, RecDict, NewAcc);
+record_fields_to_string([], [], _RecDict, Acc) ->
+ lists:reverse(Acc).
+
+-spec record_field_diffs_to_string(erl_type(), type_table()) -> string().
+
+record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) ->
+ [TagAtom] = atom_vals(Tag),
+ {ok, FieldNames} = lookup_record(TagAtom, Arity-1, RecDict),
+ %% io:format("RecCElems = ~p\nRecTypes = ~p\n", [Fs, FieldNames]),
+ FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []),
+ string:join(FieldDiffs, " and ").
+
+field_diffs([F|Fs], [{FName, _Abstr, DefType}|FDefs], RecDict, Acc) ->
+ %% Don't care about opacity for now.
+ NewAcc =
+ case not t_is_none(t_inf(F, DefType)) of
+ true -> Acc;
+ false ->
+ Str = atom_to_string(FName) ++ "::" ++ t_to_string(DefType, RecDict),
+ [Str|Acc]
+ end,
+ field_diffs(Fs, FDefs, RecDict, NewAcc);
+field_diffs([], [], _, Acc) ->
+ lists:reverse(Acc).
+
+comma_sequence(Types, RecDict) ->
+ List = [case T =:= ?any of
+ true -> "_";
+ false -> t_to_string(T, RecDict)
+ end || T <- Types],
+ string:join(List, ",").
+
+union_sequence(Types, RecDict) ->
+ List = [t_to_string(T, RecDict) || T <- Types],
+ string:join(List, " | ").
+
+-ifdef(DEBUG).
+opaque_type(Mod, Name, _Args, S, RecDict) ->
+ ArgsString = comma_sequence(_Args, RecDict),
+ String = t_to_string(S, RecDict),
+ opaque_name(Mod, Name, ArgsString) ++ "[" ++ String ++ "]".
+-else.
+opaque_type(Mod, Name, Args, _S, RecDict) ->
+ ArgsString = comma_sequence(Args, RecDict),
+ opaque_name(Mod, Name, ArgsString).
+-endif.
+
+opaque_name(Mod, Name, Extra) ->
+ S = mod_name(Mod, Name),
+ flat_format("~s(~s)", [S, Extra]).
+
+mod_name(Mod, Name) ->
+ flat_format("~w:~w", [Mod, Name]).
+
+%%=============================================================================
+%%
+%% Build a type from parse forms.
+%%
+%%=============================================================================
+
+-type type_names() :: [type_key() | record_key()].
+
+-type mta() :: {module(), atom(), arity()}.
+-type mra() :: {module(), atom(), arity()}.
+-type site() :: {'type', mta()} | {'spec', mfa()} | {'record', mra()}.
+-type cache_key() :: {module(), atom(), expand_depth(),
+ [erl_type()], type_names()}.
+-opaque cache() :: #{cache_key() => {erl_type(), expand_limit()}}.
+
+-spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_records(),
+ var_table(), cache()) -> {erl_type(), cache()}.
+
+t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
+ t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache).
+
+%% Replace external types with with none().
+-spec t_from_form_without_remote(parse_form(), site(), type_table()) ->
+ {erl_type(), cache()}.
+
+t_from_form_without_remote(Form, Site, TypeTable) ->
+ Module = site_module(Site),
+ RecDict = dict:from_list([{Module, TypeTable}]),
+ ExpTypes = replace_by_none,
+ VarTab = var_table__new(),
+ Cache = cache__new(),
+ t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache).
+
+%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
+%% EXPAND_LIMIT is used for limiting the size of types by
+%% limiting the number of elements of lists within one type form.
+%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the
+%% types balanced (unions will otherwise collapse to any()) by limiting
+%% the depth the same way as t_limit/2 does.
+
+-type expand_limit() :: integer().
+
+-type expand_depth() :: integer().
+
+-record(from_form, {site :: site(),
+ xtypes :: sets:set(mfa()) | 'replace_by_none',
+ mrecs :: mod_records(),
+ vtab :: var_table(),
+ tnames :: type_names()}).
+
+-spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none',
+ site(), mod_records(), var_table(), cache()) ->
+ {erl_type(), cache()}.
+
+t_from_form1(Form, ET, Site, MR, V, C) ->
+ TypeNames = initial_typenames(Site),
+ State = #from_form{site = Site,
+ xtypes = ET,
+ mrecs = MR,
+ vtab = V,
+ tnames = TypeNames},
+ L = ?EXPAND_LIMIT,
+ {T1, L1, C1} = from_form(Form, State, ?EXPAND_DEPTH, L, C),
+ if
+ L1 =< 0 ->
+ from_form_loop(Form, State, 1, L, C1);
+ true ->
+ {T1, C1}
+ end.
+
+initial_typenames({type, _MTA}=Site) -> [Site];
+initial_typenames({spec, _MFA}) -> [];
+initial_typenames({record, _MRA}) -> [].
+
+from_form_loop(Form, State, D, Limit, C) ->
+ {T1, L1, C1} = from_form(Form, State, D, Limit, C),
+ Delta = Limit - L1,
+ if
+ %% Save some time by assuming next depth will exceed the limit.
+ Delta * 8 > Limit ->
+ {T1, C1};
+ true ->
+ D1 = D + 1,
+ from_form_loop(Form, State, D1, Limit, C1)
+ end.
+
+-spec from_form(parse_form(),
+ #from_form{},
+ expand_depth(),
+ expand_limit(),
+ cache()) -> {erl_type(), expand_limit(), cache()}.
+
+%% If there is something wrong with parse_form()
+%% throw({error, io_lib:chars()} is called;
+%% for unknown remote types
+%% self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}
+%% is called, unless 'replace_by_none' is given.
+%%
+%% It is assumed that site_module(S) can be found in MR.
+
+from_form(_, _S, D, L, C) when D =< 0 ; L =< 0 ->
+ {t_any(), L, C};
+from_form({var, _L, '_'}, _S, _D, L, C) ->
+ {t_any(), L, C};
+from_form({var, _L, Name}, S, _D, L, C) ->
+ V = S#from_form.vtab,
+ case maps:find(Name, V) of
+ error -> {t_var(Name), L, C};
+ {ok, Val} -> {Val, L, C}
+ end;
+from_form({ann_type, _L, [_Var, Type]}, S, D, L, C) ->
+ from_form(Type, S, D, L, C);
+from_form({paren_type, _L, [Type]}, S, D, L, C) ->
+ from_form(Type, S, D, L, C);
+from_form({remote_type, _L, [{atom, _, Module}, {atom, _, Type}, Args]},
+ S, D, L, C) ->
+ remote_from_form(Module, Type, Args, S, D, L, C);
+from_form({atom, _L, Atom}, _S, _D, L, C) ->
+ {t_atom(Atom), L, C};
+from_form({integer, _L, Int}, _S, _D, L, C) ->
+ {t_integer(Int), L, C};
+from_form({op, _L, _Op, _Arg} = Op, _S, _D, L, C) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, Val} ->
+ {t_integer(Val), L, C};
+ _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
+ end;
+from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _S, _D, L, C) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, Val} ->
+ {t_integer(Val), L, C};
+ _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
+ end;
+from_form({type, _L, any, []}, _S, _D, L, C) ->
+ {t_any(), L, C};
+from_form({type, _L, arity, []}, _S, _D, L, C) ->
+ {t_arity(), L, C};
+from_form({type, _L, atom, []}, _S, _D, L, C) ->
+ {t_atom(), L, C};
+from_form({type, _L, binary, []}, _S, _D, L, C) ->
+ {t_binary(), L, C};
+from_form({type, _L, binary, [Base, Unit]} = Type, _S, _D, L, C) ->
+ case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
+ {{integer, _, B}, {integer, _, U}} when B >= 0, U >= 0 ->
+ {t_bitstr(U, B), L, C};
+ _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
+ end;
+from_form({type, _L, bitstring, []}, _S, _D, L, C) ->
+ {t_bitstr(), L, C};
+from_form({type, _L, bool, []}, _S, _D, L, C) ->
+ {t_boolean(), L, C}; % XXX: Temporarily
+from_form({type, _L, boolean, []}, _S, _D, L, C) ->
+ {t_boolean(), L, C};
+from_form({type, _L, byte, []}, _S, _D, L, C) ->
+ {t_byte(), L, C};
+from_form({type, _L, char, []}, _S, _D, L, C) ->
+ {t_char(), L, C};
+from_form({type, _L, float, []}, _S, _D, L, C) ->
+ {t_float(), L, C};
+from_form({type, _L, function, []}, _S, _D, L, C) ->
+ {t_fun(), L, C};
+from_form({type, _L, 'fun', []}, _S, _D, L, C) ->
+ {t_fun(), L, C};
+from_form({type, _L, 'fun', [{type, _, any}, Range]}, S, D, L, C) ->
+ {T, L1, C1} = from_form(Range, S, D - 1, L - 1, C),
+ {t_fun(T), L1, C1};
+from_form({type, _L, 'fun', [{type, _, product, Domain}, Range]},
+ S, D, L, C) ->
+ {Dom1, L1, C1} = list_from_form(Domain, S, D, L, C),
+ {Ran1, L2, C2} = from_form(Range, S, D, L1, C1),
+ {t_fun(Dom1, Ran1), L2, C2};
+from_form({type, _L, identifier, []}, _S, _D, L, C) ->
+ {t_identifier(), L, C};
+from_form({type, _L, integer, []}, _S, _D, L, C) ->
+ {t_integer(), L, C};
+from_form({type, _L, iodata, []}, _S, _D, L, C) ->
+ {t_iodata(), L, C};
+from_form({type, _L, iolist, []}, _S, _D, L, C) ->
+ {t_iolist(), L, C};
+from_form({type, _L, list, []}, _S, _D, L, C) ->
+ {t_list(), L, C};
+from_form({type, _L, list, [Type]}, S, D, L, C) ->
+ {T, L1, C1} = from_form(Type, S, D - 1, L - 1, C),
+ {t_list(T), L1, C1};
+from_form({type, _L, map, any}, S, D, L, C) ->
+ builtin_type(map, t_map(), S, D, L, C);
+from_form({type, _L, map, List}, S, D0, L, C) ->
+ {Pairs1, L5, C5} =
+ fun PairsFromForm(_, L1, C1) when L1 =< 0 -> {[{?any,?opt,?any}], L1, C1};
+ PairsFromForm([], L1, C1) -> {[], L1, C1};
+ PairsFromForm([{type, _, Oper, [KF, VF]}|T], L1, C1) ->
+ D = D0 - 1,
+ {Key, L2, C2} = from_form(KF, S, D, L1, C1),
+ {Val, L3, C3} = from_form(VF, S, D, L2, C2),
+ {Pairs0, L4, C4} = PairsFromForm(T, L3 - 1, C3),
+ case Oper of
+ map_field_assoc -> {[{Key,?opt, Val}|Pairs0], L4, C4};
+ map_field_exact -> {[{Key,?mand,Val}|Pairs0], L4, C4}
+ end
+ end(List, L, C),
+ try
+ {Pairs, DefK, DefV} = map_from_form(Pairs1, [], [], [], ?none, ?none),
+ {t_map(Pairs, DefK, DefV), L5, C5}
+ catch none -> {t_none(), L5, C5}
+ end;
+from_form({type, _L, mfa, []}, _S, _D, L, C) ->
+ {t_mfa(), L, C};
+from_form({type, _L, module, []}, _S, _D, L, C) ->
+ {t_module(), L, C};
+from_form({type, _L, nil, []}, _S, _D, L, C) ->
+ {t_nil(), L, C};
+from_form({type, _L, neg_integer, []}, _S, _D, L, C) ->
+ {t_neg_integer(), L, C};
+from_form({type, _L, non_neg_integer, []}, _S, _D, L, C) ->
+ {t_non_neg_integer(), L, C};
+from_form({type, _L, no_return, []}, _S, _D, L, C) ->
+ {t_unit(), L, C};
+from_form({type, _L, node, []}, _S, _D, L, C) ->
+ {t_node(), L, C};
+from_form({type, _L, none, []}, _S, _D, L, C) ->
+ {t_none(), L, C};
+from_form({type, _L, nonempty_list, []}, _S, _D, L, C) ->
+ {t_nonempty_list(), L, C};
+from_form({type, _L, nonempty_list, [Type]}, S, D, L, C) ->
+ {T, L1, C1} = from_form(Type, S, D, L - 1, C),
+ {t_nonempty_list(T), L1, C1};
+from_form({type, _L, nonempty_improper_list, [Cont, Term]}, S, D, L, C) ->
+ {T1, L1, C1} = from_form(Cont, S, D, L - 1, C),
+ {T2, L2, C2} = from_form(Term, S, D, L1, C1),
+ {t_cons(T1, T2), L2, C2};
+from_form({type, _L, nonempty_maybe_improper_list, []}, _S, _D, L, C) ->
+ {t_cons(?any, ?any), L, C};
+from_form({type, _L, nonempty_maybe_improper_list, [Cont, Term]},
+ S, D, L, C) ->
+ {T1, L1, C1} = from_form(Cont, S, D, L - 1, C),
+ {T2, L2, C2} = from_form(Term, S, D, L1, C1),
+ {t_cons(T1, T2), L2, C2};
+from_form({type, _L, nonempty_string, []}, _S, _D, L, C) ->
+ {t_nonempty_string(), L, C};
+from_form({type, _L, number, []}, _S, _D, L, C) ->
+ {t_number(), L, C};
+from_form({type, _L, pid, []}, _S, _D, L, C) ->
+ {t_pid(), L, C};
+from_form({type, _L, port, []}, _S, _D, L, C) ->
+ {t_port(), L, C};
+from_form({type, _L, pos_integer, []}, _S, _D, L, C) ->
+ {t_pos_integer(), L, C};
+from_form({type, _L, maybe_improper_list, []}, _S, _D, L, C) ->
+ {t_maybe_improper_list(), L, C};
+from_form({type, _L, maybe_improper_list, [Content, Termination]},
+ S, D, L, C) ->
+ {T1, L1, C1} = from_form(Content, S, D, L - 1, C),
+ {T2, L2, C2} = from_form(Termination, S, D, L1, C1),
+ {t_maybe_improper_list(T1, T2), L2, C2};
+from_form({type, _L, product, Elements}, S, D, L, C) ->
+ {Lst, L1, C1} = list_from_form(Elements, S, D - 1, L, C),
+ {t_product(Lst), L1, C1};
+from_form({type, _L, range, [From, To]} = Type, _S, _D, L, C) ->
+ case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
+ {{integer, _, FromVal}, {integer, _, ToVal}} ->
+ {t_from_range(FromVal, ToVal), L, C};
+ _ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
+ end;
+from_form({type, _L, record, [Name|Fields]}, S, D, L, C) ->
+ record_from_form(Name, Fields, S, D, L, C);
+from_form({type, _L, reference, []}, _S, _D, L, C) ->
+ {t_reference(), L, C};
+from_form({type, _L, string, []}, _S, _D, L, C) ->
+ {t_string(), L, C};
+from_form({type, _L, term, []}, _S, _D, L, C) ->
+ {t_any(), L, C};
+from_form({type, _L, timeout, []}, _S, _D, L, C) ->
+ {t_timeout(), L, C};
+from_form({type, _L, tuple, any}, _S, _D, L, C) ->
+ {t_tuple(), L, C};
+from_form({type, _L, tuple, Args}, S, D, L, C) ->
+ {Lst, L1, C1} = list_from_form(Args, S, D - 1, L, C),
+ {t_tuple(Lst), L1, C1};
+from_form({type, _L, union, Args}, S, D, L, C) ->
+ {Lst, L1, C1} = list_from_form(Args, S, D, L, C),
+ {t_sup(Lst), L1, C1};
+from_form({user_type, _L, Name, Args}, S, D, L, C) ->
+ type_from_form(Name, Args, S, D, L, C);
+from_form({type, _L, Name, Args}, S, D, L, C) ->
+ %% Compatibility: modules compiled before Erlang/OTP 18.0.
+ type_from_form(Name, Args, S, D, L, C);
+from_form({opaque, _L, Name, {Mod, Args, Rep}}, _S, _D, L, C) ->
+ %% XXX. To be removed.
+ {t_opaque(Mod, Name, Args, Rep), L, C}.
+
+builtin_type(Name, Type, S, D, L, C) ->
+ #from_form{site = Site, mrecs = MR} = S,
+ M = site_module(Site),
+ case dict:find(M, MR) of
+ {ok, R} ->
+ case lookup_type(Name, 0, R) of
+ {_, {{_M, _FL, _F, _A}, _T}} ->
+ type_from_form(Name, [], S, D, L, C);
+ error ->
+ {Type, L, C}
+ end;
+ error ->
+ {Type, L, C}
+ end.
+
+type_from_form(Name, Args, S, D, L, C) ->
+ #from_form{site = Site, mrecs = MR, tnames = TypeNames} = S,
+ ArgsLen = length(Args),
+ Module = site_module(Site),
+ TypeName = {type, {Module, Name, ArgsLen}},
+ case can_unfold_more(TypeName, TypeNames) of
+ true ->
+ {ok, R} = dict:find(Module, MR),
+ type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames,
+ S, D, L, C);
+ false ->
+ {t_any(), L, C}
+ end.
+
+type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, S, D, L, C) ->
+ case lookup_type(Name, ArgsLen, R) of
+ {Tag, {{Module, _FileName, Form, ArgNames}, Type}} ->
+ NewTypeNames = [TypeName|TypeNames],
+ S1 = S#from_form{tnames = NewTypeNames},
+ {ArgTypes, L1, C1} = list_from_form(Args, S1, D, L, C),
+ CKey = cache_key(Module, Name, ArgTypes, TypeNames, D),
+ case cache_find(CKey, C) of
+ {CachedType, DeltaL} ->
+ {CachedType, L1 - DeltaL, C};
+ error ->
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpV = maps:from_list(List),
+ S2 = S1#from_form{site = TypeName, vtab = TmpV},
+ Fun = fun(DD, LL) -> from_form(Form, S2, DD, LL, C1) end,
+ {NewType, L3, C3} =
+ case Tag of
+ type ->
+ recur_limit(Fun, D, L1, TypeName, TypeNames);
+ opaque ->
+ {Rep, L2, C2} = recur_limit(Fun, D, L1, TypeName, TypeNames),
+ Rep1 = choose_opaque_type(Rep, Type),
+ Rep2 = case cannot_have_opaque(Rep1, TypeName, TypeNames) of
+ true -> Rep1;
+ false ->
+ ArgTypes2 = subst_all_vars_to_any_list(ArgTypes),
+ t_opaque(Module, Name, ArgTypes2, Rep1)
+ end,
+ {Rep2, L2, C2}
+ end,
+ C4 = cache_put(CKey, NewType, L1 - L3, C3),
+ {NewType, L3, C4}
+ end;
+ error ->
+ Msg = io_lib:format("Unable to find type ~w/~w\n",
+ [Name, ArgsLen]),
+ throw({error, Msg})
+ end.
+
+remote_from_form(RemMod, Name, Args, S, D, L, C) ->
+ #from_form{xtypes = ET, mrecs = MR, tnames = TypeNames} = S,
+ if
+ ET =:= replace_by_none ->
+ {t_none(), L, C};
+ true ->
+ ArgsLen = length(Args),
+ MFA = {RemMod, Name, ArgsLen},
+ case dict:find(RemMod, MR) of
+ error ->
+ self() ! {self(), ext_types, MFA},
+ {t_any(), L, C};
+ {ok, RemDict} ->
+ case sets:is_element(MFA, ET) of
+ true ->
+ RemType = {type, MFA},
+ case can_unfold_more(RemType, TypeNames) of
+ true ->
+ remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict,
+ RemType, TypeNames, S, D, L, C);
+ false ->
+ {t_any(), L, C}
+ end;
+ false ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), L, C}
+ end
+ end
+ end.
+
+remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict, RemType, TypeNames,
+ S, D, L, C) ->
+ case lookup_type(Name, ArgsLen, RemDict) of
+ {Tag, {{Mod, _FileLine, Form, ArgNames}, Type}} ->
+ NewTypeNames = [RemType|TypeNames],
+ S1 = S#from_form{tnames = NewTypeNames},
+ {ArgTypes, L1, C1} = list_from_form(Args, S1, D, L, C),
+ CKey = cache_key(RemMod, Name, ArgTypes, TypeNames, D),
+ %% case error of
+ case cache_find(CKey, C) of
+ {CachedType, DeltaL} ->
+ {CachedType, L - DeltaL, C};
+ error ->
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpVarTab = maps:from_list(List),
+ S2 = S1#from_form{site = RemType, vtab = TmpVarTab},
+ Fun = fun(DD, LL) -> from_form(Form, S2, DD, LL, C1) end,
+ {NewType, L3, C3} =
+ case Tag of
+ type ->
+ recur_limit(Fun, D, L1, RemType, TypeNames);
+ opaque ->
+ {NewRep, L2, C2} = recur_limit(Fun, D, L1, RemType, TypeNames),
+ NewRep1 = choose_opaque_type(NewRep, Type),
+ NewRep2 =
+ case cannot_have_opaque(NewRep1, RemType, TypeNames) of
+ true -> NewRep1;
+ false ->
+ ArgTypes2 = subst_all_vars_to_any_list(ArgTypes),
+ t_opaque(Mod, Name, ArgTypes2, NewRep1)
+ end,
+ {NewRep2, L2, C2}
+ end,
+ C4 = cache_put(CKey, NewType, L1 - L3, C3),
+ {NewType, L3, C4}
+ end;
+ error ->
+ Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
+ [RemMod, Name]),
+ throw({error, Msg})
+ end.
+
+subst_all_vars_to_any_list(Types) ->
+ [subst_all_vars_to_any(Type) || Type <- Types].
+
+%% Opaque types (both local and remote) are problematic when it comes
+%% to the limits (TypeNames, D, and L). The reason is that if any() is
+%% substituted for a more specialized subtype of an opaque type, the
+%% property stated along with decorate_with_opaque() (the type has to
+%% be a subtype of the declared type) no longer holds.
+%%
+%% The less than perfect remedy: if the opaque type created from a
+%% form is not a subset of the declared type, the declared type is
+%% used instead, effectively bypassing the limits, and potentially
+%% resulting in huge types.
+choose_opaque_type(Type, DeclType) ->
+ case
+ t_is_subtype(subst_all_vars_to_any(Type),
+ subst_all_vars_to_any(DeclType))
+ of
+ true -> Type;
+ false -> DeclType
+ end.
+
+record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
+ #from_form{site = Site, mrecs = MR, tnames = TypeNames} = S,
+ RecordType = {record, Name},
+ case can_unfold_more(RecordType, TypeNames) of
+ true ->
+ M = site_module(Site),
+ {ok, R} = dict:find(M, MR),
+ case lookup_record(Name, R) of
+ {ok, DeclFields} ->
+ NewTypeNames = [RecordType|TypeNames],
+ Site1 = {record, {M, Name, length(DeclFields)}},
+ S1 = S#from_form{site = Site1, tnames = NewTypeNames},
+ Fun = fun(D, L) ->
+ {GetModRec, L1, C1} =
+ get_mod_record(ModFields, DeclFields, S1, D, L, C),
+ case GetModRec of
+ {error, FieldName} ->
+ throw({error,
+ io_lib:format("Illegal declaration of #~w{~w}\n",
+ [Name, FieldName])});
+ {ok, NewFields} ->
+ S2 = S1#from_form{vtab = var_table__new()},
+ {NewFields1, L2, C2} =
+ fields_from_form(NewFields, S2, D, L1, C1),
+ Rec = t_tuple(
+ [t_atom(Name)|[Type
+ || {_FieldName, Type} <- NewFields1]]),
+ {Rec, L2, C2}
+ end
+ end,
+ recur_limit(Fun, D0, L0, RecordType, TypeNames);
+ error ->
+ throw({error, io_lib:format("Unknown record #~w{}\n", [Name])})
+ end;
+ false ->
+ {t_any(), L0, C}
+ end.
+
+get_mod_record([], DeclFields, _S, _D, L, C) ->
+ {{ok, DeclFields}, L, C};
+get_mod_record(ModFields, DeclFields, S, D, L, C) ->
+ DeclFieldsDict = lists:keysort(1, DeclFields),
+ {ModFieldsDict, L1, C1} = build_field_dict(ModFields, S, D, L, C),
+ case get_mod_record_types(DeclFieldsDict, ModFieldsDict, []) of
+ {error, _FieldName} = Error -> {Error, L1, C1};
+ {ok, FinalKeyDict} ->
+ Fields = [lists:keyfind(FieldName, 1, FinalKeyDict)
+ || {FieldName, _, _} <- DeclFields],
+ {{ok, Fields}, L1, C1}
+ end.
+
+build_field_dict(FieldTypes, S, D, L, C) ->
+ build_field_dict(FieldTypes, S, D, L, C, []).
+
+build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left],
+ S, D, L, C, Acc) ->
+ {T, L1, C1} = from_form(Type, S, D, L - 1, C),
+ NewAcc = [{Name, Type, T}|Acc],
+ build_field_dict(Left, S, D, L1, C1, NewAcc);
+build_field_dict([], _S, _D, L, C, Acc) ->
+ {lists:keysort(1, Acc), L, C}.
+
+get_mod_record_types([{FieldName, _Abstr, _DeclType}|Left1],
+ [{FieldName, TypeForm, ModType}|Left2],
+ Acc) ->
+ get_mod_record_types(Left1, Left2, [{FieldName, TypeForm, ModType}|Acc]);
+get_mod_record_types([{FieldName1, _Abstr, _DeclType} = DT|Left1],
+ [{FieldName2, _FormType, _ModType}|_] = List2,
+ Acc) when FieldName1 < FieldName2 ->
+ get_mod_record_types(Left1, List2, [DT|Acc]);
+get_mod_record_types(Left1, [], Acc) ->
+ {ok, lists:keysort(1, Left1++Acc)};
+get_mod_record_types(_, [{FieldName2, _FormType, _ModType}|_], _Acc) ->
+ {error, FieldName2}.
+
+%% It is important to create a limited version of the record type
+%% since nested record types can otherwise easily result in huge
+%% terms.
+fields_from_form([], _S, _D, L, C) ->
+ {[], L, C};
+fields_from_form([{Name, Abstr, _Type}|Tail], S, D, L, C) ->
+ {T, L1, C1} = from_form(Abstr, S, D, L, C),
+ {F, L2, C2} = fields_from_form(Tail, S, D, L1, C1),
+ {[{Name, T}|F], L2, C2}.
+
+list_from_form([], _S, _D, L, C) ->
+ {[], L, C};
+list_from_form([H|Tail], S, D, L, C) ->
+ {H1, L1, C1} = from_form(H, S, D, L - 1, C),
+ {T1, L2, C2} = list_from_form(Tail, S, D, L1, C1),
+ {[H1|T1], L2, C2}.
+
+%% Sorts, combines non-singleton pairs, and applies precendence and
+%% mandatoriness rules.
+map_from_form([], ShdwPs, MKs, Pairs, DefK, DefV) ->
+ verify_possible(MKs, ShdwPs),
+ {promote_to_mand(MKs, Pairs), DefK, DefV};
+map_from_form([{SKey,MNess,Val}|SPairs], ShdwPs0, MKs0, Pairs0, DefK0, DefV0) ->
+ Key = lists:foldl(fun({K,_},S)->t_subtract(S,K)end, SKey, ShdwPs0),
+ ShdwPs = case Key of ?none -> ShdwPs0; _ -> [{Key,Val}|ShdwPs0] end,
+ MKs = case MNess of ?mand -> [SKey|MKs0]; ?opt -> MKs0 end,
+ if MNess =:= ?mand, SKey =:= ?none -> throw(none);
+ true -> ok
+ end,
+ {Pairs, DefK, DefV} =
+ case is_singleton_type(Key) of
+ true ->
+ MNess1 = case Val =:= ?none of true -> ?opt; false -> MNess end,
+ {mapdict_insert({Key,MNess1,Val}, Pairs0), DefK0, DefV0};
+ false ->
+ case Key =:= ?none orelse Val =:= ?none of
+ true -> {Pairs0, DefK0, DefV0};
+ false -> {Pairs0, t_sup(DefK0, Key), t_sup(DefV0, Val)}
+ end
+ end,
+ map_from_form(SPairs, ShdwPs, MKs, Pairs, DefK, DefV).
+
+%% Verifies that all mandatory keys are possible, throws 'none' otherwise
+verify_possible(MKs, ShdwPs) ->
+ lists:foreach(fun(M) -> verify_possible_1(M, ShdwPs) end, MKs).
+
+verify_possible_1(M, ShdwPs) ->
+ case lists:any(fun({K,_}) -> t_inf(M, K) =/= ?none end, ShdwPs) of
+ true -> ok;
+ false -> throw(none)
+ end.
+
+-spec promote_to_mand([erl_type()], t_map_dict()) -> t_map_dict().
+
+promote_to_mand(_, []) -> [];
+promote_to_mand(MKs, [E={K,_,V}|T]) ->
+ [case lists:any(fun(M) -> t_is_equal(K,M) end, MKs) of
+ true -> {K, ?mand, V};
+ false -> E
+ end|promote_to_mand(MKs, T)].
+
+-define(RECUR_EXPAND_LIMIT, 10).
+-define(RECUR_EXPAND_DEPTH, 2).
+
+%% If more of the limited resources is spent on the non-recursive
+%% forms, more warnings are found. And the analysis is also a bit
+%% faster.
+%%
+%% Setting REC_TYPE_LIMIT to 1 would work also work well.
+
+recur_limit(Fun, D, L, _, _) when L =< ?RECUR_EXPAND_DEPTH,
+ D =< ?RECUR_EXPAND_LIMIT ->
+ Fun(D, L);
+recur_limit(Fun, D, L, TypeName, TypeNames) ->
+ case is_recursive(TypeName, TypeNames) of
+ true ->
+ {T, L1, C1} = Fun(?RECUR_EXPAND_DEPTH, ?RECUR_EXPAND_LIMIT),
+ {T, L - L1, C1};
+ false ->
+ Fun(D, L)
+ end.
+
+-spec t_check_record_fields(parse_form(), sets:set(mfa()), site(),
+ mod_records(), var_table(), cache()) -> cache().
+
+t_check_record_fields(Form, ExpTypes, Site, RecDict, VarTable, Cache) ->
+ State = #from_form{site = Site,
+ xtypes = ExpTypes,
+ mrecs = RecDict,
+ vtab = VarTable,
+ tnames = []},
+ check_record_fields(Form, State, Cache).
+
+-spec check_record_fields(parse_form(), #from_form{}, cache()) -> cache().
+
+%% If there is something wrong with parse_form()
+%% throw({error, io_lib:chars()} is called.
+
+check_record_fields({var, _L, _}, _S, C) -> C;
+check_record_fields({ann_type, _L, [_Var, Type]}, S, C) ->
+ check_record_fields(Type, S, C);
+check_record_fields({paren_type, _L, [Type]}, S, C) ->
+ check_record_fields(Type, S, C);
+check_record_fields({remote_type, _L, [{atom, _, _}, {atom, _, _}, Args]},
+ S, C) ->
+ list_check_record_fields(Args, S, C);
+check_record_fields({atom, _L, _}, _S, C) -> C;
+check_record_fields({integer, _L, _}, _S, C) -> C;
+check_record_fields({op, _L, _Op, _Arg}, _S, C) -> C;
+check_record_fields({op, _L, _Op, _Arg1, _Arg2}, _S, C) -> C;
+check_record_fields({type, _L, tuple, any}, _S, C) -> C;
+check_record_fields({type, _L, map, any}, _S, C) -> C;
+check_record_fields({type, _L, binary, [_Base, _Unit]}, _S, C) -> C;
+check_record_fields({type, _L, 'fun', [{type, _, any}, Range]}, S, C) ->
+ check_record_fields(Range, S, C);
+check_record_fields({type, _L, range, [_From, _To]}, _S, C) -> C;
+check_record_fields({type, _L, record, [Name|Fields]}, S, C) ->
+ check_record(Name, Fields, S, C);
+check_record_fields({type, _L, _, Args}, S, C) ->
+ list_check_record_fields(Args, S, C);
+check_record_fields({user_type, _L, _Name, Args}, S, C) ->
+ list_check_record_fields(Args, S, C).
+
+check_record({atom, _, Name}, ModFields, S, C) ->
+ #from_form{site = Site, mrecs = MR} = S,
+ M = site_module(Site),
+ {ok, R} = dict:find(M, MR),
+ {ok, DeclFields} = lookup_record(Name, R),
+ case check_fields(Name, ModFields, DeclFields, S, C) of
+ {error, FieldName} ->
+ throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
+ [Name, FieldName])});
+ C1 -> C1
+ end.
+
+check_fields(RecName, [{type, _, field_type, [{atom, _, Name}, Abstr]}|Left],
+ DeclFields, S, C) ->
+ #from_form{site = Site0, xtypes = ET, mrecs = MR, vtab = V} = S,
+ M = site_module(Site0),
+ Site = {record, {M, RecName, length(DeclFields)}},
+ {Type, C1} = t_from_form(Abstr, ET, Site, MR, V, C),
+ {Name, _, DeclType} = lists:keyfind(Name, 1, DeclFields),
+ TypeNoVars = subst_all_vars_to_any(Type),
+ case t_is_subtype(TypeNoVars, DeclType) of
+ false -> {error, Name};
+ true -> check_fields(RecName, Left, DeclFields, S, C1)
+ end;
+check_fields(_RecName, [], _Decl, _S, C) ->
+ C.
+
+list_check_record_fields([], _S, C) ->
+ C;
+list_check_record_fields([H|Tail], S, C) ->
+ C1 = check_record_fields(H, S, C),
+ list_check_record_fields(Tail, S, C1).
+
+site_module({_, {Module, _, _}}) ->
+ Module.
+
+-spec cache__new() -> cache().
+
+cache__new() ->
+ maps:new().
+
+-spec cache_key(module(), atom(), [erl_type()],
+ type_names(), expand_depth()) -> cache_key().
+
+%% If TypeNames is left out from the key, the cache is smaller, and
+%% the form-to-type translation is faster. But it would be a shame if,
+%% for example, any() is used, where a more complex type should be
+%% used. There is also a slight risk of creating unnecessarily big
+%% types.
+
+cache_key(Module, Name, ArgTypes, TypeNames, D) ->
+ {Module, Name, D, ArgTypes, TypeNames}.
+
+-spec cache_find(cache_key(), cache()) ->
+ {erl_type(), expand_limit()} | 'error'.
+
+cache_find(Key, Cache) ->
+ case maps:find(Key, Cache) of
+ {ok, Value} ->
+ Value;
+ error ->
+ error
+ end.
+
+-spec cache_put(cache_key(), erl_type(), expand_limit(), cache()) -> cache().
+
+cache_put(_Key, _Type, DeltaL, Cache) when DeltaL < 0 ->
+ %% The type is truncated; do not reuse it.
+ Cache;
+cache_put(Key, Type, DeltaL, Cache) ->
+ maps:put(Key, {Type, DeltaL}, Cache).
+
+-spec t_var_names([erl_type()]) -> [atom()].
+
+t_var_names([{var, _, Name}|L]) when L =/= '_' ->
+ [Name|t_var_names(L)];
+t_var_names([]) ->
+ [].
+
+-spec t_form_to_string(parse_form()) -> string().
+
+t_form_to_string({var, _L, '_'}) -> "_";
+t_form_to_string({var, _L, Name}) -> atom_to_list(Name);
+t_form_to_string({atom, _L, Atom}) ->
+ io_lib:write_string(atom_to_list(Atom), $'); % To quote or not to quote... '
+t_form_to_string({integer, _L, Int}) -> integer_to_list(Int);
+t_form_to_string({op, _L, _Op, _Arg} = Op) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, _} = Int -> t_form_to_string(Int);
+ _ -> io_lib:format("Badly formed type ~w", [Op])
+ end;
+t_form_to_string({op, _L, _Op, _Arg1, _Arg2} = Op) ->
+ case erl_eval:partial_eval(Op) of
+ {integer, _, _} = Int -> t_form_to_string(Int);
+ _ -> io_lib:format("Badly formed type ~w", [Op])
+ end;
+t_form_to_string({ann_type, _L, [Var, Type]}) ->
+ t_form_to_string(Var) ++ "::" ++ t_form_to_string(Type);
+t_form_to_string({paren_type, _L, [Type]}) ->
+ flat_format("(~s)", [t_form_to_string(Type)]);
+t_form_to_string({remote_type, _L, [{atom, _, Mod}, {atom, _, Name}, Args]}) ->
+ ArgString = "(" ++ string:join(t_form_to_string_list(Args), ",") ++ ")",
+ flat_format("~w:~w", [Mod, Name]) ++ ArgString;
+t_form_to_string({type, _L, arity, []}) -> "arity()";
+t_form_to_string({type, _L, binary, []}) -> "binary()";
+t_form_to_string({type, _L, binary, [Base, Unit]} = Type) ->
+ case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
+ {{integer, _, B}, {integer, _, U}} ->
+ %% the following mirrors the clauses of t_to_string/2
+ case {U, B} of
+ {0, 0} -> "<<>>";
+ {8, 0} -> "binary()";
+ {1, 0} -> "bitstring()";
+ {0, B} -> flat_format("<<_:~w>>", [B]);
+ {U, 0} -> flat_format("<<_:_*~w>>", [U]);
+ {U, B} -> flat_format("<<_:~w,_:_*~w>>", [B, U])
+ end;
+ _ -> io_lib:format("Badly formed bitstr type ~w", [Type])
+ end;
+t_form_to_string({type, _L, bitstring, []}) -> "bitstring()";
+t_form_to_string({type, _L, 'fun', []}) -> "fun()";
+t_form_to_string({type, _L, 'fun', [{type, _, any}, Range]}) ->
+ "fun(...) -> " ++ t_form_to_string(Range);
+t_form_to_string({type, _L, 'fun', [{type, _, product, Domain}, Range]}) ->
+ "fun((" ++ string:join(t_form_to_string_list(Domain), ",") ++ ") -> "
+ ++ t_form_to_string(Range) ++ ")";
+t_form_to_string({type, _L, iodata, []}) -> "iodata()";
+t_form_to_string({type, _L, iolist, []}) -> "iolist()";
+t_form_to_string({type, _L, list, [Type]}) ->
+ "[" ++ t_form_to_string(Type) ++ "]";
+t_form_to_string({type, _L, map, any}) -> "map()";
+t_form_to_string({type, _L, map, Args}) ->
+ "#{" ++ string:join(t_form_to_string_list(Args), ",") ++ "}";
+t_form_to_string({type, _L, map_field_assoc, [Key, Val]}) ->
+ t_form_to_string(Key) ++ "=>" ++ t_form_to_string(Val);
+t_form_to_string({type, _L, map_field_exact, [Key, Val]}) ->
+ t_form_to_string(Key) ++ ":=" ++ t_form_to_string(Val);
+t_form_to_string({type, _L, mfa, []}) -> "mfa()";
+t_form_to_string({type, _L, module, []}) -> "module()";
+t_form_to_string({type, _L, node, []}) -> "node()";
+t_form_to_string({type, _L, nonempty_list, [Type]}) ->
+ "[" ++ t_form_to_string(Type) ++ ",...]";
+t_form_to_string({type, _L, nonempty_string, []}) -> "nonempty_string()";
+t_form_to_string({type, _L, product, Elements}) ->
+ "<" ++ string:join(t_form_to_string_list(Elements), ",") ++ ">";
+t_form_to_string({type, _L, range, [From, To]} = Type) ->
+ case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
+ {{integer, _, FromVal}, {integer, _, ToVal}} ->
+ flat_format("~w..~w", [FromVal, ToVal]);
+ _ -> flat_format("Badly formed type ~w",[Type])
+ end;
+t_form_to_string({type, _L, record, [{atom, _, Name}]}) ->
+ flat_format("#~w{}", [Name]);
+t_form_to_string({type, _L, record, [{atom, _, Name}|Fields]}) ->
+ FieldString = string:join(t_form_to_string_list(Fields), ","),
+ flat_format("#~w{~s}", [Name, FieldString]);
+t_form_to_string({type, _L, field_type, [{atom, _, Name}, Type]}) ->
+ flat_format("~w::~s", [Name, t_form_to_string(Type)]);
+t_form_to_string({type, _L, term, []}) -> "term()";
+t_form_to_string({type, _L, timeout, []}) -> "timeout()";
+t_form_to_string({type, _L, tuple, any}) -> "tuple()";
+t_form_to_string({type, _L, tuple, Args}) ->
+ "{" ++ string:join(t_form_to_string_list(Args), ",") ++ "}";
+t_form_to_string({type, _L, union, Args}) ->
+ string:join(t_form_to_string_list(Args), " | ");
+t_form_to_string({type, _L, Name, []} = T) ->
+ try
+ M = mod,
+ D0 = dict:new(),
+ MR = dict:from_list([{M, D0}]),
+ Site = {type, {M,Name,0}},
+ V = var_table__new(),
+ C = cache__new(),
+ State = #from_form{site = Site,
+ xtypes = sets:new(),
+ mrecs = MR,
+ vtab = V,
+ tnames = []},
+ {T1, _, _} = from_form(T, State, _Deep=1000, _ALot=1000000, C),
+ t_to_string(T1)
+ catch throw:{error, _} -> atom_to_string(Name) ++ "()"
+ end;
+t_form_to_string({user_type, _L, Name, List}) ->
+ flat_format("~w(~s)",
+ [Name, string:join(t_form_to_string_list(List), ",")]);
+t_form_to_string({type, L, Name, List}) ->
+ %% Compatibility: modules compiled before Erlang/OTP 18.0.
+ t_form_to_string({user_type, L, Name, List}).
+
+t_form_to_string_list(List) ->
+ t_form_to_string_list(List, []).
+
+t_form_to_string_list([H|T], Acc) ->
+ t_form_to_string_list(T, [t_form_to_string(H)|Acc]);
+t_form_to_string_list([], Acc) ->
+ lists:reverse(Acc).
+
+-spec atom_to_string(atom()) -> string().
+
+atom_to_string(Atom) ->
+ flat_format("~w", [Atom]).
+
+%%=============================================================================
+%%
+%% Utilities
+%%
+%%=============================================================================
+
+-spec any_none([erl_type()]) -> boolean().
+
+any_none([?none|_Left]) -> true;
+any_none([_|Left]) -> any_none(Left);
+any_none([]) -> false.
+
+-spec any_none_or_unit([erl_type()]) -> boolean().
+
+any_none_or_unit([?none|_]) -> true;
+any_none_or_unit([?unit|_]) -> true;
+any_none_or_unit([_|Left]) -> any_none_or_unit(Left);
+any_none_or_unit([]) -> false.
+
+-spec is_erl_type(any()) -> boolean().
+
+is_erl_type(?any) -> true;
+is_erl_type(?none) -> true;
+is_erl_type(?unit) -> true;
+is_erl_type(#c{}) -> true;
+is_erl_type(_) -> false.
+
+-spec lookup_record(atom(), type_table()) ->
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
+
+lookup_record(Tag, RecDict) when is_atom(Tag) ->
+ case dict:find({record, Tag}, RecDict) of
+ {ok, {_FileLine, [{_Arity, Fields}]}} ->
+ {ok, Fields};
+ {ok, {_FileLine, List}} when is_list(List) ->
+ %% This will have to do, since we do not know which record we
+ %% are looking for.
+ error;
+ error ->
+ error
+ end.
+
+-spec lookup_record(atom(), arity(), type_table()) ->
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
+
+lookup_record(Tag, Arity, RecDict) when is_atom(Tag) ->
+ case dict:find({record, Tag}, RecDict) of
+ {ok, {_FileLine, [{Arity, Fields}]}} -> {ok, Fields};
+ {ok, {_FileLine, OrdDict}} -> orddict:find(Arity, OrdDict);
+ error -> error
+ end.
+
+-spec lookup_type(_, _, _) -> {'type' | 'opaque', type_value()} | 'error'.
+lookup_type(Name, Arity, RecDict) ->
+ case dict:find({type, Name, Arity}, RecDict) of
+ error ->
+ case dict:find({opaque, Name, Arity}, RecDict) of
+ error -> error;
+ {ok, Found} -> {opaque, Found}
+ end;
+ {ok, Found} -> {type, Found}
+ end.
+
+-spec type_is_defined('type' | 'opaque', atom(), arity(), type_table()) ->
+ boolean().
+
+type_is_defined(TypeOrOpaque, Name, Arity, RecDict) ->
+ dict:is_key({TypeOrOpaque, Name, Arity}, RecDict).
+
+cannot_have_opaque(Type, TypeName, TypeNames) ->
+ t_is_none(Type) orelse is_recursive(TypeName, TypeNames).
+
+is_recursive(TypeName, TypeNames) ->
+ lists:member(TypeName, TypeNames).
+
+can_unfold_more(TypeName, TypeNames) ->
+ Fun = fun(E, Acc) -> case E of TypeName -> Acc + 1; _ -> Acc end end,
+ lists:foldl(Fun, 0, TypeNames) < ?REC_TYPE_LIMIT.
+
+-spec do_opaque(erl_type(), opaques(), fun((_) -> T)) -> T.
+
+%% Probably a little faster than calling t_unopaque/2.
+%% Unions that are due to opaque types are unopaqued.
+do_opaque(?opaque(_) = Type, Opaques, Pred) ->
+ case Opaques =:= 'universe' orelse is_opaque_type(Type, Opaques) of
+ true -> do_opaque(t_opaque_structure(Type), Opaques, Pred);
+ false -> Pred(Type)
+ end;
+do_opaque(?union(List) = Type, Opaques, Pred) ->
+ [A,B,F,I,L,N,T,M,O,Map] = List,
+ if O =:= ?none -> Pred(Type);
+ true ->
+ case Opaques =:= 'universe' orelse is_opaque_type(O, Opaques) of
+ true ->
+ S = t_opaque_structure(O),
+ do_opaque(t_sup([A,B,F,I,L,N,T,M,S,Map]), Opaques, Pred);
+ false -> Pred(Type)
+ end
+ end;
+do_opaque(Type, _Opaques, Pred) ->
+ Pred(Type).
+
+map_all_values(?map(Pairs,_,DefV)) ->
+ [DefV|[V || {V, _, _} <- Pairs]].
+
+map_all_keys(?map(Pairs,DefK,_)) ->
+ [DefK|[K || {_, _, K} <- Pairs]].
+
+map_all_types(M) ->
+ map_all_keys(M) ++ map_all_values(M).
+
+%% Tests if a type has exactly one possible value.
+-spec t_is_singleton(erl_type()) -> boolean().
+
+t_is_singleton(Type) ->
+ t_is_singleton(Type, 'universe').
+
+-spec t_is_singleton(erl_type(), opaques()) -> boolean().
+
+t_is_singleton(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun is_singleton_type/1).
+
+%% Incomplete; not all representable singleton types are included.
+is_singleton_type(?nil) -> true;
+is_singleton_type(?atom(?any)) -> false;
+is_singleton_type(?atom(Set)) ->
+ ordsets:size(Set) =:= 1;
+is_singleton_type(?int_range(V, V)) -> true;
+is_singleton_type(?int_set(Set)) ->
+ ordsets:size(Set) =:= 1;
+is_singleton_type(?tuple(Types, Arity, _)) when is_integer(Arity) ->
+ lists:all(fun is_singleton_type/1, Types);
+is_singleton_type(?tuple_set([{Arity, [OnlyTuple]}])) when is_integer(Arity) ->
+ is_singleton_type(OnlyTuple);
+is_singleton_type(?map(Pairs, ?none, ?none)) ->
+ lists:all(fun({_,MNess,V}) -> MNess =:= ?mand andalso is_singleton_type(V)
+ end, Pairs);
+is_singleton_type(_) ->
+ false.
+
+%% Returns the only possible value of a singleton type.
+-spec t_singleton_to_term(erl_type(), opaques()) -> term().
+
+t_singleton_to_term(Type, Opaques) ->
+ do_opaque(Type, Opaques, fun singleton_type_to_term/1).
+
+singleton_type_to_term(?nil) -> [];
+singleton_type_to_term(?atom(Set)) when Set =/= ?any ->
+ case ordsets:size(Set) of
+ 1 -> hd(ordsets:to_list(Set));
+ _ -> error(badarg)
+ end;
+singleton_type_to_term(?int_range(V, V)) -> V;
+singleton_type_to_term(?int_set(Set)) ->
+ case ordsets:size(Set) of
+ 1 -> hd(ordsets:to_list(Set));
+ _ -> error(badarg)
+ end;
+singleton_type_to_term(?tuple(Types, Arity, _)) when is_integer(Arity) ->
+ lists:map(fun singleton_type_to_term/1, Types);
+singleton_type_to_term(?tuple_set([{Arity, [OnlyTuple]}]))
+ when is_integer(Arity) ->
+ singleton_type_to_term(OnlyTuple);
+singleton_type_to_term(?map(Pairs, ?none, ?none)) ->
+ maps:from_list([{singleton_type_to_term(K), singleton_type_to_term(V)}
+ || {K,?mand,V} <- Pairs]).
+
+%% -----------------------------------
+%% Set
+%%
+
+set_singleton(Element) ->
+ ordsets:from_list([Element]).
+
+set_is_singleton(Element, Set) ->
+ set_singleton(Element) =:= Set.
+
+set_is_element(Element, Set) ->
+ ordsets:is_element(Element, Set).
+
+set_union(?any, _) -> ?any;
+set_union(_, ?any) -> ?any;
+set_union(S1, S2) ->
+ case ordsets:union(S1, S2) of
+ S when length(S) =< ?SET_LIMIT -> S;
+ _ -> ?any
+ end.
+
+%% The intersection and subtraction can return ?none.
+%% This should always be handled right away since ?none is not a valid set.
+%% However, ?any is considered a valid set.
+
+set_intersection(?any, S) -> S;
+set_intersection(S, ?any) -> S;
+set_intersection(S1, S2) ->
+ case ordsets:intersection(S1, S2) of
+ [] -> ?none;
+ S -> S
+ end.
+
+set_subtract(_, ?any) -> ?none;
+set_subtract(?any, _) -> ?any;
+set_subtract(S1, S2) ->
+ case ordsets:subtract(S1, S2) of
+ [] -> ?none;
+ S -> S
+ end.
+
+set_from_list(List) ->
+ case length(List) of
+ L when L =< ?SET_LIMIT -> ordsets:from_list(List);
+ L when L > ?SET_LIMIT -> ?any
+ end.
+
+set_to_list(Set) ->
+ ordsets:to_list(Set).
+
+set_filter(Fun, Set) ->
+ case ordsets:filter(Fun, Set) of
+ [] -> ?none;
+ NewSet -> NewSet
+ end.
+
+set_size(Set) ->
+ ordsets:size(Set).
+
+set_to_string(Set) ->
+ L = [case is_atom(X) of
+ true -> io_lib:write_string(atom_to_list(X), $'); % stupid emacs '
+ false -> flat_format("~w", [X])
+ end || X <- set_to_list(Set)],
+ string:join(L, " | ").
+
+set_min([H|_]) -> H.
+
+set_max(Set) ->
+ hd(lists:reverse(Set)).
+
+flat_format(F, S) ->
+ lists:flatten(io_lib:format(F, S)).
+
+%%=============================================================================
+%%
+%% Utilities for the binary type
+%%
+%%=============================================================================
+
+-spec gcd(integer(), integer()) -> integer().
+
+gcd(A, B) when B > A ->
+ gcd1(B, A);
+gcd(A, B) ->
+ gcd1(A, B).
+
+-spec gcd1(integer(), integer()) -> integer().
+
+gcd1(A, 0) -> A;
+gcd1(A, B) ->
+ case A rem B of
+ 0 -> B;
+ X -> gcd1(B, X)
+ end.
+
+-spec bitstr_concat(erl_type(), erl_type()) -> erl_type().
+
+bitstr_concat(?none, _) -> ?none;
+bitstr_concat(_, ?none) -> ?none;
+bitstr_concat(?bitstr(U1, B1), ?bitstr(U2, B2)) ->
+ t_bitstr(gcd(U1, U2), B1+B2).
+
+-spec bitstr_match(erl_type(), erl_type()) -> erl_type().
+
+bitstr_match(?none, _) -> ?none;
+bitstr_match(_, ?none) -> ?none;
+bitstr_match(?bitstr(0, B1), ?bitstr(0, B2)) when B1 =< B2 ->
+ t_bitstr(0, B2-B1);
+bitstr_match(?bitstr(0, _B1), ?bitstr(0, _B2)) ->
+ ?none;
+bitstr_match(?bitstr(0, B1), ?bitstr(U2, B2)) when B1 =< B2 ->
+ t_bitstr(U2, B2-B1);
+bitstr_match(?bitstr(0, B1), ?bitstr(U2, B2)) ->
+ t_bitstr(U2, handle_base(U2, B2-B1));
+bitstr_match(?bitstr(_, B1), ?bitstr(0, B2)) when B1 > B2 ->
+ ?none;
+bitstr_match(?bitstr(U1, B1), ?bitstr(U2, B2)) ->
+ GCD = gcd(U1, U2),
+ t_bitstr(GCD, handle_base(GCD, B2-B1)).
+
+-spec handle_base(integer(), integer()) -> integer().
+
+handle_base(Unit, Pos) when Pos >= 0 ->
+ Pos rem Unit;
+handle_base(Unit, Neg) ->
+ (Unit+(Neg rem Unit)) rem Unit.
+
+family(L) ->
+ R = sofs:relation(L),
+ F = sofs:relation_to_family(R),
+ sofs:to_external(F).
+
+%%=============================================================================
+%%
+%% Interface functions for abstract data types defined in this module
+%%
+%%=============================================================================
+
+-spec var_table__new() -> var_table().
+
+var_table__new() ->
+ maps:new().
+
+%%=============================================================================
+%% Consistency-testing function(s) below
+%%=============================================================================
+
+-ifdef(DO_ERL_TYPES_TEST).
+
+test() ->
+ Atom1 = t_atom(),
+ Atom2 = t_atom(foo),
+ Atom3 = t_atom(bar),
+ true = t_is_atom(Atom2),
+
+ True = t_atom(true),
+ False = t_atom(false),
+ Bool = t_boolean(),
+ true = t_is_boolean(True),
+ true = t_is_boolean(Bool),
+ false = t_is_boolean(Atom1),
+
+ Binary = t_binary(),
+ true = t_is_binary(Binary),
+
+ Bitstr = t_bitstr(),
+ true = t_is_bitstr(Bitstr),
+
+ Bitstr1 = t_bitstr(7, 3),
+ true = t_is_bitstr(Bitstr1),
+ false = t_is_binary(Bitstr1),
+
+ Bitstr2 = t_bitstr(16, 8),
+ true = t_is_bitstr(Bitstr2),
+ true = t_is_binary(Bitstr2),
+
+ ?bitstr(8, 16) = t_subtract(t_bitstr(4, 12), t_bitstr(8, 12)),
+ ?bitstr(8, 16) = t_subtract(t_bitstr(4, 12), t_bitstr(8, 12)),
+
+ Int1 = t_integer(),
+ Int2 = t_integer(1),
+ Int3 = t_integer(16#ffffffff),
+ true = t_is_integer(Int2),
+ true = t_is_byte(Int2),
+ false = t_is_byte(Int3),
+ false = t_is_byte(t_from_range(-1, 1)),
+ true = t_is_byte(t_from_range(1, ?MAX_BYTE)),
+
+ Tuple1 = t_tuple(),
+ Tuple2 = t_tuple(3),
+ Tuple3 = t_tuple([Atom1, Int1]),
+ Tuple4 = t_tuple([Tuple1, Tuple2]),
+ Tuple5 = t_tuple([Tuple3, Tuple4]),
+ Tuple6 = t_limit(Tuple5, 2),
+ Tuple7 = t_limit(Tuple5, 3),
+ true = t_is_tuple(Tuple1),
+
+ Port = t_port(),
+ Pid = t_pid(),
+ Ref = t_reference(),
+ Identifier = t_identifier(),
+ false = t_is_reference(Port),
+ true = t_is_identifier(Port),
+
+ Function1 = t_fun(),
+ Function2 = t_fun(Pid),
+ Function3 = t_fun([], Pid),
+ Function4 = t_fun([Port, Pid], Pid),
+ Function5 = t_fun([Pid, Atom1], Int2),
+ true = t_is_fun(Function3),
+
+ List1 = t_list(),
+ List2 = t_list(t_boolean()),
+ List3 = t_cons(t_boolean(), List2),
+ List4 = t_cons(t_boolean(), t_atom()),
+ List5 = t_cons(t_boolean(), t_nil()),
+ List6 = t_cons_tl(List5),
+ List7 = t_sup(List4, List5),
+ List8 = t_inf(List7, t_list()),
+ List9 = t_cons(),
+ List10 = t_cons_tl(List9),
+ true = t_is_boolean(t_cons_hd(List5)),
+ true = t_is_list(List5),
+ false = t_is_list(List4),
+
+ Product1 = t_product([Atom1, Atom2]),
+ Product2 = t_product([Atom3, Atom1]),
+ Product3 = t_product([Atom3, Atom2]),
+
+ Union1 = t_sup(Atom2, Atom3),
+ Union2 = t_sup(Tuple2, Tuple3),
+ Union3 = t_sup(Int2, Atom3),
+ Union4 = t_sup(Port, Pid),
+ Union5 = t_sup(Union4, Int1),
+ Union6 = t_sup(Function1, Function2),
+ Union7 = t_sup(Function4, Function5),
+ Union8 = t_sup(True, False),
+ true = t_is_boolean(Union8),
+ Union9 = t_sup(Int2, t_integer(2)),
+ true = t_is_byte(Union9),
+ Union10 = t_sup(t_tuple([t_atom(true), ?any]),
+ t_tuple([t_atom(false), ?any])),
+
+ ?any = t_sup(Product3, Function5),
+
+ Atom3 = t_inf(Union3, Atom1),
+ Union2 = t_inf(Union2, Tuple1),
+ Int2 = t_inf(Int1, Union3),
+ Union4 = t_inf(Union4, Identifier),
+ Port = t_inf(Union5, Port),
+ Function4 = t_inf(Union7, Function4),
+ ?none = t_inf(Product2, Atom1),
+ Product3 = t_inf(Product1, Product2),
+ Function5 = t_inf(Union7, Function5),
+ true = t_is_byte(t_inf(Union9, t_number())),
+ true = t_is_char(t_inf(Union9, t_number())),
+
+ io:format("3? ~p ~n", [?int_set([3])]),
+
+ RecDict = dict:store({foo, 2}, [bar, baz], dict:new()),
+ Record1 = t_from_term({foo, [1,2], {1,2,3}}),
+
+ Types = [
+ Atom1,
+ Atom2,
+ Atom3,
+ Binary,
+ Int1,
+ Int2,
+ Tuple1,
+ Tuple2,
+ Tuple3,
+ Tuple4,
+ Tuple5,
+ Tuple6,
+ Tuple7,
+ Ref,
+ Port,
+ Pid,
+ Identifier,
+ List1,
+ List2,
+ List3,
+ List4,
+ List5,
+ List6,
+ List7,
+ List8,
+ List9,
+ List10,
+ Function1,
+ Function2,
+ Function3,
+ Function4,
+ Function5,
+ Product1,
+ Product2,
+ Record1,
+ Union1,
+ Union2,
+ Union3,
+ Union4,
+ Union5,
+ Union6,
+ Union7,
+ Union8,
+ Union10,
+ t_inf(Union10, t_tuple([t_atom(true), t_integer()]))
+ ],
+ io:format("~p\n", [[t_to_string(X, RecDict) || X <- Types]]).
+
+-endif.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl
index c19330eb30..597460ce77 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl
@@ -52,7 +52,7 @@ exact_api_set_type(#exact_api{}=E) -> E.
-record(exact_adt, {}).
exact_adt_test(X) ->
- #exact_adt{} = exact_adt:exact_adt_set_type(X). % breaks the opaqueness
+ #exact_adt{} = exact_adt:exact_adt_set_type(X). % breaks the opacity
exact_adt_new(A) ->
A = #exact_adt{},
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/is_rec.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/is_rec.erl
index 2b157483bc..b906431b44 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/is_rec.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/is_rec.erl
@@ -16,11 +16,11 @@ ri11() ->
ri13() ->
A = simple1_adt:d1(),
- if is_record(A, r) -> true end. % breaks the opaqueness
+ if is_record(A, r) -> true end. % breaks the opacity
ri14() ->
A = simple1_adt:d1(),
- if is_record({A, 1}, r) -> true end. % breaks the opaqueness
+ if is_record({A, 1}, r) -> true end. % breaks the opacity
-type '1-3-t'() :: 1..3.
@@ -54,7 +54,7 @@ ri5() ->
ri6() ->
A = simple1_adt:d1(),
- if is_record(A, r) -> true end. % breaks opaqueness
+ if is_record(A, r) -> true end. % breaks opacity
ri7() ->
A = simple1_adt:d1(),
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
index fb6d59d263..59b9e0fec4 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl
@@ -30,7 +30,7 @@ t3() ->
adt_t1() ->
R = rec_adt:r1(),
- {r1, a} = R. % breaks the opaqueness
+ {r1, a} = R. % breaks the opacity
-spec adt_t1(rec_adt:r1()) -> rec_adt:r1(). % invalid type spec
@@ -82,7 +82,7 @@ f() ->
r_adt() ->
{{r, rec_adt:f(), 2},
- #r{f = rec_adt:f(), o = 2}}. % breaks the opaqueness
+ #r{f = rec_adt:f(), o = 2}}. % breaks the opacity
-record(r2, % like #r1{}, but with initial value
{f1 = a :: a()}).
@@ -110,7 +110,7 @@ u3() ->
v1() ->
A = #r3{f1 = queue:new()},
- {r3, a} = A. % breaks the opaqueness
+ {r3, a} = A. % breaks the opacity
v2() ->
A = {r3, 10},
@@ -120,4 +120,4 @@ v2() ->
v3() ->
A = {r3, 10},
- #r3{f1 = 10} = A. % breaks the opaqueness
+ #r3{f1 = 10} = A. % breaks the opacity
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl b/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
index 7db1100597..d67aa913d8 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/simple/simple1_api.erl
@@ -194,7 +194,7 @@ tt1() ->
tt2() ->
A = simple1_adt:d1(),
- is_integer(A). % breaks the opaqueness
+ is_integer(A). % breaks the opacity
%% Comparison with integers
@@ -262,11 +262,11 @@ f2() ->
adt_f1() ->
T = simple1_adt:n1(),
- if is_function(T) -> ok end. % breaks the opaqueness
+ if is_function(T) -> ok end. % breaks the opacity
adt_f2() ->
T = simple1_adt:n1(),
- is_function(T). % breaks the opaqueness
+ is_function(T). % breaks the opacity
f3() ->
A = i1(),
@@ -281,12 +281,12 @@ f4() ->
adt_f3() ->
A = simple1_adt:i1(),
T = simple1_adt:n1(),
- if is_function(T, A) -> ok end. % breaks the opaqueness
+ if is_function(T, A) -> ok end. % breaks the opacity
adt_f4() ->
A = simple1_adt:i1(),
T = simple1_adt:n1(),
- is_function(T, A). % breaks the opaqueness
+ is_function(T, A). % breaks the opacity
adt_f4_a() ->
A = simple1_adt:i1(),
@@ -297,7 +297,7 @@ adt_f4_a() ->
adt_f4_b() ->
A = i1(),
T = simple1_adt:n1(),
- is_function(T, A). % breaks the opaqueness
+ is_function(T, A). % breaks the opacity
%% A few Boolean examples
@@ -404,7 +404,7 @@ bit_t1() ->
bit_adt_t1() ->
A = simple1_adt:i1(),
- <<100:(A)>>. % breaks the opaqueness
+ <<100:(A)>>. % breaks the opacity
bit_t3(A) ->
B = i1(),
@@ -415,14 +415,14 @@ bit_t3(A) ->
bit_adt_t2() ->
A = simple1_adt:i1(),
case <<"hej">> of
- <<_:A>> -> ok % breaks the opaqueness (but the message is strange)
+ <<_:A>> -> ok % breaks the opacity (but the message is strange)
end.
bit_adt_t3(A) ->
B = simple1_adt:i1(),
case none:none() of
- <<A: % breaks the opaqueness (the message is less than perfect)
+ <<A: % breaks the opacity (the message is less than perfect)
B>> -> 1
end.
@@ -445,7 +445,7 @@ bit_t4(A) ->
bit_adt_t4(A) ->
Sz = simple1_adt:i1(),
case A of
- <<_:Sz>> -> 1 % breaks the opaqueness
+ <<_:Sz>> -> 1 % breaks the opacity
end.
bit_t5() ->
@@ -457,7 +457,7 @@ bit_t5() ->
bit_adt_t5() ->
A = simple1_adt:bit1(),
case A of
- <<_/binary>> -> 1 % breaks the opaqueness
+ <<_/binary>> -> 1 % breaks the opacity
end.
-opaque bit1() :: binary().
@@ -475,7 +475,7 @@ call_f(A) ->
call_f_adt(A) ->
A = simple1_adt:a(),
- foo:A(A). % breaks the opaqueness
+ foo:A(A). % breaks the opacity
call_m(A) ->
A = a(),
@@ -483,7 +483,7 @@ call_m(A) ->
call_m_adt(A) ->
A = simple1_adt:a(),
- A:foo(A). % breaks the opaqueness
+ A:foo(A). % breaks the opacity
-opaque a() :: atom().
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/timer/timer_use.erl b/lib/dialyzer/test/opaque_SUITE_data/src/timer/timer_use.erl
index 9c8ea0af1c..ed6810634f 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/timer/timer_use.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/timer/timer_use.erl
@@ -1,8 +1,8 @@
%%---------------------------------------------------------------------------
%% A test case with:
%% - a genuine matching error -- 1st branch
-%% - a violation of the opaqueness of timer:tref() -- 2nd branch
-%% - a subtle violation of the opaqueness of timer:tref() -- 3rd branch
+%% - a violation of the opacity of timer:tref() -- 2nd branch
+%% - a subtle violation of the opacity of timer:tref() -- 3rd branch
%% The test is supposed to check that these cases are treated properly.
%%---------------------------------------------------------------------------
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl
new file mode 100644
index 0000000000..094138e72b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl
@@ -0,0 +1,18 @@
+-module(weird_warning1).
+-export([public_func/0]).
+
+-record(a, {
+ d = dict:new() :: dict:dict()
+ }).
+
+-record(b, {
+ q = queue:new() :: queue:queue()
+ }).
+
+public_func() ->
+ add_element(#b{}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl
new file mode 100644
index 0000000000..4e4512157b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl
@@ -0,0 +1,14 @@
+-module(weird_warning2).
+-export([public_func/0]).
+
+-record(a, {d = dict:new() :: dict:dict()}).
+
+-record(b, {q = queue:new() :: queue:queue()}).
+
+public_func() ->
+ add_element(#a{}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl
new file mode 100644
index 0000000000..b70ca645cb
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl
@@ -0,0 +1,19 @@
+-module(weird_warning3).
+-export([public_func/0]).
+
+-record(a, {
+ d = dict:new() :: dict:dict()
+ }).
+
+-record(b, {
+ q = queue:new() :: queue:queue()
+ }).
+
+public_func() ->
+ %% Notice that t_to_string() will create "#a{d::queue:queue(_)}".
+ add_element({a, queue:new()}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/wings/wings_util.erl b/lib/dialyzer/test/opaque_SUITE_data/src/wings/wings_util.erl
index ca6bc0ab4a..6b825d85fe 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/wings/wings_util.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/wings/wings_util.erl
@@ -14,12 +14,12 @@
rel2fam(Rel) ->
sofs:to_external(sofs:relation_to_family(sofs:relation(Rel))).
-%% a definition that does not violate the opaqueness of gb_trees:tree()
+%% a definition that does not violate the opacity of gb_trees:tree()
gb_trees_smallest_key(Tree) ->
{Key, _V} = gb_trees:smallest(Tree),
Key.
-%% a definition that violates the opaqueness of gb_trees:tree()
+%% a definition that violates the opacity of gb_trees:tree()
gb_trees_largest_key({_, Tree}) ->
largest_key1(Tree).
diff --git a/lib/dialyzer/test/options1_SUITE_data/results/compiler b/lib/dialyzer/test/options1_SUITE_data/results/compiler
index 30b6f4814a..cbb5115c91 100644
--- a/lib/dialyzer/test/options1_SUITE_data/results/compiler
+++ b/lib/dialyzer/test/options1_SUITE_data/results/compiler
@@ -31,6 +31,8 @@ cerl_inline.erl:2756: The pattern <{F, _L, D}, Vs> can never match the type <[1.
compile.erl:788: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>}
core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can never match the type <_,#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}},tl::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}}},[any()],_>
core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_>
+sys_pre_expand.erl:625: Call to missing or unexported function erlang:hash/2
v3_codegen.erl:1569: The call v3_codegen:load_reg_1(V::any(),I::0,Rs::any(),pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
v3_codegen.erl:1571: The call v3_codegen:load_reg_1(V::any(),I::0,[],pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
v3_core.erl:646: Matching of pattern {'iprimop', _, _, _} tagged with a record name violates the declared type of #c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]}
+v3_kernel.erl:1381: Call to missing or unexported function erlang:hash/2
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
index 0108f91b7f..cf2cbe8e2b 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
@@ -565,7 +565,7 @@ resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) ->
[OldIndex] = resolve_args(Args),
{value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} =
lists:keysearch(OldIndex, 1, Lambdas),
- [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy.
+ [{_,{M,_,_}}|_] = Lbls, % Slightly kludgy.
{make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) ->
resolve_inst(Instr, Imports, Str, Lbls).
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
index 95d2076ccf..8fca202b8c 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
@@ -951,7 +951,7 @@ i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) ->
%% Finally, we create new letrec-bindings for any and all
%% residualised definitions. All referenced functions should have
- %% been visited; the call to `visit' below is expected to retreive a
+ %% been visited; the call to `visit' below is expected to retrieve a
%% cached expression.
Rs1 = keep_referenced(Rs, S4),
{Es1, S5} = mapfoldl(fun (R, S) ->
@@ -997,7 +997,7 @@ i_apply(E, Ctxt, Ren, Env, S) ->
%% location could be recycled after the flag has been tested, but
%% there is no real advantage to that, because in practice, only
%% 4-5% of all created store locations will ever be reused, while
- %% there will be a noticable overhead for managing the free list.)
+ %% there will be a noticeable overhead for managing the free list.)
case st__get_app_inlined(L, S3) of
true ->
%% The application was inlined, so we have the final
@@ -2007,7 +2007,7 @@ residualize_operand(Opnd, E, S) ->
case st__get_opnd_effect(Opnd#opnd.loc, S) of
true ->
%% The operand has not been visited, so we do that now, but
- %% in `effect' context. (Waddell's algoritm does some stuff
+ %% in `effect' context. (Waddell's algorithm does some stuff
%% here to account specially for the operand size, which
%% appears unnecessary.)
{E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren,
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
index 01c2512397..76ae871aee 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
@@ -469,7 +469,7 @@ get(Key, Env) ->
-define(MINIMUM_RANGE, 1000).
-define(START_RANGE_FACTOR, 50).
-define(MAX_RETRIES, 2). % retries before enlarging range
--define(ENLARGE_FACTOR, 10). % range enlargment factor
+-define(ENLARGE_FACTOR, 10). % range enlargement factor
-ifdef(DEBUG).
%% If you want to use these process dictionary counters, make sure to
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
index 49a95a95e5..69139cd568 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
@@ -316,7 +316,7 @@ record_test_in_guard(Line, Term, Name, Vs, St) ->
%% code bloat.)
%% (4) Xref may be run on the abstract code, so the name in the
%% abstract code must be erlang:is_record/3.
- %% (5) To achive both (3) and (4) at the same time, set the name
+ %% (5) To achieve both (3) and (4) at the same time, set the name
%% here to erlang:is_record/3, but mark it as compiler-generated.
%% The v3_core pass will change the name to erlang:internal_is_record/3.
Fs = record_fields(Name, St),
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
index 33a322b466..acb49b5faf 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
@@ -1667,7 +1667,7 @@ bs_function({function,Name,Arity,CLabel,Asm0}=Func) ->
%%%
%%% Pass 1: Found out which bs_restore's that are needed. For now we assume
-%%% that a bs_restore is needed unless it is directly preceeded by a bs_save.
+%%% that a bs_restore is needed unless it is directly preceded by a bs_save.
%%%
bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) ->
diff --git a/lib/dialyzer/test/options2_SUITE_data/dialyzer_options b/lib/dialyzer/test/options2_SUITE_data/dialyzer_options
index 5db2e50d23..be57e2de72 100644
--- a/lib/dialyzer/test/options2_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/options2_SUITE_data/dialyzer_options
@@ -1 +1 @@
-{dialyzer_options, [{defines, [{'vsn', 4}]}, {warnings, [no_return]}]}.
+{dialyzer_options, [{defines, [{'vsn', 4}]}, {warnings, [unknown, no_return]}]}.
diff --git a/lib/dialyzer/test/options2_SUITE_data/results/unknown_type b/lib/dialyzer/test/options2_SUITE_data/results/unknown_type
new file mode 100644
index 0000000000..d308c5a810
--- /dev/null
+++ b/lib/dialyzer/test/options2_SUITE_data/results/unknown_type
@@ -0,0 +1,2 @@
+
+:0: Unknown type unknown:type/0 \ No newline at end of file
diff --git a/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type b/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type
new file mode 100644
index 0000000000..110d896c76
--- /dev/null
+++ b/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type
@@ -0,0 +1,2 @@
+
+:0: Unknown type unknown:type1/0:0: Unknown type unknown:type2/0:0: Unknown type unknown:type3/0 \ No newline at end of file
diff --git a/lib/dialyzer/test/options2_SUITE_data/src/unknown_type.erl b/lib/dialyzer/test/options2_SUITE_data/src/unknown_type.erl
new file mode 100644
index 0000000000..7a891c0831
--- /dev/null
+++ b/lib/dialyzer/test/options2_SUITE_data/src/unknown_type.erl
@@ -0,0 +1,7 @@
+-module(unknown_type).
+
+-export([t/0]).
+
+-spec t() -> unknown:type().
+t() ->
+ a.
diff --git a/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl b/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl
new file mode 100644
index 0000000000..90df7d528a
--- /dev/null
+++ b/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl
@@ -0,0 +1,10 @@
+-module(unused_unknown_type).
+
+-export_type([unused/0]).
+
+-type unused() :: unknown:type1().
+
+-record(unused_rec, {a :: unknown:type2()}).
+
+-record(rec, {a}).
+-type unused_rec() :: #rec{a :: unknown:type3()}.
diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl
index ecbac14e5d..ebe79b2a6d 100644
--- a/lib/dialyzer/test/plt_SUITE.erl
+++ b/lib/dialyzer/test/plt_SUITE.erl
@@ -7,12 +7,16 @@
-include("dialyzer_test_constants.hrl").
-export([suite/0, all/0, build_plt/1, beam_tests/1, update_plt/1,
- run_plt_check/1, run_succ_typings/1]).
+ local_fun_same_as_callback/1,
+ remove_plt/1, run_plt_check/1, run_succ_typings/1,
+ bad_dialyzer_attr/1, merge_plts/1]).
suite() ->
[{timetrap, ?plt_timeout}].
-all() -> [build_plt, beam_tests, update_plt, run_plt_check, run_succ_typings].
+all() -> [build_plt, beam_tests, update_plt, run_plt_check,
+ remove_plt, run_succ_typings, local_fun_same_as_callback,
+ bad_dialyzer_attr, merge_plts].
build_plt(Config) ->
OutDir = ?config(priv_dir, Config),
@@ -22,6 +26,8 @@ build_plt(Config) ->
end.
beam_tests(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "beam_tests.plt"),
Prog = <<"
-module(no_auto_import).
@@ -38,10 +44,12 @@ beam_tests(Config) when is_list(Config) ->
">>,
Opts = [no_auto_import],
{ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts),
- [] = run_dialyzer(plt_build, [BeamFile], []),
+ [] = run_dialyzer(plt_build, [BeamFile], [{output_plt, Plt}]),
ok.
run_plt_check(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_plt_check.plt"),
Mod1 = <<"
-module(run_plt_check1).
">>,
@@ -52,7 +60,7 @@ run_plt_check(Config) when is_list(Config) ->
{ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []),
{ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []),
- [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []),
+ [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], [{output_plt, Plt}]),
Mod2B = <<"
-module(run_plt_check2).
@@ -66,11 +74,13 @@ run_plt_check(Config) when is_list(Config) ->
% callgraph warning as run_plt_check2:call/1 makes a call to unexported
% function run_plt_check1:call/1.
- [_] = run_dialyzer(plt_check, [], []),
+ [_] = run_dialyzer(plt_check, [], [{init_plt, Plt}]),
ok.
run_succ_typings(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_succ_typings.plt"),
Mod1A = <<"
-module(run_succ_typings1).
@@ -80,7 +90,7 @@ run_succ_typings(Config) when is_list(Config) ->
">>,
{ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []),
- [] = run_dialyzer(plt_build, [BeamFile1], []),
+ [] = run_dialyzer(plt_build, [BeamFile1], [{output_plt, Plt}]),
Mod1B = <<"
-module(run_succ_typings1).
@@ -103,9 +113,11 @@ run_succ_typings(Config) when is_list(Config) ->
{ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []),
% contract types warning as run_succ_typings2:call/0 makes a call to
% run_succ_typings1:call/0, which returns a (not b) in the PLT.
- [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]),
+ [_] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, false}, {init_plt, Plt}]),
% warning not returned as run_succ_typings1 is updated in the PLT.
- [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]),
+ [] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, true}, {init_plt, Plt}]),
ok.
@@ -158,6 +170,215 @@ update_plt(Config) ->
{init_plt, Plt}] ++ Opts),
ok.
+
+%%% If a behaviour module contains an non-exported function with the same name
+%%% as one of the behaviour's callbacks, the callback info was inadvertently
+%%% deleted from the PLT as the dialyzer_plt:delete_list/2 function was cleaning
+%%% up the callback table. This bug was reported by Brujo Benavides.
+
+local_fun_same_as_callback(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Prog1 =
+ <<"-module(bad_behaviour).
+ -callback bad() -> bad.
+ -export([publicly_bad/0]).
+
+ %% @doc This function is here just to avoid the 'unused' warning for bad/0
+ publicly_bad() -> bad().
+
+ %% @doc This function overlaps with the callback with the same name, and
+ %% that was an issue for dialyzer since it's a private function.
+ bad() -> bad.">>,
+ {ok, Beam} = compile(Config, Prog1, bad_behaviour, []),
+
+ ErlangBeam = case code:where_is_file("erlang.beam") of
+ non_existing ->
+ filename:join([code:root_dir(),
+ "erts", "preloaded", "ebin",
+ "erlang.beam"]);
+ EBeam ->
+ EBeam
+ end,
+ Plt = filename:join(PrivDir, "plt_bad_behaviour.plt"),
+ Opts = [{check_plt, true}, {from, byte_code}],
+ [] = dialyzer:run([{analysis_type, plt_build},
+ {files, [Beam, ErlangBeam]},
+ {output_plt, Plt}] ++ Opts),
+
+ Prog2 =
+ <<"-module(bad_child).
+ -behaviour(bad_behaviour).
+
+ -export([bad/0]).
+
+ %% @doc This function incorreclty implements bad_behaviour.
+ bad() -> not_bad.">>,
+ {ok, TestBeam} = compile(Config, Prog2, bad_child, []),
+
+ [{warn_behaviour, _,
+ {callback_type_mismatch,
+ [bad_behaviour,bad,0,"'not_bad'","'bad'"]}}] =
+ dialyzer:run([{analysis_type, succ_typings},
+ {files, [TestBeam]},
+ {init_plt, Plt}] ++ Opts),
+ ok.
+
+%%% [James Fish:]
+%%% Dialyzer always asserts that files and directories passed in its
+%%% options exist. Therefore it is not possible to remove a beam/module
+%%% from a PLT when the beam file no longer exists. Dialyzer should not to
+%%% check files exist on disk when removing from the PLT.
+remove_plt(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Prog1 = <<"-module(m1).
+ -export([t/0]).
+ t() ->
+ m2:a(a).">>,
+ {ok, Beam1} = compile(Config, Prog1, m1, []),
+
+ Prog2 = <<"-module(m2).
+ -export([a/1]).
+ a(A) when is_integer(A) -> A.">>,
+ {ok, Beam2} = compile(Config, Prog2, m2, []),
+
+ Plt = filename:join(PrivDir, "remove.plt"),
+ Opts = [{check_plt, true}, {from, byte_code}],
+
+ [{warn_return_no_exit, _, {no_return,[only_normal,t,0]}},
+ {warn_failing_call, _, {call, [m2,a,"('a')",_,_,_,_,_]}}] =
+ dialyzer:run([{analysis_type, plt_build},
+ {files, [Beam1, Beam2]},
+ {get_warnings, true},
+ {output_plt, Plt}] ++ Opts),
+
+ [] = dialyzer:run([{init_plt, Plt},
+ {files, [Beam2]},
+ {analysis_type, plt_remove}]),
+
+ [] = dialyzer:run([{analysis_type, succ_typings},
+ {files, [Beam1]},
+ {init_plt, Plt}] ++ Opts),
+ ok.
+
+%% ERL-283, OTP-13979. As of OTP-14323 this test no longer does what
+%% it is designed to do--the linter stops every attempt to run the
+%% checks of Dialyzer's on bad dialyzer attributes. For the time
+%% being, the linter's error message are checked instead. The test
+%% needs to be updated when/if the Dialyzer can analyze Core Erlang
+%% without compiling abstract code.
+bad_dialyzer_attr(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Source = lists:concat([dial, ".erl"]),
+ Filename = filename:join(PrivDir, Source),
+ ok = dialyzer_common:check_plt(PrivDir),
+ PltFilename = dialyzer_common:plt_file(PrivDir),
+ Opts = [{files, [Filename]},
+ {check_plt, false},
+ {from, src_code},
+ {init_plt, PltFilename}],
+
+ Prog1 = <<"-module(dial).
+ -dialyzer({no_return, [undef/0]}).">>,
+ ok = file:write_file(Filename, Prog1),
+ {dialyzer_error,
+ "Analysis failed with error:\n" ++ Str1} =
+ (catch dialyzer:run(Opts)),
+ P1 = string:str(Str1, "dial.erl:2: function undef/0 undefined"),
+ true = P1 > 0,
+
+ Prog2 = <<"-module(dial).
+ -dialyzer({no_return, [{undef,1,2}]}).">>,
+ ok = file:write_file(Filename, Prog2),
+ {dialyzer_error,
+ "Analysis failed with error:\n" ++ Str2} =
+ (catch dialyzer:run(Opts)),
+ P2 = string:str(Str2, "dial.erl:2: badly formed dialyzer "
+ "attribute: {no_return,{undef,1,2}}"),
+ true = P2 > 0,
+
+ ok.
+
+merge_plts(Config) ->
+ %% A few checks of merging PLTs.
+ fun() ->
+ {Mod1, Mod2} = types(),
+ {BeamFiles, Plt1, Plt2} = create_plts(Mod1, Mod2, Config),
+
+ {dialyzer_error,
+ "Could not merge PLTs since they are not disjoint"++_} =
+ (catch run_dialyzer(succ_typings, BeamFiles,
+ [{plts, [Plt1, Plt1]}])),
+ [{warn_contract_types,_,_}] =
+ run_dialyzer(succ_typings, BeamFiles,
+ [{warnings, [unknown]},
+ {plts, [Plt1, Plt2]}])
+ end(),
+
+ fun() ->
+ {Mod1, Mod2} = callbacks(),
+ {BeamFiles, Plt1, Plt2} = create_plts(Mod1, Mod2, Config),
+
+ {dialyzer_error,
+ "Could not merge PLTs since they are not disjoint"++_} =
+ (catch run_dialyzer(succ_typings, BeamFiles,
+ [{plts, [Plt1, Plt1]}])),
+ [] =
+ run_dialyzer(succ_typings, BeamFiles,
+ [{warnings, [unknown]},
+ {plts, [Plt1, Plt2]}])
+ end(),
+
+ ok.
+
+types() ->
+ Mod1 = <<"-module(merge_plts_1).
+ -export([f/0]).
+ -export_type([t/0]).
+ -type t() :: merge_plts_2:t().
+ -spec f() -> t().
+ f() -> 1. % Not an atom().
+ ">>,
+ Mod2 = <<"-module(merge_plts_2).
+ -export_type([t/0]).
+ -type t() :: atom().
+ ">>,
+ {Mod1, Mod2}.
+
+callbacks() -> % A very shallow test.
+ Mod1 = <<"-module(merge_plts_1).
+ -callback t() -> merge_plts_2:t().
+ ">>,
+ Mod2 = <<"-module(merge_plts_2).
+ -export_type([t/0]).
+ -type t() :: atom().
+ ">>,
+ {Mod1, Mod2}.
+
+create_plts(Mod1, Mod2, Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt1 = filename:join(PrivDir, "merge_plts_1.plt"),
+ Plt2 = filename:join(PrivDir, "merge_plts_2.plt"),
+ ErlangBeam = erlang_beam(),
+
+ {ok, BeamFile1} = compile(Config, Mod1, merge_plts_1, []),
+ [] = run_dialyzer(plt_build, [ErlangBeam,BeamFile1], [{output_plt,Plt1}]),
+
+ {ok, BeamFile2} = compile(Config, Mod2, merge_plts_2, []),
+ [] = run_dialyzer(plt_build, [BeamFile2], [{output_plt, Plt2}]),
+ {[BeamFile1, BeamFile2], Plt1, Plt2}.
+
+%% End of merge_plts().
+
+erlang_beam() ->
+ case code:where_is_file("erlang.beam") of
+ non_existing ->
+ filename:join([code:root_dir(),
+ "erts", "preloaded", "ebin",
+ "erlang.beam"]);
+ EBeam ->
+ EBeam
+ end.
+
compile(Config, Prog, Module, CompileOpts) ->
Source = lists:concat([Module, ".erl"]),
PrivDir = ?config(priv_dir,Config),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
index b73943422a..71acdd9c9e 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
+++ b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
@@ -6,7 +6,7 @@ mnesia_bup.erl:111: The created fun has no local return
mnesia_bup.erl:574: Function fallback_receiver/2 has no local return
mnesia_bup.erl:967: Function uninstall_fallback_master/2 has no local return
mnesia_checkpoint.erl:1014: The variable Error can never match since previous clauses completely covered the type {'ok',#checkpoint_args{nodes::[any()],retainers::[any(),...]}}
-mnesia_checkpoint.erl:894: The call sys:handle_system_msg(Msg::any(),From::any(),'no_parent','mnesia_checkpoint',[],Cp::#checkpoint_args{}) breaks the contract (Msg,From,Parent,Module,Debug,Misc) -> no_return() when is_subtype(Msg,term()), is_subtype(From,{pid(),Tag::_}), is_subtype(Parent,pid()), is_subtype(Module,module()), is_subtype(Debug,[dbg_opt()]), is_subtype(Misc,term())
+mnesia_checkpoint.erl:894: The call sys:handle_system_msg(Msg::any(),From::any(),'no_parent','mnesia_checkpoint',[],Cp::#checkpoint_args{}) breaks the contract (Msg,From,Parent,Module,Debug,Misc) -> no_return() when Msg :: term(), From :: {pid(),Tag::_}, Parent :: pid(), Module :: module(), Debug :: [dbg_opt()], Misc :: term()
mnesia_controller.erl:1666: The variable Tab can never match since previous clauses completely covered the type [any()]
mnesia_controller.erl:1679: The pattern {'stop', Reason, Reply, State2} can never match the type {'noreply',_} | {'reply',_,_} | {'stop','shutdown',#state{}}
mnesia_controller.erl:1685: The pattern {'noreply', State2, _Timeout} can never match the type {'reply',_,_}
@@ -17,6 +17,7 @@ mnesia_frag.erl:294: The call mnesia_frag:remote_collect(Ref::reference(),{'erro
mnesia_frag.erl:304: The call mnesia_frag:remote_collect(Ref::reference(),{'error',{'node_not_running',_}},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
mnesia_frag.erl:312: The call mnesia_frag:remote_collect(Ref::reference(),LocalRes::{'error',_},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
mnesia_frag_hash.erl:24: Callback info about the mnesia_frag_hash behaviour is not available
+mnesia_frag_old_hash.erl:105: Call to missing or unexported function erlang:hash/2
mnesia_frag_old_hash.erl:23: Callback info about the mnesia_frag_hash behaviour is not available
mnesia_index.erl:52: The call mnesia_lib:other_val(Var::{_,'commit_work' | 'index' | 'setorbag' | 'storage_type' | {'index',_}},_ReASoN_::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any())
mnesia_lib.erl:1028: The pattern {'EXIT', Reason} can never match the type [any()] | {'error',_}
@@ -35,6 +36,6 @@ mnesia_schema.erl:1258: Guard test FromS::'disc_copies' | 'disc_only_copies' | '
mnesia_schema.erl:1639: The pattern {'false', 'mandatory'} can never match the type {'false','optional'}
mnesia_schema.erl:2434: The variable Reason can never match since previous clauses completely covered the type {'error',_} | {'ok',_}
mnesia_schema.erl:451: Guard test UseDirAnyway::'false' == 'true' can never succeed
-mnesia_text.erl:180: The variable T can never match since previous clauses completely covered the type {'error',{integer(),atom() | tuple(),_}} | {'ok',_}
+mnesia_text.erl:180: The variable T can never match since previous clauses completely covered the type {'error',{non_neg_integer(),atom(),_}} | {'ok',_}
mnesia_tm.erl:1522: Function commit_participant/5 has no local return
mnesia_tm.erl:2169: Function system_terminate/4 has no local return
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
index ed38b2f915..3829479a94 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
@@ -520,7 +520,7 @@ save_automatic_tagged_types([_M|Ms]) ->
%% remove_in_set_imports/3 :
%% input: list with tuples of each module's imports and module name
%% respectively.
-%% output: one list with same format but each occured import from a
+%% output: one list with same format but each occurred import from a
%% module in the input set (IMNameL) is removed.
remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) ->
NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]),
@@ -1628,7 +1628,7 @@ tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
%% reads the content from the configuration file and returns the
-%% selected part choosen by InfoType. Assumes that the config file
+%% selected part chosen by InfoType. Assumes that the config file
%% content is an Erlang term.
read_config_file(ModuleName,InfoType) when atom(InfoType) ->
CfgList = read_config_file(ModuleName),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
index c26b8f851b..a4f39bde74 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
@@ -4028,7 +4028,7 @@ check_sequence(S,Type,Comps) ->
{CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps),
% io:format("CRelInf: ~p~n",[CRelInf]),
% io:format("NewComps2: ~p~n",[NewComps2]),
- %% CompListWithTblInf has got a lot unecessary info about
+ %% CompListWithTblInf has got a lot unnecessary info about
%% the involved class removed, as the class of the object
%% set.
CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2),
@@ -4686,7 +4686,7 @@ any_component_relation(_,[],_,_,Acc) ->
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
-%% found AtPath is refering to the same sub-branch as the simple table
+%% found AtPath is referring to the same sub-branch as the simple table
%% has, then there shall not be any leading attribute info on this
%% level.
evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) ->
@@ -4857,7 +4857,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
case Cons of
[{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
%% This AtList must have an "outermost" at sign to be
- %% relevent here.
+ %% relevant here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
= AtList,
%% #'ObjectClassFieldType'{class=ClassDef} = Def,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
index 392896932a..0b5ea85681 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
@@ -1259,7 +1259,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
index 9725da4d11..fb9ffb13db 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
@@ -1096,7 +1096,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
index 5f8c7a0de8..32676b3448 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
@@ -2721,7 +2721,7 @@ prioritize_error(ErrList) ->
end,
NewErrList),
case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
+ {[],UndefPosErrs} -> % if no error with Position exists
lists:last(UndefPosErrs);
{IntPosErrs,_} ->
IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
index 5854f8edbd..8f4d189b5a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
@@ -1036,7 +1036,7 @@ decode_real2(Buffer0, Form, Len, RemBytes1) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
index 0457425445..6e12d36579 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
@@ -1034,7 +1034,7 @@ decode_real_notag(_Buffer, _Form) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
index b163aa24ac..97c92a2dd1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
@@ -823,7 +823,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
index 15986cc217..aa2cf5ba88 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
@@ -1000,7 +1000,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
index 43d9bef54e..24f7949c21 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
@@ -1059,7 +1059,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
index 4f0ca99cce..8be5b0cd6e 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
@@ -108,7 +108,7 @@ user(Pid, User, Pass) ->
gen_server:call(Pid, {user, User, Pass}, infinity).
%% user(Pid, User, Pass,Acc)
-%% Purpose: Login whith a supplied account name
+%% Purpose: Login with a supplied account name
%% Args: Pid = pid(), User = Pass = Acc = string()
%% Returns: ok | {error, euser} | {error, econn} | {error, eacct}
user(Pid, User, Pass,Acc) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
index cf05431f5a..039960dac7 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
@@ -24,7 +24,7 @@
%%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!)
%%% - HTTP/1.1 Specification Errata found at
%%% http://world.std.com/~lawrence/http_errata.html
-%%% Additionaly follows the following recommendations:
+%%% Additionally follows the following recommendations:
%%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!)
%%% - draft-nottingham-hdrreg-http-00.txt (not yet!)
%%%
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
index ebefcd7ad7..28ea42c685 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
@@ -697,7 +697,7 @@ lookup(Key,Val) ->
%%% This code is for parsing trailer headers in chunked messages.
%%% Will be deprecated whenever I have found an alternative working solution!
%%% Note:
-%%% - The header names are returned slighly different from what the what
+%%% - The header names are returned slightly different from what the what
%%% inet_drv returns
read_headers_old(Scheme,Socket,Timeout) ->
read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
index 45beaa84f7..d2653184aa 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
@@ -95,7 +95,7 @@ abort_session(Addr,Sid,Msg) ->
next_request(Addr,Sid) ->
gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity).
-%%% Session handler has succeded to set up a new session, now register
+%%% Session handler has succeed to set up a new session, now register
%%% the socket
register_socket(Addr,Sid,Socket) ->
gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
index 85e06f43b6..3058ac3556 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
@@ -224,7 +224,7 @@ is_blocked(ServerRef) ->
%%
-%% Module API. Theese functions are intended for use from modules only.
+%% Module API. These functions are intended for use from modules only.
%%
config_lookup(Port, Query) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
index d7a698d65a..07f951d057 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
@@ -109,7 +109,7 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
%%If it is version prio to 1.1 kill the conneciton
[$H, $T, $T, $P, $\/, $1, $.,N] ->
case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
- %%if the connection isnt ordered to go down let it live
+ %%if the connection isn't ordered to go down let it live
%%The keep-alive value is the older http/1.1 might be older
%%Clients that use it.
"keep-alive" when N >= 49 ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
index 47c7fc1b8d..50e0e42786 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
@@ -34,7 +34,7 @@
-define(PROCEED_RESPONSE(StatusCode, Info),
{proceed,
[{response,{already_sent, StatusCode,
- httpd_util:key1search(Info#mod.data,content_lenght)}}]}).
+ httpd_util:key1search(Info#mod.data,content_length)}}]}).
-include("httpd.hrl").
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
index 6b872d7c95..73edcf6b92 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
@@ -60,7 +60,7 @@
% request_line, % string() Request Line
headers, % #req_headers{} Parsed request headers
entity_body= <<>>, % binary() Body of request
- connection, % boolean() true if persistant connection
+ connection, % boolean() true if persistent connection
status_code, % int() Status code
logging % int() 0=No logging
% 1=Only mod_log present
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
index e42494ff76..847d6e97c1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
@@ -53,7 +53,7 @@ store_directory_data(Directory, DirData) ->
%% API
%%
-%% Compability API
+%% Compatibility API
store_user(UserName, Password, Port, Dir, AccessPassword) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
index 1203aeaa4c..a48f73274b 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
@@ -440,7 +440,7 @@ try_new_erl_scheme_method(Info,Env,Input,Mod,Func)->
%%----------------------------------------------------------------------
-%%The function recieves the data from the process that generates the page
+%%The function receives the data from the process that generates the page
%%and send the data to the client through the mod_cgi:send function
%%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
index f600c65e92..d95c745b07 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
@@ -272,10 +272,10 @@ controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
end.
-%---------------------------------------------------------------------%
-%The Denycontrol isn't neccessary to preform since the allow control %
-%override the deny control %
-%---------------------------------------------------------------------%
+%--------------------------------------------------------------------%
+%The Denycontrol isn't necessary to preform since the allow control %
+%override the deny control %
+%--------------------------------------------------------------------%
controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
case AllowedNetworks of
[{allow,all}]->
@@ -657,7 +657,7 @@ getData2(HtAccessFileNames,SplittedPath,Info)->
%----------------------------------------------------------------------
%HtAccessFilenames is a list the names the accesssfiles can have
-%Path is the shortest match agains all alias and documentroot
+%Path is the shortest match against all alias and documentroot
%rest of splitted path is a list of the parts of the path
%Info is the mod recod from the server
%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
index 4e6030d5e2..f2c45c4a3f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
@@ -80,7 +80,7 @@ send_range_response(Path,Info,Ranges,FileInfo,LastModified)->
send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)
end.
%%More than one range specified
-%%Send a multipart reponse to the user
+%%Send a multipart response to the user
%
%%An example of an multipart range response
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
index 76168f3890..a997db6880 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
@@ -48,8 +48,8 @@ do(Info) ->
%%----------------------------------------------------------------------
-%%Control that the request header did not contians any limitations
-%%wheather a response shall be createed or not
+%%Control that the request header did not contains any limitations
+%%whether a response shall be created or not
%%----------------------------------------------------------------------
do_responsecontrol(Info) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
index 19b571ac47..cc72a9b6fe 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
@@ -431,7 +431,7 @@ wrap_trans(State, Fun, Args, Retries, Mod, Kind) ->
%% read lock is only set on the first node
%% Nodes may either be a list of nodes or one node as an atom
%% Mnesia on all Nodes must be connected to each other, but
-%% it is not neccessary that they are up and running.
+%% it is not necessary that they are up and running.
lock(LockItem, LockKind) ->
case get(mnesia_activity_state) of
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
index fdbf3e4481..a85a08e4f8 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
@@ -775,7 +775,7 @@ restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) ->
restore_tables([], _Header, _Schema, State) ->
State.
-%% Creates all neccessary dat files and inserts
+%% Creates all necessary dat files and inserts
%% the table definitions in the schema table
%%
%% Returns a list of local_tab tuples for all local tables
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
index 2b5c77b3ba..0403c7e978 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
@@ -332,7 +332,7 @@ really_retain(Name, Tab) ->
%%
%% {min, MinTabs}
%% Minimize redundancy and only keep checkpoint info together with
-%% one replica, preferrably at the local node. If any node involved
+%% one replica, preferably at the local node. If any node involved
%% the checkpoint goes down, the checkpoint is deactivated.
%%
%% {max, MaxTabs}
@@ -345,7 +345,7 @@ really_retain(Name, Tab) ->
%% {ram_overrides_dump, Tabs}
%% Only applicable for ram_copies. Bool controls which versions of
%% the records that should be included in the checkpoint state.
-%% true means that the latest comitted records in ram (i.e. the
+%% true means that the latest committed records in ram (i.e. the
%% records that the application accesses) should be included
%% in the checkpoint. false means that the records dumped to
%% dat-files (the records that will be loaded at startup) should
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
index 70fee1741e..07667d73f5 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
@@ -61,7 +61,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
Repair = mnesia_monitor:get_env(auto_repair),
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
@@ -82,7 +82,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies ->
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
index 701aa8f598..accb631f2a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
@@ -170,14 +170,14 @@ loop(State) ->
end;
%% If test_set_sticky fails, we send this to all nodes
- %% after aquiring a real write lock on Oid
+ %% after acquiring a real write lock on Oid
{stick, {Tab, _}, N} ->
?ets_insert(mnesia_sticky_locks, {Tab, N}),
loop(State);
%% The caller which sends this message, must have first
- %% aquired a write lock on the entire table
+ %% acquired a write lock on the entire table
{unstick, Tab} ->
?ets_delete(mnesia_sticky_locks, Tab),
loop(State);
@@ -738,11 +738,11 @@ dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
sticky_wlock_table(Tid, Store, Tab) ->
sticky_lock(Tid, Store, {Tab, ?ALL}, write).
-%% aquire a wlock on Oid
+%% acquire a wlock on Oid
%% We store a {Tabname, write, Tid} in all locktables
%% on all nodes containing a copy of Tabname
%% We also store an item {{locks, Tab, Key}, write} in the
-%% local store when we have aquired the lock.
+%% local store when we have acquired the lock.
%%
wlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
index d1ff09ce29..7fd5f70e23 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
@@ -144,7 +144,7 @@ check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) ->
end,
[node(Mon) | check_protocol(Tail, Protocols)];
false ->
- unlink(Mon), % Get rid of unneccessary link
+ unlink(Mon), % Get rid of unnecessary link
check_protocol(Tail, Protocols)
end;
check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
index ec07e1c1ab..fbd1356a7f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
@@ -1265,7 +1265,7 @@ make_change_table_copy_type(Tab, Node, ToS) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% change index functions ....
-%% Pos is allready added by 1 in both of these functions
+%% Pos is already added by 1 in both of these functions
add_table_index(Tab, Pos) ->
schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
index 3e08354b5a..09e310530d 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
@@ -1615,7 +1615,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
do_abort(Tid, Bin) when binary(Bin) ->
%% Possible optimization:
- %% If we want we could pass arround a flag
+ %% If we want we could pass around a flag
%% that tells us whether the binary contains
%% schema ops or not. Only if the binary
%% contains schema ops there are meningful
diff --git a/lib/dialyzer/test/small_SUITE_data/results/abs b/lib/dialyzer/test/small_SUITE_data/results/abs
new file mode 100644
index 0000000000..f229a6d036
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/abs
@@ -0,0 +1,9 @@
+
+abs.erl:12: Function i1/0 has no local return
+abs.erl:16: The pattern 'true' can never match the type 'false'
+abs.erl:23: Function i2/0 has no local return
+abs.erl:27: The pattern 'true' can never match the type 'false'
+abs.erl:34: Function i3/0 has no local return
+abs.erl:37: The pattern 'true' can never match the type 'false'
+abs.erl:45: Function i4/0 has no local return
+abs.erl:49: The pattern 'true' can never match the type 'false'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/app_call b/lib/dialyzer/test/small_SUITE_data/results/app_call
index cc1a63f944..1af649815a 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/app_call
+++ b/lib/dialyzer/test/small_SUITE_data/results/app_call
@@ -1,3 +1,3 @@
-app_call.erl:6: The call M:'foo'() requires that M is of type atom() | tuple() not 42
+app_call.erl:6: The call M:'foo'() requires that M is of type atom() not 42
app_call.erl:9: The call 'mod':F() requires that F is of type atom() not {'gazonk',[]}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/bif1 b/lib/dialyzer/test/small_SUITE_data/results/bif1
new file mode 100644
index 0000000000..289b6f821f
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/bif1
@@ -0,0 +1,3 @@
+
+bif1.erl:13: Function string_chars/0 has no local return
+bif1.erl:16: The call string:chars(S::65,10,L2::bif1_adt:s()) contains an opaque term as 3rd argument when terms of different types are expected in these positions
diff --git a/lib/dialyzer/test/small_SUITE_data/results/chars b/lib/dialyzer/test/small_SUITE_data/results/chars
new file mode 100644
index 0000000000..2c1f8f8d17
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/chars
@@ -0,0 +1,4 @@
+
+chars.erl:29: Invalid type specification for function chars:f/1. The success typing is (#{'b':=50}) -> 'ok'
+chars.erl:32: Function t1/0 has no local return
+chars.erl:32: The call chars:f(#{'b':=50}) breaks the contract (#{'a':=49,'b'=>50,'c'=>51}) -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/comparisons b/lib/dialyzer/test/small_SUITE_data/results/comparisons
index 642585d25e..5083d2695a 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/comparisons
+++ b/lib/dialyzer/test/small_SUITE_data/results/comparisons
@@ -1,50 +1,43 @@
comparisons.erl:100: The pattern 'true' can never match the type 'false'
-comparisons.erl:101: The pattern 'true' can never match the type 'false'
+comparisons.erl:101: The pattern 'false' can never match the type 'true'
comparisons.erl:102: The pattern 'false' can never match the type 'true'
-comparisons.erl:103: The pattern 'false' can never match the type 'true'
+comparisons.erl:103: The pattern 'true' can never match the type 'false'
comparisons.erl:104: The pattern 'true' can never match the type 'false'
-comparisons.erl:105: The pattern 'true' can never match the type 'false'
-comparisons.erl:107: The pattern 'true' can never match the type 'false'
-comparisons.erl:108: The pattern 'true' can never match the type 'false'
-comparisons.erl:109: The pattern 'false' can never match the type 'true'
-comparisons.erl:110: The pattern 'false' can never match the type 'true'
-comparisons.erl:111: The pattern 'true' can never match the type 'false'
-comparisons.erl:112: The pattern 'true' can never match the type 'false'
-comparisons.erl:113: The pattern 'false' can never match the type 'true'
-comparisons.erl:114: The pattern 'false' can never match the type 'true'
-comparisons.erl:115: The pattern 'true' can never match the type 'false'
-comparisons.erl:116: The pattern 'true' can never match the type 'false'
-comparisons.erl:117: The pattern 'false' can never match the type 'true'
comparisons.erl:118: The pattern 'false' can never match the type 'true'
+comparisons.erl:119: The pattern 'false' can never match the type 'true'
+comparisons.erl:120: The pattern 'true' can never match the type 'false'
+comparisons.erl:121: The pattern 'true' can never match the type 'false'
+comparisons.erl:122: The pattern 'false' can never match the type 'true'
comparisons.erl:123: The pattern 'false' can never match the type 'true'
-comparisons.erl:124: The pattern 'false' can never match the type 'true'
+comparisons.erl:124: The pattern 'true' can never match the type 'false'
comparisons.erl:125: The pattern 'true' can never match the type 'false'
-comparisons.erl:126: The pattern 'true' can never match the type 'false'
+comparisons.erl:126: The pattern 'false' can never match the type 'true'
comparisons.erl:127: The pattern 'false' can never match the type 'true'
-comparisons.erl:128: The pattern 'false' can never match the type 'true'
+comparisons.erl:128: The pattern 'true' can never match the type 'false'
comparisons.erl:129: The pattern 'true' can never match the type 'false'
-comparisons.erl:130: The pattern 'true' can never match the type 'false'
+comparisons.erl:130: The pattern 'false' can never match the type 'true'
+comparisons.erl:131: The pattern 'false' can never match the type 'true'
comparisons.erl:132: The pattern 'true' can never match the type 'false'
comparisons.erl:133: The pattern 'true' can never match the type 'false'
comparisons.erl:134: The pattern 'false' can never match the type 'true'
comparisons.erl:135: The pattern 'false' can never match the type 'true'
comparisons.erl:136: The pattern 'true' can never match the type 'false'
comparisons.erl:137: The pattern 'true' can never match the type 'false'
-comparisons.erl:138: The pattern 'false' can never match the type 'true'
-comparisons.erl:139: The pattern 'false' can never match the type 'true'
+comparisons.erl:139: The pattern 'true' can never match the type 'false'
comparisons.erl:140: The pattern 'true' can never match the type 'false'
-comparisons.erl:141: The pattern 'true' can never match the type 'false'
+comparisons.erl:141: The pattern 'false' can never match the type 'true'
comparisons.erl:142: The pattern 'false' can never match the type 'true'
-comparisons.erl:143: The pattern 'false' can never match the type 'true'
+comparisons.erl:143: The pattern 'true' can never match the type 'false'
comparisons.erl:144: The pattern 'true' can never match the type 'false'
-comparisons.erl:145: The pattern 'true' can never match the type 'false'
+comparisons.erl:145: The pattern 'false' can never match the type 'true'
comparisons.erl:146: The pattern 'false' can never match the type 'true'
-comparisons.erl:147: The pattern 'false' can never match the type 'true'
-comparisons.erl:152: The pattern 'false' can never match the type 'true'
-comparisons.erl:153: The pattern 'false' can never match the type 'true'
-comparisons.erl:154: The pattern 'true' can never match the type 'false'
-comparisons.erl:155: The pattern 'true' can never match the type 'false'
+comparisons.erl:147: The pattern 'true' can never match the type 'false'
+comparisons.erl:148: The pattern 'true' can never match the type 'false'
+comparisons.erl:149: The pattern 'false' can never match the type 'true'
+comparisons.erl:150: The pattern 'false' can never match the type 'true'
+comparisons.erl:155: The pattern 'false' can never match the type 'true'
+comparisons.erl:156: The pattern 'false' can never match the type 'true'
comparisons.erl:157: The pattern 'true' can never match the type 'false'
comparisons.erl:158: The pattern 'true' can never match the type 'false'
comparisons.erl:159: The pattern 'false' can never match the type 'true'
@@ -59,36 +52,62 @@ comparisons.erl:167: The pattern 'false' can never match the type 'true'
comparisons.erl:168: The pattern 'false' can never match the type 'true'
comparisons.erl:169: The pattern 'true' can never match the type 'false'
comparisons.erl:170: The pattern 'true' can never match the type 'false'
-comparisons.erl:171: The pattern 'false' can never match the type 'true'
-comparisons.erl:172: The pattern 'false' can never match the type 'true'
+comparisons.erl:172: The pattern 'true' can never match the type 'false'
comparisons.erl:173: The pattern 'true' can never match the type 'false'
-comparisons.erl:174: The pattern 'true' can never match the type 'false'
+comparisons.erl:174: The pattern 'false' can never match the type 'true'
comparisons.erl:175: The pattern 'false' can never match the type 'true'
-comparisons.erl:176: The pattern 'false' can never match the type 'true'
+comparisons.erl:176: The pattern 'true' can never match the type 'false'
+comparisons.erl:177: The pattern 'true' can never match the type 'false'
+comparisons.erl:178: The pattern 'false' can never match the type 'true'
+comparisons.erl:179: The pattern 'false' can never match the type 'true'
+comparisons.erl:180: The pattern 'true' can never match the type 'false'
+comparisons.erl:181: The pattern 'true' can never match the type 'false'
+comparisons.erl:182: The pattern 'false' can never match the type 'true'
+comparisons.erl:183: The pattern 'false' can never match the type 'true'
+comparisons.erl:184: The pattern 'true' can never match the type 'false'
+comparisons.erl:185: The pattern 'true' can never match the type 'false'
comparisons.erl:186: The pattern 'false' can never match the type 'true'
comparisons.erl:187: The pattern 'false' can never match the type 'true'
-comparisons.erl:188: The pattern 'true' can never match the type 'false'
-comparisons.erl:189: The pattern 'true' can never match the type 'false'
-comparisons.erl:190: The pattern 'false' can never match the type 'true'
-comparisons.erl:191: The pattern 'false' can never match the type 'true'
-comparisons.erl:192: The pattern 'true' can never match the type 'false'
-comparisons.erl:193: The pattern 'true' can never match the type 'false'
-comparisons.erl:203: The pattern 'false' can never match the type 'true'
-comparisons.erl:204: The pattern 'false' can never match the type 'true'
+comparisons.erl:192: The pattern 'false' can never match the type 'true'
+comparisons.erl:193: The pattern 'false' can never match the type 'true'
+comparisons.erl:194: The pattern 'true' can never match the type 'false'
+comparisons.erl:195: The pattern 'true' can never match the type 'false'
+comparisons.erl:196: The pattern 'false' can never match the type 'true'
+comparisons.erl:197: The pattern 'false' can never match the type 'true'
+comparisons.erl:198: The pattern 'true' can never match the type 'false'
+comparisons.erl:199: The pattern 'true' can never match the type 'false'
+comparisons.erl:200: The pattern 'false' can never match the type 'true'
+comparisons.erl:201: The pattern 'false' can never match the type 'true'
+comparisons.erl:202: The pattern 'true' can never match the type 'false'
+comparisons.erl:203: The pattern 'true' can never match the type 'false'
comparisons.erl:205: The pattern 'true' can never match the type 'false'
comparisons.erl:206: The pattern 'true' can never match the type 'false'
-comparisons.erl:208: The pattern 'true' can never match the type 'false'
+comparisons.erl:207: The pattern 'false' can never match the type 'true'
+comparisons.erl:208: The pattern 'false' can never match the type 'true'
comparisons.erl:209: The pattern 'true' can never match the type 'false'
-comparisons.erl:210: The pattern 'false' can never match the type 'true'
+comparisons.erl:210: The pattern 'true' can never match the type 'false'
comparisons.erl:211: The pattern 'false' can never match the type 'true'
+comparisons.erl:212: The pattern 'false' can never match the type 'true'
+comparisons.erl:213: The pattern 'true' can never match the type 'false'
+comparisons.erl:214: The pattern 'true' can never match the type 'false'
+comparisons.erl:215: The pattern 'false' can never match the type 'true'
+comparisons.erl:216: The pattern 'false' can never match the type 'true'
+comparisons.erl:217: The pattern 'true' can never match the type 'false'
+comparisons.erl:218: The pattern 'true' can never match the type 'false'
+comparisons.erl:219: The pattern 'false' can never match the type 'true'
+comparisons.erl:220: The pattern 'false' can never match the type 'true'
comparisons.erl:221: The pattern 'true' can never match the type 'false'
comparisons.erl:222: The pattern 'true' can never match the type 'false'
comparisons.erl:223: The pattern 'false' can never match the type 'true'
comparisons.erl:224: The pattern 'false' can never match the type 'true'
-comparisons.erl:225: The pattern 'true' can never match the type 'false'
-comparisons.erl:226: The pattern 'true' can never match the type 'false'
-comparisons.erl:227: The pattern 'false' can never match the type 'true'
-comparisons.erl:228: The pattern 'false' can never match the type 'true'
+comparisons.erl:229: The pattern 'true' can never match the type 'false'
+comparisons.erl:230: The pattern 'true' can never match the type 'false'
+comparisons.erl:231: The pattern 'false' can never match the type 'true'
+comparisons.erl:232: The pattern 'false' can never match the type 'true'
+comparisons.erl:233: The pattern 'false' can never match the type 'true'
+comparisons.erl:234: The pattern 'false' can never match the type 'true'
+comparisons.erl:235: The pattern 'true' can never match the type 'false'
+comparisons.erl:236: The pattern 'true' can never match the type 'false'
comparisons.erl:242: The pattern 'false' can never match the type 'true'
comparisons.erl:243: The pattern 'false' can never match the type 'true'
comparisons.erl:244: The pattern 'true' can never match the type 'false'
@@ -97,57 +116,86 @@ comparisons.erl:246: The pattern 'false' can never match the type 'true'
comparisons.erl:247: The pattern 'false' can never match the type 'true'
comparisons.erl:248: The pattern 'true' can never match the type 'false'
comparisons.erl:249: The pattern 'true' can never match the type 'false'
-comparisons.erl:251: The pattern 'true' can never match the type 'false'
-comparisons.erl:252: The pattern 'true' can never match the type 'false'
-comparisons.erl:253: The pattern 'false' can never match the type 'true'
-comparisons.erl:254: The pattern 'false' can never match the type 'true'
-comparisons.erl:263: The pattern 'false' can never match the type 'true'
-comparisons.erl:264: The pattern 'false' can never match the type 'true'
+comparisons.erl:259: The pattern 'false' can never match the type 'true'
+comparisons.erl:260: The pattern 'false' can never match the type 'true'
+comparisons.erl:261: The pattern 'true' can never match the type 'false'
+comparisons.erl:262: The pattern 'true' can never match the type 'false'
+comparisons.erl:264: The pattern 'true' can never match the type 'false'
comparisons.erl:265: The pattern 'true' can never match the type 'false'
-comparisons.erl:266: The pattern 'true' can never match the type 'false'
-comparisons.erl:268: The pattern 'true' can never match the type 'false'
-comparisons.erl:269: The pattern 'true' can never match the type 'false'
-comparisons.erl:270: The pattern 'false' can never match the type 'true'
-comparisons.erl:271: The pattern 'false' can never match the type 'true'
-comparisons.erl:272: The pattern 'true' can never match the type 'false'
-comparisons.erl:273: The pattern 'true' can never match the type 'false'
-comparisons.erl:274: The pattern 'false' can never match the type 'true'
-comparisons.erl:275: The pattern 'false' can never match the type 'true'
-comparisons.erl:293: The pattern 'false' can never match the type 'true'
-comparisons.erl:294: The pattern 'false' can never match the type 'true'
-comparisons.erl:295: The pattern 'true' can never match the type 'false'
-comparisons.erl:296: The pattern 'true' can never match the type 'false'
-comparisons.erl:311: The pattern 'true' can never match the type 'false'
-comparisons.erl:312: The pattern 'true' can never match the type 'false'
-comparisons.erl:313: The pattern 'false' can never match the type 'true'
-comparisons.erl:314: The pattern 'false' can never match the type 'true'
-comparisons.erl:44: The pattern 'false' can never match the type 'true'
-comparisons.erl:45: The pattern 'false' can never match the type 'true'
-comparisons.erl:46: The pattern 'true' can never match the type 'false'
-comparisons.erl:47: The pattern 'true' can never match the type 'false'
-comparisons.erl:48: The pattern 'false' can never match the type 'true'
-comparisons.erl:49: The pattern 'false' can never match the type 'true'
-comparisons.erl:50: The pattern 'true' can never match the type 'false'
-comparisons.erl:51: The pattern 'true' can never match the type 'false'
+comparisons.erl:266: The pattern 'false' can never match the type 'true'
+comparisons.erl:267: The pattern 'false' can never match the type 'true'
+comparisons.erl:277: The pattern 'true' can never match the type 'false'
+comparisons.erl:278: The pattern 'true' can never match the type 'false'
+comparisons.erl:279: The pattern 'false' can never match the type 'true'
+comparisons.erl:280: The pattern 'false' can never match the type 'true'
+comparisons.erl:281: The pattern 'true' can never match the type 'false'
+comparisons.erl:282: The pattern 'true' can never match the type 'false'
+comparisons.erl:283: The pattern 'false' can never match the type 'true'
+comparisons.erl:284: The pattern 'false' can never match the type 'true'
+comparisons.erl:298: The pattern 'false' can never match the type 'true'
+comparisons.erl:299: The pattern 'false' can never match the type 'true'
+comparisons.erl:300: The pattern 'true' can never match the type 'false'
+comparisons.erl:301: The pattern 'true' can never match the type 'false'
+comparisons.erl:302: The pattern 'false' can never match the type 'true'
+comparisons.erl:303: The pattern 'false' can never match the type 'true'
+comparisons.erl:304: The pattern 'true' can never match the type 'false'
+comparisons.erl:305: The pattern 'true' can never match the type 'false'
+comparisons.erl:307: The pattern 'true' can never match the type 'false'
+comparisons.erl:308: The pattern 'true' can never match the type 'false'
+comparisons.erl:309: The pattern 'false' can never match the type 'true'
+comparisons.erl:310: The pattern 'false' can never match the type 'true'
+comparisons.erl:319: The pattern 'false' can never match the type 'true'
+comparisons.erl:320: The pattern 'false' can never match the type 'true'
+comparisons.erl:321: The pattern 'true' can never match the type 'false'
+comparisons.erl:322: The pattern 'true' can never match the type 'false'
+comparisons.erl:324: The pattern 'true' can never match the type 'false'
+comparisons.erl:325: The pattern 'true' can never match the type 'false'
+comparisons.erl:326: The pattern 'false' can never match the type 'true'
+comparisons.erl:327: The pattern 'false' can never match the type 'true'
+comparisons.erl:328: The pattern 'true' can never match the type 'false'
+comparisons.erl:329: The pattern 'true' can never match the type 'false'
+comparisons.erl:330: The pattern 'false' can never match the type 'true'
+comparisons.erl:331: The pattern 'false' can never match the type 'true'
+comparisons.erl:349: The pattern 'false' can never match the type 'true'
+comparisons.erl:350: The pattern 'false' can never match the type 'true'
+comparisons.erl:351: The pattern 'true' can never match the type 'false'
+comparisons.erl:352: The pattern 'true' can never match the type 'false'
+comparisons.erl:367: The pattern 'true' can never match the type 'false'
+comparisons.erl:368: The pattern 'true' can never match the type 'false'
+comparisons.erl:369: The pattern 'false' can never match the type 'true'
+comparisons.erl:370: The pattern 'false' can never match the type 'true'
comparisons.erl:52: The pattern 'false' can never match the type 'true'
comparisons.erl:53: The pattern 'false' can never match the type 'true'
comparisons.erl:54: The pattern 'true' can never match the type 'false'
comparisons.erl:55: The pattern 'true' can never match the type 'false'
+comparisons.erl:56: The pattern 'false' can never match the type 'true'
+comparisons.erl:57: The pattern 'false' can never match the type 'true'
+comparisons.erl:58: The pattern 'true' can never match the type 'false'
+comparisons.erl:59: The pattern 'true' can never match the type 'false'
+comparisons.erl:60: The pattern 'false' can never match the type 'true'
+comparisons.erl:61: The pattern 'false' can never match the type 'true'
+comparisons.erl:62: The pattern 'true' can never match the type 'false'
+comparisons.erl:63: The pattern 'true' can never match the type 'false'
+comparisons.erl:64: The pattern 'false' can never match the type 'true'
+comparisons.erl:65: The pattern 'false' can never match the type 'true'
+comparisons.erl:66: The pattern 'true' can never match the type 'false'
+comparisons.erl:67: The pattern 'true' can never match the type 'false'
+comparisons.erl:68: The pattern 'false' can never match the type 'true'
comparisons.erl:69: The pattern 'false' can never match the type 'true'
-comparisons.erl:70: The pattern 'false' can never match the type 'true'
+comparisons.erl:70: The pattern 'true' can never match the type 'false'
comparisons.erl:71: The pattern 'true' can never match the type 'false'
-comparisons.erl:72: The pattern 'true' can never match the type 'false'
-comparisons.erl:73: The pattern 'false' can never match the type 'true'
-comparisons.erl:74: The pattern 'false' can never match the type 'true'
-comparisons.erl:75: The pattern 'true' can never match the type 'false'
-comparisons.erl:76: The pattern 'true' can never match the type 'false'
-comparisons.erl:77: The pattern 'false' can never match the type 'true'
-comparisons.erl:78: The pattern 'false' can never match the type 'true'
-comparisons.erl:79: The pattern 'true' can never match the type 'false'
-comparisons.erl:80: The pattern 'true' can never match the type 'false'
+comparisons.erl:85: The pattern 'false' can never match the type 'true'
+comparisons.erl:86: The pattern 'false' can never match the type 'true'
+comparisons.erl:87: The pattern 'true' can never match the type 'false'
+comparisons.erl:88: The pattern 'true' can never match the type 'false'
+comparisons.erl:89: The pattern 'false' can never match the type 'true'
+comparisons.erl:90: The pattern 'false' can never match the type 'true'
+comparisons.erl:91: The pattern 'true' can never match the type 'false'
+comparisons.erl:92: The pattern 'true' can never match the type 'false'
+comparisons.erl:93: The pattern 'false' can never match the type 'true'
comparisons.erl:94: The pattern 'false' can never match the type 'true'
-comparisons.erl:95: The pattern 'false' can never match the type 'true'
+comparisons.erl:95: The pattern 'true' can never match the type 'false'
comparisons.erl:96: The pattern 'true' can never match the type 'false'
-comparisons.erl:97: The pattern 'true' can never match the type 'false'
+comparisons.erl:97: The pattern 'false' can never match the type 'true'
comparisons.erl:98: The pattern 'false' can never match the type 'true'
-comparisons.erl:99: The pattern 'false' can never match the type 'true'
+comparisons.erl:99: The pattern 'true' can never match the type 'false'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
index a9fbfb6068..d2a3ebb766 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
+++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
@@ -1,15 +1,15 @@
-contracts_with_subtypes.erl:106: The call contracts_with_subtypes:rec_arg({'a','b'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,{'a',A} | {'b',B}), is_subtype(A,'a' | {'b',B}), is_subtype(B,'b' | {'a',A})
-contracts_with_subtypes.erl:107: The call contracts_with_subtypes:rec_arg({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,{'a',A} | {'b',B}), is_subtype(A,'a' | {'b',B}), is_subtype(B,'b' | {'a',A})
-contracts_with_subtypes.erl:109: The call contracts_with_subtypes:rec_arg({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,{'a',A} | {'b',B}), is_subtype(A,'a' | {'b',B}), is_subtype(B,'b' | {'a',A})
-contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:106: The call contracts_with_subtypes:rec_arg({'a','b'}) breaks the contract (Arg) -> 'ok' when Arg :: {'a',A} | {'b',B}, A :: 'a' | {'b',B}, B :: 'b' | {'a',A}
+contracts_with_subtypes.erl:107: The call contracts_with_subtypes:rec_arg({'b','a'}) breaks the contract (Arg) -> 'ok' when Arg :: {'a',A} | {'b',B}, A :: 'a' | {'b',B}, B :: 'b' | {'a',A}
+contracts_with_subtypes.erl:109: The call contracts_with_subtypes:rec_arg({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when Arg :: {'a',A} | {'b',B}, A :: 'a' | {'b',B}, B :: 'b' | {'a',A}
+contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
+contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when Arg :: ab()
contracts_with_subtypes.erl:175: The pattern 1 can never match the type string()
contracts_with_subtypes.erl:178: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()}
contracts_with_subtypes.erl:180: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()}
@@ -24,13 +24,13 @@ contracts_with_subtypes.erl:23: Invalid type specification for function contract
contracts_with_subtypes.erl:240: The pattern {'ok', 42} can never match the type {'ok',_,string()}
contracts_with_subtypes.erl:241: The pattern 42 can never match the type {'ok',_,string()}
contracts_with_subtypes.erl:267: Function flat_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed')
+contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when Name :: atom(), Options :: [Option], Option :: 'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'
contracts_with_subtypes.erl:294: Function factored_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term())
-contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,atom()), is_subtype(Res,atom())
-contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,Arg2), is_subtype(Arg2,atom()), is_subtype(Res,atom())
-contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when is_subtype(Arg2,atom()), is_subtype(Arg1,Arg2), is_subtype(Res,atom())
+contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when Name :: atom(), Options :: [Option], Option :: Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks, Type :: type(), Access :: access(), Tweaks :: {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed', Pos :: pos_integer(), HeirData :: term()
+contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when Arg1 :: atom(), Res :: atom()
+contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when Arg1 :: Arg2, Arg2 :: atom(), Res :: atom()
+contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when Arg2 :: atom(), Arg1 :: Arg2, Res :: atom()
contracts_with_subtypes.erl:7: Invalid type specification for function contracts_with_subtypes:extract/0. The success typing is () -> 'something'
-contracts_with_subtypes.erl:80: The call contracts_with_subtypes:foo4(5) breaks the contract (Type) -> Type when is_subtype(Type,atom())
+contracts_with_subtypes.erl:80: The call contracts_with_subtypes:foo4(5) breaks the contract (Type) -> Type when Type :: atom()
contracts_with_subtypes.erl:81: The call contracts_with_subtypes:foo5(5) breaks the contract (Type::atom()) -> Type::atom()
-contracts_with_subtypes.erl:82: The call contracts_with_subtypes:foo6(5) breaks the contract (Type) -> Type when is_subtype(Type,atom())
+contracts_with_subtypes.erl:82: The call contracts_with_subtypes:foo6(5) breaks the contract (Type) -> Type when Type :: atom()
diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes2 b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes2
index 9f5433a13d..1a8aeb13d0 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes2
+++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes2
@@ -1,3 +1,3 @@
contracts_with_subtypes2.erl:18: Function t/0 has no local return
-contracts_with_subtypes2.erl:19: The call contracts_with_subtypes2:t({'a',{'b',{'c',{'d',{'e',{'g',3}}}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,{'a',A}), is_subtype(A,{'b',B}), is_subtype(B,{'c',C}), is_subtype(C,{'d',D}), is_subtype(D,{'e',E}), is_subtype(E,{'f',_})
+contracts_with_subtypes2.erl:19: The call contracts_with_subtypes2:t({'a',{'b',{'c',{'d',{'e',{'g',3}}}}}}) breaks the contract (Arg) -> 'ok' when Arg :: {'a',A}, A :: {'b',B}, B :: {'c',C}, C :: {'d',D}, D :: {'e',E}, E :: {'f',_}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/fun_arity b/lib/dialyzer/test/small_SUITE_data/results/fun_arity
new file mode 100644
index 0000000000..e916b2483f
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/fun_arity
@@ -0,0 +1,37 @@
+
+fun_arity.erl:100: Fun application will fail since _@c1 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:100: Function 'Mfa_0_ko'/1 has no local return
+fun_arity.erl:104: Fun application will fail since _@c1 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:104: Function 'Mfa_1_ko'/1 has no local return
+fun_arity.erl:111: Fun application will fail since _@c1 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:111: Function mFa_0_ko/1 has no local return
+fun_arity.erl:115: Fun application will fail since _@c1 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:115: Function mFa_1_ko/1 has no local return
+fun_arity.erl:122: Fun application will fail since _@c2 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:122: Function 'MFa_0_ko'/2 has no local return
+fun_arity.erl:126: Fun application will fail since _@c2 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:126: Function 'MFa_1_ko'/2 has no local return
+fun_arity.erl:35: Fun application will fail since _@c0 :: fun(() -> 'ok') is not a function of arity 1
+fun_arity.erl:35: Function f_0_ko/0 has no local return
+fun_arity.erl:39: Fun application will fail since _@c0 :: fun((_) -> 'ok') is not a function of arity 0
+fun_arity.erl:39: Function f_1_ko/0 has no local return
+fun_arity.erl:48: Fun application will fail since _@c0 :: fun(() -> 'ok') is not a function of arity 1
+fun_arity.erl:48: Function fa_0_ko/0 has no local return
+fun_arity.erl:53: Fun application will fail since _@c0 :: fun((_) -> 'ok') is not a function of arity 0
+fun_arity.erl:53: Function fa_1_ko/0 has no local return
+fun_arity.erl:63: Fun application will fail since _@c0 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:63: Function mfa_0_ko/0 has no local return
+fun_arity.erl:68: Fun application will fail since _@c0 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:68: Function mfa_1_ko/0 has no local return
+fun_arity.erl:76: Fun application will fail since _@c0 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:76: Function mfa_ne_0_ko/0 has no local return
+fun_arity.erl:78: Function mf_ne/0 will never be called
+fun_arity.erl:81: Fun application will fail since _@c0 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:81: Function mfa_ne_1_ko/0 has no local return
+fun_arity.erl:83: Function mf_ne/1 will never be called
+fun_arity.erl:89: Fun application will fail since _@c0 :: fun(() -> any()) is not a function of arity 1
+fun_arity.erl:89: Function mfa_nd_0_ko/0 has no local return
+fun_arity.erl:90: Call to missing or unexported function fun_arity:mf_nd/0
+fun_arity.erl:93: Fun application will fail since _@c0 :: fun((_) -> any()) is not a function of arity 0
+fun_arity.erl:93: Function mfa_nd_1_ko/0 has no local return
+fun_arity.erl:94: Call to missing or unexported function fun_arity:mf_nd/1
diff --git a/lib/dialyzer/test/small_SUITE_data/results/guards b/lib/dialyzer/test/small_SUITE_data/results/guards
index 824a7cfa24..cd0d3cace0 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/guards
+++ b/lib/dialyzer/test/small_SUITE_data/results/guards
@@ -10,8 +10,8 @@ guards.erl:136: The call guards:t16('a') will never return since it differs in t
guards.erl:136: The call guards:t16('c') will never return since it differs in the 1st argument from the success typing arguments: ('b')
guards.erl:55: Function t5/1 has no local return
guards.erl:55: Guard test is_integer(A::atom()) can never succeed
-guards.erl:59: Clause guard cannot succeed. The variable A was matched against the type any()
guards.erl:59: Function t6/1 has no local return
+guards.erl:59: Guard test is_integer(A::atom()) can never succeed
guards.erl:67: The call guards:t7({42}) will never return since it differs in the 1st argument from the success typing arguments: (atom() | integer())
guards.erl:75: The call guards:t8({42}) will never return since it differs in the 1st argument from the success typing arguments: (atom() | integer())
guards.erl:92: The variable _ can never match since previous clauses completely covered the type {'true','true'}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/higher_order_discrepancy b/lib/dialyzer/test/small_SUITE_data/results/higher_order_discrepancy
index 7ce440a60d..11b9ecade6 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/higher_order_discrepancy
+++ b/lib/dialyzer/test/small_SUITE_data/results/higher_order_discrepancy
@@ -1,4 +1,3 @@
-higher_order_discrepancy.erl:11: The call higher_order_discrepancy:g('foo') will never return since it differs in the 1st argument from the success typing arguments: ('bar')
-higher_order_discrepancy.erl:14: Function g/1 has no local return
-higher_order_discrepancy.erl:14: The pattern 'bar' can never match the type 'foo'
+higher_order_discrepancy.erl:19: Function g/1 has no local return
+higher_order_discrepancy.erl:19: The pattern 'bar' can never match the type 'foo'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/literals b/lib/dialyzer/test/small_SUITE_data/results/literals
index 03e161ca71..1ee39453a4 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/literals
+++ b/lib/dialyzer/test/small_SUITE_data/results/literals
@@ -1,14 +1,17 @@
literals.erl:11: Function t1/0 has no local return
-literals.erl:12: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:12: Record construction #r{id::'a'} violates the declared type of field id::'integer'
literals.erl:14: Function t2/0 has no local return
-literals.erl:15: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:15: Record construction #r{id::'a'} violates the declared type of field id::'integer'
literals.erl:17: Function t3/0 has no local return
-literals.erl:18: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
-literals.erl:21: Record construction #r{id::'a'} violates the declared type of field id::'integer' | 'undefined'
+literals.erl:18: Record construction #r{id::'a'} violates the declared type of field id::'integer'
+literals.erl:20: Function t4/0 has no local return
+literals.erl:21: Record construction #r{id::'a'} violates the declared type of field id::'integer'
literals.erl:23: Function m1/1 has no local return
-literals.erl:23: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer' | 'undefined'}
+literals.erl:23: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer'}
literals.erl:26: Function m2/1 has no local return
-literals.erl:26: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer' | 'undefined'}
+literals.erl:26: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer'}
literals.erl:29: Function m3/1 has no local return
literals.erl:29: The pattern {{'r', 'a'}} can never match the type any()
+literals.erl:32: Function m4/1 has no local return
+literals.erl:32: Matching of pattern {'r', 'a'} tagged with a record name violates the declared type of #r{id::'integer'}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps1 b/lib/dialyzer/test/small_SUITE_data/results/maps1
new file mode 100644
index 0000000000..f36f7f4926
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps1
@@ -0,0 +1,4 @@
+
+maps1.erl:43: Function t3/0 has no local return
+maps1.erl:44: The call maps1:foo(#{'greger'=>3, #{'arne'=>'anka'}=>45},1) will never return since it differs in the 1st and 2nd argument from the success typing arguments: (#{'beta':=_, _=>_},'b')
+maps1.erl:52: The variable Mod can never match since previous clauses completely covered the type #{}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps_difftype b/lib/dialyzer/test/small_SUITE_data/results/maps_difftype
index 8980321135..3018b888db 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/maps_difftype
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps_difftype
@@ -1,3 +1,3 @@
maps_difftype.erl:10: Function empty_mismatch/1 has no local return
-maps_difftype.erl:11: The pattern ~{}~ can never match the type tuple()
+maps_difftype.erl:11: The pattern #{} can never match the type tuple()
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps_sum b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
index a19c0bba96..bd192bdb93 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/maps_sum
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
@@ -1,4 +1,4 @@
-maps_sum.erl:15: Invalid type specification for function maps_sum:wrong1/1. The success typing is (#{}) -> any()
+maps_sum.erl:15: Invalid type specification for function maps_sum:wrong1/1. The success typing is (map()) -> any()
maps_sum.erl:26: Function wrong2/1 has no local return
maps_sum.erl:27: The call lists:foldl(fun((_,_,_) -> any()),0,Data::any()) will never return since it differs in the 1st argument from the success typing arguments: (fun((_,_) -> any()),any(),[any()])
diff --git a/lib/dialyzer/test/small_SUITE_data/results/non_existing b/lib/dialyzer/test/small_SUITE_data/results/non_existing
index 58da2bfc8b..b0da5998c7 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/non_existing
+++ b/lib/dialyzer/test/small_SUITE_data/results/non_existing
@@ -1,2 +1,3 @@
+non_existing.erl:12: Call to missing or unexported function lists:non_existing_fun/1
non_existing.erl:9: Call to missing or unexported function lists:non_existing_call/1
diff --git a/lib/dialyzer/test/small_SUITE_data/results/pretty_bitstring b/lib/dialyzer/test/small_SUITE_data/results/pretty_bitstring
index 0ad6eee766..e148e5cf22 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/pretty_bitstring
+++ b/lib/dialyzer/test/small_SUITE_data/results/pretty_bitstring
@@ -1,3 +1,3 @@
pretty_bitstring.erl:7: Function t/0 has no local return
-pretty_bitstring.erl:8: The call binary:copy(#{#<1>(8, 1, 'integer', ['unsigned', 'big']), #<2>(8, 1, 'integer', ['unsigned', 'big']), #<3>(3, 1, 'integer', ['unsigned', 'big'])}#,2) breaks the contract (Subject,N) -> binary() when is_subtype(Subject,binary()), is_subtype(N,non_neg_integer())
+pretty_bitstring.erl:8: The call binary:copy(#{#<1>(8, 1, 'integer', ['unsigned', 'big']), #<2>(8, 1, 'integer', ['unsigned', 'big']), #<3>(3, 1, 'integer', ['unsigned', 'big'])}#,2) breaks the contract (Subject,N) -> binary() when Subject :: binary(), N :: non_neg_integer()
diff --git a/lib/dialyzer/test/small_SUITE_data/results/record_creation_diffs b/lib/dialyzer/test/small_SUITE_data/results/record_creation_diffs
index f00c4b10ff..c971935bbf 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/record_creation_diffs
+++ b/lib/dialyzer/test/small_SUITE_data/results/record_creation_diffs
@@ -1,3 +1,3 @@
record_creation_diffs.erl:10: Function foo/1 has no local return
-record_creation_diffs.erl:11: Record construction #bar{some_list::{'this','is','a','tuple'}} violates the declared type of field some_list::'undefined' | [any()]
+record_creation_diffs.erl:11: Record construction #bar{some_list::{'this','is','a','tuple'}} violates the declared type of field some_list::[any()]
diff --git a/lib/dialyzer/test/small_SUITE_data/results/record_pat b/lib/dialyzer/test/small_SUITE_data/results/record_pat
index a46be6c451..8317ea041a 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/record_pat
+++ b/lib/dialyzer/test/small_SUITE_data/results/record_pat
@@ -1,2 +1,2 @@
-record_pat.erl:14: Matching of pattern {'foo', 'baz'} tagged with a record name violates the declared type of #foo{bar::'undefined' | integer()}
+record_pat.erl:14: Matching of pattern {'foo', 'baz'} tagged with a record name violates the declared type of #foo{bar::integer()}
diff --git a/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning b/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning
index 2e417e1b2a..ea3ac92d96 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning
+++ b/lib/dialyzer/test/small_SUITE_data/results/relevant_record_warning
@@ -1,3 +1,3 @@
relevant_record_warning.erl:22: Function test/1 has no local return
-relevant_record_warning.erl:23: Record construction #r{field::<<_:8>>} violates the declared type of field field::'binary' | 'undefined'
+relevant_record_warning.erl:23: Record construction #r{field::<<_:8>>} violates the declared type of field field::'binary'
diff --git a/lib/dialyzer/test/small_SUITE_data/results/undefined b/lib/dialyzer/test/small_SUITE_data/results/undefined
new file mode 100644
index 0000000000..9daa8640d3
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/undefined
@@ -0,0 +1,2 @@
+
+r.erl:16: Record construction #r{a::{'fi'},b::{'a','b'},c::[],d::'undefined',e::[],f::'undefined'} violates the declared type of field b::[any()] and d::[any()] and f::[any()]
diff --git a/lib/dialyzer/test/small_SUITE_data/src/abs.erl b/lib/dialyzer/test/small_SUITE_data/src/abs.erl
new file mode 100644
index 0000000000..251e24cdfc
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/abs.erl
@@ -0,0 +1,71 @@
+-module(abs).
+
+%% OTP-12948. erlang:abs/1 bug fix.
+
+-export([t/0]).
+
+t() ->
+ Fs = [fun i1/0, fun i2/0, fun i3/0, fun i4/0, fun f1/0],
+ _ = [catch F() || F <- Fs],
+ ok.
+
+i1() ->
+ A = int(),
+ I1 = i1(A),
+ true = I1 < 2,
+ true = I1 < 1. % can never match
+
+-spec i1(neg_integer()) -> non_neg_integer().
+
+i1(A) when is_integer(A), A < 0 ->
+ abs(A).
+
+i2() ->
+ A = int(),
+ I2 = i2(A),
+ true = I2 < 1,
+ true = I2 < 0. % can never match
+
+-spec i2(non_neg_integer()) -> non_neg_integer().
+
+i2(A) when is_integer(A), A >= 0 ->
+ abs(A).
+
+i3() ->
+ A = int(),
+ I3 = i3(A),
+ true = I3 < -1,
+ true = I3 < 0. % can never match
+
+-spec i3(integer()) -> non_neg_integer().
+
+i3(A) when is_integer(A) ->
+ abs(A).
+
+i4() ->
+ A = int(),
+ I4 = i4(A),
+ true = I4 =:= 0 orelse I4 =:= 1,
+ true = I4 < 0 orelse I4 > 1. % can never match
+
+-spec i4(integer()) -> number().
+
+i4(A) when A =:= -1; A =:= 0; A =:= 1 ->
+ abs(A).
+
+f1() ->
+ F1 = f1(float()),
+ math:sqrt(F1).
+
+f1(A) ->
+ abs(A).
+
+-spec int() -> integer().
+
+int() ->
+ foo:bar().
+
+-spec float() -> float().
+
+float() ->
+ math:sqrt(1.0).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/anno.erl b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
new file mode 100644
index 0000000000..70f1d42141
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
@@ -0,0 +1,18 @@
+-module(anno).
+
+%% OTP-14131
+
+-export([t1/0, t2/0, t3/0]).
+
+t1() ->
+ A = erl_parse:anno_from_term({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t2() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t3() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ T = erl_parse:anno_to_term(A),
+ {attribute, 1, module, my_test} = T.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1.erl b/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1.erl
new file mode 100644
index 0000000000..bc746538d3
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1.erl
@@ -0,0 +1,16 @@
+-module(bif1).
+
+%% Other set of warnings due to removed of functions from
+%% erl_bif_types.
+
+-export([ets_rename/0, string_chars/0]).
+
+ets_rename() ->
+ A = ets:new(fipp, []),
+ true = not is_atom(A),
+ ets:rename(A, fopp). % No warning
+
+string_chars() ->
+ L2 = bif1_adt:opaque_string(),
+ S = $A,
+ string:chars(S, 10, L2). % Warning
diff --git a/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1_adt.erl b/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1_adt.erl
new file mode 100644
index 0000000000..01b0bccc68
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/bif1/bif1_adt.erl
@@ -0,0 +1,12 @@
+-module(bif1_adt).
+
+-export([opaque_string/0]).
+
+-export_type([s/0]).
+
+-opaque s() :: string().
+
+-spec opaque_string() -> s().
+
+opaque_string() ->
+ "string".
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
index ab84e94106..9ad4810a5e 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
@@ -36,7 +36,7 @@
%% Start of Abstract Format
--type line() :: erl_scan:line().
+-type line() :: erl_anno:line().
-export_type([af_record_index/0, af_record_field/1, af_record_name/0,
af_field_name/0, af_function_decl/0]).
@@ -332,8 +332,8 @@
%% End of Abstract Format
-type error_description() :: term().
--type error_info() :: {erl_scan:line(), module(), error_description()}.
--type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_scan:anno()}.
%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
index fc7c5241a8..fe567ff10d 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
@@ -36,7 +36,7 @@
%% Start of Abstract Format
--type line() :: erl_scan:line().
+-type line() :: erl_anno:line().
-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
af_compile/0, af_file/0, af_record_decl/0,
@@ -329,8 +329,8 @@
%% End of Abstract Format
-type error_description() :: term().
--type error_info() :: {erl_scan:line(), module(), error_description()}.
--type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_anno:line()}.
%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/chars.erl b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
new file mode 100644
index 0000000000..1e9c8ab6b9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
@@ -0,0 +1,32 @@
+-module(chars).
+
+%% ERL-313
+
+-export([t/0]).
+-export([t1/0]).
+
+-record(r, {f :: $A .. $Z}).
+
+-type cs() :: $A..$Z | $a .. $z | $/.
+
+-spec t() -> $0-$0..$9-$0| $?.
+
+t() ->
+ c(#r{f = $z - 3}),
+ c($z - 3),
+ c($B).
+
+-spec c(cs()) -> $3-$0..$9-$0.
+
+c($A + 1) -> 2;
+c(C) ->
+ case C of
+ $z - 3 -> 3;
+ #r{f = $z - 3} -> 7
+ end.
+
+%% Display contract with character in warning:
+-spec f(#{a := $1, b => $2, c => $3}) -> ok. % invalid type spec
+f(_) -> ok.
+
+t1() -> f(#{b => $2}). % breaks the contract
diff --git a/lib/dialyzer/test/small_SUITE_data/src/comparisons.erl b/lib/dialyzer/test/small_SUITE_data/src/comparisons.erl
index 70e3cb6af4..64927c6c91 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/comparisons.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/comparisons.erl
@@ -19,12 +19,20 @@ tuple(X) when is_tuple(X) -> X.
list() -> list(?r).
list(X) when is_list(X) -> X.
+map() -> map(?r).
+map(X) when is_map(X) -> #{}.
+
+bitstring() -> bitstring(?r).
+bitstring(X) when is_bitstring(X) -> <<0:63>>.
+
i() -> integer().
f() -> mfloat().
n() -> case ?r of 1 -> i(); 2 -> f() end.
a() -> atom().
t() -> tuple().
l() -> list().
+m() -> map().
+b() -> bitstring().
na() -> case ?r of 1 -> n(); 2 -> a() end.
at() -> case ?r of 1 -> t(); 2 -> a() end.
tl() -> case ?r of 1 -> t(); 2 -> l() end.
@@ -53,6 +61,14 @@ test_i_ll_l() -> case i() < l() of true -> always; false -> never end.
test_i_le_l() -> case i() =< l() of true -> always; false -> never end.
test_i_gg_l() -> case i() > l() of true -> never; false -> always end.
test_i_ge_l() -> case i() >= l() of true -> never; false -> always end.
+test_i_ll_m() -> case i() < m() of true -> always; false -> never end.
+test_i_le_m() -> case i() =< m() of true -> always; false -> never end.
+test_i_gg_m() -> case i() > m() of true -> never; false -> always end.
+test_i_ge_m() -> case i() >= m() of true -> never; false -> always end.
+test_i_ll_b() -> case i() < b() of true -> always; false -> never end.
+test_i_le_b() -> case i() =< b() of true -> always; false -> never end.
+test_i_gg_b() -> case i() > b() of true -> never; false -> always end.
+test_i_ge_b() -> case i() >= b() of true -> never; false -> always end.
test_f_ll_i() -> case f() < i() of true -> maybe; false -> maybe_too end.
test_f_le_i() -> case f() =< i() of true -> maybe; false -> maybe_too end.
@@ -78,6 +94,14 @@ test_f_ll_l() -> case f() < l() of true -> always; false -> never end.
test_f_le_l() -> case f() =< l() of true -> always; false -> never end.
test_f_gg_l() -> case f() > l() of true -> never; false -> always end.
test_f_ge_l() -> case f() >= l() of true -> never; false -> always end.
+test_f_ll_m() -> case f() < m() of true -> always; false -> never end.
+test_f_le_m() -> case f() =< m() of true -> always; false -> never end.
+test_f_gg_m() -> case f() > m() of true -> never; false -> always end.
+test_f_ge_m() -> case f() >= m() of true -> never; false -> always end.
+test_f_ll_b() -> case f() < b() of true -> always; false -> never end.
+test_f_le_b() -> case f() =< b() of true -> always; false -> never end.
+test_f_gg_b() -> case f() > b() of true -> never; false -> always end.
+test_f_ge_b() -> case f() >= b() of true -> never; false -> always end.
test_n_ll_i() -> case n() < i() of true -> maybe; false -> maybe_too end.
test_n_le_i() -> case n() =< i() of true -> maybe; false -> maybe_too end.
@@ -103,6 +127,14 @@ test_n_ll_l() -> case n() < l() of true -> always; false -> never end.
test_n_le_l() -> case n() =< l() of true -> always; false -> never end.
test_n_gg_l() -> case n() > l() of true -> never; false -> always end.
test_n_ge_l() -> case n() >= l() of true -> never; false -> always end.
+test_n_ll_m() -> case n() < m() of true -> always; false -> never end.
+test_n_le_m() -> case n() =< m() of true -> always; false -> never end.
+test_n_gg_m() -> case n() > m() of true -> never; false -> always end.
+test_n_ge_m() -> case n() >= m() of true -> never; false -> always end.
+test_n_ll_b() -> case n() < b() of true -> always; false -> never end.
+test_n_le_b() -> case n() =< b() of true -> always; false -> never end.
+test_n_gg_b() -> case n() > b() of true -> never; false -> always end.
+test_n_ge_b() -> case n() >= b() of true -> never; false -> always end.
test_a_ll_i() -> case a() < i() of true -> never; false -> always end.
test_a_le_i() -> case a() =< i() of true -> never; false -> always end.
@@ -128,6 +160,14 @@ test_a_ll_l() -> case a() < l() of true -> always; false -> never end.
test_a_le_l() -> case a() =< l() of true -> always; false -> never end.
test_a_gg_l() -> case a() > l() of true -> never; false -> always end.
test_a_ge_l() -> case a() >= l() of true -> never; false -> always end.
+test_a_ll_m() -> case a() < m() of true -> always; false -> never end.
+test_a_le_m() -> case a() =< m() of true -> always; false -> never end.
+test_a_gg_m() -> case a() > m() of true -> never; false -> always end.
+test_a_ge_m() -> case a() >= m() of true -> never; false -> always end.
+test_a_ll_b() -> case a() < b() of true -> always; false -> never end.
+test_a_le_b() -> case a() =< b() of true -> always; false -> never end.
+test_a_gg_b() -> case a() > b() of true -> never; false -> always end.
+test_a_ge_b() -> case a() >= b() of true -> never; false -> always end.
test_t_ll_i() -> case t() < i() of true -> never; false -> always end.
test_t_le_i() -> case t() =< i() of true -> never; false -> always end.
@@ -153,6 +193,14 @@ test_t_ll_l() -> case t() < l() of true -> always; false -> never end.
test_t_le_l() -> case t() =< l() of true -> always; false -> never end.
test_t_gg_l() -> case t() > l() of true -> never; false -> always end.
test_t_ge_l() -> case t() >= l() of true -> never; false -> always end.
+test_t_ll_m() -> case t() < m() of true -> always; false -> never end.
+test_t_le_m() -> case t() =< m() of true -> always; false -> never end.
+test_t_gg_m() -> case t() > m() of true -> never; false -> always end.
+test_t_ge_m() -> case t() >= m() of true -> never; false -> always end.
+test_t_ll_b() -> case t() < b() of true -> always; false -> never end.
+test_t_le_b() -> case t() =< b() of true -> always; false -> never end.
+test_t_gg_b() -> case t() > b() of true -> never; false -> always end.
+test_t_ge_b() -> case t() >= b() of true -> never; false -> always end.
test_l_ll_i() -> case l() < i() of true -> never; false -> always end.
test_l_le_i() -> case l() =< i() of true -> never; false -> always end.
@@ -178,6 +226,14 @@ test_l_ll_l() -> case l() < l() of true -> maybe; false -> maybe_too end.
test_l_le_l() -> case l() =< l() of true -> maybe; false -> maybe_too end.
test_l_gg_l() -> case l() > l() of true -> maybe; false -> maybe_too end.
test_l_ge_l() -> case l() >= l() of true -> maybe; false -> maybe_too end.
+test_l_ll_m() -> case l() < m() of true -> never; false -> always end.
+test_l_le_m() -> case l() =< m() of true -> never; false -> always end.
+test_l_gg_m() -> case l() > m() of true -> always; false -> never end.
+test_l_ge_m() -> case l() >= m() of true -> always; false -> never end.
+test_l_ll_b() -> case l() < b() of true -> always; false -> never end.
+test_l_le_b() -> case l() =< b() of true -> always; false -> never end.
+test_l_gg_b() -> case l() > b() of true -> never; false -> always end.
+test_l_ge_b() -> case l() >= b() of true -> never; false -> always end.
test_n_ll_na() -> case n() < na() of true -> maybe; false -> maybe_too end.
test_n_le_na() -> case n() =< na() of true -> maybe; false -> maybe_too end.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/fun_arity.erl b/lib/dialyzer/test/small_SUITE_data/src/fun_arity.erl
new file mode 100644
index 0000000000..850d2fd331
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/fun_arity.erl
@@ -0,0 +1,127 @@
+%%--------------------------------------------------------------------------
+%% Module which contains calls to funs of different arity.
+%%--------------------------------------------------------------------------
+-module(fun_arity).
+
+-export([f_0_ok/0, f_0_ko/0]).
+-export([f_1_ok/0, f_1_ko/0]).
+
+-export([fa_0_ok/0, fa_0_ko/0]).
+-export([fa_1_ok/0, fa_1_ko/0]).
+
+-export([mfa_0_ok/0, mfa_0_ko/0, mf/0]).
+-export([mfa_1_ok/0, mfa_1_ko/0, mf/1]).
+
+-export([mfa_ne_0_ok/0, mfa_ne_0_ko/0]).
+-export([mfa_ne_1_ok/0, mfa_ne_1_ko/0]).
+
+-export([mfa_nd_0_ok/0, mfa_nd_0_ko/0]).
+-export([mfa_nd_1_ok/0, mfa_nd_1_ko/0]).
+
+-export(['Mfa_0_ok'/1, 'Mfa_0_ko'/1]).
+-export(['Mfa_1_ok'/1, 'Mfa_1_ko'/1]).
+
+-export(['mFa_0_ok'/1, 'mFa_0_ko'/1]).
+-export(['mFa_1_ok'/1, 'mFa_1_ko'/1]).
+
+-export(['MFa_0_ok'/2, 'MFa_0_ko'/2]).
+-export(['MFa_1_ok'/2, 'MFa_1_ko'/2]).
+
+%%--------------------------------------------------------------------------
+
+%% Funs like "fun(...) -> ... end".
+
+f_0_ok() -> (fun_f_0())().
+f_0_ko() -> (fun_f_0())(1).
+fun_f_0() -> fun() -> ok end.
+
+f_1_ok() -> (fun_f_1())(1).
+f_1_ko() -> (fun_f_1())().
+fun_f_1() -> fun(_) -> ok end .
+
+%%--------------------------------------------------------------------------
+
+%% Funs like "fun F/A" when F is literal atom and A is literal
+%% non-negative integer.
+
+fa_0_ok() -> (fun_fa_0())().
+fa_0_ko() -> (fun_fa_0())(1).
+fun_fa_0() -> fun f/0.
+f() -> ok.
+
+fa_1_ok() -> (fun_fa_1())(1).
+fa_1_ko() -> (fun_fa_1())().
+fun_fa_1() -> fun f/1.
+f(_) -> ok.
+
+%%--------------------------------------------------------------------------
+
+%% Funs like "fun M:F/A" when M and F are literal atoms, A is literal
+%% non-negative integer and function is (defined and) exported.
+
+mfa_0_ok() -> (fun_mfa_0())().
+mfa_0_ko() -> (fun_mfa_0())(1).
+fun_mfa_0() -> fun ?MODULE:mf/0.
+mf() -> ok.
+
+mfa_1_ok() -> (fun_mfa_1())(1).
+mfa_1_ko() -> (fun_mfa_1())().
+fun_mfa_1() -> fun ?MODULE:mf/1.
+mf(_) -> ok.
+
+%% Funs like "fun M:F/A" when M and F are literal atoms, A is literal
+%% non-negative integer and function is defined but not exported.
+
+mfa_ne_0_ok() -> (fun_mfa_ne_0())().
+mfa_ne_0_ko() -> (fun_mfa_ne_0())(1).
+fun_mfa_ne_0() -> fun ?MODULE:mf_ne/0.
+mf_ne() -> ok.
+
+mfa_ne_1_ok() -> (fun_mfa_ne_1())(1).
+mfa_ne_1_ko() -> (fun_mfa_ne_1())().
+fun_mfa_ne_1() -> fun ?MODULE:mf_ne/1.
+mf_ne(_) -> ok.
+
+%% Funs like "fun M:F/A" when M and F are literal atoms, A is literal
+%% non-negative integer and function is not defined.
+
+mfa_nd_0_ok() -> (fun_mfa_nd_0())().
+mfa_nd_0_ko() -> (fun_mfa_nd_0())(1).
+fun_mfa_nd_0() -> fun ?MODULE:mf_nd/0.
+
+mfa_nd_1_ok() -> (fun_mfa_nd_1())(1).
+mfa_nd_1_ko() -> (fun_mfa_nd_1())().
+fun_mfa_nd_1() -> fun ?MODULE:mf_nd/1.
+
+%% Funs like "fun M:F/A" when M is variable, F is literal atoms and A
+%% is literal non-negative integer.
+
+'Mfa_0_ok'(M) -> ('fun_Mfa_0'(M))().
+'Mfa_0_ko'(M) -> ('fun_Mfa_0'(M))(1).
+'fun_Mfa_0'(M) -> fun M:f/0.
+
+'Mfa_1_ok'(M) -> ('fun_Mfa_1'(M))(1).
+'Mfa_1_ko'(M) -> ('fun_Mfa_1'(M))().
+'fun_Mfa_1'(M) -> fun M:f/1.
+
+%% Funs like "fun M:F/A" when M is literal atom, F is variable and A
+%% is literal non-negative integer.
+
+'mFa_0_ok'(F) -> ('fun_mFa_0'(F))().
+'mFa_0_ko'(F) -> ('fun_mFa_0'(F))(1).
+'fun_mFa_0'(F) -> fun ?MODULE:F/0.
+
+'mFa_1_ok'(F) -> ('fun_mFa_1'(F))(1).
+'mFa_1_ko'(F) -> ('fun_mFa_1'(F))().
+'fun_mFa_1'(F) -> fun ?MODULE:F/1.
+
+%% Funs like "fun M:F/A" when M and F are variables and A is literal
+%% non-negative integer.
+
+'MFa_0_ok'(M, F) -> ('fun_MFa_0'(M, F))().
+'MFa_0_ko'(M, F) -> ('fun_MFa_0'(M, F))(1).
+'fun_MFa_0'(M, F) -> fun M:F/0.
+
+'MFa_1_ok'(M, F) -> ('fun_MFa_1'(M, F))(1).
+'MFa_1_ko'(M, F) -> ('fun_MFa_1'(M, F))().
+'fun_MFa_1'(M, F) -> fun M:F/1.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/higher_order_discrepancy.erl b/lib/dialyzer/test/small_SUITE_data/src/higher_order_discrepancy.erl
index ff5ee6bac4..f9547d4929 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/higher_order_discrepancy.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/higher_order_discrepancy.erl
@@ -1,3 +1,8 @@
+%% With the patch introduced to avoid false warnings in
+%% user_SUITE_data/src/wpc_hlines.erl we can unfortunately no longer precisely
+%% catch problems like this one... The refinement procedure is still enough to
+%% keep some of the details, nevertheless.
+
-module(higher_order_discrepancy).
-export([test/1]).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/keydel.erl b/lib/dialyzer/test/small_SUITE_data/src/keydel.erl
new file mode 100644
index 0000000000..18a5c0670c
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/keydel.erl
@@ -0,0 +1,29 @@
+-module(keydel).
+
+-export([store/3]).
+
+-record(att, {f}).
+
+-type attachment() :: list().
+
+-opaque att() :: #att{} | attachment().
+
+-spec store(atom(), any(), att()) -> att().
+store(Field, undefined, Att) when is_list(Att) ->
+ lists:keydelete(Field, 1, Att);
+store(Field, Value, Att) when is_list(Att) ->
+ lists:keystore(Field, 1, Att, {Field, Value});
+store(Field, Value, Att) ->
+ store(Field, Value, upgrade(Att)).
+
+
+-spec upgrade(#att{}) -> attachment().
+upgrade(#att{} = Att) ->
+ Map = lists:zip(
+ record_info(fields, att),
+ lists:seq(2, record_info(size, att))
+ ),
+ %% Don't store undefined elements since that is default
+ [{F, element(I, Att)} || {F, I} <- Map, element(I, Att) /= undefined];
+upgrade(Att) ->
+ Att.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/literals.erl b/lib/dialyzer/test/small_SUITE_data/src/literals.erl
index abd7033712..354a0f4cdc 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/literals.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/literals.erl
@@ -2,7 +2,7 @@
%% Bad records inside structures used to be ignored. The reason:
%% v3_core:unfold() does not annotate the parts of a literal.
-%% This example does not work perfectly yet, in particular Maps.
+%% This example does not work perfectly yet.
-export([t1/0, t2/0, t3/0, t4/0, m1/1, m2/1, m3/1, m4/1]).
@@ -18,7 +18,7 @@ t3() ->
{#r{id = a}}. % violation
t4() ->
- #{a => #r{id = a}}. % violation found, but t4() returns... (bug)
+ #{a => #r{id = a}}. % violation
m1(#r{id = a}) -> % violation
ok.
@@ -29,5 +29,5 @@ m2([#r{id = a}]) -> % violation
m3({#r{id = a}}) -> % can never match; not so good
ok.
-m4(#{a := #r{id = a}}) -> % violation not found
+m4(#{a := #r{id = a}}) -> % violation
ok.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/loopy.erl b/lib/dialyzer/test/small_SUITE_data/src/loopy.erl
new file mode 100644
index 0000000000..28125ec3d9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/loopy.erl
@@ -0,0 +1,17 @@
+%% ERL-157, OTP-13653.
+%% Would cause Dialyzer to go into an infinite loop.
+
+-module(loopy).
+
+-export([loop/1]).
+
+
+-spec loop(Args) -> ok when
+ Args :: [{Module, Args}],
+ Module :: module(),
+ Args :: any().
+loop([{Module, Args} | Rest]) ->
+ Module:init(Args),
+ loop(Rest);
+loop([]) ->
+ ok.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps1.erl b/lib/dialyzer/test/small_SUITE_data/src/maps1.erl
index 06ced5b69e..597358d16a 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/maps1.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps1.erl
@@ -21,7 +21,7 @@ recv(Packet, Fun, Chan) ->
#{id := Can_id, data := Can_data} = P = decode(Packet),
Fun(P).
--spec decode(<<_:64,_:_*8>>) -> #{id => <<_:11>>,timestamp => char()}.
+-spec decode(<<_:64,_:_*8>>) -> #{id => <<_:11>>,timestamp => char(),_ => _}.
decode(<<_:12, Len:4, Timestamp:16, 0:3, Id:11/bitstring, 0:18,
Data:Len/binary, _/binary>>) ->
#{id => Id, data => Data, timestamp => Timestamp}.
@@ -39,3 +39,15 @@ t2() -> ok.
update(#{ id := Id, val := Val } = M, X) when is_integer(Id) ->
M#{ val := [Val,X] }.
+
+t3() ->
+ foo(#{greger => 3, #{arne=>anka} => 45}, 1).
+
+foo(#{} = M, b) -> %% Error
+ M#{alfa => 42, beta := 1337}.
+
+t4() ->
+ case #{} of
+ #{} -> ok;
+ Mod -> Mod:function(#{literal => map}, another_arg) %% Error
+ end.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_redef.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_redef.erl
deleted file mode 100644
index 70059f73b6..0000000000
--- a/lib/dialyzer/test/small_SUITE_data/src/maps_redef.erl
+++ /dev/null
@@ -1,12 +0,0 @@
--module(maps_redef).
-
--export([t/0]).
-
-%% OK in Erlang/OTP 17, at least.
-
--type map() :: atom(). % redefine built-in type
-
--spec t() -> map().
-
-t() ->
- a. % OK
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ms.erl b/lib/dialyzer/test/small_SUITE_data/src/ms.erl
new file mode 100644
index 0000000000..47a5e886cf
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ms.erl
@@ -0,0 +1,8 @@
+-module(ms).
+-export([t/0]).
+
+-include_lib("stdlib/include/ms_transform.hrl").
+
+t() ->
+ MS = dbg:fun2ms(fun(All) -> message(All) end),
+ erlang:trace_pattern({m, f, '_'}, MS).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/trec.erl b/lib/dialyzer/test/small_SUITE_data/src/trec.erl
index 06706162c1..516358f7c6 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/trec.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/trec.erl
@@ -8,7 +8,7 @@
-module(trec).
-export([test/0, mk_foo_exp/2]).
--record(foo, {a :: integer(), b :: [atom()]}).
+-record(foo, {a :: integer() | 'undefined', b :: [atom()]}).
%%
%% For these functions we currently get the following warnings:
diff --git a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
index d608275efe..88ac486044 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
@@ -2,7 +2,7 @@
%%% File : tuple1.erl
%%% Author : Tobias Lindahl <[email protected]>
%%% Description : Exposed two bugs in the analysis;
-%%% one supressed warning and one crash.
+%%% one suppressed warning and one crash.
%%%
%%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
diff --git a/lib/dialyzer/test/small_SUITE_data/undefined.erl b/lib/dialyzer/test/small_SUITE_data/undefined.erl
new file mode 100644
index 0000000000..8549f2e161
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/undefined.erl
@@ -0,0 +1,29 @@
+-module(undefined).
+
+-export([t/0]).
+
+%% As of OTP 19.0 'undefined' is no longer added to fields with a type
+%% declaration but without an initializer. The pretty printing of
+%% records (erl_types:t_to_string()) is updated to reflect this: if a
+%% field is of type 'undefined', it is output if 'undefined' is not in
+%% the declared type of the field. (It used to be the case that the
+%% singleton type 'undefined' was never output.)
+%%
+%% One consequence is shown by the example below: the warning about
+%% the record construction violating the the declared type shows
+%% #r{..., d::'undefined', ...} which is meant to be of help to the
+%% user, who could otherwise get confused the first time (s)he gets
+%% confronted by the warning.
+
+-record(r,
+ {
+ a = {fi},
+ b = {a,b} :: list(), % violation
+ c = {a,b} :: list(),
+ d :: list(), % violation
+ e = [] :: list(),
+ f = undefined :: list() % violation
+ }).
+
+t() ->
+ #r{c = []}.
diff --git a/lib/dialyzer/test/typer_SUITE.erl b/lib/dialyzer/test/typer_SUITE.erl
new file mode 100644
index 0000000000..da5b961643
--- /dev/null
+++ b/lib/dialyzer/test/typer_SUITE.erl
@@ -0,0 +1,158 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(typer_SUITE).
+
+-export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ smoke/1]).
+
+-include_lib("common_test/include/ct.hrl").
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [smoke].
+
+groups() ->
+ [].
+
+init_per_suite(Config) ->
+ OutDir = proplists:get_value(priv_dir, Config),
+ case dialyzer_common:check_plt(OutDir) of
+ fail -> {skip, "Plt creation/check failed."};
+ ok -> [{dialyzer_options, []}|Config]
+ end.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+smoke(Config) ->
+ Code = <<"-module(typer_test_module).
+ -compile([export_all,nowarn_export_all]).
+ a(L) ->
+ L ++ [1,2,3].">>,
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Src = filename:join(PrivDir, "typer_test_module.erl"),
+ ok = file:write_file(Src, Code),
+ Args = "--plt " ++ PrivDir ++ "dialyzer_plt",
+ Res = ["^$",
+ "^%% File:",
+ "^%% ----",
+ "^-spec a",
+ "^_OK_"],
+ run(Config, Args, Src, Res),
+ ok.
+
+typer() ->
+ case os:find_executable("typer") of
+ false ->
+ ct:fail("Can't find typer");
+ Typer ->
+ Typer
+ end.
+
+%% Runs a command.
+
+run(Config, Args0, Name, Expect) ->
+ Args = Args0 ++ " " ++ Name,
+ Result = run_command(Config, Args),
+ verify_result(Result, Expect).
+
+verify_result(Result, Expect) ->
+ Messages = split(Result, [], []),
+ io:format("Result: ~p", [Messages]),
+ io:format("Expected: ~p", [Expect]),
+ match_messages(Messages, Expect).
+
+split([$\n|Rest], Current, Lines) ->
+ split(Rest, [], [lists:reverse(Current)|Lines]);
+split([$\r|Rest], Current, Lines) ->
+ split(Rest, Current, Lines);
+split([Char|Rest], Current, Lines) ->
+ split(Rest, [Char|Current], Lines);
+split([], [], Lines) ->
+ lists:reverse(Lines);
+split([], Current, Lines) ->
+ split([], [], [lists:reverse(Current)|Lines]).
+
+match_messages([Msg|Rest1], [Regexp|Rest2]) ->
+ case re:run(Msg, Regexp, [{capture,none}, unicode]) of
+ match ->
+ ok;
+ nomatch ->
+ io:format("Not matching: ~s\n", [Msg]),
+ io:format("Regexp : ~s\n", [Regexp]),
+ ct:fail(message_mismatch)
+ end,
+ match_messages(Rest1, Rest2);
+match_messages([], [Expect|Rest]) ->
+ ct:fail({too_few_messages, [Expect|Rest]});
+match_messages([Msg|Rest], []) ->
+ ct:fail({too_many_messages, [Msg|Rest]});
+match_messages([], []) ->
+ ok.
+
+%% Runs the command using os:cmd/1.
+%%
+%% Returns the output from the command (as a list of characters with
+%% embedded newlines). The very last line will indicate the
+%% exit status of the command, where _OK_ means zero, and _ERROR_
+%% a non-zero exit status.
+
+run_command(Config, Args) ->
+ TmpDir = filename:join(proplists:get_value(priv_dir, Config), "tmp"),
+ file:make_dir(TmpDir),
+ {RunFile, Run, Script} = run_command(TmpDir, os:type(), Args),
+ ok = file:write_file(filename:join(TmpDir, RunFile),
+ unicode:characters_to_binary(Script)),
+ io:format("~ts\n", [Script]),
+ os:cmd(Run).
+
+run_command(Dir, {win32, _}, Args) ->
+ BatchFile = filename:join(Dir, "run.bat"),
+ Run = re:replace(filename:rootname(BatchFile), "/", "\\",
+ [global,{return,list}]),
+ Typer = typer(),
+ {BatchFile,
+ Run,
+ ["@echo off\r\n",
+ "\"",Typer,"\" ",Args, "\r\n",
+ "if errorlevel 1 echo _ERROR_\r\n",
+ "if not errorlevel 1 echo _OK_\r\n"]};
+run_command(Dir, {unix, _}, Args) ->
+ TyperDir = filename:dirname(typer()),
+ Name = filename:join(Dir, "run"),
+ {Name,
+ "/bin/sh " ++ Name,
+ ["#!/bin/sh\n",
+ "PATH=\"",TyperDir,":$PATH\"\n",
+ "typer ",Args,"\n",
+ "case $? in\n",
+ " 0) echo '_OK_';;\n",
+ " *) echo '_ERROR_';;\n",
+ "esac\n"]};
+run_command(_Dir, Other, _Args) ->
+ ct:fail("Don't know how to test exit code for ~p", [Other]).
diff --git a/lib/dialyzer/test/unmatched_returns_SUITE_data/src/send.erl b/lib/dialyzer/test/unmatched_returns_SUITE_data/src/send.erl
new file mode 100644
index 0000000000..4d681b5cc7
--- /dev/null
+++ b/lib/dialyzer/test/unmatched_returns_SUITE_data/src/send.erl
@@ -0,0 +1,11 @@
+-module(send).
+
+-export([s/0]).
+
+s() ->
+ self() ! n(), % no warning
+ erlang:send(self(), n()), % no warning
+ ok.
+
+n() ->
+ {1, 1}.
diff --git a/lib/dialyzer/test/user_SUITE_data/results/wpc_hlines b/lib/dialyzer/test/user_SUITE_data/results/wpc_hlines
new file mode 100644
index 0000000000..d6e3f29ab9
--- /dev/null
+++ b/lib/dialyzer/test/user_SUITE_data/results/wpc_hlines
@@ -0,0 +1,3 @@
+
+wpc_hlines.erl:22: Function bad/1 has no local return
+wpc_hlines.erl:22: The pattern 'false' can never match the type 'true'
diff --git a/lib/dialyzer/test/user_SUITE_data/results/wsp_pdu b/lib/dialyzer/test/user_SUITE_data/results/wsp_pdu
index d1f8f4caf2..626e677524 100644
--- a/lib/dialyzer/test/user_SUITE_data/results/wsp_pdu
+++ b/lib/dialyzer/test/user_SUITE_data/results/wsp_pdu
@@ -6,7 +6,7 @@ wsp_pdu.erl:2403: The call wsp_pdu:d_date(Data1::binary()) will never return sin
wsp_pdu.erl:2406: Guard test is_integer(Sec::{[byte()] | byte() | {'long',binary()} | {'short',binary()},binary()}) can never succeed
wsp_pdu.erl:2408: The pattern {'short', Data2} can never match the type {[byte()] | byte() | {'long',binary()} | {'short',binary()},binary()}
wsp_pdu.erl:2755: Function parse_push_flag/1 has no local return
-wsp_pdu.erl:2756: The call erlang:integer_to_list(Value::[any()]) breaks the contract (Integer) -> string() when is_subtype(Integer,integer())
+wsp_pdu.erl:2756: The call erlang:integer_to_list(Value::[any()]) breaks the contract (Integer) -> string() when Integer :: integer()
wsp_pdu.erl:2875: The call wsp_pdu:d_text_string(Data::byte()) will never return since it differs in the 1st argument from the success typing arguments: (binary())
wsp_pdu.erl:2976: The call wsp_pdu:d_q_value(QData::byte()) will never return since it differs in the 1st argument from the success typing arguments: (<<_:8,_:_*8>>)
wsp_pdu.erl:3336: The call wsp_pdu:encode_typed_field(Ver::any(),'Q-value',ParamValue::any()) will never return since it differs in the 2nd argument from the success typing arguments: (any(),'Constrained-encoding' | 'Date-value' | 'No-value' | 'Short-integer' | 'Text-string' | 'Text-value' | 'Well-known-charset',any())
diff --git a/lib/dialyzer/test/user_SUITE_data/src/wpc_hlines.erl b/lib/dialyzer/test/user_SUITE_data/src/wpc_hlines.erl
new file mode 100644
index 0000000000..8c205a8247
--- /dev/null
+++ b/lib/dialyzer/test/user_SUITE_data/src/wpc_hlines.erl
@@ -0,0 +1,22 @@
+%% Bug reported by Dan Gudmundsson, test shrunk down by Magnus Lång.
+
+%% The problem is that dialyzer_dep generates edges from the fun
+%% application to both of the functions, and then during the warning pass
+%% dialyzer_dataflow:handle_apply_or_call generates warnings for any such
+%% edge that won't return.
+
+%% Since dialyzer_dep is currently supposed to overapproximate rather than
+%% underapproximate, the fix was to modify handle_apply_or_call to not generate
+%% warnings if some of the possible funs can succeed.
+
+-module(wpc_hlines).
+
+-export([do_export/0]).
+
+do_export() ->
+ {Proj, _} = % The culprit seems to be putting the funs in a tuple
+ {fun good/1, fun bad/1},
+ Proj(true).
+
+good(_) -> ok.
+bad(false) -> ok.
diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk
index 48e0830109..4a1a7c25a0 100644
--- a/lib/dialyzer/vsn.mk
+++ b/lib/dialyzer/vsn.mk
@@ -1 +1 @@
-DIALYZER_VSN = 2.8
+DIALYZER_VSN = 3.2